mirror of
https://github.com/lemeow125/StudE-Frontend.git
synced 2024-11-17 06:19:25 +08:00
Fixed filtering for study groups
This commit is contained in:
parent
68778cea7a
commit
81bead43ff
1 changed files with 13 additions and 7 deletions
|
@ -17,7 +17,8 @@ export default function ParseStudyGroupList(
|
||||||
let result: any[] = [];
|
let result: any[] = [];
|
||||||
// We first remove any instances that do not have a study group associated with it
|
// We first remove any instances that do not have a study group associated with it
|
||||||
let data_filtered = data.filter(
|
let data_filtered = data.filter(
|
||||||
(item: StudentStatusFilterType) => item.study_group !== ""
|
(item: StudentStatusFilterType) =>
|
||||||
|
item.study_group !== undefined && item.study_group.length > 0
|
||||||
);
|
);
|
||||||
// console.log("Filtered Data:", data_filtered);
|
// console.log("Filtered Data:", data_filtered);
|
||||||
// Then we flatten the data so that all attributes are in the first layer
|
// Then we flatten the data so that all attributes are in the first layer
|
||||||
|
@ -35,19 +36,20 @@ export default function ParseStudyGroupList(
|
||||||
}));
|
}));
|
||||||
// console.log("Flattened Data:", data_flattened);
|
// console.log("Flattened Data:", data_flattened);
|
||||||
|
|
||||||
// We take from the array all unique subject names
|
// We take from the array all unique study groups
|
||||||
let unique_subjects = [
|
let unique_studygroups = [
|
||||||
...new Set(
|
...new Set(
|
||||||
data_flattened.map((item: StudentStatusFilterType) => item.subject)
|
data_flattened.map((item: StudentStatusFilterType) => item.study_group)
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
// Then we create arrays unique to each subject
|
// Then we create arrays unique to each subject
|
||||||
unique_subjects.forEach((subject, index: number) => {
|
unique_studygroups.forEach((studygroup, index: number) => {
|
||||||
// We build another array for each subject, including only those instances that are the same subject name
|
// We build another array for each subject, including only those instances that are the same subject name
|
||||||
let unique_subject_list = data_flattened
|
let unique_subject_list = data_flattened
|
||||||
.filter(
|
.filter(
|
||||||
(item: StudentStatusFilterTypeFlattened) => item.subject === subject
|
(item: StudentStatusFilterTypeFlattened) =>
|
||||||
|
item.study_group === studygroup
|
||||||
)
|
)
|
||||||
.map((item: StudentStatusFilterTypeFlattened) => ({
|
.map((item: StudentStatusFilterTypeFlattened) => ({
|
||||||
active: item.active,
|
active: item.active,
|
||||||
|
@ -147,6 +149,7 @@ export default function ParseStudyGroupList(
|
||||||
// We now build the object that we will return
|
// We now build the object that we will return
|
||||||
const subjectUserMap: subjectUserMapType = {
|
const subjectUserMap: subjectUserMapType = {
|
||||||
subject: "",
|
subject: "",
|
||||||
|
study_group: "",
|
||||||
users: [],
|
users: [],
|
||||||
latitude: 0,
|
latitude: 0,
|
||||||
longitude: 0,
|
longitude: 0,
|
||||||
|
@ -156,6 +159,9 @@ export default function ParseStudyGroupList(
|
||||||
if (!subjectUserMap["users"]) {
|
if (!subjectUserMap["users"]) {
|
||||||
subjectUserMap["users"] = [];
|
subjectUserMap["users"] = [];
|
||||||
}
|
}
|
||||||
|
if (!subjectUserMap["study_group"]) {
|
||||||
|
subjectUserMap["study_group"] = unique_subject_list[0].study_group;
|
||||||
|
}
|
||||||
subjectUserMap["subject"] = item.subject;
|
subjectUserMap["subject"] = item.subject;
|
||||||
subjectUserMap["latitude"] = avgLat;
|
subjectUserMap["latitude"] = avgLat;
|
||||||
subjectUserMap["longitude"] = avgLng;
|
subjectUserMap["longitude"] = avgLng;
|
||||||
|
@ -167,7 +173,7 @@ export default function ParseStudyGroupList(
|
||||||
result = result.concat([subjectUserMap]);
|
result = result.concat([subjectUserMap]);
|
||||||
});
|
});
|
||||||
|
|
||||||
// console.log("Final Result:", result);
|
console.log("Final Result:", result);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue