Allow relevancy sorting of Meilisearch
This commit is contained in:
parent
69efd3df76
commit
f1c18e0b09
1 changed files with 61 additions and 19 deletions
|
@ -1,4 +1,4 @@
|
||||||
import { In } from "typeorm";
|
import {FindManyOptions, In} from "typeorm";
|
||||||
import { Notes } from "@/models/index.js";
|
import { Notes } from "@/models/index.js";
|
||||||
import { Note } from "@/models/entities/note.js";
|
import { Note } from "@/models/entities/note.js";
|
||||||
import config from "@/config/index.js";
|
import config from "@/config/index.js";
|
||||||
|
@ -58,6 +58,11 @@ export const paramDef = {
|
||||||
nullable: true,
|
nullable: true,
|
||||||
default: null,
|
default: null,
|
||||||
},
|
},
|
||||||
|
order: {
|
||||||
|
type: "string",
|
||||||
|
default: "chronological",
|
||||||
|
nullable: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
required: ["query"],
|
required: ["query"],
|
||||||
} as const;
|
} as const;
|
||||||
|
@ -156,9 +161,6 @@ export default define(meta, paramDef, async (ps, me) => {
|
||||||
where: {
|
where: {
|
||||||
id: In(chunk),
|
id: In(chunk),
|
||||||
},
|
},
|
||||||
order: {
|
|
||||||
id: "DESC",
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// The notes are checked for visibility and muted/blocked users when packed
|
// The notes are checked for visibility and muted/blocked users when packed
|
||||||
|
@ -175,19 +177,32 @@ export default define(meta, paramDef, async (ps, me) => {
|
||||||
} else if (meilisearch) {
|
} else if (meilisearch) {
|
||||||
let start = 0;
|
let start = 0;
|
||||||
const chunkSize = 100;
|
const chunkSize = 100;
|
||||||
|
const sortByDate = ps.order !== "relevancy";
|
||||||
|
|
||||||
|
type NoteResult = {
|
||||||
|
id: string,
|
||||||
|
createdAt: number
|
||||||
|
}
|
||||||
|
const extractedNotes: NoteResult[] = [];
|
||||||
|
|
||||||
// Use meilisearch to fetch and step through all search results that could match the requirements
|
|
||||||
const ids = [];
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const results = await meilisearch.search(ps.query, chunkSize, start, me);
|
const searchRes = await meilisearch.search(
|
||||||
|
ps.query,
|
||||||
|
chunkSize,
|
||||||
|
start,
|
||||||
|
me,
|
||||||
|
);
|
||||||
|
const results: MeilisearchNote[] = searchRes.hits as MeilisearchNote[];
|
||||||
|
|
||||||
|
console.log(JSON.stringify(results));
|
||||||
|
|
||||||
start += chunkSize;
|
start += chunkSize;
|
||||||
|
|
||||||
if (results.hits.length === 0) {
|
if (results.length === 0) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const res = results.hits
|
const res = results
|
||||||
.filter((key: MeilisearchNote) => {
|
.filter((key: MeilisearchNote) => {
|
||||||
if (ps.userId && key.userId !== ps.userId) {
|
if (ps.userId && key.userId !== ps.userId) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -203,34 +218,61 @@ export default define(meta, paramDef, async (ps, me) => {
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
})
|
})
|
||||||
.map((key) => key.id);
|
.map((key) => {
|
||||||
|
return {
|
||||||
|
id: key.id,
|
||||||
|
createdAt: key.createdAt
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
ids.push(...res);
|
extractedNotes.push(...res);
|
||||||
|
console.log(extractedNotes);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort all the results by note id DESC (newest first)
|
// Depending on the ordering requested, return the notes sorted by relevancy as
|
||||||
ids.sort((a, b) => b - a);
|
// returned by Meilisearch or order chronologically
|
||||||
|
if (sortByDate) {
|
||||||
|
extractedNotes.sort((a, b) => b.createdAt - a.createdAt);
|
||||||
|
}
|
||||||
|
|
||||||
// Fetch the notes from the database until we have enough to satisfy the limit
|
// Fetch the notes from the database until we have enough to satisfy the limit
|
||||||
start = 0;
|
start = 0;
|
||||||
const found = [];
|
const found = [];
|
||||||
while (found.length < ps.limit && start < ids.length) {
|
const noteIDs = extractedNotes.map(note => note.id);
|
||||||
const chunk = ids.slice(start, start + chunkSize);
|
|
||||||
const notes: Note[] = await Notes.find({
|
// Index the ID => index number into a map, so we can sort efficiently later
|
||||||
|
const idIndexMap = new Map(noteIDs.map((id, index) => [id, index]));
|
||||||
|
|
||||||
|
while (found.length < ps.limit && start < noteIDs.length) {
|
||||||
|
const chunk = noteIDs.slice(start, start + chunkSize);
|
||||||
|
|
||||||
|
let query : FindManyOptions = sortByDate ? {
|
||||||
where: {
|
where: {
|
||||||
id: In(chunk),
|
id: In(chunk),
|
||||||
},
|
},
|
||||||
order: {
|
order: {
|
||||||
id: "DESC",
|
id: "DESC"
|
||||||
|
}
|
||||||
|
} : {
|
||||||
|
where: {
|
||||||
|
id: In(chunk),
|
||||||
},
|
},
|
||||||
});
|
}
|
||||||
|
|
||||||
|
console.log(JSON.stringify(query))
|
||||||
|
|
||||||
|
const notes: Note[] = await Notes.find(query);
|
||||||
|
|
||||||
|
// Re-order the note result according to the noteIDs array (cannot be undefined, we map this earlier)
|
||||||
|
// @ts-ignore
|
||||||
|
notes.sort((a, b) => idIndexMap.get(a.id) - idIndexMap.get(b.id));
|
||||||
|
|
||||||
// The notes are checked for visibility and muted/blocked users when packed
|
// The notes are checked for visibility and muted/blocked users when packed
|
||||||
found.push(...(await Notes.packMany(notes, me)));
|
found.push(...(await Notes.packMany(notes, me)));
|
||||||
start += chunkSize;
|
start += chunkSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we have more results than the limit, trim them
|
// If we have more results than the limit, trim the results down
|
||||||
if (found.length > ps.limit) {
|
if (found.length > ps.limit) {
|
||||||
found.length = ps.limit;
|
found.length = ps.limit;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue