chore: formatting
This commit is contained in:
parent
5c56487802
commit
2cf1d9f630
6 changed files with 192 additions and 192 deletions
|
@ -5,7 +5,7 @@ introMisskey: "Welcome! Calckey is an open source, decentralized social media pl
|
|||
\ that's free forever! \U0001F680"
|
||||
monthAndDay: "{month}/{day}"
|
||||
search: "Search"
|
||||
search_placeholder: "Enter search terms..."
|
||||
searchPlaceholder: "Search Calckey"
|
||||
notifications: "Notifications"
|
||||
username: "Username"
|
||||
password: "Password"
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import {Health, MeiliSearch, Stats} from "meilisearch";
|
||||
import {dbLogger} from "./logger.js";
|
||||
import { Health, MeiliSearch, Stats } from "meilisearch";
|
||||
import { dbLogger } from "./logger.js";
|
||||
|
||||
import config from "@/config/index.js";
|
||||
import {Note} from "@/models/entities/note.js";
|
||||
import { Note } from "@/models/entities/note.js";
|
||||
import * as url from "url";
|
||||
import {ILocalUser, User} from "@/models/entities/user.js";
|
||||
import {Followings, Users} from "@/models/index.js";
|
||||
import { ILocalUser, User } from "@/models/entities/user.js";
|
||||
import { Followings, Users } from "@/models/index.js";
|
||||
|
||||
const logger = dbLogger.createSubLogger("meilisearch", "gray", false);
|
||||
|
||||
|
@ -72,195 +72,195 @@ export type MeilisearchNote = {
|
|||
|
||||
export default hasConfig
|
||||
? {
|
||||
search: async (
|
||||
query: string,
|
||||
limit: number,
|
||||
offset: number,
|
||||
userCtx: ILocalUser | null,
|
||||
) => {
|
||||
/// Advanced search syntax
|
||||
/// from:user => filter by user + optional domain
|
||||
/// has:image/video/audio/text/file => filter by attachment types
|
||||
/// domain:domain.com => filter by domain
|
||||
/// before:Date => show posts made before Date
|
||||
/// after: Date => show posts made after Date
|
||||
/// "text" => get posts with exact text between quotes
|
||||
/// filter:following => show results only from users you follow
|
||||
/// filter:followers => show results only from followers
|
||||
search: async (
|
||||
query: string,
|
||||
limit: number,
|
||||
offset: number,
|
||||
userCtx: ILocalUser | null,
|
||||
) => {
|
||||
/// Advanced search syntax
|
||||
/// from:user => filter by user + optional domain
|
||||
/// has:image/video/audio/text/file => filter by attachment types
|
||||
/// domain:domain.com => filter by domain
|
||||
/// before:Date => show posts made before Date
|
||||
/// after: Date => show posts made after Date
|
||||
/// "text" => get posts with exact text between quotes
|
||||
/// filter:following => show results only from users you follow
|
||||
/// filter:followers => show results only from followers
|
||||
|
||||
let constructedFilters: string[] = [];
|
||||
let constructedFilters: string[] = [];
|
||||
|
||||
let splitSearch = query.split(" ");
|
||||
let splitSearch = query.split(" ");
|
||||
|
||||
// Detect search operators and remove them from the actual query
|
||||
let filteredSearchTerms = (
|
||||
await Promise.all(
|
||||
splitSearch.map(async (term) => {
|
||||
if (term.startsWith("has:")) {
|
||||
let fileType = term.slice(4);
|
||||
constructedFilters.push(`mediaAttachment = "${fileType}"`);
|
||||
return null;
|
||||
} else if (term.startsWith("from:")) {
|
||||
let user = term.slice(5);
|
||||
constructedFilters.push(`userName = ${user}`);
|
||||
return null;
|
||||
} else if (term.startsWith("domain:")) {
|
||||
let domain = term.slice(7);
|
||||
constructedFilters.push(`userHost = ${domain}`);
|
||||
return null;
|
||||
} else if (term.startsWith("after:")) {
|
||||
let timestamp = term.slice(6);
|
||||
// Try to parse the timestamp as JavaScript Date
|
||||
let date = Date.parse(timestamp);
|
||||
if (isNaN(date)) return null;
|
||||
constructedFilters.push(`createdAt > ${date / 1000}`);
|
||||
return null;
|
||||
} else if (term.startsWith("before:")) {
|
||||
let timestamp = term.slice(7);
|
||||
// Try to parse the timestamp as JavaScript Date
|
||||
let date = Date.parse(timestamp);
|
||||
if (isNaN(date)) return null;
|
||||
constructedFilters.push(`createdAt < ${date / 1000}`);
|
||||
return null;
|
||||
} else if (term.startsWith("filter:following")) {
|
||||
// Check if we got a context user
|
||||
if (userCtx) {
|
||||
// Fetch user follows from DB
|
||||
let followedUsers = await Followings.find({
|
||||
where: {
|
||||
followerId: userCtx.id,
|
||||
},
|
||||
select: {
|
||||
followeeId: true,
|
||||
},
|
||||
});
|
||||
let followIDs = followedUsers.map((user) => user.followeeId);
|
||||
// Detect search operators and remove them from the actual query
|
||||
let filteredSearchTerms = (
|
||||
await Promise.all(
|
||||
splitSearch.map(async (term) => {
|
||||
if (term.startsWith("has:")) {
|
||||
let fileType = term.slice(4);
|
||||
constructedFilters.push(`mediaAttachment = "${fileType}"`);
|
||||
return null;
|
||||
} else if (term.startsWith("from:")) {
|
||||
let user = term.slice(5);
|
||||
constructedFilters.push(`userName = ${user}`);
|
||||
return null;
|
||||
} else if (term.startsWith("domain:")) {
|
||||
let domain = term.slice(7);
|
||||
constructedFilters.push(`userHost = ${domain}`);
|
||||
return null;
|
||||
} else if (term.startsWith("after:")) {
|
||||
let timestamp = term.slice(6);
|
||||
// Try to parse the timestamp as JavaScript Date
|
||||
let date = Date.parse(timestamp);
|
||||
if (isNaN(date)) return null;
|
||||
constructedFilters.push(`createdAt > ${date / 1000}`);
|
||||
return null;
|
||||
} else if (term.startsWith("before:")) {
|
||||
let timestamp = term.slice(7);
|
||||
// Try to parse the timestamp as JavaScript Date
|
||||
let date = Date.parse(timestamp);
|
||||
if (isNaN(date)) return null;
|
||||
constructedFilters.push(`createdAt < ${date / 1000}`);
|
||||
return null;
|
||||
} else if (term.startsWith("filter:following")) {
|
||||
// Check if we got a context user
|
||||
if (userCtx) {
|
||||
// Fetch user follows from DB
|
||||
let followedUsers = await Followings.find({
|
||||
where: {
|
||||
followerId: userCtx.id,
|
||||
},
|
||||
select: {
|
||||
followeeId: true,
|
||||
},
|
||||
});
|
||||
let followIDs = followedUsers.map((user) => user.followeeId);
|
||||
|
||||
if (followIDs.length === 0) return null;
|
||||
if (followIDs.length === 0) return null;
|
||||
|
||||
constructedFilters.push(`userId IN [${followIDs.join(",")}]`);
|
||||
} else {
|
||||
logger.warn(
|
||||
"search filtered to follows called without user context",
|
||||
);
|
||||
constructedFilters.push(`userId IN [${followIDs.join(",")}]`);
|
||||
} else {
|
||||
logger.warn(
|
||||
"search filtered to follows called without user context",
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
} else if (term.startsWith("filter:followers")) {
|
||||
// Check if we got a context user
|
||||
if (userCtx) {
|
||||
// Fetch users follows from DB
|
||||
let followedUsers = await Followings.find({
|
||||
where: {
|
||||
followeeId: userCtx.id,
|
||||
},
|
||||
select: {
|
||||
followerId: true,
|
||||
},
|
||||
});
|
||||
let followIDs = followedUsers.map((user) => user.followerId);
|
||||
|
||||
if (followIDs.length === 0) return null;
|
||||
|
||||
constructedFilters.push(`userId IN [${followIDs.join(",")}]`);
|
||||
} else {
|
||||
logger.warn(
|
||||
"search filtered to followers called without user context",
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
return null;
|
||||
} else if (term.startsWith("filter:followers")) {
|
||||
// Check if we got a context user
|
||||
if (userCtx) {
|
||||
// Fetch users follows from DB
|
||||
let followedUsers = await Followings.find({
|
||||
where: {
|
||||
followeeId: userCtx.id,
|
||||
},
|
||||
select: {
|
||||
followerId: true,
|
||||
},
|
||||
});
|
||||
let followIDs = followedUsers.map((user) => user.followerId);
|
||||
return term;
|
||||
}),
|
||||
)
|
||||
).filter((term) => term !== null);
|
||||
|
||||
if (followIDs.length === 0) return null;
|
||||
let sortRules = [];
|
||||
|
||||
constructedFilters.push(`userId IN [${followIDs.join(",")}]`);
|
||||
} else {
|
||||
logger.warn(
|
||||
"search filtered to followers called without user context",
|
||||
);
|
||||
}
|
||||
// An empty search term with defined filters means we have a placeholder search => https://www.meilisearch.com/docs/reference/api/search#placeholder-search
|
||||
// These have to be ordered manually, otherwise the *oldest* posts are returned first, which we don't want
|
||||
if (filteredSearchTerms.length === 0 && constructedFilters.length > 0) {
|
||||
sortRules.push("createdAt:desc");
|
||||
}
|
||||
|
||||
return null;
|
||||
logger.info(`Searching for ${filteredSearchTerms.join(" ")}`);
|
||||
logger.info(`Limit: ${limit}`);
|
||||
logger.info(`Offset: ${offset}`);
|
||||
logger.info(`Filters: ${constructedFilters}`);
|
||||
logger.info(`Ordering: ${sortRules}`);
|
||||
|
||||
return posts.search(filteredSearchTerms.join(" "), {
|
||||
limit: limit,
|
||||
offset: offset,
|
||||
filter: constructedFilters,
|
||||
sort: sortRules,
|
||||
});
|
||||
},
|
||||
ingestNote: async (ingestNotes: Note | Note[]) => {
|
||||
if (ingestNotes instanceof Note) {
|
||||
ingestNotes = [ingestNotes];
|
||||
}
|
||||
|
||||
let indexingBatch: MeilisearchNote[] = [];
|
||||
|
||||
for (let note of ingestNotes) {
|
||||
if (note.user === undefined) {
|
||||
note.user = await Users.findOne({
|
||||
where: {
|
||||
id: note.userId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
let attachmentType = "";
|
||||
if (note.attachedFileTypes.length > 0) {
|
||||
attachmentType = note.attachedFileTypes[0].split("/")[0];
|
||||
switch (attachmentType) {
|
||||
case "image":
|
||||
case "video":
|
||||
case "audio":
|
||||
case "text":
|
||||
break;
|
||||
default:
|
||||
attachmentType = "file";
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return term;
|
||||
}),
|
||||
)
|
||||
).filter((term) => term !== null);
|
||||
|
||||
let sortRules = [];
|
||||
|
||||
// An empty search term with defined filters means we have a placeholder search => https://www.meilisearch.com/docs/reference/api/search#placeholder-search
|
||||
// These have to be ordered manually, otherwise the *oldest* posts are returned first, which we don't want
|
||||
if (filteredSearchTerms.length === 0 && constructedFilters.length > 0) {
|
||||
sortRules.push("createdAt:desc");
|
||||
}
|
||||
|
||||
logger.info(`Searching for ${filteredSearchTerms.join(" ")}`);
|
||||
logger.info(`Limit: ${limit}`);
|
||||
logger.info(`Offset: ${offset}`);
|
||||
logger.info(`Filters: ${constructedFilters}`);
|
||||
logger.info(`Ordering: ${sortRules}`);
|
||||
|
||||
return posts.search(filteredSearchTerms.join(" "), {
|
||||
limit: limit,
|
||||
offset: offset,
|
||||
filter: constructedFilters,
|
||||
sort: sortRules,
|
||||
});
|
||||
},
|
||||
ingestNote: async (ingestNotes: Note | Note[]) => {
|
||||
if (ingestNotes instanceof Note) {
|
||||
ingestNotes = [ingestNotes];
|
||||
}
|
||||
|
||||
let indexingBatch: MeilisearchNote[] = [];
|
||||
|
||||
for (let note of ingestNotes) {
|
||||
if (note.user === undefined) {
|
||||
note.user = await Users.findOne({
|
||||
where: {
|
||||
id: note.userId,
|
||||
},
|
||||
indexingBatch.push(<MeilisearchNote>{
|
||||
id: note.id.toString(),
|
||||
text: note.text ? note.text : "",
|
||||
userId: note.userId,
|
||||
userHost:
|
||||
note.userHost !== ""
|
||||
? note.userHost
|
||||
: url.parse(config.host).host,
|
||||
channelId: note.channelId ? note.channelId : "",
|
||||
mediaAttachment: attachmentType,
|
||||
userName: note.user?.username ?? "UNKNOWN",
|
||||
createdAt: note.createdAt.getTime() / 1000, // division by 1000 is necessary because Node returns in ms-accuracy
|
||||
});
|
||||
}
|
||||
|
||||
let attachmentType = "";
|
||||
if (note.attachedFileTypes.length > 0) {
|
||||
attachmentType = note.attachedFileTypes[0].split("/")[0];
|
||||
switch (attachmentType) {
|
||||
case "image":
|
||||
case "video":
|
||||
case "audio":
|
||||
case "text":
|
||||
break;
|
||||
default:
|
||||
attachmentType = "file";
|
||||
break;
|
||||
}
|
||||
}
|
||||
return posts
|
||||
.addDocuments(indexingBatch, {
|
||||
primaryKey: "id",
|
||||
})
|
||||
.then(() =>
|
||||
console.log(`sent ${indexingBatch.length} posts for indexing`),
|
||||
);
|
||||
},
|
||||
serverStats: async () => {
|
||||
let health: Health = await client.health();
|
||||
let stats: Stats = await client.getStats();
|
||||
|
||||
indexingBatch.push(<MeilisearchNote>{
|
||||
id: note.id.toString(),
|
||||
text: note.text ? note.text : "",
|
||||
userId: note.userId,
|
||||
userHost:
|
||||
note.userHost !== ""
|
||||
? note.userHost
|
||||
: url.parse(config.host).host,
|
||||
channelId: note.channelId ? note.channelId : "",
|
||||
mediaAttachment: attachmentType,
|
||||
userName: note.user?.username ?? "UNKNOWN",
|
||||
createdAt: note.createdAt.getTime() / 1000, // division by 1000 is necessary because Node returns in ms-accuracy
|
||||
});
|
||||
}
|
||||
|
||||
return posts
|
||||
.addDocuments(indexingBatch, {
|
||||
primaryKey: "id",
|
||||
})
|
||||
.then(() =>
|
||||
console.log(`sent ${indexingBatch.length} posts for indexing`),
|
||||
);
|
||||
},
|
||||
serverStats: async () => {
|
||||
let health: Health = await client.health();
|
||||
let stats: Stats = await client.getStats();
|
||||
|
||||
return {
|
||||
health: health.status,
|
||||
size: stats.databaseSize,
|
||||
indexed_count: stats.indexes["posts"].numberOfDocuments,
|
||||
};
|
||||
},
|
||||
}
|
||||
return {
|
||||
health: health.status,
|
||||
size: stats.databaseSize,
|
||||
indexed_count: stats.indexes["posts"].numberOfDocuments,
|
||||
};
|
||||
},
|
||||
}
|
||||
: null;
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import type Bull from "bull";
|
||||
|
||||
import {queueLogger} from "../../logger.js";
|
||||
import {Notes} from "@/models/index.js";
|
||||
import {MoreThan} from "typeorm";
|
||||
import {index} from "@/services/note/create.js";
|
||||
import {Note} from "@/models/entities/note.js";
|
||||
import { queueLogger } from "../../logger.js";
|
||||
import { Notes } from "@/models/index.js";
|
||||
import { MoreThan } from "typeorm";
|
||||
import { index } from "@/services/note/create.js";
|
||||
import { Note } from "@/models/entities/note.js";
|
||||
import meilisearch from "../../../db/meilisearch.js";
|
||||
|
||||
const logger = queueLogger.createSubLogger("index-all-notes");
|
||||
|
@ -33,7 +33,7 @@ export default async function indexAllNotes(
|
|||
try {
|
||||
notes = await Notes.find({
|
||||
where: {
|
||||
...(cursor ? {id: MoreThan(cursor)} : {}),
|
||||
...(cursor ? { id: MoreThan(cursor) } : {}),
|
||||
},
|
||||
take: take,
|
||||
order: {
|
||||
|
@ -69,7 +69,7 @@ export default async function indexAllNotes(
|
|||
|
||||
indexedCount += chunk.length;
|
||||
const pct = (indexedCount / total) * 100;
|
||||
job.update({indexedCount, cursor, total});
|
||||
job.update({ indexedCount, cursor, total });
|
||||
job.progress(+pct.toFixed(1));
|
||||
logger.info(`Indexed notes ${indexedCount}/${total ? total : "?"}`);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import { Note } from "@/models/entities/note.js";
|
|||
import config from "@/config/index.js";
|
||||
import es from "../../../../db/elasticsearch.js";
|
||||
import sonic from "../../../../db/sonic.js";
|
||||
import meilisearch, {MeilisearchNote} from "../../../../db/meilisearch.js";
|
||||
import meilisearch, { MeilisearchNote } from "../../../../db/meilisearch.js";
|
||||
import define from "../../define.js";
|
||||
import { makePaginationQuery } from "../../common/make-pagination-query.js";
|
||||
import { generateVisibilityQuery } from "../../common/generate-visibility-query.js";
|
||||
|
|
|
@ -5,7 +5,7 @@ import { mainRouter } from "@/router";
|
|||
export async function search() {
|
||||
const { canceled, result: query } = await os.inputText({
|
||||
title: i18n.ts.search,
|
||||
placeholder: i18n.ts.search_placeholder,
|
||||
placeholder: i18n.ts.searchPlaceholder,
|
||||
text:
|
||||
"Advanced search operators\n" +
|
||||
"from:user => filter by user\n" +
|
||||
|
|
|
@ -11,9 +11,9 @@
|
|||
</template>
|
||||
|
||||
<script lang="ts" setup>
|
||||
import {onBeforeUnmount, onMounted} from "vue";
|
||||
import { onBeforeUnmount, onMounted } from "vue";
|
||||
import bytes from "@/filters/bytes";
|
||||
import {i18n} from "@/i18n";
|
||||
import { i18n } from "@/i18n";
|
||||
|
||||
const props = defineProps<{
|
||||
connection: any;
|
||||
|
|
Loading…
Reference in a new issue