Revert "refactor (backend): port extract-hashtags to backend-rs"

This reverts commit 332b6cd620.

It worked perfectly fine, but the mfm crate is not licensed properly.
Also, we may want to use libmfm instead of mfm-rs.
This commit is contained in:
naskya 2024-07-24 19:27:03 +09:00
parent 0595dccbfd
commit 5752254929
No known key found for this signature in database
GPG key ID: 712D413B3A9FED5C
16 changed files with 32 additions and 108 deletions

23
Cargo.lock generated
View file

@ -211,7 +211,6 @@ dependencies = [
"image",
"isahc",
"macros",
"mfm",
"napi",
"napi-build",
"napi-derive",
@ -1807,18 +1806,6 @@ version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "mfm"
version = "0.1.0"
source = "git+https://firefish.dev/firefish/mfm.git?rev=1dc2cc408176e6559db08cb568e63236597341b4#1dc2cc408176e6559db08cb568e63236597341b4"
dependencies = [
"emojis",
"nom",
"nom-regex",
"once_cell",
"unicode-segmentation",
]
[[package]]
name = "mime"
version = "0.3.17"
@ -1942,16 +1929,6 @@ dependencies = [
"thiserror",
]
[[package]]
name = "nom-regex"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72e5c7731c4c1370b61604ed52a2475e861aac9e08dec9f23903d4ddfdc91c18"
dependencies = [
"nom",
"regex",
]
[[package]]
name = "noop_proc_macro"
version = "0.3.0"

View file

@ -26,7 +26,6 @@ emojis = { version = "0.6.2", default-features = false }
idna = { version = "1.0.2", default-features = false }
image = { version = "0.25.2", default-features = false }
isahc = { version = "1.7.2", default-features = false }
mfm = { git = "https://firefish.dev/firefish/mfm.git", rev = "1dc2cc408176e6559db08cb568e63236597341b4" }
nom-exif = { version = "1.2.6", default-features = false }
once_cell = { version = "1.19.0", default-features = false }
pretty_assertions = { version = "1.4.0", default-features = false }

View file

@ -30,7 +30,6 @@ emojis = { workspace = true }
idna = { workspace = true, features = ["std", "compiled_data"] }
image = { workspace = true, features = ["avif", "bmp", "gif", "ico", "jpeg", "png", "tiff", "webp"] }
isahc = { workspace = true, features = ["http2", "text-decoding", "json"] }
mfm = { workspace = true }
nom-exif = { workspace = true }
once_cell = { workspace = true }
openssl = { workspace = true, features = ["vendored"] }

View file

@ -369,8 +369,6 @@ export interface Emoji {
height: number | null
}
export declare function extractHashtags(text: string): Array<string>
export declare function extractHost(uri: string): string
export declare function fetchMeta(): Promise<Meta>

View file

@ -374,7 +374,6 @@ module.exports.decodeReaction = nativeBinding.decodeReaction
module.exports.DriveFileEvent = nativeBinding.DriveFileEvent
module.exports.DriveFileUsageHint = nativeBinding.DriveFileUsageHint
module.exports.DriveFolderEvent = nativeBinding.DriveFolderEvent
module.exports.extractHashtags = nativeBinding.extractHashtags
module.exports.extractHost = nativeBinding.extractHost
module.exports.fetchMeta = nativeBinding.fetchMeta
module.exports.fetchNodeinfo = nativeBinding.fetchNodeinfo

View file

@ -4,7 +4,6 @@ pub mod config;
pub mod database;
pub mod federation;
pub mod init;
pub mod mfm;
pub mod misc;
pub mod model;
pub mod service;

View file

@ -1,14 +0,0 @@
use mfm::{node, parse, MfmParseError};
#[macros::export]
pub fn extract_hashtags(text: &str) -> Result<Vec<String>, MfmParseError> {
Ok(parse(text)?
.into_iter()
.filter_map(|node| match node {
node::Node::Inline(node::Inline::Hashtag(node::Hashtag { hashtag })) => Some(hashtag),
_ => None,
})
.collect::<std::collections::HashSet<_>>()
.into_iter()
.collect())
}

View file

@ -1 +0,0 @@
pub mod hashtag;

View file

@ -0,0 +1,9 @@
import * as mfm from "mfm-js";
import { unique } from "@/prelude/array.js";
export function extractHashtags(nodes: mfm.MfmNode[]): string[] {
const hashtagNodes = mfm.extract(nodes, (node) => node.type === "hashtag");
const hashtags = unique(hashtagNodes.map((x) => x.props.hashtag));
return hashtags;
}

View file

@ -8,14 +8,13 @@ import { resolveImage } from "./image.js";
import type { CacheableRemoteUser } from "@/models/entities/user.js";
import { htmlToMfm } from "../misc/html-to-mfm.js";
import { extractApHashtags } from "./tag.js";
import { unique, toArray, toSingle, concat } from "@/prelude/array.js";
import { unique, toArray, toSingle } from "@/prelude/array.js";
import { extractPollFromQuestion } from "./question.js";
import vote from "@/services/note/polls/vote.js";
import { apLogger } from "../logger.js";
import type { DriveFile } from "@/models/entities/drive-file.js";
import {
type ImageSize,
extractHashtags,
extractHost,
genId,
getImageSizeFromUrl,
@ -49,6 +48,7 @@ import { extractApMentions } from "./mention.js";
import DbResolver from "../db-resolver.js";
import { StatusError } from "@/misc/fetch.js";
import { publishNoteStream } from "@/services/stream.js";
import { extractHashtags } from "@/misc/extract-hashtags.js";
import { UserProfiles } from "@/models/index.js";
import { In } from "typeorm";
import { config } from "@/config.js";
@ -663,36 +663,19 @@ export async function updateNote(value: string | IObject, resolver?: Resolver) {
const apMentions = await extractApMentions(post.tag);
const apHashtags = await extractApHashtags(post.tag);
let hashTags: string[];
const poll = await extractPollFromQuestion(post, resolver).catch(
() => undefined,
);
if (apHashtags) {
hashTags = apHashtags;
} else {
hashTags = unique(
(text ? extractHashtags(text) : [])
.concat(cw ? extractHashtags(cw) : [])
.concat(
poll?.choices
? concat(
poll.choices.map((choice: string) => extractHashtags(choice)),
)
: [],
),
);
}
const choices =
poll?.choices.flatMap((choice: string) => mfm.parse(choice)) ?? [];
const choices = poll?.choices.flatMap((choice) => mfm.parse(choice)) ?? [];
const tokens = mfm
.parse(text || "")
.concat(mfm.parse(cw || ""))
.concat(choices);
const hashTags: string[] = apHashtags || extractHashtags(tokens);
const mentionUsers =
apMentions || (await extractMentionedUsers(actor, tokens));

View file

@ -3,7 +3,7 @@ import { publishMainStream, publishUserEvent } from "@/services/stream.js";
import acceptAllFollowRequests from "@/services/following/requests/accept-all.js";
import { publishToFollowers } from "@/services/i/update.js";
import { extractCustomEmojisFromMfm } from "@/misc/extract-custom-emojis-from-mfm.js";
import { extractHashtags } from "backend-rs";
import { extractHashtags } from "@/misc/extract-hashtags.js";
import { updateUsertags } from "@/services/update-hashtag.js";
import { Users, DriveFiles, UserProfiles, Pages } from "@/models/index.js";
import type { User } from "@/models/entities/user.js";
@ -309,7 +309,7 @@ export default define(meta, paramDef, async (ps, _user, token) => {
if (newDescription != null) {
const tokens = mfm.parse(newDescription);
emojis = emojis.concat(extractCustomEmojisFromMfm(tokens!));
tags = extractHashtags(newDescription)
tags = extractHashtags(tokens!)
.map((tag) => normalizeForSearch(tag))
.splice(0, 32);
}

View file

@ -23,8 +23,8 @@ import { HOUR } from "@/const.js";
import { getNote } from "@/server/api/common/getters.js";
import { Poll } from "@/models/entities/poll.js";
import * as mfm from "mfm-js";
import { concat, unique } from "@/prelude/array.js";
import { extractHashtags } from "backend-rs";
import { concat } from "@/prelude/array.js";
import { extractHashtags } from "@/misc/extract-hashtags.js";
import { extractCustomEmojisFromMfm } from "@/misc/extract-custom-emojis-from-mfm.js";
import { extractMentionedUsers } from "@/services/note/create.js";
import { publishNoteStream } from "@/services/stream.js";
@ -400,15 +400,7 @@ export default define(meta, paramDef, async (ps, user) => {
const combinedTokens = tokens.concat(cwTokens).concat(choiceTokens);
tags = unique(
(ps.text ? extractHashtags(ps.text) : [])
.concat(ps.cw ? extractHashtags(ps.cw) : [])
.concat(
ps.poll?.choices
? concat(ps.poll.choices.map((choice) => extractHashtags(choice)))
: [],
),
);
tags = extractHashtags(combinedTokens);
emojis = extractCustomEmojisFromMfm(combinedTokens);

View file

@ -44,5 +44,9 @@ export default define(meta, paramDef, async (ps, user) => {
return 204;
}
return translate(note.text, note.lang as string | null, ps.targetLang);
return translate(
note.text,
note.lang as string | null,
ps.targetLang,
);
});

View file

@ -8,7 +8,7 @@ import { deliverToRelays } from "@/services/relay.js";
import type { UserProfile } from "@/models/entities/user-profile.js";
import { extractCustomEmojisFromMfm } from "@/misc/extract-custom-emojis-from-mfm.js";
import mfm from "mfm-js";
import { extractHashtags } from "backend-rs";
import { extractHashtags } from "@/misc/extract-hashtags.js";
import { normalizeForSearch } from "@/misc/normalize-for-search.js";
import { updateUsertags } from "@/services/update-hashtag.js";
import { publishMainStream, publishUserEvent } from "@/services/stream.js";
@ -56,7 +56,7 @@ export async function updateUserProfileData(
if (newDescription != null) {
const tokens = mfm.parse(newDescription);
emojis = emojis.concat(extractCustomEmojisFromMfm(tokens!));
tags = extractHashtags(newDescription)
tags = extractHashtags(tokens!)
.map((tag) => normalizeForSearch(tag))
.splice(0, 32);
}

View file

@ -8,11 +8,12 @@ import { renderActivity } from "@/remote/activitypub/renderer/index.js";
import { resolveUser } from "@/remote/resolve-user.js";
import { config } from "@/config.js";
import { updateHashtags } from "@/services/update-hashtag.js";
import { concat, unique } from "@/prelude/array.js";
import { concat } from "@/prelude/array.js";
import { insertNoteUnread } from "@/services/note/unread.js";
import { registerOrFetchInstanceDoc } from "@/services/register-or-fetch-instance-doc.js";
import { extractMentions } from "@/misc/extract-mentions.js";
import { extractCustomEmojisFromMfm } from "@/misc/extract-custom-emojis-from-mfm.js";
import { extractHashtags } from "@/misc/extract-hashtags.js";
import type { IMentionedRemoteUsers } from "@/models/entities/note.js";
import { Note } from "@/models/entities/note.js";
import {
@ -38,7 +39,6 @@ import { Poll } from "@/models/entities/poll.js";
import { createNotification } from "@/services/create-notification.js";
import { isDuplicateKeyValueError } from "@/misc/is-duplicate-key-value-error.js";
import {
extractHashtags,
updateAntennasOnNewNote,
checkWordMute,
genId,
@ -291,21 +291,7 @@ export default async (
const combinedTokens = tokens.concat(cwTokens).concat(choiceTokens);
tags =
data.apHashtags ||
unique(
(data.text ? extractHashtags(data.text) : [])
.concat(data.cw ? extractHashtags(data.cw) : [])
.concat(
data.poll?.choices
? concat(
data.poll.choices.map((choice: string) =>
extractHashtags(choice),
),
)
: [],
),
);
tags = data.apHashtags || extractHashtags(combinedTokens);
emojis = data.apEmojis || extractCustomEmojisFromMfm(combinedTokens);

View file

@ -7,6 +7,7 @@ import DeliverManager from "@/remote/activitypub/deliver-manager.js";
import renderNote from "@/remote/activitypub/renderer/note.js";
import { renderActivity } from "@/remote/activitypub/renderer/index.js";
import { extractCustomEmojisFromMfm } from "@/misc/extract-custom-emojis-from-mfm.js";
import { extractHashtags } from "@/misc/extract-hashtags.js";
import type { IMentionedRemoteUsers } from "@/models/entities/note.js";
import type { Note } from "@/models/entities/note.js";
import {
@ -20,13 +21,12 @@ import {
import type { DriveFile } from "@/models/entities/drive-file.js";
import { In } from "typeorm";
import type { ILocalUser, IRemoteUser } from "@/models/entities/user.js";
import { extractHashtags, genId } from "backend-rs";
import { genId } from "backend-rs";
import type { IPoll } from "@/models/entities/poll.js";
import { deliverToRelays } from "../relay.js";
import renderUpdate from "@/remote/activitypub/renderer/update.js";
import { extractMentionedUsers } from "@/services/note/create.js";
import { normalizeForSearch } from "@/misc/normalize-for-search.js";
import { unique } from "@/prelude/array.js";
type Option = {
text?: string | null;
@ -51,13 +51,7 @@ export default async function (
const tokens = mfm.parse(data.text || "").concat(mfm.parse(data.cw || ""));
const extractedTags = unique(
(data.text ? extractHashtags(data.text) : []).concat(
data.cw ? extractHashtags(data.cw) : [],
),
);
const tags: string[] = extractedTags
const tags: string[] = extractHashtags(tokens)
.filter((tag) => Array.from(tag || "").length <= 128)
.splice(0, 32)
.map(normalizeForSearch);