Merge branch 'develop' of https://firefish.dev/firefish/firefish into refactor/types
This commit is contained in:
commit
fec1a800b6
171 changed files with 1487 additions and 1335 deletions
|
@ -19,10 +19,4 @@ id: 'aid'
|
|||
# '10.69.1.0/24'
|
||||
#]
|
||||
|
||||
logLevel: [
|
||||
'error',
|
||||
'success',
|
||||
'warning',
|
||||
'debug',
|
||||
'info'
|
||||
]
|
||||
maxLogLevel: 'debug'
|
||||
|
|
|
@ -145,16 +145,11 @@ reservedUsernames: [
|
|||
# IP address family used for outgoing request (ipv4, ipv6 or dual)
|
||||
#outgoingAddressFamily: ipv4
|
||||
|
||||
# Log Option
|
||||
# Production env: ['error', 'success', 'warning', 'info']
|
||||
# Debug/Test env or Troubleshooting: ['error', 'success', 'warning', 'debug' ,'info']
|
||||
# Production env which storage space or IO is tight: ['error', 'warning']
|
||||
logLevel: [
|
||||
'error',
|
||||
'success',
|
||||
'warning',
|
||||
'info'
|
||||
]
|
||||
# Log level (error, warning, info, debug, trace)
|
||||
# Production env: info
|
||||
# Production env whose storage space or IO is tight: warning
|
||||
# Debug/Test env or Troubleshooting: debug (or trace)
|
||||
maxLogLevel: info
|
||||
|
||||
# Syslog option
|
||||
#syslog:
|
||||
|
|
653
Cargo.lock
generated
653
Cargo.lock
generated
|
@ -37,7 +37,6 @@ dependencies = [
|
|||
"cfg-if",
|
||||
"getrandom",
|
||||
"once_cell",
|
||||
"serde",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
]
|
||||
|
@ -78,60 +77,6 @@ dependencies = [
|
|||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstream"
|
||||
version = "0.6.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"anstyle-parse",
|
||||
"anstyle-query",
|
||||
"anstyle-wincon",
|
||||
"colorchoice",
|
||||
"utf8parse",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c"
|
||||
dependencies = [
|
||||
"utf8parse",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-query"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648"
|
||||
dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-wincon"
|
||||
version = "3.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.82"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519"
|
||||
|
||||
[[package]]
|
||||
name = "argon2"
|
||||
version = "0.5.3"
|
||||
|
@ -203,27 +148,22 @@ name = "backend-rs"
|
|||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"argon2",
|
||||
"async-trait",
|
||||
"basen",
|
||||
"bcrypt",
|
||||
"cfg-if",
|
||||
"chrono",
|
||||
"cuid2",
|
||||
"emojis",
|
||||
"idna",
|
||||
"jsonschema",
|
||||
"macro_rs",
|
||||
"napi",
|
||||
"napi-build",
|
||||
"napi-derive",
|
||||
"once_cell",
|
||||
"parse-display",
|
||||
"pretty_assertions",
|
||||
"rand",
|
||||
"redis",
|
||||
"regex",
|
||||
"rmp-serde",
|
||||
"schemars",
|
||||
"sea-orm",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -231,6 +171,8 @@ dependencies = [
|
|||
"strum 0.26.2",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"url",
|
||||
"urlencoding",
|
||||
]
|
||||
|
@ -298,21 +240,6 @@ dependencies = [
|
|||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bit-set"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
|
||||
dependencies = [
|
||||
"bit-vec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bit-vec"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
|
@ -420,12 +347,6 @@ dependencies = [
|
|||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bytecount"
|
||||
version = "0.6.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e1e5f035d16fc623ae5f74981db80a439803888314e3a555fd6f04acd51a3205"
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
version = "1.5.0"
|
||||
|
@ -481,52 +402,6 @@ dependencies = [
|
|||
"inout",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
"clap_lex",
|
||||
"strsim",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.58",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce"
|
||||
|
||||
[[package]]
|
||||
name = "colorchoice"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
|
||||
|
||||
[[package]]
|
||||
name = "combine"
|
||||
version = "4.6.7"
|
||||
|
@ -552,16 +427,6 @@ dependencies = [
|
|||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
|
||||
dependencies = [
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation-sys"
|
||||
version = "0.8.6"
|
||||
|
@ -701,12 +566,6 @@ version = "0.15.7"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
|
||||
|
||||
[[package]]
|
||||
name = "dyn-clone"
|
||||
version = "1.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125"
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.11.0"
|
||||
|
@ -725,15 +584,6 @@ dependencies = [
|
|||
"phf",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "encoding_rs"
|
||||
version = "0.8.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "equivalent"
|
||||
version = "1.0.1"
|
||||
|
@ -767,16 +617,6 @@ version = "2.5.3"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
|
||||
|
||||
[[package]]
|
||||
name = "fancy-regex"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b95f7c0680e4142284cf8b22c14a476e87d61b004a3a0861872b32ef7ead40a2"
|
||||
dependencies = [
|
||||
"bit-set",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastrand"
|
||||
version = "2.0.2"
|
||||
|
@ -800,12 +640,6 @@ dependencies = [
|
|||
"spin 0.9.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fnv"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
||||
|
||||
[[package]]
|
||||
name = "form_urlencoded"
|
||||
version = "1.2.1"
|
||||
|
@ -815,16 +649,6 @@ dependencies = [
|
|||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fraction"
|
||||
version = "0.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3027ae1df8d41b4bed2241c8fdad4acc1e7af60c8e17743534b545e77182d678"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"num",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "funty"
|
||||
version = "2.0.0"
|
||||
|
@ -935,10 +759,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
"libc",
|
||||
"wasi",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -947,25 +769,6 @@ version = "0.28.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "0.3.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"futures-util",
|
||||
"http",
|
||||
"indexmap",
|
||||
"slab",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.12.3"
|
||||
|
@ -1003,12 +806,6 @@ dependencies = [
|
|||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.3.9"
|
||||
|
@ -1048,64 +845,6 @@ dependencies = [
|
|||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "0.2.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
"itoa",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http-body"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"http",
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httparse"
|
||||
version = "1.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904"
|
||||
|
||||
[[package]]
|
||||
name = "httpdate"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
|
||||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "0.14.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"h2",
|
||||
"http",
|
||||
"http-body",
|
||||
"httparse",
|
||||
"httpdate",
|
||||
"itoa",
|
||||
"pin-project-lite",
|
||||
"socket2",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
"want",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iana-time-zone"
|
||||
version = "0.1.60"
|
||||
|
@ -1169,21 +908,6 @@ dependencies = [
|
|||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3"
|
||||
|
||||
[[package]]
|
||||
name = "iso8601"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "924e5d73ea28f59011fec52a0d12185d496a9b075d360657aed2a5707f701153"
|
||||
dependencies = [
|
||||
"nom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.12.1"
|
||||
|
@ -1208,36 +932,6 @@ dependencies = [
|
|||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "0.17.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a071f4f7efc9a9118dfb627a0a94ef247986e1ab8606a4c806ae2b3aa3b6978"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"anyhow",
|
||||
"base64 0.21.7",
|
||||
"bytecount",
|
||||
"clap",
|
||||
"fancy-regex",
|
||||
"fraction",
|
||||
"getrandom",
|
||||
"iso8601",
|
||||
"itoa",
|
||||
"memchr",
|
||||
"num-cmp",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"time",
|
||||
"url",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "keccak"
|
||||
version = "0.1.5"
|
||||
|
@ -1339,12 +1033,6 @@ version = "2.7.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
|
||||
|
||||
[[package]]
|
||||
name = "mime"
|
||||
version = "0.3.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||
|
||||
[[package]]
|
||||
name = "minimal-lexical"
|
||||
version = "0.2.1"
|
||||
|
@ -1442,6 +1130,16 @@ dependencies = [
|
|||
"minimal-lexical",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nu-ansi-term"
|
||||
version = "0.46.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
|
||||
dependencies = [
|
||||
"overload",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num"
|
||||
version = "0.4.2"
|
||||
|
@ -1484,12 +1182,6 @@ dependencies = [
|
|||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-cmp"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "63335b2e2c34fae2fb0aa2cecfd9f0832a1e24b3b32ecec612c3426d46dc8aaa"
|
||||
|
||||
[[package]]
|
||||
name = "num-complex"
|
||||
version = "0.4.5"
|
||||
|
@ -1598,13 +1290,19 @@ version = "0.17.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec4c6225c69b4ca778c0aea097321a64c421cf4577b331c61b229267edabb6f8"
|
||||
dependencies = [
|
||||
"heck 0.4.1",
|
||||
"heck",
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.58",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "overload"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.1"
|
||||
|
@ -1628,31 +1326,6 @@ dependencies = [
|
|||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parse-display"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06af5f9333eb47bd9ba8462d612e37a8328a5cb80b13f0af4de4c3b89f52dee5"
|
||||
dependencies = [
|
||||
"parse-display-derive",
|
||||
"regex",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parse-display-derive"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc9252f259500ee570c75adcc4e317fa6f57a1e47747d622e0bf838002a7b790"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"regex-syntax",
|
||||
"structmeta",
|
||||
"syn 2.0.58",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "password-hash"
|
||||
version = "0.5.0"
|
||||
|
@ -1933,42 +1606,6 @@ dependencies = [
|
|||
"bytecheck",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.11.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62"
|
||||
dependencies = [
|
||||
"base64 0.21.7",
|
||||
"bytes",
|
||||
"encoding_rs",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"h2",
|
||||
"http",
|
||||
"http-body",
|
||||
"hyper",
|
||||
"ipnet",
|
||||
"js-sys",
|
||||
"log",
|
||||
"mime",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
"sync_wrapper",
|
||||
"system-configuration",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
"url",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
"web-sys",
|
||||
"winreg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ring"
|
||||
version = "0.17.8"
|
||||
|
@ -2132,31 +1769,6 @@ version = "1.0.17"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
|
||||
|
||||
[[package]]
|
||||
name = "schemars"
|
||||
version = "0.8.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "45a28f4c49489add4ce10783f7911893516f15afe45d015608d41faca6bc4d29"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"dyn-clone",
|
||||
"schemars_derive",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schemars_derive"
|
||||
version = "0.8.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c767fd6fa65d9ccf9cf026122c1b555f2ef9a4f0cea69da4d7dbc3e258d30967"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde_derive_internals",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.2.0"
|
||||
|
@ -2179,7 +1791,7 @@ version = "0.2.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3bd3534a9978d0aa7edd2808dc1f8f31c4d0ecd31ddf71d997b3c98e9f3c9114"
|
||||
dependencies = [
|
||||
"heck 0.4.1",
|
||||
"heck",
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -2220,7 +1832,7 @@ version = "0.12.15"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e115c6b078e013aa963cc2d38c196c2c40b05f03d0ac872fe06b6e0d5265603"
|
||||
dependencies = [
|
||||
"heck 0.4.1",
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"sea-bae",
|
||||
|
@ -2293,17 +1905,6 @@ dependencies = [
|
|||
"syn 2.0.58",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive_internals"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85bf8229e7920a9f636479437026331ce11aa132b4dde37d121944a44d6e5f3c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.115"
|
||||
|
@ -2315,18 +1916,6 @@ dependencies = [
|
|||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_urlencoded"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
|
||||
dependencies = [
|
||||
"form_urlencoded",
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_yaml"
|
||||
version = "0.9.34+deprecated"
|
||||
|
@ -2378,6 +1967,15 @@ dependencies = [
|
|||
"keccak",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sharded-slab"
|
||||
version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "signal-hook-registry"
|
||||
version = "1.4.1"
|
||||
|
@ -2551,7 +2149,7 @@ checksum = "5833ef53aaa16d860e92123292f1f6a3d53c34ba8b1969f152ef1a7bb803f3c8"
|
|||
dependencies = [
|
||||
"dotenvy",
|
||||
"either",
|
||||
"heck 0.4.1",
|
||||
"heck",
|
||||
"hex",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
|
@ -2703,35 +2301,6 @@ dependencies = [
|
|||
"unicode-normalization",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "structmeta"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e1575d8d40908d70f6fd05537266b90ae71b15dbbe7a8b7dffa2b759306d329"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"structmeta-derive",
|
||||
"syn 2.0.58",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "structmeta-derive"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.58",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strum"
|
||||
version = "0.25.0"
|
||||
|
@ -2753,7 +2322,7 @@ version = "0.26.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c6cf59daf282c0a494ba14fd21610a0325f9f90ec9d1231dea26bcb1d696c946"
|
||||
dependencies = [
|
||||
"heck 0.4.1",
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
|
@ -2800,33 +2369,6 @@ dependencies = [
|
|||
"syn 2.0.58",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sync_wrapper"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
|
||||
|
||||
[[package]]
|
||||
name = "system-configuration"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"core-foundation",
|
||||
"system-configuration-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "system-configuration-sys"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9"
|
||||
dependencies = [
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tap"
|
||||
version = "1.0.1"
|
||||
|
@ -2865,6 +2407,16 @@ dependencies = [
|
|||
"syn 2.0.58",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "1.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.36"
|
||||
|
@ -2952,20 +2504,6 @@ dependencies = [
|
|||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-util"
|
||||
version = "0.7.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_datetime"
|
||||
version = "0.6.5"
|
||||
|
@ -2983,12 +2521,6 @@ dependencies = [
|
|||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-service"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.40"
|
||||
|
@ -3019,13 +2551,33 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "try-lock"
|
||||
version = "0.2.5"
|
||||
name = "tracing-log"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
||||
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
|
||||
dependencies = [
|
||||
"log",
|
||||
"once_cell",
|
||||
"tracing-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-subscriber"
|
||||
version = "0.3.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
|
||||
dependencies = [
|
||||
"nu-ansi-term",
|
||||
"sharded-slab",
|
||||
"smallvec",
|
||||
"thread_local",
|
||||
"tracing-core",
|
||||
"tracing-log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
|
@ -3095,12 +2647,6 @@ version = "2.1.3"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
|
||||
|
||||
[[package]]
|
||||
name = "utf8parse"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.8.0"
|
||||
|
@ -3110,6 +2656,12 @@ dependencies = [
|
|||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "valuable"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
|
||||
|
||||
[[package]]
|
||||
name = "vcpkg"
|
||||
version = "0.2.15"
|
||||
|
@ -3122,15 +2674,6 @@ version = "0.9.4"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "want"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
|
||||
dependencies = [
|
||||
"try-lock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.11.0+wasi-snapshot-preview1"
|
||||
|
@ -3168,18 +2711,6 @@ dependencies = [
|
|||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-futures"
|
||||
version = "0.4.42"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
"web-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.92"
|
||||
|
@ -3209,16 +2740,6 @@ version = "0.2.92"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
|
||||
|
||||
[[package]]
|
||||
name = "web-sys"
|
||||
version = "0.3.69"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "0.25.4"
|
||||
|
@ -3235,6 +2756,28 @@ dependencies = [
|
|||
"wasite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu",
|
||||
"winapi-x86_64-pc-windows-gnu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.52.0"
|
||||
|
@ -3392,16 +2935,6 @@ dependencies = [
|
|||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winreg"
|
||||
version = "0.50.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wyz"
|
||||
version = "0.5.1"
|
||||
|
|
|
@ -10,18 +10,14 @@ napi-derive = "2.16.2"
|
|||
napi-build = "2.1.3"
|
||||
|
||||
argon2 = "0.5.3"
|
||||
async-trait = "0.1.80"
|
||||
basen = "0.1.0"
|
||||
bcrypt = "0.15.1"
|
||||
cfg-if = "1.0.0"
|
||||
chrono = "0.4.37"
|
||||
convert_case = "0.6.0"
|
||||
cuid2 = "0.1.2"
|
||||
emojis = "0.6.1"
|
||||
idna = "0.5.0"
|
||||
jsonschema = "0.17.1"
|
||||
once_cell = "1.19.0"
|
||||
parse-display = "0.9.0"
|
||||
pretty_assertions = "1.4.0"
|
||||
proc-macro2 = "1.0.79"
|
||||
quote = "1.0.36"
|
||||
|
@ -29,7 +25,6 @@ rand = "0.8.5"
|
|||
redis = "0.25.3"
|
||||
regex = "1.10.4"
|
||||
rmp-serde = "1.2.0"
|
||||
schemars = "0.8.16"
|
||||
sea-orm = "0.12.15"
|
||||
serde = "1.0.197"
|
||||
serde_json = "1.0.115"
|
||||
|
@ -38,6 +33,8 @@ strum = "0.26.2"
|
|||
syn = "2.0.58"
|
||||
thiserror = "1.0.58"
|
||||
tokio = "1.37.0"
|
||||
tracing = "0.1.40"
|
||||
tracing-subscriber = "0.3.1"
|
||||
url = "2.5.0"
|
||||
urlencoding = "2.1.3"
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
Breaking changes are indicated by the :warning: icon.
|
||||
|
||||
## Unreleased
|
||||
## v20240424
|
||||
|
||||
- Added `antennaLimit` field to the response of `meta` and `admin/meta`, and the request of `admin/update-meta` (optional).
|
||||
- Added `filter` optional parameter to `notes/renotes` endpoint to filter the types of renotes. It can take the following values:
|
||||
|
|
|
@ -5,6 +5,12 @@ Critical security updates are indicated by the :warning: icon.
|
|||
- Server administrators should check [notice-for-admins.md](./notice-for-admins.md) as well.
|
||||
- Third-party client/bot developers may want to check [api-change.md](./api-change.md) as well.
|
||||
|
||||
## [v20240424](https://firefish.dev/firefish/firefish/-/merge_requests/10765/commits)
|
||||
|
||||
- Improve the usability of the feature to prevent forgetting to write alt texts
|
||||
- Add a server-wide setting for the maximum number of antennas each user can create
|
||||
- Fix bugs
|
||||
|
||||
## [v20240421](https://firefish.dev/firefish/firefish/-/merge_requests/10756/commits)
|
||||
|
||||
- Fix bugs
|
||||
|
|
|
@ -2,6 +2,12 @@
|
|||
|
||||
You can skip intermediate versions when upgrading from an old version, but please read the notices and follow the instructions for each intermediate version before [upgrading](./upgrade.md).
|
||||
|
||||
## Unreleased
|
||||
|
||||
### For all users
|
||||
|
||||
You can control the verbosity of the server log by adding `maxLogLevel` in `.config/default.yml`. `logLevels` has been deprecated in favor of this setting. (see also: <https://firefish.dev/firefish/firefish/-/blob/eac0c1c47cd23789dcc395ab08b074934409fd96/.config/example.yml#L152>)
|
||||
|
||||
## v20240413
|
||||
|
||||
### For all users
|
||||
|
|
|
@ -2289,3 +2289,6 @@ autocorrectNoteLanguage: Mostra un avís si l'idioma de la publicació no coinci
|
|||
amb el resultat de l'idioma detectat automàticament
|
||||
noteEditHistory: Historial d'edicions
|
||||
media: Multimèdia
|
||||
antennaLimit: El nombre màxim d'antenes que pot crear un usuari
|
||||
showAddFileDescriptionAtFirstPost: Obra de forma automàtica un formulari per escriure
|
||||
una descripció quant intentes publicar un fitxer que no en té
|
||||
|
|
|
@ -645,6 +645,7 @@ deletedNote: "Deleted post"
|
|||
invisibleNote: "Invisible post"
|
||||
enableInfiniteScroll: "Automatically load more"
|
||||
visibility: "Visiblility"
|
||||
cannotEditVisibility: "You can't edit the visibility"
|
||||
poll: "Poll"
|
||||
useCw: "Hide content"
|
||||
enablePlayer: "Open video player"
|
||||
|
@ -2146,6 +2147,7 @@ _notification:
|
|||
reacted: "reacted to your post"
|
||||
renoted: "boosted your post"
|
||||
voted: "voted on your poll"
|
||||
andCountUsers: "and {count} more users {acted}"
|
||||
_types:
|
||||
all: "All"
|
||||
follow: "New followers"
|
||||
|
@ -2232,3 +2234,4 @@ autocorrectNoteLanguage: "Show a warning if the post language does not match the
|
|||
incorrectLanguageWarning: "It looks like your post is in {detected}, but you selected
|
||||
{current}.\nWould you like to set the language to {detected} instead?"
|
||||
noteEditHistory: "Post edit history"
|
||||
foldNotification: "Group similar notifications"
|
||||
|
|
|
@ -564,6 +564,7 @@ deletedNote: "已删除的帖子"
|
|||
invisibleNote: "隐藏的帖子"
|
||||
enableInfiniteScroll: "滚动页面以载入更多内容"
|
||||
visibility: "可见性"
|
||||
cannotEditVisibility: "不能编辑帖子的可见性"
|
||||
poll: "调查问卷"
|
||||
useCw: "隐藏内容"
|
||||
enablePlayer: "打开播放器"
|
||||
|
@ -1386,7 +1387,7 @@ _poll:
|
|||
_visibility:
|
||||
public: "公开"
|
||||
publicDescription: "您的帖子将出现在公共时间线上"
|
||||
home: "不公开"
|
||||
home: "悄悄公开"
|
||||
homeDescription: "仅发送至首页时间线"
|
||||
followers: "仅关注者"
|
||||
followersDescription: "仅对您的关注者和提及的用户可见"
|
||||
|
@ -1787,6 +1788,7 @@ _notification:
|
|||
reacted: 回应了您的帖子
|
||||
voted: 在您的问卷调查中投了票
|
||||
renoted: 转发了您的帖子
|
||||
andCountUsers: "和其他 {count} 名用户{acted}"
|
||||
_deck:
|
||||
alwaysShowMainColumn: "总是显示主列"
|
||||
columnAlign: "列对齐"
|
||||
|
@ -2059,3 +2061,4 @@ autocorrectNoteLanguage: 当帖子语言不符合自动检测的结果的时候
|
|||
incorrectLanguageWarning: "看上去您帖子使用的语言是{detected},但您选择的语言是{current}。\n要改为以{detected}发帖吗?"
|
||||
noteEditHistory: "帖子编辑历史"
|
||||
media: 媒体
|
||||
foldNotification: "将通知按同类型分组"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "firefish",
|
||||
"version": "20240421",
|
||||
"version": "20240424",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://firefish.dev/firefish/firefish.git"
|
||||
|
|
|
@ -18,22 +18,17 @@ napi = { workspace = true, optional = true, default-features = false, features =
|
|||
napi-derive = { workspace = true, optional = true }
|
||||
|
||||
argon2 = { workspace = true, features = ["std"] }
|
||||
async-trait = { workspace = true }
|
||||
basen = { workspace = true }
|
||||
bcrypt = { workspace = true }
|
||||
cfg-if = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
cuid2 = { workspace = true }
|
||||
emojis = { workspace = true }
|
||||
idna = { workspace = true }
|
||||
jsonschema = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
parse-display = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
redis = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
rmp-serde = { workspace = true }
|
||||
schemars = { workspace = true, features = ["chrono"] }
|
||||
sea-orm = { workspace = true, features = ["sqlx-postgres", "runtime-tokio-rustls"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
|
@ -41,6 +36,8 @@ serde_yaml = { workspace = true }
|
|||
strum = { workspace = true, features = ["derive"] }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
url = { workspace = true }
|
||||
urlencoding = { workspace = true }
|
||||
|
||||
|
|
41
packages/backend-rs/index.d.ts
vendored
41
packages/backend-rs/index.d.ts
vendored
|
@ -3,6 +3,21 @@
|
|||
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
export const SECOND: number
|
||||
export const MINUTE: number
|
||||
export const HOUR: number
|
||||
export const DAY: number
|
||||
export const USER_ONLINE_THRESHOLD: number
|
||||
export const USER_ACTIVE_THRESHOLD: number
|
||||
/**
|
||||
* List of file types allowed to be viewed directly in the browser
|
||||
* Anything not included here will be responded as application/octet-stream
|
||||
* SVG is not allowed because it generates XSS <- we need to fix this and later allow it to be viewed directly
|
||||
* https://github.com/sindresorhus/file-type/blob/main/supported.js
|
||||
* https://github.com/sindresorhus/file-type/blob/main/core.js
|
||||
* https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Containers
|
||||
*/
|
||||
export const FILE_TYPE_BROWSERSAFE: string[]
|
||||
export interface EnvConfig {
|
||||
onlyQueue: boolean
|
||||
onlyServer: boolean
|
||||
|
@ -38,7 +53,9 @@ export interface ServerConfig {
|
|||
inboxJobPerSec?: number
|
||||
deliverJobMaxAttempts?: number
|
||||
inboxJobMaxAttempts?: number
|
||||
/** deprecated */
|
||||
logLevel?: Array<string>
|
||||
maxLogLevel?: string
|
||||
syslog?: SysLogConfig
|
||||
proxyRemoteFiles?: boolean
|
||||
mediaProxy?: string
|
||||
|
@ -148,7 +165,9 @@ export interface Config {
|
|||
inboxJobPerSec?: number
|
||||
deliverJobMaxAttempts?: number
|
||||
inboxJobMaxAttempts?: number
|
||||
/** deprecated */
|
||||
logLevel?: Array<string>
|
||||
maxLogLevel?: string
|
||||
syslog?: SysLogConfig
|
||||
proxyRemoteFiles?: boolean
|
||||
mediaProxy?: string
|
||||
|
@ -156,8 +175,8 @@ export interface Config {
|
|||
reservedUsernames?: Array<string>
|
||||
maxUserSignups?: number
|
||||
isManagedHosting?: boolean
|
||||
maxNoteLength?: number
|
||||
maxCaptionLength?: number
|
||||
maxNoteLength: number
|
||||
maxCaptionLength: number
|
||||
deepl?: DeepLConfig
|
||||
libreTranslate?: LibreTranslateConfig
|
||||
email?: EmailConfig
|
||||
|
@ -264,6 +283,8 @@ export interface DecodedReaction {
|
|||
export function decodeReaction(reaction: string): DecodedReaction
|
||||
export function countReactions(reactions: Record<string, number>): Record<string, number>
|
||||
export function toDbReaction(reaction?: string | undefined | null, host?: string | undefined | null): Promise<string>
|
||||
/** Delete all entries in the "attestation_challenge" table created at more than 5 minutes ago */
|
||||
export function removeOldAttestationChallenges(): Promise<void>
|
||||
export interface AbuseUserReport {
|
||||
id: string
|
||||
createdAt: Date
|
||||
|
@ -1120,8 +1141,16 @@ export interface Webhook {
|
|||
latestSentAt: Date | null
|
||||
latestStatus: number | null
|
||||
}
|
||||
/** Initializes Cuid2 generator. Must be called before any [create_id]. */
|
||||
export function initIdGenerator(length: number, fingerprint: string): void
|
||||
export function initializeRustLogger(): void
|
||||
export function watchNote(watcherId: string, noteAuthorId: string, noteId: string): Promise<void>
|
||||
export function unwatchNote(watcherId: string, noteId: string): Promise<void>
|
||||
export enum ChatEvent {
|
||||
Message = 'message',
|
||||
Read = 'read',
|
||||
Deleted = 'deleted',
|
||||
Typing = 'typing'
|
||||
}
|
||||
export function publishToChatStream(senderUserId: string, receiverUserId: string, kind: ChatEvent, object: any): void
|
||||
export function getTimestamp(id: string): number
|
||||
/**
|
||||
* The generated ID results in the form of `[8 chars timestamp] + [cuid2]`.
|
||||
|
@ -1131,5 +1160,7 @@ export function getTimestamp(id: string): number
|
|||
*
|
||||
* Ref: https://github.com/paralleldrive/cuid2#parameterized-length
|
||||
*/
|
||||
export function genId(date?: Date | undefined | null): string
|
||||
export function genId(): string
|
||||
/** Generate an ID using a specific datetime */
|
||||
export function genIdAt(date: Date): string
|
||||
export function secureRndstr(length?: number | undefined | null): string
|
||||
|
|
|
@ -310,8 +310,15 @@ if (!nativeBinding) {
|
|||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { loadEnv, loadConfig, stringToAcct, acctToString, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getNoteSummary, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, initIdGenerator, getTimestamp, genId, secureRndstr } = nativeBinding
|
||||
const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getNoteSummary, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, initializeRustLogger, watchNote, unwatchNote, ChatEvent, publishToChatStream, getTimestamp, genId, genIdAt, secureRndstr } = nativeBinding
|
||||
|
||||
module.exports.SECOND = SECOND
|
||||
module.exports.MINUTE = MINUTE
|
||||
module.exports.HOUR = HOUR
|
||||
module.exports.DAY = DAY
|
||||
module.exports.USER_ONLINE_THRESHOLD = USER_ONLINE_THRESHOLD
|
||||
module.exports.USER_ACTIVE_THRESHOLD = USER_ACTIVE_THRESHOLD
|
||||
module.exports.FILE_TYPE_BROWSERSAFE = FILE_TYPE_BROWSERSAFE
|
||||
module.exports.loadEnv = loadEnv
|
||||
module.exports.loadConfig = loadConfig
|
||||
module.exports.stringToAcct = stringToAcct
|
||||
|
@ -342,6 +349,7 @@ module.exports.isOldPasswordAlgorithm = isOldPasswordAlgorithm
|
|||
module.exports.decodeReaction = decodeReaction
|
||||
module.exports.countReactions = countReactions
|
||||
module.exports.toDbReaction = toDbReaction
|
||||
module.exports.removeOldAttestationChallenges = removeOldAttestationChallenges
|
||||
module.exports.AntennaSrcEnum = AntennaSrcEnum
|
||||
module.exports.DriveFileUsageHintEnum = DriveFileUsageHintEnum
|
||||
module.exports.MutedNoteReasonEnum = MutedNoteReasonEnum
|
||||
|
@ -353,7 +361,12 @@ module.exports.RelayStatusEnum = RelayStatusEnum
|
|||
module.exports.UserEmojimodpermEnum = UserEmojimodpermEnum
|
||||
module.exports.UserProfileFfvisibilityEnum = UserProfileFfvisibilityEnum
|
||||
module.exports.UserProfileMutingnotificationtypesEnum = UserProfileMutingnotificationtypesEnum
|
||||
module.exports.initIdGenerator = initIdGenerator
|
||||
module.exports.initializeRustLogger = initializeRustLogger
|
||||
module.exports.watchNote = watchNote
|
||||
module.exports.unwatchNote = unwatchNote
|
||||
module.exports.ChatEvent = ChatEvent
|
||||
module.exports.publishToChatStream = publishToChatStream
|
||||
module.exports.getTimestamp = getTimestamp
|
||||
module.exports.genId = genId
|
||||
module.exports.genIdAt = genIdAt
|
||||
module.exports.secureRndstr = secureRndstr
|
||||
|
|
67
packages/backend-rs/src/config/constant.rs
Normal file
67
packages/backend-rs/src/config/constant.rs
Normal file
|
@ -0,0 +1,67 @@
|
|||
#[crate::export]
|
||||
pub const SECOND: i32 = 1000;
|
||||
#[crate::export]
|
||||
pub const MINUTE: i32 = 60 * SECOND;
|
||||
#[crate::export]
|
||||
pub const HOUR: i32 = 60 * MINUTE;
|
||||
#[crate::export]
|
||||
pub const DAY: i32 = 24 * HOUR;
|
||||
|
||||
#[crate::export]
|
||||
pub const USER_ONLINE_THRESHOLD: i32 = 10 * MINUTE;
|
||||
#[crate::export]
|
||||
pub const USER_ACTIVE_THRESHOLD: i32 = 3 * DAY;
|
||||
|
||||
/// List of file types allowed to be viewed directly in the browser
|
||||
/// Anything not included here will be responded as application/octet-stream
|
||||
/// SVG is not allowed because it generates XSS <- we need to fix this and later allow it to be viewed directly
|
||||
/// https://github.com/sindresorhus/file-type/blob/main/supported.js
|
||||
/// https://github.com/sindresorhus/file-type/blob/main/core.js
|
||||
/// https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Containers
|
||||
#[crate::export]
|
||||
pub const FILE_TYPE_BROWSERSAFE: [&str; 41] = [
|
||||
// Images
|
||||
"image/png",
|
||||
"image/gif", // TODO: deprecated, but still used by old posts, new gifs should be converted to webp in the future
|
||||
"image/jpeg",
|
||||
"image/webp", // TODO: make this the default image format
|
||||
"image/apng",
|
||||
"image/bmp",
|
||||
"image/tiff",
|
||||
"image/x-icon",
|
||||
"image/avif", // not as good supported now, but its good to introduce initial support for the future
|
||||
// OggS
|
||||
"audio/opus",
|
||||
"video/ogg",
|
||||
"audio/ogg",
|
||||
"application/ogg",
|
||||
// ISO/IEC base media file format
|
||||
"video/quicktime",
|
||||
"video/mp4", // TODO: we need to check for av1 later
|
||||
"video/vnd.avi", // also av1
|
||||
"audio/mp4",
|
||||
"video/x-m4v",
|
||||
"audio/x-m4a",
|
||||
"video/3gpp",
|
||||
"video/3gpp2",
|
||||
"video/3gp2",
|
||||
"audio/3gpp",
|
||||
"audio/3gpp2",
|
||||
"audio/3gp2",
|
||||
"video/mpeg",
|
||||
"audio/mpeg",
|
||||
"video/webm",
|
||||
"audio/webm",
|
||||
"audio/aac",
|
||||
"audio/x-flac",
|
||||
"audio/flac",
|
||||
"audio/vnd.wave",
|
||||
"audio/mod",
|
||||
"audio/x-mod",
|
||||
"audio/s3m",
|
||||
"audio/x-s3m",
|
||||
"audio/xm",
|
||||
"audio/x-xm",
|
||||
"audio/it",
|
||||
"audio/x-it",
|
||||
];
|
|
@ -1,4 +1,5 @@
|
|||
pub use server::CONFIG;
|
||||
|
||||
pub mod constant;
|
||||
pub mod environment;
|
||||
pub mod server;
|
||||
|
|
|
@ -36,8 +36,11 @@ struct ServerConfig {
|
|||
pub deliver_job_max_attempts: Option<u32>,
|
||||
pub inbox_job_max_attempts: Option<u32>,
|
||||
|
||||
/// deprecated
|
||||
pub log_level: Option<Vec<String>>,
|
||||
|
||||
pub max_log_level: Option<String>,
|
||||
|
||||
pub syslog: Option<SysLogConfig>,
|
||||
|
||||
pub proxy_remote_files: Option<bool>,
|
||||
|
@ -197,7 +200,11 @@ pub struct Config {
|
|||
pub inbox_job_per_sec: Option<u32>,
|
||||
pub deliver_job_max_attempts: Option<u32>,
|
||||
pub inbox_job_max_attempts: Option<u32>,
|
||||
|
||||
/// deprecated
|
||||
pub log_level: Option<Vec<String>>,
|
||||
|
||||
pub max_log_level: Option<String>,
|
||||
pub syslog: Option<SysLogConfig>,
|
||||
pub proxy_remote_files: Option<bool>,
|
||||
pub media_proxy: Option<String>,
|
||||
|
@ -205,8 +212,8 @@ pub struct Config {
|
|||
pub reserved_usernames: Option<Vec<String>>,
|
||||
pub max_user_signups: Option<u32>,
|
||||
pub is_managed_hosting: Option<bool>,
|
||||
pub max_note_length: Option<u32>,
|
||||
pub max_caption_length: Option<u32>,
|
||||
pub max_note_length: u32,
|
||||
pub max_caption_length: u32,
|
||||
pub deepl: Option<DeepLConfig>,
|
||||
pub libre_translate: Option<LibreTranslateConfig>,
|
||||
pub email: Option<EmailConfig>,
|
||||
|
@ -346,6 +353,7 @@ fn load_config() -> Config {
|
|||
deliver_job_max_attempts: server_config.deliver_job_max_attempts,
|
||||
inbox_job_max_attempts: server_config.inbox_job_max_attempts,
|
||||
log_level: server_config.log_level,
|
||||
max_log_level: server_config.max_log_level,
|
||||
syslog: server_config.syslog,
|
||||
proxy_remote_files: server_config.proxy_remote_files,
|
||||
media_proxy: server_config.media_proxy,
|
||||
|
@ -353,8 +361,8 @@ fn load_config() -> Config {
|
|||
reserved_usernames: server_config.reserved_usernames,
|
||||
max_user_signups: server_config.max_user_signups,
|
||||
is_managed_hosting: server_config.is_managed_hosting,
|
||||
max_note_length: server_config.max_note_length,
|
||||
max_caption_length: server_config.max_caption_length,
|
||||
max_note_length: server_config.max_note_length.unwrap_or(3000),
|
||||
max_caption_length: server_config.max_caption_length.unwrap_or(1500),
|
||||
deepl: server_config.deepl,
|
||||
libre_translate: server_config.libre_translate,
|
||||
email: server_config.email,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use crate::config::CONFIG;
|
||||
use sea_orm::{Database, DbConn, DbErr};
|
||||
use sea_orm::{ConnectOptions, Database, DbConn, DbErr};
|
||||
use tracing::log::LevelFilter;
|
||||
|
||||
static DB_CONN: once_cell::sync::OnceCell<DbConn> = once_cell::sync::OnceCell::new();
|
||||
|
||||
|
@ -12,7 +13,13 @@ async fn init_database() -> Result<&'static DbConn, DbErr> {
|
|||
CONFIG.db.port,
|
||||
CONFIG.db.db,
|
||||
);
|
||||
let conn = Database::connect(database_uri).await?;
|
||||
let option: ConnectOptions = ConnectOptions::new(database_uri)
|
||||
.sqlx_logging_level(LevelFilter::Trace)
|
||||
.to_owned();
|
||||
|
||||
tracing::info!("Initializing PostgreSQL connection");
|
||||
|
||||
let conn = Database::connect(option).await?;
|
||||
Ok(DB_CONN.get_or_init(move || conn))
|
||||
}
|
||||
|
||||
|
|
|
@ -26,6 +26,8 @@ fn init_redis() -> Result<Client, RedisError> {
|
|||
params.concat()
|
||||
};
|
||||
|
||||
tracing::info!("Initializing Redis connection");
|
||||
|
||||
Client::open(redis_url)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,21 +1,31 @@
|
|||
use crate::database::{redis_conn, redis_key};
|
||||
use crate::model::entity::note;
|
||||
use crate::service::stream;
|
||||
use crate::util::id::get_timestamp;
|
||||
use redis::{streams::StreamMaxlen, Commands};
|
||||
use crate::util::id::{get_timestamp, InvalidIdErr};
|
||||
use redis::{streams::StreamMaxlen, Commands, RedisError};
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error("Redis error: {0}")]
|
||||
RedisErr(#[from] RedisError),
|
||||
#[error("Invalid ID: {0}")]
|
||||
InvalidIdErr(#[from] InvalidIdErr),
|
||||
#[error("Stream error: {0}")]
|
||||
StreamErr(#[from] stream::Error),
|
||||
}
|
||||
|
||||
type Note = note::Model;
|
||||
|
||||
#[crate::export]
|
||||
pub fn add_note_to_antenna(antenna_id: String, note: &Note) -> Result<(), stream::Error> {
|
||||
pub fn add_note_to_antenna(antenna_id: String, note: &Note) -> Result<(), Error> {
|
||||
// for timeline API
|
||||
redis_conn()?.xadd_maxlen(
|
||||
redis_key(format!("antennaTimeline:{}", antenna_id)),
|
||||
StreamMaxlen::Approx(200),
|
||||
format!("{}-*", get_timestamp(¬e.id)),
|
||||
format!("{}-*", get_timestamp(¬e.id)?),
|
||||
&[("note", ¬e.id)],
|
||||
)?;
|
||||
|
||||
// for streaming API
|
||||
stream::antenna::publish(antenna_id, note)
|
||||
Ok(stream::antenna::publish(antenna_id, note)?)
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ async fn all_texts(note: NoteLike) -> Result<Vec<String>, DbErr> {
|
|||
.flatten(),
|
||||
);
|
||||
|
||||
if let Some(renote_id) = note.renote_id {
|
||||
if let Some(renote_id) = ¬e.renote_id {
|
||||
if let Some((text, cw)) = note::Entity::find_by_id(renote_id)
|
||||
.select_only()
|
||||
.columns([note::Column::Text, note::Column::Cw])
|
||||
|
@ -53,10 +53,12 @@ async fn all_texts(note: NoteLike) -> Result<Vec<String>, DbErr> {
|
|||
if let Some(c) = cw {
|
||||
texts.push(c);
|
||||
}
|
||||
} else {
|
||||
tracing::warn!("nonexistent renote id: {:#?}", renote_id);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(reply_id) = note.reply_id {
|
||||
if let Some(reply_id) = ¬e.reply_id {
|
||||
if let Some((text, cw)) = note::Entity::find_by_id(reply_id)
|
||||
.select_only()
|
||||
.columns([note::Column::Text, note::Column::Cw])
|
||||
|
@ -70,6 +72,8 @@ async fn all_texts(note: NoteLike) -> Result<Vec<String>, DbErr> {
|
|||
if let Some(c) = cw {
|
||||
texts.push(c);
|
||||
}
|
||||
} else {
|
||||
tracing::warn!("nonexistent reply id: {:#?}", reply_id);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#[crate::export]
|
||||
pub fn to_mastodon_id(firefish_id: &str) -> Option<String> {
|
||||
let decoded: [u8; 16] = basen::BASE36.decode_var_len(&firefish_id.to_ascii_lowercase())?;
|
||||
let decoded: [u8; 16] = basen::BASE36.decode_var_len(firefish_id)?;
|
||||
Some(basen::BASE10.encode_var_len(&decoded))
|
||||
}
|
||||
|
||||
|
|
|
@ -13,3 +13,4 @@ pub mod nyaify;
|
|||
pub mod password;
|
||||
pub mod reaction;
|
||||
pub mod redis_cache;
|
||||
pub mod remove_old_attestation_challenges;
|
||||
|
|
|
@ -97,6 +97,8 @@ pub async fn to_db_reaction(reaction: Option<&str>, host: Option<&str>) -> Resul
|
|||
{
|
||||
return Ok(format!(":{name}@{ascii_host}:"));
|
||||
}
|
||||
|
||||
tracing::info!("nonexistent remote custom emoji: :{name}@{ascii_host}:");
|
||||
} else {
|
||||
// local emoji
|
||||
// TODO: Does SeaORM have the `exists` method?
|
||||
|
@ -109,6 +111,8 @@ pub async fn to_db_reaction(reaction: Option<&str>, host: Option<&str>) -> Resul
|
|||
{
|
||||
return Ok(format!(":{name}:"));
|
||||
}
|
||||
|
||||
tracing::info!("nonexistent local custom emoji: :{name}:");
|
||||
}
|
||||
};
|
||||
};
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
// TODO: We want to get rid of this
|
||||
|
||||
use crate::database::db_conn;
|
||||
use crate::model::entity::attestation_challenge;
|
||||
use chrono::{Duration, Local};
|
||||
use sea_orm::{ColumnTrait, DbErr, EntityTrait, QueryFilter};
|
||||
|
||||
/// Delete all entries in the "attestation_challenge" table created at more than 5 minutes ago
|
||||
#[crate::export]
|
||||
pub async fn remove_old_attestation_challenges() -> Result<(), DbErr> {
|
||||
let res = attestation_challenge::Entity::delete_many()
|
||||
.filter(attestation_challenge::Column::CreatedAt.lt(Local::now() - Duration::minutes(5)))
|
||||
.exec(db_conn().await?)
|
||||
.await?;
|
||||
|
||||
tracing::info!("{} attestation challenges are removed", res.rows_affected);
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
#[derive(thiserror::Error, Debug, PartialEq, Eq)]
|
||||
pub enum Error {
|
||||
#[error("Failed to parse string: {0}")]
|
||||
ParseError(#[from] parse_display::ParseError),
|
||||
#[error("Database error: {0}")]
|
||||
DbError(#[from] sea_orm::DbErr),
|
||||
#[error("Requested entity not found")]
|
||||
NotFound,
|
||||
}
|
|
@ -1,4 +1 @@
|
|||
pub mod entity;
|
||||
pub mod error;
|
||||
// pub mod repository;
|
||||
pub mod schema;
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
use async_trait::async_trait;
|
||||
use schemars::JsonSchema;
|
||||
|
||||
use super::error::Error;
|
||||
|
||||
/// Repositories have a packer that converts a database model to its
|
||||
/// corresponding API schema.
|
||||
#[async_trait]
|
||||
pub trait Repository<T: JsonSchema> {
|
||||
async fn pack(self) -> Result<T, Error>;
|
||||
/// Retrieves one model by its id and pack it.
|
||||
async fn pack_by_id(id: String) -> Result<T, Error>;
|
||||
}
|
||||
|
||||
mod macros {
|
||||
/// Provides the default implementation of
|
||||
/// [crate::model::repository::Repository::pack_by_id].
|
||||
macro_rules! impl_pack_by_id {
|
||||
($a:ty, $b:ident) => {
|
||||
match <$a>::find_by_id($b)
|
||||
.one(crate::database::get_database()?)
|
||||
.await?
|
||||
{
|
||||
None => Err(Error::NotFound),
|
||||
Some(m) => m.pack().await,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use impl_pack_by_id;
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
use jsonschema::JSONSchema;
|
||||
use schemars::{schema_for, JsonSchema};
|
||||
|
||||
/// Structs of schema defitions implement this trait in order to
|
||||
/// provide the JSON Schema validator [`jsonschema::JSONSchema`].
|
||||
pub trait Schema<T: JsonSchema> {
|
||||
/// Returns the validator of [JSON Schema Draft
|
||||
/// 7](https://json-schema.org/specification-links.html#draft-7) with the
|
||||
/// default settings of [`schemars::gen::SchemaSettings`].
|
||||
fn validator() -> JSONSchema {
|
||||
let root = schema_for!(T);
|
||||
let schema = serde_json::to_value(&root).expect("Schema definition invalid");
|
||||
JSONSchema::options()
|
||||
.with_draft(jsonschema::Draft::Draft7)
|
||||
.compile(&schema)
|
||||
.expect("Unable to compile schema")
|
||||
}
|
||||
}
|
39
packages/backend-rs/src/service/log.rs
Normal file
39
packages/backend-rs/src/service/log.rs
Normal file
|
@ -0,0 +1,39 @@
|
|||
use crate::config::CONFIG;
|
||||
use tracing::Level;
|
||||
use tracing_subscriber::FmtSubscriber;
|
||||
|
||||
#[crate::export(js_name = "initializeRustLogger")]
|
||||
pub fn initialize_logger() {
|
||||
let mut builder = FmtSubscriber::builder();
|
||||
|
||||
if let Some(max_level) = &CONFIG.max_log_level {
|
||||
builder = builder.with_max_level(match max_level.as_str() {
|
||||
"error" => Level::ERROR,
|
||||
"warning" => Level::WARN,
|
||||
"info" => Level::INFO,
|
||||
"debug" => Level::DEBUG,
|
||||
"trace" => Level::TRACE,
|
||||
_ => Level::INFO,
|
||||
});
|
||||
} else if let Some(levels) = &CONFIG.log_level {
|
||||
// `logLevel` config is Deprecated
|
||||
if levels.contains(&"trace".to_string()) {
|
||||
builder = builder.with_max_level(Level::TRACE);
|
||||
} else if levels.contains(&"debug".to_string()) {
|
||||
builder = builder.with_max_level(Level::DEBUG);
|
||||
} else if levels.contains(&"info".to_string()) {
|
||||
builder = builder.with_max_level(Level::INFO);
|
||||
} else if levels.contains(&"warning".to_string()) {
|
||||
builder = builder.with_max_level(Level::WARN);
|
||||
} else if levels.contains(&"error".to_string()) {
|
||||
builder = builder.with_max_level(Level::ERROR);
|
||||
}
|
||||
} else {
|
||||
// Fallback
|
||||
builder = builder.with_max_level(Level::INFO);
|
||||
};
|
||||
|
||||
let subscriber = builder.with_level(true).pretty().finish();
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).expect("Failed to initialize the logger");
|
||||
}
|
|
@ -1 +1,3 @@
|
|||
pub mod log;
|
||||
pub mod note;
|
||||
pub mod stream;
|
||||
|
|
1
packages/backend-rs/src/service/note/mod.rs
Normal file
1
packages/backend-rs/src/service/note/mod.rs
Normal file
|
@ -0,0 +1 @@
|
|||
pub mod watch;
|
42
packages/backend-rs/src/service/note/watch.rs
Normal file
42
packages/backend-rs/src/service/note/watch.rs
Normal file
|
@ -0,0 +1,42 @@
|
|||
use crate::database::db_conn;
|
||||
use crate::model::entity::note_watching;
|
||||
use crate::util::id::gen_id;
|
||||
use sea_orm::{ActiveValue, ColumnTrait, DbErr, EntityTrait, ModelTrait, QueryFilter};
|
||||
|
||||
#[crate::export]
|
||||
pub async fn watch_note(
|
||||
watcher_id: &str,
|
||||
note_author_id: &str,
|
||||
note_id: &str,
|
||||
) -> Result<(), DbErr> {
|
||||
if watcher_id != note_author_id {
|
||||
note_watching::Entity::insert(note_watching::ActiveModel {
|
||||
id: ActiveValue::set(gen_id()),
|
||||
created_at: ActiveValue::set(chrono::Local::now().naive_local()),
|
||||
user_id: ActiveValue::Set(watcher_id.to_string()),
|
||||
note_user_id: ActiveValue::Set(note_author_id.to_string()),
|
||||
note_id: ActiveValue::Set(note_id.to_string()),
|
||||
})
|
||||
.exec(db_conn().await?)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[crate::export]
|
||||
pub async fn unwatch_note(watcher_id: &str, note_id: &str) -> Result<(), DbErr> {
|
||||
let db = db_conn().await?;
|
||||
|
||||
let entry = note_watching::Entity::find()
|
||||
.filter(note_watching::Column::UserId.eq(watcher_id))
|
||||
.filter(note_watching::Column::NoteId.eq(note_id))
|
||||
.one(db)
|
||||
.await?;
|
||||
|
||||
if let Some(entry) = entry {
|
||||
entry.delete(db).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
pub mod antenna;
|
||||
pub mod chat;
|
||||
|
||||
use crate::config::CONFIG;
|
||||
use crate::database::redis_conn;
|
||||
|
@ -51,7 +52,7 @@ pub enum Error {
|
|||
|
||||
pub fn publish_to_stream(
|
||||
stream: &Stream,
|
||||
kind: Option<&str>,
|
||||
kind: Option<String>,
|
||||
value: Option<String>,
|
||||
) -> Result<(), Error> {
|
||||
let message = if let Some(kind) = kind {
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::service::stream::{publish_to_stream, Error, Stream};
|
|||
pub fn publish(antenna_id: String, note: ¬e::Model) -> Result<(), Error> {
|
||||
publish_to_stream(
|
||||
&Stream::Antenna { antenna_id },
|
||||
Some("note"),
|
||||
Some("note".to_string()),
|
||||
Some(serde_json::to_string(note)?),
|
||||
)
|
||||
}
|
||||
|
|
31
packages/backend-rs/src/service/stream/chat.rs
Normal file
31
packages/backend-rs/src/service/stream/chat.rs
Normal file
|
@ -0,0 +1,31 @@
|
|||
use crate::service::stream::{publish_to_stream, Error, Stream};
|
||||
|
||||
#[derive(strum::Display)]
|
||||
#[crate::export(string_enum = "camelCase")]
|
||||
pub enum ChatEvent {
|
||||
#[strum(serialize = "message")]
|
||||
Message,
|
||||
#[strum(serialize = "read")]
|
||||
Read,
|
||||
#[strum(serialize = "deleted")]
|
||||
Deleted,
|
||||
#[strum(serialize = "typing")]
|
||||
Typing,
|
||||
}
|
||||
|
||||
#[crate::export(js_name = "publishToChatStream")]
|
||||
pub fn publish(
|
||||
sender_user_id: String,
|
||||
receiver_user_id: String,
|
||||
kind: ChatEvent,
|
||||
object: &serde_json::Value, // TODO?: change this to enum
|
||||
) -> Result<(), Error> {
|
||||
publish_to_stream(
|
||||
&Stream::Chat {
|
||||
sender_user_id,
|
||||
receiver_user_id,
|
||||
},
|
||||
Some(kind.to_string()),
|
||||
Some(serde_json::to_string(object)?),
|
||||
)
|
||||
}
|
|
@ -1,95 +1,109 @@
|
|||
//! ID generation utility based on [cuid2]
|
||||
|
||||
use crate::config::CONFIG;
|
||||
use basen::BASE36;
|
||||
use cfg_if::cfg_if;
|
||||
use chrono::NaiveDateTime;
|
||||
use chrono::{DateTime, NaiveDateTime, Utc};
|
||||
use once_cell::sync::OnceCell;
|
||||
use std::cmp;
|
||||
|
||||
#[derive(thiserror::Error, Debug, PartialEq, Eq)]
|
||||
#[error("ID generator has not been initialized yet")]
|
||||
pub struct ErrorUninitialized;
|
||||
|
||||
static FINGERPRINT: OnceCell<String> = OnceCell::new();
|
||||
static GENERATOR: OnceCell<cuid2::CuidConstructor> = OnceCell::new();
|
||||
|
||||
const TIME_2000: i64 = 946_684_800_000;
|
||||
const TIMESTAMP_LENGTH: u16 = 8;
|
||||
const TIMESTAMP_LENGTH: u8 = 8;
|
||||
|
||||
/// Initializes Cuid2 generator. Must be called before any [create_id].
|
||||
#[crate::export]
|
||||
pub fn init_id_generator(length: u16, fingerprint: &str) {
|
||||
/// Initializes Cuid2 generator.
|
||||
fn init_id_generator(length: u8, fingerprint: &str) {
|
||||
FINGERPRINT.get_or_init(move || format!("{}{}", fingerprint, cuid2::create_id()));
|
||||
GENERATOR.get_or_init(move || {
|
||||
cuid2::CuidConstructor::new()
|
||||
// length to pass shoule be greater than or equal to 8.
|
||||
.with_length(cmp::max(length - TIMESTAMP_LENGTH, 8))
|
||||
.with_length(cmp::max(length - TIMESTAMP_LENGTH, 8).into())
|
||||
.with_fingerprinter(|| FINGERPRINT.get().unwrap().clone())
|
||||
});
|
||||
}
|
||||
|
||||
/// Returns Cuid2 with the length specified by [init_id]. Must be called after
|
||||
/// [init_id], otherwise returns [ErrorUninitialized].
|
||||
pub fn create_id(datetime: &NaiveDateTime) -> Result<String, ErrorUninitialized> {
|
||||
match GENERATOR.get() {
|
||||
None => Err(ErrorUninitialized),
|
||||
Some(gen) => {
|
||||
let date_num = cmp::max(0, datetime.and_utc().timestamp_millis() - TIME_2000) as u64;
|
||||
Ok(format!(
|
||||
"{:0>8}{}",
|
||||
BASE36.encode_var_len(&date_num),
|
||||
gen.create_id()
|
||||
))
|
||||
}
|
||||
/// Returns Cuid2 with the length specified by [init_id_generator].
|
||||
/// It automatically calls [init_id_generator], if the generator has not been initialized.
|
||||
fn create_id(datetime: &NaiveDateTime) -> String {
|
||||
if GENERATOR.get().is_none() {
|
||||
let length = match &CONFIG.cuid {
|
||||
Some(cuid) => cmp::min(cmp::max(cuid.length.unwrap_or(16), 16), 24),
|
||||
None => 16,
|
||||
};
|
||||
let fingerprint = match &CONFIG.cuid {
|
||||
Some(cuid) => cuid.fingerprint.as_deref().unwrap_or_default(),
|
||||
None => "",
|
||||
};
|
||||
init_id_generator(length, fingerprint);
|
||||
}
|
||||
let date_num = cmp::max(0, datetime.and_utc().timestamp_millis() - TIME_2000) as u64;
|
||||
format!(
|
||||
"{:0>8}{}",
|
||||
BASE36.encode_var_len(&date_num),
|
||||
GENERATOR.get().unwrap().create_id()
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
#[error("Invalid ID: {id}")]
|
||||
pub struct InvalidIdErr {
|
||||
id: String,
|
||||
}
|
||||
|
||||
#[crate::export]
|
||||
pub fn get_timestamp(id: &str) -> i64 {
|
||||
pub fn get_timestamp(id: &str) -> Result<i64, InvalidIdErr> {
|
||||
let n: Option<u64> = BASE36.decode_var_len(&id[0..8]);
|
||||
match n {
|
||||
None => -1,
|
||||
Some(n) => n as i64 + TIME_2000,
|
||||
if let Some(n) = n {
|
||||
Ok(n as i64 + TIME_2000)
|
||||
} else {
|
||||
Err(InvalidIdErr { id: id.to_string() })
|
||||
}
|
||||
}
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(feature = "napi")] {
|
||||
use chrono::{DateTime, Utc};
|
||||
/// The generated ID results in the form of `[8 chars timestamp] + [cuid2]`.
|
||||
/// The minimum and maximum lengths are 16 and 24, respectively.
|
||||
/// With the length of 16, namely 8 for cuid2, roughly 1427399 IDs are needed
|
||||
/// in the same millisecond to reach 50% chance of collision.
|
||||
///
|
||||
/// Ref: https://github.com/paralleldrive/cuid2#parameterized-length
|
||||
#[crate::export]
|
||||
pub fn gen_id() -> String {
|
||||
create_id(&Utc::now().naive_utc())
|
||||
}
|
||||
|
||||
/// The generated ID results in the form of `[8 chars timestamp] + [cuid2]`.
|
||||
/// The minimum and maximum lengths are 16 and 24, respectively.
|
||||
/// With the length of 16, namely 8 for cuid2, roughly 1427399 IDs are needed
|
||||
/// in the same millisecond to reach 50% chance of collision.
|
||||
///
|
||||
/// Ref: https://github.com/paralleldrive/cuid2#parameterized-length
|
||||
#[napi_derive::napi]
|
||||
pub fn gen_id(date: Option<DateTime<Utc>>) -> String {
|
||||
create_id(&date.unwrap_or_else(Utc::now).naive_utc()).unwrap()
|
||||
}
|
||||
}
|
||||
/// Generate an ID using a specific datetime
|
||||
#[crate::export]
|
||||
pub fn gen_id_at(date: DateTime<Utc>) -> String {
|
||||
create_id(&date.naive_utc())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod unit_test {
|
||||
use crate::util::id;
|
||||
use chrono::Utc;
|
||||
use super::{gen_id, gen_id_at, get_timestamp};
|
||||
use chrono::{Duration, Utc};
|
||||
use pretty_assertions::{assert_eq, assert_ne};
|
||||
use std::thread;
|
||||
|
||||
#[test]
|
||||
fn can_create_and_decode_id() {
|
||||
let now = Utc::now().naive_utc();
|
||||
assert_eq!(id::create_id(&now), Err(id::ErrorUninitialized));
|
||||
id::init_id_generator(16, "");
|
||||
assert_eq!(id::create_id(&now).unwrap().len(), 16);
|
||||
assert_ne!(id::create_id(&now).unwrap(), id::create_id(&now).unwrap());
|
||||
let id1 = thread::spawn(move || id::create_id(&now).unwrap());
|
||||
let id2 = thread::spawn(move || id::create_id(&now).unwrap());
|
||||
let now = Utc::now();
|
||||
assert_eq!(gen_id().len(), 16);
|
||||
assert_ne!(gen_id_at(now), gen_id_at(now));
|
||||
assert_ne!(gen_id(), gen_id());
|
||||
|
||||
let id1 = thread::spawn(move || gen_id_at(now));
|
||||
let id2 = thread::spawn(move || gen_id_at(now));
|
||||
assert_ne!(id1.join().unwrap(), id2.join().unwrap());
|
||||
|
||||
let test_id = id::create_id(&now).unwrap();
|
||||
let timestamp = id::get_timestamp(&test_id);
|
||||
assert_eq!(now.and_utc().timestamp_millis(), timestamp);
|
||||
let test_id = gen_id_at(now);
|
||||
let timestamp = get_timestamp(&test_id).unwrap();
|
||||
assert_eq!(now.timestamp_millis(), timestamp);
|
||||
|
||||
let now_id = gen_id_at(now);
|
||||
let old_id = gen_id_at(now - Duration::milliseconds(1));
|
||||
let future_id = gen_id_at(now + Duration::milliseconds(1));
|
||||
assert!(old_id < now_id);
|
||||
assert!(now_id < future_id);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,8 @@ import semver from "semver";
|
|||
|
||||
import Logger from "@/services/logger.js";
|
||||
import type { Config } from "backend-rs";
|
||||
import { fetchMeta } from "backend-rs";
|
||||
import { initializeRustLogger } from "backend-rs";
|
||||
import { fetchMeta, removeOldAttestationChallenges } from "backend-rs";
|
||||
import { config, envOption } from "@/config.js";
|
||||
import { showMachineInfo } from "@/misc/show-machine-info.js";
|
||||
import { db, initDb } from "@/db/postgre.js";
|
||||
|
@ -94,6 +95,7 @@ export async function masterMain() {
|
|||
await showMachineInfo(bootLogger);
|
||||
showNodejsVersion();
|
||||
await connectDb();
|
||||
initializeRustLogger();
|
||||
} catch (e) {
|
||||
bootLogger.error(
|
||||
`Fatal error occurred during initialization:\n${inspect(e)}`,
|
||||
|
@ -103,30 +105,26 @@ export async function masterMain() {
|
|||
process.exit(1);
|
||||
}
|
||||
|
||||
bootLogger.succ("Firefish initialized");
|
||||
bootLogger.info("Firefish initialized");
|
||||
|
||||
if (!envOption.disableClustering) {
|
||||
await spawnWorkers(config.clusterLimits);
|
||||
}
|
||||
|
||||
bootLogger.succ(
|
||||
bootLogger.info(
|
||||
`Now listening on port ${config.port} on ${config.url}`,
|
||||
null,
|
||||
true,
|
||||
);
|
||||
|
||||
if (
|
||||
!envOption.noDaemons &&
|
||||
config.clusterLimits?.web &&
|
||||
config.clusterLimits?.web >= 1
|
||||
) {
|
||||
if (!envOption.noDaemons) {
|
||||
import("../daemons/server-stats.js").then((x) => x.default());
|
||||
import("../daemons/queue-stats.js").then((x) => x.default());
|
||||
import("../daemons/janitor.js").then((x) => x.default());
|
||||
// Update meta cache every 5 minitues
|
||||
setInterval(() => fetchMeta(false), 1000 * 60 * 5);
|
||||
// Remove old attestation challenges
|
||||
setInterval(() => removeOldAttestationChallenges(), 1000 * 60 * 30);
|
||||
}
|
||||
|
||||
// Update meta cache every 5 minitues
|
||||
setInterval(() => fetchMeta(false), 1000 * 60 * 5);
|
||||
}
|
||||
|
||||
function showEnvironment(): void {
|
||||
|
@ -164,7 +162,7 @@ async function connectDb(): Promise<void> {
|
|||
const v = await db
|
||||
.query("SHOW server_version")
|
||||
.then((x) => x[0].server_version);
|
||||
dbLogger.succ(`Connected: v${v}`);
|
||||
dbLogger.info(`Connected: v${v}`);
|
||||
} catch (e) {
|
||||
dbLogger.error("Failed to connect to the database", null, true);
|
||||
dbLogger.error(inspect(e));
|
||||
|
@ -200,7 +198,7 @@ async function spawnWorkers(
|
|||
`Starting ${clusterLimits.web} web workers and ${clusterLimits.queue} queue workers (total ${total})...`,
|
||||
);
|
||||
await Promise.all(workers.map((mode) => spawnWorker(mode)));
|
||||
bootLogger.succ("All workers started");
|
||||
bootLogger.info("All workers started");
|
||||
}
|
||||
|
||||
function spawnWorker(mode: "web" | "queue"): Promise<void> {
|
||||
|
|
|
@ -1,17 +1,11 @@
|
|||
import cluster from "node:cluster";
|
||||
import { config } from "@/config.js";
|
||||
import { initDb } from "@/db/postgre.js";
|
||||
import { initIdGenerator } from "backend-rs";
|
||||
import os from "node:os";
|
||||
|
||||
/**
|
||||
* Init worker process
|
||||
*/
|
||||
export async function workerMain() {
|
||||
const length = Math.min(Math.max(config.cuid?.length ?? 16, 16), 24);
|
||||
const fingerprint = config.cuid?.fingerprint ?? "";
|
||||
initIdGenerator(length, fingerprint);
|
||||
|
||||
await initDb();
|
||||
|
||||
if (!process.env.mode || process.env.mode === "web") {
|
||||
|
|
|
@ -1,83 +0,0 @@
|
|||
import { config } from "@/config.js";
|
||||
import {
|
||||
DB_MAX_IMAGE_COMMENT_LENGTH,
|
||||
DB_MAX_NOTE_TEXT_LENGTH,
|
||||
} from "@/misc/hard-limits.js";
|
||||
|
||||
export const MAX_NOTE_TEXT_LENGTH = Math.min(
|
||||
config.maxNoteLength ?? 3000,
|
||||
DB_MAX_NOTE_TEXT_LENGTH,
|
||||
);
|
||||
export const MAX_CAPTION_TEXT_LENGTH = Math.min(
|
||||
config.maxCaptionLength ?? 1500,
|
||||
DB_MAX_IMAGE_COMMENT_LENGTH,
|
||||
);
|
||||
|
||||
export const SECOND = 1000;
|
||||
export const MINUTE = 60 * SECOND;
|
||||
export const HOUR = 60 * MINUTE;
|
||||
export const DAY = 24 * HOUR;
|
||||
|
||||
export const USER_ONLINE_THRESHOLD = 10 * MINUTE;
|
||||
export const USER_ACTIVE_THRESHOLD = 3 * DAY;
|
||||
|
||||
// List of file types allowed to be viewed directly in the browser
|
||||
// Anything not included here will be responded as application/octet-stream
|
||||
// SVG is not allowed because it generates XSS <- we need to fix this and later allow it to be viewed directly
|
||||
export const FILE_TYPE_BROWSERSAFE = [
|
||||
// Images
|
||||
"image/png",
|
||||
"image/gif", // TODO: deprecated, but still used by old notes, new gifs should be converted to webp in the future
|
||||
"image/jpeg",
|
||||
"image/webp", // TODO: make this the default image format
|
||||
"image/apng",
|
||||
"image/bmp",
|
||||
"image/tiff",
|
||||
"image/x-icon",
|
||||
"image/avif", // not as good supported now, but its good to introduce initial support for the future
|
||||
|
||||
// OggS
|
||||
"audio/opus",
|
||||
"video/ogg",
|
||||
"audio/ogg",
|
||||
"application/ogg",
|
||||
|
||||
// ISO/IEC base media file format
|
||||
"video/quicktime",
|
||||
"video/mp4", // TODO: we need to check for av1 later
|
||||
"video/vnd.avi", // also av1
|
||||
"audio/mp4",
|
||||
"video/x-m4v",
|
||||
"audio/x-m4a",
|
||||
"video/3gpp",
|
||||
"video/3gpp2",
|
||||
"video/3gp2",
|
||||
"audio/3gpp",
|
||||
"audio/3gpp2",
|
||||
"audio/3gp2",
|
||||
|
||||
"video/mpeg",
|
||||
"audio/mpeg",
|
||||
|
||||
"video/webm",
|
||||
"audio/webm",
|
||||
|
||||
"audio/aac",
|
||||
"audio/x-flac",
|
||||
"audio/flac",
|
||||
"audio/vnd.wave",
|
||||
|
||||
"audio/mod",
|
||||
"audio/x-mod",
|
||||
"audio/s3m",
|
||||
"audio/x-s3m",
|
||||
"audio/xm",
|
||||
"audio/x-xm",
|
||||
"audio/it",
|
||||
"audio/x-it",
|
||||
];
|
||||
/*
|
||||
https://github.com/sindresorhus/file-type/blob/main/supported.js
|
||||
https://github.com/sindresorhus/file-type/blob/main/core.js
|
||||
https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Containers
|
||||
*/
|
|
@ -1,20 +0,0 @@
|
|||
// TODO: 消したい
|
||||
|
||||
const interval = 30 * 60 * 1000;
|
||||
import { AttestationChallenges } from "@/models/index.js";
|
||||
import { LessThan } from "typeorm";
|
||||
|
||||
/**
|
||||
* Clean up database occasionally
|
||||
*/
|
||||
export default function () {
|
||||
async function tick() {
|
||||
await AttestationChallenges.delete({
|
||||
createdAt: LessThan(new Date(Date.now() - 5 * 60 * 1000)),
|
||||
});
|
||||
}
|
||||
|
||||
tick();
|
||||
|
||||
setInterval(tick, interval);
|
||||
}
|
|
@ -80,7 +80,7 @@ import { dbLogger } from "./logger.js";
|
|||
|
||||
const sqlLogger = dbLogger.createSubLogger("sql", "gray", false);
|
||||
|
||||
class MyCustomLogger implements Logger {
|
||||
class DbLogger implements Logger {
|
||||
private highlight(sql: string) {
|
||||
return highlight.highlight(sql, {
|
||||
language: "sql",
|
||||
|
@ -89,15 +89,16 @@ class MyCustomLogger implements Logger {
|
|||
}
|
||||
|
||||
public logQuery(query: string, parameters?: any[]) {
|
||||
sqlLogger.info(this.highlight(query).substring(0, 100));
|
||||
sqlLogger.trace(this.highlight(query).substring(0, 100));
|
||||
}
|
||||
|
||||
public logQueryError(error: string, query: string, parameters?: any[]) {
|
||||
sqlLogger.error(this.highlight(query));
|
||||
sqlLogger.error(error);
|
||||
sqlLogger.trace(this.highlight(query));
|
||||
}
|
||||
|
||||
public logQuerySlow(time: number, query: string, parameters?: any[]) {
|
||||
sqlLogger.warn(this.highlight(query));
|
||||
sqlLogger.trace(this.highlight(query));
|
||||
}
|
||||
|
||||
public logSchemaBuild(message: string) {
|
||||
|
@ -215,7 +216,7 @@ export const db = new DataSource({
|
|||
}
|
||||
: false,
|
||||
logging: log,
|
||||
logger: log ? new MyCustomLogger() : undefined,
|
||||
logger: log ? new DbLogger() : undefined,
|
||||
maxQueryExecutionTime: 300,
|
||||
entities: entities,
|
||||
migrations: ["../../migration/*.js"],
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import * as fs from "node:fs";
|
||||
import * as util from "node:util";
|
||||
import * as fs from "node:fs/promises";
|
||||
import Logger from "@/services/logger.js";
|
||||
import { createTemp } from "./create-temp.js";
|
||||
import { downloadUrl } from "./download-url.js";
|
||||
|
@ -16,7 +15,7 @@ export async function downloadTextFile(url: string): Promise<string> {
|
|||
// write content at URL to temp file
|
||||
await downloadUrl(url, path);
|
||||
|
||||
const text = await util.promisify(fs.readFile)(path, "utf8");
|
||||
const text = await fs.readFile(path, "utf-8");
|
||||
|
||||
return text;
|
||||
} finally {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import * as fs from "node:fs";
|
||||
import * as stream from "node:stream";
|
||||
import * as util from "node:util";
|
||||
import * as stream from "node:stream/promises";
|
||||
import got, * as Got from "got";
|
||||
import { config } from "@/config.js";
|
||||
import { getAgentByHostname, StatusError } from "./fetch.js";
|
||||
|
@ -10,16 +9,14 @@ import IPCIDR from "ip-cidr";
|
|||
import PrivateIp from "private-ip";
|
||||
import { isValidUrl } from "./is-valid-url.js";
|
||||
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
|
||||
export async function downloadUrl(url: string, path: string): Promise<void> {
|
||||
if (!isValidUrl(url)) {
|
||||
throw new StatusError("Invalid URL", 400);
|
||||
}
|
||||
|
||||
const logger = new Logger("download");
|
||||
const downloadLogger = new Logger("download");
|
||||
|
||||
logger.info(`Downloading ${chalk.cyan(url)} ...`);
|
||||
downloadLogger.debug(`Downloading ${chalk.cyan(url)} ...`);
|
||||
|
||||
const timeout = 30 * 1000;
|
||||
const operationTimeout = 60 * 1000;
|
||||
|
@ -48,7 +45,7 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
|
|||
})
|
||||
.on("redirect", (res: Got.Response, opts: Got.NormalizedOptions) => {
|
||||
if (!isValidUrl(opts.url)) {
|
||||
logger.warn(`Invalid URL: ${opts.url}`);
|
||||
downloadLogger.warn(`Invalid URL: ${opts.url}`);
|
||||
req.destroy();
|
||||
}
|
||||
})
|
||||
|
@ -60,7 +57,7 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
|
|||
res.ip
|
||||
) {
|
||||
if (isPrivateIp(res.ip)) {
|
||||
logger.warn(`Blocked address: ${res.ip}`);
|
||||
downloadLogger.warn(`Blocked address: ${res.ip}`);
|
||||
req.destroy();
|
||||
}
|
||||
}
|
||||
|
@ -69,14 +66,16 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
|
|||
if (contentLength != null) {
|
||||
const size = Number(contentLength);
|
||||
if (size > maxSize) {
|
||||
logger.warn(`maxSize exceeded (${size} > ${maxSize}) on response`);
|
||||
downloadLogger.warn(
|
||||
`maxSize exceeded (${size} > ${maxSize}) on response`,
|
||||
);
|
||||
req.destroy();
|
||||
}
|
||||
}
|
||||
})
|
||||
.on("downloadProgress", (progress: Got.Progress) => {
|
||||
if (progress.transferred > maxSize) {
|
||||
logger.warn(
|
||||
downloadLogger.warn(
|
||||
`maxSize exceeded (${progress.transferred} > ${maxSize}) on downloadProgress`,
|
||||
);
|
||||
req.destroy();
|
||||
|
@ -84,7 +83,7 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
|
|||
});
|
||||
|
||||
try {
|
||||
await pipeline(req, fs.createWriteStream(path));
|
||||
await stream.pipeline(req, fs.createWriteStream(path));
|
||||
} catch (e) {
|
||||
if (e instanceof Got.HTTPError) {
|
||||
throw new StatusError(
|
||||
|
@ -97,7 +96,7 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
|
|||
}
|
||||
}
|
||||
|
||||
logger.succ(`Download finished: ${chalk.cyan(url)}`);
|
||||
downloadLogger.debug(`Download finished: ${chalk.cyan(url)}`);
|
||||
}
|
||||
|
||||
export function isPrivateIp(ip: string): boolean {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import probeImageSize from "probe-image-size";
|
||||
import { Mutex } from "redis-semaphore";
|
||||
|
||||
import { FILE_TYPE_BROWSERSAFE } from "@/const.js";
|
||||
import { FILE_TYPE_BROWSERSAFE } from "backend-rs";
|
||||
import Logger from "@/services/logger.js";
|
||||
import { Cache } from "./cache.js";
|
||||
import { redisClient } from "@/db/redis.js";
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import * as fs from "node:fs";
|
||||
import * as fs from "node:fs/promises";
|
||||
import { createReadStream } from "node:fs";
|
||||
import * as crypto from "node:crypto";
|
||||
import * as stream from "node:stream";
|
||||
import * as util from "node:util";
|
||||
import * as stream from "node:stream/promises";
|
||||
import { fileTypeFromFile } from "file-type";
|
||||
import probeImageSize from "probe-image-size";
|
||||
import isSvg from "is-svg";
|
||||
|
@ -9,8 +9,6 @@ import sharp from "sharp";
|
|||
import { encode } from "blurhash";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
|
||||
export type FileInfo = {
|
||||
size: number;
|
||||
md5: string;
|
||||
|
@ -163,7 +161,7 @@ export async function checkSvg(path: string) {
|
|||
try {
|
||||
const size = await getFileSize(path);
|
||||
if (size > 1 * 1024 * 1024) return false;
|
||||
return isSvg(fs.readFileSync(path));
|
||||
return isSvg(await fs.readFile(path, "utf-8"));
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
|
@ -173,8 +171,7 @@ export async function checkSvg(path: string) {
|
|||
* Get file size
|
||||
*/
|
||||
export async function getFileSize(path: string): Promise<number> {
|
||||
const getStat = util.promisify(fs.stat);
|
||||
return (await getStat(path)).size;
|
||||
return (await fs.stat(path)).size;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -182,7 +179,7 @@ export async function getFileSize(path: string): Promise<number> {
|
|||
*/
|
||||
async function calcHash(path: string): Promise<string> {
|
||||
const hash = crypto.createHash("md5").setEncoding("hex");
|
||||
await pipeline(fs.createReadStream(path), hash);
|
||||
await stream.pipeline(createReadStream(path), hash);
|
||||
return hash.read();
|
||||
}
|
||||
|
||||
|
@ -196,7 +193,7 @@ async function detectImageSize(path: string): Promise<{
|
|||
hUnits: string;
|
||||
orientation?: number;
|
||||
}> {
|
||||
const readable = fs.createReadStream(path);
|
||||
const readable = createReadStream(path);
|
||||
const imageSize = await probeImageSize(readable);
|
||||
readable.destroy();
|
||||
return imageSize;
|
||||
|
@ -214,7 +211,7 @@ function getBlurhash(path: string): Promise<string> {
|
|||
.toBuffer((err, buffer, { width, height }) => {
|
||||
if (err) return reject(err);
|
||||
|
||||
let hash;
|
||||
let hash: string;
|
||||
|
||||
try {
|
||||
hash = encode(new Uint8ClampedArray(buffer), width, height, 7, 7);
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
// If you change DB_* values, you must also change the DB schema.
|
||||
|
||||
/**
|
||||
* Maximum note text length that can be stored in DB.
|
||||
* Surrogate pairs count as one
|
||||
*
|
||||
* NOTE: this can hypothetically be pushed further
|
||||
* (up to 250000000), but will likely cause truncations
|
||||
* and incompatibilities with other servers,
|
||||
* as well as potential performance issues.
|
||||
*/
|
||||
export const DB_MAX_NOTE_TEXT_LENGTH = 100000;
|
||||
|
||||
/**
|
||||
* Maximum image description length that can be stored in DB.
|
||||
* Surrogate pairs count as one
|
||||
*/
|
||||
export const DB_MAX_IMAGE_COMMENT_LENGTH = 8192;
|
|
@ -1,4 +1,4 @@
|
|||
import { FILE_TYPE_BROWSERSAFE } from "@/const.js";
|
||||
import { FILE_TYPE_BROWSERSAFE } from "backend-rs";
|
||||
|
||||
const dictionary = {
|
||||
"safe-file": FILE_TYPE_BROWSERSAFE,
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
import type { Note } from "@/models/entities/note.js";
|
||||
|
||||
export default function (note: Note): boolean {
|
||||
return (
|
||||
note.renoteId != null &&
|
||||
(note.text != null ||
|
||||
note.hasPoll ||
|
||||
(note.fileIds != null && note.fileIds.length > 0))
|
||||
);
|
||||
}
|
|
@ -2,7 +2,7 @@ import { Brackets } from "typeorm";
|
|||
import { isBlockedServer } from "backend-rs";
|
||||
import { Instances } from "@/models/index.js";
|
||||
import type { Instance } from "@/models/entities/instance.js";
|
||||
import { DAY } from "@/const.js";
|
||||
import { DAY } from "backend-rs";
|
||||
|
||||
// Threshold from last contact after which an instance will be considered
|
||||
// "dead" and should no longer get activities delivered to it.
|
||||
|
|
|
@ -13,7 +13,6 @@ import { id } from "../id.js";
|
|||
import { Note } from "./note.js";
|
||||
import { User } from "./user.js";
|
||||
import { DriveFolder } from "./drive-folder.js";
|
||||
import { DB_MAX_IMAGE_COMMENT_LENGTH } from "@/misc/hard-limits.js";
|
||||
import { NoteFile } from "./note-file.js";
|
||||
|
||||
export type DriveFileUsageHint = "userAvatar" | "userBanner" | null;
|
||||
|
@ -73,7 +72,7 @@ export class DriveFile {
|
|||
|
||||
@Index() // USING pgroonga pgroonga_varchar_full_text_search_ops_v2
|
||||
@Column("varchar", {
|
||||
length: DB_MAX_IMAGE_COMMENT_LENGTH,
|
||||
length: 8192,
|
||||
nullable: true,
|
||||
comment: "The comment of the DriveFile.",
|
||||
})
|
||||
|
|
|
@ -7,7 +7,7 @@ import type { Packed } from "@/misc/schema.js";
|
|||
import type { Promiseable } from "@/prelude/await-all.js";
|
||||
import { awaitAll } from "@/prelude/await-all.js";
|
||||
import { populateEmojis } from "@/misc/populate-emojis.js";
|
||||
import { USER_ACTIVE_THRESHOLD, USER_ONLINE_THRESHOLD } from "@/const.js";
|
||||
import { USER_ACTIVE_THRESHOLD, USER_ONLINE_THRESHOLD } from "backend-rs";
|
||||
import { Cache } from "@/misc/cache.js";
|
||||
import { db } from "@/db/postgre.js";
|
||||
import { isActor, getApId } from "@/remote/activitypub/type.js";
|
||||
|
|
|
@ -70,10 +70,10 @@ deliverQueue
|
|||
),
|
||||
)
|
||||
.on("failed", (job, err) =>
|
||||
deliverLogger.warn(`failed(${err}) ${getJobInfo(job)} to=${job.data.to}`),
|
||||
deliverLogger.info(`failed(${err}) ${getJobInfo(job)} to=${job.data.to}`),
|
||||
)
|
||||
.on("error", (job: any, err: Error) =>
|
||||
deliverLogger.error(`error ${err}`, { job, e: renderError(err) }),
|
||||
deliverLogger.warn(`error ${err}`, { job, e: renderError(err) }),
|
||||
)
|
||||
.on("stalled", (job) =>
|
||||
deliverLogger.warn(`stalled ${getJobInfo(job)} to=${job.data.to}`),
|
||||
|
@ -564,12 +564,12 @@ export default function () {
|
|||
|
||||
export function destroy() {
|
||||
deliverQueue.once("cleaned", (jobs, status) => {
|
||||
deliverLogger.succ(`Cleaned ${jobs.length} ${status} jobs`);
|
||||
deliverLogger.info(`Cleaned ${jobs.length} ${status} jobs`);
|
||||
});
|
||||
deliverQueue.clean(0, "delayed");
|
||||
|
||||
inboxQueue.once("cleaned", (jobs, status) => {
|
||||
inboxLogger.succ(`Cleaned ${jobs.length} ${status} jobs`);
|
||||
inboxLogger.info(`Cleaned ${jobs.length} ${status} jobs`);
|
||||
});
|
||||
inboxQueue.clean(0, "delayed");
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ const logger = queueLogger.createSubLogger("delete-account");
|
|||
export async function deleteAccount(
|
||||
job: Bull.Job<DbUserDeleteJobData>,
|
||||
): Promise<string | void> {
|
||||
logger.info(`Deleting account of ${job.data.user.id} ...`);
|
||||
logger.info(`Deleting account ${job.data.user.id} ...`);
|
||||
|
||||
const user = await Users.findOneBy({ id: job.data.user.id });
|
||||
if (!user) return;
|
||||
|
@ -43,7 +43,7 @@ export async function deleteAccount(
|
|||
await Notes.delete(notes.map((note) => note.id));
|
||||
}
|
||||
|
||||
logger.succ("All of notes deleted");
|
||||
logger.info(`All posts of user ${job.data.user.id} were deleted`);
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -73,7 +73,7 @@ export async function deleteAccount(
|
|||
}
|
||||
}
|
||||
|
||||
logger.succ("All of files deleted");
|
||||
logger.info(`All files of user ${job.data.user.id} were deleted`);
|
||||
}
|
||||
|
||||
{
|
||||
|
|
|
@ -54,8 +54,6 @@ export async function deleteDriveFiles(
|
|||
job.progress(deletedCount / total);
|
||||
}
|
||||
|
||||
logger.succ(
|
||||
`All drive files (${deletedCount}) of ${user.id} has been deleted.`,
|
||||
);
|
||||
logger.info(`${deletedCount} drive files of user ${user.id} were deleted.`);
|
||||
done();
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ import { createTemp } from "@/misc/create-temp.js";
|
|||
import { Users, Blockings } from "@/models/index.js";
|
||||
import { MoreThan } from "typeorm";
|
||||
import type { DbUserJobData } from "@/queue/types.js";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const logger = queueLogger.createSubLogger("export-blocking");
|
||||
|
||||
|
@ -27,7 +28,7 @@ export async function exportBlocking(
|
|||
// Create temp file
|
||||
const [path, cleanup] = await createTemp();
|
||||
|
||||
logger.info(`Temp file is ${path}`);
|
||||
logger.info(`temp file created: ${path}`);
|
||||
|
||||
try {
|
||||
const stream = fs.createWriteStream(path, { flags: "a" });
|
||||
|
@ -63,9 +64,10 @@ export async function exportBlocking(
|
|||
|
||||
const content = getFullApAccount(u.username, u.host);
|
||||
await new Promise<void>((res, rej) => {
|
||||
stream.write(content + "\n", (err) => {
|
||||
stream.write(`${content}\n`, (err) => {
|
||||
if (err) {
|
||||
logger.error(err);
|
||||
logger.warn("failed");
|
||||
logger.info(inspect(err));
|
||||
rej(err);
|
||||
} else {
|
||||
res();
|
||||
|
@ -83,7 +85,7 @@ export async function exportBlocking(
|
|||
}
|
||||
|
||||
stream.end();
|
||||
logger.succ(`Exported to: ${path}`);
|
||||
logger.info(`Exported to: ${path}`);
|
||||
|
||||
const fileName = `blocking-${dateFormat(
|
||||
new Date(),
|
||||
|
@ -96,7 +98,7 @@ export async function exportBlocking(
|
|||
force: true,
|
||||
});
|
||||
|
||||
logger.succ(`Exported to: ${driveFile.id}`);
|
||||
logger.info(`Exported to: ${driveFile.id}`);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ export async function exportCustomEmojis(
|
|||
|
||||
const [path, cleanup] = await createTempDir();
|
||||
|
||||
logger.info(`Temp dir is ${path}`);
|
||||
logger.info(`temp dir created: ${path}`);
|
||||
|
||||
const metaPath = `${path}/meta.json`;
|
||||
|
||||
|
@ -41,7 +41,8 @@ export async function exportCustomEmojis(
|
|||
return new Promise<void>((res, rej) => {
|
||||
metaStream.write(text, (err) => {
|
||||
if (err) {
|
||||
logger.error(err);
|
||||
logger.warn("Failed to export custom emojis");
|
||||
logger.info(inspect(err));
|
||||
rej(err);
|
||||
} else {
|
||||
res();
|
||||
|
@ -105,7 +106,7 @@ export async function exportCustomEmojis(
|
|||
zlib: { level: 0 },
|
||||
});
|
||||
archiveStream.on("close", async () => {
|
||||
logger.succ(`Exported to: ${archivePath}`);
|
||||
logger.info(`Exported to: ${archivePath}`);
|
||||
|
||||
const fileName = `custom-emojis-${dateFormat(
|
||||
new Date(),
|
||||
|
@ -118,7 +119,7 @@ export async function exportCustomEmojis(
|
|||
force: true,
|
||||
});
|
||||
|
||||
logger.succ(`Exported to: ${driveFile.id}`);
|
||||
logger.info(`Exported to: ${driveFile.id}`);
|
||||
cleanup();
|
||||
archiveCleanup();
|
||||
done();
|
||||
|
|
|
@ -10,6 +10,7 @@ import { Users, Followings, Mutings } from "@/models/index.js";
|
|||
import { In, MoreThan, Not } from "typeorm";
|
||||
import type { DbUserJobData } from "@/queue/types.js";
|
||||
import type { Following } from "@/models/entities/following.js";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const logger = queueLogger.createSubLogger("export-following");
|
||||
|
||||
|
@ -28,7 +29,7 @@ export async function exportFollowing(
|
|||
// Create temp file
|
||||
const [path, cleanup] = await createTemp();
|
||||
|
||||
logger.info(`Temp file is ${path}`);
|
||||
logger.info(`temp file created: ${path}`);
|
||||
|
||||
try {
|
||||
const stream = fs.createWriteStream(path, { flags: "a" });
|
||||
|
@ -78,9 +79,12 @@ export async function exportFollowing(
|
|||
|
||||
const content = getFullApAccount(u.username, u.host);
|
||||
await new Promise<void>((res, rej) => {
|
||||
stream.write(content + "\n", (err) => {
|
||||
stream.write(`${content}\n`, (err) => {
|
||||
if (err) {
|
||||
logger.error(err);
|
||||
logger.warn(
|
||||
`failed to export following users of ${job.data.user.id}`,
|
||||
);
|
||||
logger.info(inspect(err));
|
||||
rej(err);
|
||||
} else {
|
||||
res();
|
||||
|
@ -91,7 +95,7 @@ export async function exportFollowing(
|
|||
}
|
||||
|
||||
stream.end();
|
||||
logger.succ(`Exported to: ${path}`);
|
||||
logger.info(`Exported to: ${path}`);
|
||||
|
||||
const fileName = `following-${dateFormat(
|
||||
new Date(),
|
||||
|
@ -104,7 +108,7 @@ export async function exportFollowing(
|
|||
force: true,
|
||||
});
|
||||
|
||||
logger.succ(`Exported to: ${driveFile.id}`);
|
||||
logger.info(`Exported to: ${driveFile.id}`);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ import { createTemp } from "@/misc/create-temp.js";
|
|||
import { Users, Mutings } from "@/models/index.js";
|
||||
import { IsNull, MoreThan } from "typeorm";
|
||||
import type { DbUserJobData } from "@/queue/types.js";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const logger = queueLogger.createSubLogger("export-mute");
|
||||
|
||||
|
@ -16,7 +17,7 @@ export async function exportMute(
|
|||
job: Bull.Job<DbUserJobData>,
|
||||
done: any,
|
||||
): Promise<void> {
|
||||
logger.info(`Exporting mute of ${job.data.user.id} ...`);
|
||||
logger.info(`Exporting mutes of ${job.data.user.id} ...`);
|
||||
|
||||
const user = await Users.findOneBy({ id: job.data.user.id });
|
||||
if (user == null) {
|
||||
|
@ -27,7 +28,7 @@ export async function exportMute(
|
|||
// Create temp file
|
||||
const [path, cleanup] = await createTemp();
|
||||
|
||||
logger.info(`Temp file is ${path}`);
|
||||
logger.info(`temp file created: ${path}`);
|
||||
|
||||
try {
|
||||
const stream = fs.createWriteStream(path, { flags: "a" });
|
||||
|
@ -64,9 +65,10 @@ export async function exportMute(
|
|||
|
||||
const content = getFullApAccount(u.username, u.host);
|
||||
await new Promise<void>((res, rej) => {
|
||||
stream.write(content + "\n", (err) => {
|
||||
stream.write(`${content}\n`, (err) => {
|
||||
if (err) {
|
||||
logger.error(err);
|
||||
logger.warn("failed");
|
||||
logger.info(inspect(err));
|
||||
rej(err);
|
||||
} else {
|
||||
res();
|
||||
|
@ -84,7 +86,7 @@ export async function exportMute(
|
|||
}
|
||||
|
||||
stream.end();
|
||||
logger.succ(`Exported to: ${path}`);
|
||||
logger.info(`Exported to: ${path}`);
|
||||
|
||||
const fileName = `mute-${dateFormat(
|
||||
new Date(),
|
||||
|
@ -97,7 +99,7 @@ export async function exportMute(
|
|||
force: true,
|
||||
});
|
||||
|
||||
logger.succ(`Exported to: ${driveFile.id}`);
|
||||
logger.info(`Exported to: ${driveFile.id}`);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import type { Note } from "@/models/entities/note.js";
|
|||
import type { Poll } from "@/models/entities/poll.js";
|
||||
import type { DbUserJobData } from "@/queue/types.js";
|
||||
import { createTemp } from "@/misc/create-temp.js";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const logger = queueLogger.createSubLogger("export-notes");
|
||||
|
||||
|
@ -28,7 +29,7 @@ export async function exportNotes(
|
|||
// Create temp file
|
||||
const [path, cleanup] = await createTemp();
|
||||
|
||||
logger.info(`Temp file is ${path}`);
|
||||
logger.info(`temp file created: ${path}`);
|
||||
|
||||
try {
|
||||
const stream = fs.createWriteStream(path, { flags: "a" });
|
||||
|
@ -37,7 +38,8 @@ export async function exportNotes(
|
|||
return new Promise<void>((res, rej) => {
|
||||
stream.write(text, (err) => {
|
||||
if (err) {
|
||||
logger.error(err);
|
||||
logger.warn(`failed to export posts of ${job.data.user.id}`);
|
||||
logger.info(inspect(err));
|
||||
rej(err);
|
||||
} else {
|
||||
res();
|
||||
|
@ -91,7 +93,7 @@ export async function exportNotes(
|
|||
await write("]");
|
||||
|
||||
stream.end();
|
||||
logger.succ(`Exported to: ${path}`);
|
||||
logger.info(`Exported to: ${path}`);
|
||||
|
||||
const fileName = `notes-${dateFormat(
|
||||
new Date(),
|
||||
|
@ -104,7 +106,7 @@ export async function exportNotes(
|
|||
force: true,
|
||||
});
|
||||
|
||||
logger.succ(`Exported to: ${driveFile.id}`);
|
||||
logger.info(`Exported to: ${driveFile.id}`);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ import { createTemp } from "@/misc/create-temp.js";
|
|||
import { Users, UserLists, UserListJoinings } from "@/models/index.js";
|
||||
import { In } from "typeorm";
|
||||
import type { DbUserJobData } from "@/queue/types.js";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const logger = queueLogger.createSubLogger("export-user-lists");
|
||||
|
||||
|
@ -31,7 +32,7 @@ export async function exportUserLists(
|
|||
// Create temp file
|
||||
const [path, cleanup] = await createTemp();
|
||||
|
||||
logger.info(`Temp file is ${path}`);
|
||||
logger.info(`temp file created: ${path}`);
|
||||
|
||||
try {
|
||||
const stream = fs.createWriteStream(path, { flags: "a" });
|
||||
|
@ -46,9 +47,10 @@ export async function exportUserLists(
|
|||
const acct = getFullApAccount(u.username, u.host);
|
||||
const content = `${list.name},${acct}`;
|
||||
await new Promise<void>((res, rej) => {
|
||||
stream.write(content + "\n", (err) => {
|
||||
stream.write(`${content}\n`, (err) => {
|
||||
if (err) {
|
||||
logger.error(err);
|
||||
logger.warn(`failed to export ${list.id}`);
|
||||
logger.info(inspect(err));
|
||||
rej(err);
|
||||
} else {
|
||||
res();
|
||||
|
@ -59,7 +61,7 @@ export async function exportUserLists(
|
|||
}
|
||||
|
||||
stream.end();
|
||||
logger.succ(`Exported to: ${path}`);
|
||||
logger.info(`Exported to: ${path}`);
|
||||
|
||||
const fileName = `user-lists-${dateFormat(
|
||||
new Date(),
|
||||
|
@ -72,7 +74,7 @@ export async function exportUserLists(
|
|||
force: true,
|
||||
});
|
||||
|
||||
logger.succ(`Exported to: ${driveFile.id}`);
|
||||
logger.info(`Exported to: ${driveFile.id}`);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
|
|
|
@ -66,14 +66,15 @@ export async function importBlocking(
|
|||
// skip myself
|
||||
if (target.id === job.data.user.id) continue;
|
||||
|
||||
logger.info(`Block[${linenum}] ${target.id} ...`);
|
||||
logger.debug(`Block[${linenum}] ${target.id} ...`);
|
||||
|
||||
await block(user, target);
|
||||
} catch (e) {
|
||||
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`);
|
||||
logger.warn(`failed: error in line ${linenum}`);
|
||||
logger.info(inspect(e));
|
||||
}
|
||||
}
|
||||
|
||||
logger.succ("Imported");
|
||||
logger.info("Imported");
|
||||
done();
|
||||
}
|
||||
|
|
|
@ -11,14 +11,28 @@ import { addFile } from "@/services/drive/add-file.js";
|
|||
import { genId } from "backend-rs";
|
||||
import { db } from "@/db/postgre.js";
|
||||
import probeImageSize from "probe-image-size";
|
||||
import * as path from "path";
|
||||
import * as path from "node:path";
|
||||
|
||||
const logger = queueLogger.createSubLogger("import-custom-emojis");
|
||||
|
||||
// probeImageSize acceptable extensions
|
||||
// JPG, GIF, PNG, WebP, BMP, TIFF, SVG, PSD.
|
||||
const acceptableExtensions = [
|
||||
".jpeg",
|
||||
".jpg",
|
||||
".gif",
|
||||
".png",
|
||||
".webp",
|
||||
".bmp",
|
||||
// ".tiff", // Cannot be used as emoji
|
||||
// ".svg", // Disable for secure issues
|
||||
// ".psd", // Cannot be used as emoji
|
||||
];
|
||||
|
||||
// TODO: 名前衝突時の動作を選べるようにする
|
||||
export async function importCustomEmojis(
|
||||
job: Bull.Job<DbUserImportJobData>,
|
||||
done: any,
|
||||
done: () => void,
|
||||
): Promise<void> {
|
||||
logger.info("Importing custom emojis ...");
|
||||
|
||||
|
@ -32,7 +46,7 @@ export async function importCustomEmojis(
|
|||
|
||||
const [tempPath, cleanup] = await createTempDir();
|
||||
|
||||
logger.info(`Temp dir is ${tempPath}`);
|
||||
logger.debug(`temp dir created: ${tempPath}`);
|
||||
|
||||
const destPath = `${tempPath}/emojis.zip`;
|
||||
|
||||
|
@ -62,6 +76,14 @@ export async function importCustomEmojis(
|
|||
if (!record.downloaded) continue;
|
||||
const emojiInfo = record.emoji;
|
||||
const emojiPath = `${outputPath}/${record.fileName}`;
|
||||
|
||||
const extname = path.extname(record.fileName);
|
||||
|
||||
// Skip non-support files
|
||||
if (!acceptableExtensions.includes(extname.toLowerCase())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
await Emojis.delete({
|
||||
name: emojiInfo.name,
|
||||
});
|
||||
|
@ -92,7 +114,7 @@ export async function importCustomEmojis(
|
|||
} else {
|
||||
logger.info("starting emoji import without metadata");
|
||||
// Since we lack metadata, we import into a randomized category name instead
|
||||
let categoryName = genId();
|
||||
const categoryName = genId();
|
||||
|
||||
let containedEmojis = fs.readdirSync(outputPath);
|
||||
|
||||
|
@ -103,7 +125,14 @@ export async function importCustomEmojis(
|
|||
|
||||
for (const emojiFilename of containedEmojis) {
|
||||
// strip extension and get filename to use as name
|
||||
const name = path.basename(emojiFilename, path.extname(emojiFilename));
|
||||
const extname = path.extname(emojiFilename);
|
||||
|
||||
// Skip non-emoji files, such as LICENSE
|
||||
if (!acceptableExtensions.includes(extname.toLowerCase())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const name = path.basename(emojiFilename, extname);
|
||||
const emojiPath = `${outputPath}/${emojiFilename}`;
|
||||
|
||||
logger.info(`importing ${name}`);
|
||||
|
@ -143,8 +172,8 @@ export async function importCustomEmojis(
|
|||
|
||||
cleanup();
|
||||
|
||||
logger.succ("Imported");
|
||||
logger.info("Imported");
|
||||
done();
|
||||
});
|
||||
logger.succ(`Unzipping to ${outputPath}`);
|
||||
logger.info(`Unzipping to ${outputPath}`);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import * as Post from "@/misc/post.js";
|
||||
import create from "@/services/note/create.js";
|
||||
import { Users } from "@/models/index.js";
|
||||
import { NoteFiles, Users } from "@/models/index.js";
|
||||
import type { DbUserImportMastoPostJobData } from "@/queue/types.js";
|
||||
import { queueLogger } from "../../logger.js";
|
||||
import { uploadFromUrl } from "@/services/drive/upload-from-url.js";
|
||||
|
@ -49,7 +49,7 @@ export async function importCkPost(
|
|||
});
|
||||
files.push(file);
|
||||
} catch (e) {
|
||||
logger.error(`Skipped adding file to drive: ${url}`);
|
||||
logger.info(`Skipped adding file to drive: ${url}`);
|
||||
}
|
||||
}
|
||||
const { text, cw, localOnly, createdAt, visibility } = Post.parse(post);
|
||||
|
@ -59,9 +59,18 @@ export async function importCkPost(
|
|||
userId: user.id,
|
||||
});
|
||||
|
||||
if (note && (note?.fileIds?.length || 0) < files.length) {
|
||||
// FIXME: What is this condition?
|
||||
if (note != null && (note.fileIds?.length || 0) < files.length) {
|
||||
const update: Partial<Note> = {};
|
||||
update.fileIds = files.map((x) => x.id);
|
||||
|
||||
if (update.fileIds != null) {
|
||||
await NoteFiles.delete({ noteId: note.id });
|
||||
await NoteFiles.insert(
|
||||
update.fileIds.map((fileId) => ({ noteId: note?.id, fileId })),
|
||||
);
|
||||
}
|
||||
|
||||
await Notes.update(note.id, update);
|
||||
await NoteEdits.insert({
|
||||
id: genId(),
|
||||
|
@ -71,12 +80,12 @@ export async function importCkPost(
|
|||
fileIds: note.fileIds,
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
logger.info(`Note file updated`);
|
||||
logger.info("Post updated");
|
||||
}
|
||||
if (!note) {
|
||||
if (note == null) {
|
||||
note = await create(user, {
|
||||
createdAt: createdAt,
|
||||
files: files.length == 0 ? undefined : files,
|
||||
files: files.length === 0 ? undefined : files,
|
||||
poll: undefined,
|
||||
text: text || undefined,
|
||||
reply: post.replyId ? job.data.parent : null,
|
||||
|
@ -90,11 +99,11 @@ export async function importCkPost(
|
|||
apHashtags: undefined,
|
||||
apEmojis: undefined,
|
||||
});
|
||||
logger.info(`Create new note`);
|
||||
logger.debug("New post has been created");
|
||||
} else {
|
||||
logger.info(`Note exist`);
|
||||
logger.info("This post already exists");
|
||||
}
|
||||
logger.succ("Imported");
|
||||
logger.info("Imported");
|
||||
if (post.childNotes) {
|
||||
for (const child of post.childNotes) {
|
||||
createImportCkPostJob(
|
||||
|
|
|
@ -64,11 +64,12 @@ export async function importFollowing(
|
|||
// skip myself
|
||||
if (target.id === job.data.user.id) continue;
|
||||
|
||||
logger.info(`Follow[${linenum}] ${target.id} ...`);
|
||||
logger.debug(`Follow[${linenum}] ${target.id} ...`);
|
||||
|
||||
follow(user, target);
|
||||
} catch (e) {
|
||||
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`);
|
||||
logger.warn(`Error in line ${linenum}`);
|
||||
logger.info(inspect(e));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -102,15 +103,16 @@ export async function importFollowing(
|
|||
// skip myself
|
||||
if (target.id === job.data.user.id) continue;
|
||||
|
||||
logger.info(`Follow[${linenum}] ${target.id} ...`);
|
||||
logger.debug(`Follow[${linenum}] ${target.id} ...`);
|
||||
|
||||
follow(user, target);
|
||||
} catch (e) {
|
||||
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`);
|
||||
logger.warn(`Error in line ${linenum}`);
|
||||
logger.info(inspect(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.succ("Imported");
|
||||
logger.info("Imported");
|
||||
done();
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import create from "@/services/note/create.js";
|
||||
import { Users } from "@/models/index.js";
|
||||
import { NoteFiles, Users } from "@/models/index.js";
|
||||
import type { DbUserImportMastoPostJobData } from "@/queue/types.js";
|
||||
import { queueLogger } from "../../logger.js";
|
||||
import type Bull from "bull";
|
||||
|
@ -73,7 +73,7 @@ export async function importMastoPost(
|
|||
});
|
||||
files.push(file);
|
||||
} catch (e) {
|
||||
logger.error(`Skipped adding file to drive: ${url}`);
|
||||
logger.warn(`Skipped adding file to drive: ${url}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -85,9 +85,18 @@ export async function importMastoPost(
|
|||
userId: user.id,
|
||||
});
|
||||
|
||||
if (note && (note?.fileIds?.length || 0) < files.length) {
|
||||
// FIXME: What is this condition?
|
||||
if (note != null && (note.fileIds?.length || 0) < files.length) {
|
||||
const update: Partial<Note> = {};
|
||||
update.fileIds = files.map((x) => x.id);
|
||||
|
||||
if (update.fileIds != null) {
|
||||
await NoteFiles.delete({ noteId: note.id });
|
||||
await NoteFiles.insert(
|
||||
update.fileIds.map((fileId) => ({ noteId: note?.id, fileId })),
|
||||
);
|
||||
}
|
||||
|
||||
await Notes.update(note.id, update);
|
||||
await NoteEdits.insert({
|
||||
id: genId(),
|
||||
|
@ -97,14 +106,14 @@ export async function importMastoPost(
|
|||
fileIds: note.fileIds,
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
logger.info(`Note file updated`);
|
||||
logger.info("Post updated");
|
||||
}
|
||||
if (!note) {
|
||||
if (note == null) {
|
||||
note = await create(user, {
|
||||
createdAt: isRenote
|
||||
? new Date(post.published)
|
||||
: new Date(post.object.published),
|
||||
files: files.length == 0 ? undefined : files,
|
||||
files: files.length === 0 ? undefined : files,
|
||||
poll: undefined,
|
||||
text: text || undefined,
|
||||
reply,
|
||||
|
@ -118,12 +127,12 @@ export async function importMastoPost(
|
|||
apHashtags: undefined,
|
||||
apEmojis: undefined,
|
||||
});
|
||||
logger.info(`Create new note`);
|
||||
logger.debug("New post has been created");
|
||||
} else {
|
||||
logger.info(`Note exist`);
|
||||
logger.info("This post already exists");
|
||||
}
|
||||
job.progress(100);
|
||||
done();
|
||||
|
||||
logger.succ("Imported");
|
||||
logger.info("Imported");
|
||||
}
|
||||
|
|
|
@ -66,15 +66,16 @@ export async function importMuting(
|
|||
// skip myself
|
||||
if (target.id === job.data.user.id) continue;
|
||||
|
||||
logger.info(`Mute[${linenum}] ${target.id} ...`);
|
||||
logger.debug(`Mute[${linenum}] ${target.id} ...`);
|
||||
|
||||
await mute(user, target);
|
||||
} catch (e) {
|
||||
logger.warn(`Error in line ${linenum}: ${inspect(e)}`);
|
||||
logger.warn(`Error in line ${linenum}`);
|
||||
logger.info(inspect(e));
|
||||
}
|
||||
}
|
||||
|
||||
logger.succ("Imported");
|
||||
logger.info("Imported");
|
||||
done();
|
||||
}
|
||||
|
||||
|
|
|
@ -45,9 +45,10 @@ export async function importPosts(
|
|||
}
|
||||
} catch (e) {
|
||||
// handle error
|
||||
logger.warn(`Failed to read Mastodon archive:\n${inspect(e)}`);
|
||||
logger.warn("Failed to read Mastodon archive");
|
||||
logger.info(inspect(e));
|
||||
}
|
||||
logger.succ("Mastodon archive imported");
|
||||
logger.info("Mastodon archive imported");
|
||||
done();
|
||||
return;
|
||||
}
|
||||
|
@ -56,24 +57,25 @@ export async function importPosts(
|
|||
|
||||
try {
|
||||
const parsed = JSON.parse(json);
|
||||
if (parsed instanceof Array) {
|
||||
logger.info("Parsing key style posts");
|
||||
if (Array.isArray(parsed)) {
|
||||
logger.info("Parsing *key posts");
|
||||
const arr = recreateChain(parsed);
|
||||
for (const post of arr) {
|
||||
createImportCkPostJob(job.data.user, post, job.data.signatureCheck);
|
||||
}
|
||||
} else if (parsed instanceof Object) {
|
||||
logger.info("Parsing animal style posts");
|
||||
logger.info("Parsing Mastodon posts");
|
||||
for (const post of parsed.orderedItems) {
|
||||
createImportMastoPostJob(job.data.user, post, job.data.signatureCheck);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// handle error
|
||||
logger.warn(`Error occured while reading:\n${inspect(e)}`);
|
||||
logger.warn("an error occured while reading");
|
||||
logger.info(inspect(e));
|
||||
}
|
||||
|
||||
logger.succ("Imported");
|
||||
logger.info("Imported");
|
||||
done();
|
||||
}
|
||||
|
||||
|
|
|
@ -86,10 +86,11 @@ export async function importUserLists(
|
|||
|
||||
pushUserToUserList(target, list!);
|
||||
} catch (e) {
|
||||
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`);
|
||||
logger.warn(`Error in line ${linenum}`);
|
||||
logger.info(inspect(e));
|
||||
}
|
||||
}
|
||||
|
||||
logger.succ("Imported");
|
||||
logger.info("Imported");
|
||||
done();
|
||||
}
|
||||
|
|
|
@ -48,6 +48,6 @@ export default async function cleanRemoteFiles(
|
|||
job.progress(deletedCount / total);
|
||||
}
|
||||
|
||||
logger.succ("All cached remote files has been deleted.");
|
||||
logger.info("All cached remote files are deleted.");
|
||||
done();
|
||||
}
|
||||
|
|
|
@ -28,6 +28,6 @@ export async function checkExpiredMutings(
|
|||
}
|
||||
}
|
||||
|
||||
logger.succ("All expired mutings checked.");
|
||||
logger.info("All expired mutings checked.");
|
||||
done();
|
||||
}
|
||||
|
|
|
@ -11,6 +11,6 @@ export async function cleanCharts(
|
|||
): Promise<void> {
|
||||
logger.info("Cleaning active users chart...");
|
||||
await activeUsersChart.clean();
|
||||
logger.succ("Active users chart has been cleaned.");
|
||||
logger.info("Active users chart has been cleaned.");
|
||||
done();
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import { UserIps } from "@/models/index.js";
|
|||
|
||||
import { queueLogger } from "../../logger.js";
|
||||
|
||||
const logger = queueLogger.createSubLogger("clean");
|
||||
const logger = queueLogger.createSubLogger("clean-user-ip-log");
|
||||
|
||||
export async function clean(
|
||||
job: Bull.Job<Record<string, unknown>>,
|
||||
|
@ -16,6 +16,6 @@ export async function clean(
|
|||
createdAt: LessThan(new Date(Date.now() - 1000 * 60 * 60 * 24 * 90)),
|
||||
});
|
||||
|
||||
logger.succ("Cleaned.");
|
||||
logger.info("Cleaned.");
|
||||
done();
|
||||
}
|
||||
|
|
|
@ -27,17 +27,16 @@ export async function setLocalEmojiSizes(
|
|||
height: size.height || null,
|
||||
});
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Unable to set emoji size (${i + 1}/${emojis.length}):\n${inspect(e)}`,
|
||||
);
|
||||
logger.warn(`Unable to set emoji size (${i + 1}/${emojis.length})`);
|
||||
logger.info(inspect(e));
|
||||
/* skip if any error happens */
|
||||
} finally {
|
||||
// wait for 1sec so that this would not overwhelm the object storage.
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
if (i % 10 === 9) logger.succ(`fetched ${i + 1}/${emojis.length} emojis`);
|
||||
if (i % 10 === 9) logger.info(`fetched ${i + 1}/${emojis.length} emojis`);
|
||||
}
|
||||
}
|
||||
|
||||
logger.succ("Done.");
|
||||
logger.info("Done.");
|
||||
done();
|
||||
}
|
||||
|
|
|
@ -33,12 +33,13 @@ export async function verifyLinks(
|
|||
fields: user.fields,
|
||||
});
|
||||
} catch (e) {
|
||||
logger.error(`Failed to update user ${user.userId}:\n${inspect(e)}`);
|
||||
logger.error(`Failed to update user ${user.userId}`);
|
||||
logger.info(inspect(e));
|
||||
done(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.succ("All links successfully verified.");
|
||||
logger.info("All links successfully verified.");
|
||||
done();
|
||||
}
|
||||
|
|
|
@ -133,7 +133,8 @@ export default class DeliverManager {
|
|||
host: new URL(inbox).host,
|
||||
});
|
||||
} catch (error) {
|
||||
apLogger.error(`Invalid Inbox ${inbox}:\n${inspect(error)}`);
|
||||
apLogger.info(`Invalid Inbox ${inbox}`);
|
||||
apLogger.debug(inspect(error));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,20 +6,19 @@ import { isFollow, getApType } from "../../type.js";
|
|||
import { apLogger } from "../../logger.js";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const logger = apLogger;
|
||||
|
||||
export default async (
|
||||
actor: CacheableRemoteUser,
|
||||
activity: IAccept,
|
||||
): Promise<string> => {
|
||||
const uri = activity.id || activity;
|
||||
|
||||
logger.info(`Accept: ${uri}`);
|
||||
apLogger.info(`Accept: ${uri}`);
|
||||
|
||||
const resolver = new Resolver();
|
||||
|
||||
const object = await resolver.resolve(activity.object).catch((e) => {
|
||||
logger.error(`Resolution failed:\n${inspect(e)}`);
|
||||
apLogger.info(`Failed to resolve AP object: ${e}`);
|
||||
apLogger.debug(inspect(e));
|
||||
throw e;
|
||||
});
|
||||
|
||||
|
|
|
@ -5,15 +5,13 @@ import type { IAnnounce } from "../../type.js";
|
|||
import { getApId } from "../../type.js";
|
||||
import { apLogger } from "../../logger.js";
|
||||
|
||||
const logger = apLogger;
|
||||
|
||||
export default async (
|
||||
actor: CacheableRemoteUser,
|
||||
activity: IAnnounce,
|
||||
): Promise<void> => {
|
||||
const uri = getApId(activity);
|
||||
|
||||
logger.info(`Announce: ${uri}`);
|
||||
apLogger.info(`Announce: ${uri}`);
|
||||
|
||||
const resolver = new Resolver();
|
||||
|
||||
|
|
|
@ -13,8 +13,6 @@ import { Notes } from "@/models/index.js";
|
|||
import { isBlockedServer } from "backend-rs";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const logger = apLogger;
|
||||
|
||||
/**
|
||||
* Handle announcement activities
|
||||
*/
|
||||
|
@ -50,11 +48,14 @@ export default async function (
|
|||
// Skip if target is 4xx
|
||||
if (e instanceof StatusError) {
|
||||
if (e.isClientError) {
|
||||
logger.warn(`Ignored announce target ${targetUri} - ${e.statusCode}`);
|
||||
apLogger.info(
|
||||
`Ignored announce target ${targetUri} - ${e.statusCode}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.warn(`Error in announce target ${targetUri}:\n${inspect(e)}`);
|
||||
apLogger.warn(`Error in announce target ${targetUri}`);
|
||||
apLogger.debug(inspect(e));
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
|
@ -63,7 +64,7 @@ export default async function (
|
|||
console.log("skip: invalid actor for this activity");
|
||||
return;
|
||||
}
|
||||
logger.info(`Creating the (Re)Note: ${uri}`);
|
||||
apLogger.info(`Creating (re)note: ${uri}`);
|
||||
|
||||
const activityAudience = await parseAudience(
|
||||
actor,
|
||||
|
|
|
@ -7,15 +7,13 @@ import { apLogger } from "../../logger.js";
|
|||
import { toArray, concat, unique } from "@/prelude/array.js";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const logger = apLogger;
|
||||
|
||||
export default async (
|
||||
actor: CacheableRemoteUser,
|
||||
activity: ICreate,
|
||||
): Promise<void> => {
|
||||
const uri = getApId(activity);
|
||||
|
||||
logger.info(`Create: ${uri}`);
|
||||
apLogger.info(`Create: ${uri}`);
|
||||
|
||||
// copy audiences between activity <=> object.
|
||||
if (typeof activity.object === "object") {
|
||||
|
@ -40,13 +38,14 @@ export default async (
|
|||
const resolver = new Resolver();
|
||||
|
||||
const object = await resolver.resolve(activity.object).catch((e) => {
|
||||
logger.error(`Resolution failed:\n${inspect(e)}`);
|
||||
apLogger.info(`Failed to resolve AP object: ${e}`);
|
||||
apLogger.debug(inspect(e));
|
||||
throw e;
|
||||
});
|
||||
|
||||
if (isPost(object)) {
|
||||
createNote(resolver, actor, object, false, activity);
|
||||
} else {
|
||||
logger.warn(`Unknown type: ${getApType(object)}`);
|
||||
apLogger.info(`Unknown type: ${getApType(object)}`);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -3,13 +3,11 @@ import { createDeleteAccountJob } from "@/queue/index.js";
|
|||
import type { CacheableRemoteUser } from "@/models/entities/user.js";
|
||||
import { Users } from "@/models/index.js";
|
||||
|
||||
const logger = apLogger;
|
||||
|
||||
export async function deleteActor(
|
||||
actor: CacheableRemoteUser,
|
||||
uri: string,
|
||||
): Promise<string> {
|
||||
logger.info(`Deleting the Actor: ${uri}`);
|
||||
apLogger.info(`Deleting Actor: ${uri}`);
|
||||
|
||||
if (actor.uri !== uri) {
|
||||
return `skip: delete actor ${actor.uri} !== ${uri}`;
|
||||
|
|
|
@ -5,13 +5,11 @@ import DbResolver from "../../db-resolver.js";
|
|||
import { getApLock } from "@/misc/app-lock.js";
|
||||
import { deleteMessage } from "@/services/messages/delete.js";
|
||||
|
||||
const logger = apLogger;
|
||||
|
||||
export default async function (
|
||||
actor: CacheableRemoteUser,
|
||||
uri: string,
|
||||
): Promise<string> {
|
||||
logger.info(`Deleting the Note: ${uri}`);
|
||||
apLogger.info(`Deleting note: ${uri}`);
|
||||
|
||||
const lock = await getApLock(uri);
|
||||
|
||||
|
|
|
@ -54,7 +54,8 @@ export async function performActivity(
|
|||
try {
|
||||
await performOneActivity(actor, act);
|
||||
} catch (err) {
|
||||
apLogger.error(inspect(err));
|
||||
apLogger.info(`Failed to perform activity: ${err}`);
|
||||
apLogger.debug(inspect(err));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -88,9 +89,15 @@ async function performOneActivity(
|
|||
} else if (isReject(activity)) {
|
||||
await reject(actor, activity);
|
||||
} else if (isAdd(activity)) {
|
||||
await add(actor, activity).catch((err) => apLogger.error(inspect(err)));
|
||||
await add(actor, activity).catch((err) => {
|
||||
apLogger.warn(`Failed to perform 'add' activity: ${err}`);
|
||||
apLogger.debug(inspect(err));
|
||||
});
|
||||
} else if (isRemove(activity)) {
|
||||
await remove(actor, activity).catch((err) => apLogger.error(inspect(err)));
|
||||
await remove(actor, activity).catch((err) => {
|
||||
apLogger.warn(`Failed to perform 'remove' activity: ${err}`);
|
||||
apLogger.debug(inspect(err));
|
||||
});
|
||||
} else if (isAnnounce(activity)) {
|
||||
await announce(actor, activity);
|
||||
} else if (isLike(activity)) {
|
||||
|
@ -104,7 +111,7 @@ async function performOneActivity(
|
|||
} else if (isMove(activity)) {
|
||||
await move(actor, activity);
|
||||
} else {
|
||||
apLogger.warn(
|
||||
apLogger.info(
|
||||
`Unrecognized activity type: ${(activity as IActivity).type}`,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -14,12 +14,13 @@ export default async (
|
|||
): Promise<string> => {
|
||||
const uri = activity.id || activity;
|
||||
|
||||
logger.info(`Reject: ${uri}`);
|
||||
apLogger.info(`Reject: ${uri}`);
|
||||
|
||||
const resolver = new Resolver();
|
||||
|
||||
const object = await resolver.resolve(activity.object).catch((e) => {
|
||||
logger.error(`Resolution failed:\n${inspect(e)}`);
|
||||
apLogger.info(`Failed to resolve AP object: ${e}`);
|
||||
apLogger.debug(inspect(e));
|
||||
throw e;
|
||||
});
|
||||
|
||||
|
|
|
@ -17,8 +17,6 @@ import Resolver from "../../resolver.js";
|
|||
import { apLogger } from "../../logger.js";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const logger = apLogger;
|
||||
|
||||
export default async (
|
||||
actor: CacheableRemoteUser,
|
||||
activity: IUndo,
|
||||
|
@ -29,12 +27,13 @@ export default async (
|
|||
|
||||
const uri = activity.id || activity;
|
||||
|
||||
logger.info(`Undo: ${uri}`);
|
||||
apLogger.info(`Undo: ${uri}`);
|
||||
|
||||
const resolver = new Resolver();
|
||||
|
||||
const object = await resolver.resolve(activity.object).catch((e) => {
|
||||
logger.error(`Resolution failed:\n${inspect(e)}`);
|
||||
apLogger.info(`Failed to resolve AP object: ${e}`);
|
||||
apLogger.debug(inspect(e));
|
||||
throw e;
|
||||
});
|
||||
|
||||
|
|
|
@ -18,12 +18,13 @@ export default async (
|
|||
return "skip: invalid actor";
|
||||
}
|
||||
|
||||
apLogger.debug("Update");
|
||||
apLogger.info("Update");
|
||||
|
||||
const resolver = new Resolver();
|
||||
|
||||
const object = await resolver.resolve(activity.object).catch((e) => {
|
||||
apLogger.error(`Resolution failed:\n${inspect(e)}`);
|
||||
apLogger.info(`Failed to resolve AP object: ${e}`);
|
||||
apLogger.debug(inspect(e));
|
||||
throw e;
|
||||
});
|
||||
|
||||
|
|
|
@ -9,9 +9,7 @@ import type {
|
|||
} from "@/models/entities/drive-file.js";
|
||||
import { DriveFiles } from "@/models/index.js";
|
||||
import { truncate } from "@/misc/truncate.js";
|
||||
import { DB_MAX_IMAGE_COMMENT_LENGTH } from "@/misc/hard-limits.js";
|
||||
|
||||
const logger = apLogger;
|
||||
import { config } from "@/config.js";
|
||||
|
||||
/**
|
||||
* create an Image.
|
||||
|
@ -36,7 +34,7 @@ export async function createImage(
|
|||
throw new Error(`Invalid image, unexpected schema: ${image.url}`);
|
||||
}
|
||||
|
||||
logger.info(`Creating the Image: ${image.url}`);
|
||||
apLogger.info(`Creating an image: ${image.url}`);
|
||||
|
||||
const instance = await fetchMeta(true);
|
||||
|
||||
|
@ -46,7 +44,7 @@ export async function createImage(
|
|||
uri: image.url,
|
||||
sensitive: image.sensitive,
|
||||
isLink: !instance.cacheRemoteFiles,
|
||||
comment: truncate(image.name, DB_MAX_IMAGE_COMMENT_LENGTH),
|
||||
comment: truncate(image.name, config.maxCaptionLength),
|
||||
usageHint: usage,
|
||||
});
|
||||
|
||||
|
|
|
@ -44,14 +44,12 @@ import { publishNoteStream } from "@/services/stream.js";
|
|||
import { extractHashtags } from "@/misc/extract-hashtags.js";
|
||||
import { UserProfiles } from "@/models/index.js";
|
||||
import { In } from "typeorm";
|
||||
import { DB_MAX_IMAGE_COMMENT_LENGTH } from "@/misc/hard-limits.js";
|
||||
import { config } from "@/config.js";
|
||||
import { truncate } from "@/misc/truncate.js";
|
||||
import { type Size, getEmojiSize } from "@/misc/emoji-meta.js";
|
||||
import { langmap } from "@/misc/langmap.js";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const logger = apLogger;
|
||||
|
||||
export function validateNote(object: any, uri: string) {
|
||||
const expectHost = extractHost(uri);
|
||||
|
||||
|
@ -112,13 +110,16 @@ export async function createNote(
|
|||
const entryUri = getApId(value);
|
||||
const err = validateNote(object, entryUri);
|
||||
if (err) {
|
||||
logger.error(`${err.message}`, {
|
||||
resolver: {
|
||||
history: resolver.getHistory(),
|
||||
},
|
||||
value: value,
|
||||
object: object,
|
||||
});
|
||||
apLogger.info(`${err.message}`);
|
||||
apLogger.debug(
|
||||
inspect({
|
||||
resolver: {
|
||||
history: resolver.getHistory(),
|
||||
},
|
||||
value: value,
|
||||
object: object,
|
||||
}),
|
||||
);
|
||||
throw new Error("invalid note");
|
||||
}
|
||||
|
||||
|
@ -140,8 +141,8 @@ export async function createNote(
|
|||
throw new Error(`unexpected schema of note url: ${url}`);
|
||||
}
|
||||
|
||||
logger.debug(`Note fetched: ${JSON.stringify(note, null, 2)}`);
|
||||
logger.info(`Creating the Note: ${note.id}`);
|
||||
apLogger.trace(`Note fetched: ${JSON.stringify(note, null, 2)}`);
|
||||
apLogger.info(`Creating the Note: ${note.id}`);
|
||||
|
||||
// Skip if note is made before 2007 (1yr before Fedi was created)
|
||||
// OR skip if note is made 3 days in advance
|
||||
|
@ -150,13 +151,13 @@ export async function createNote(
|
|||
const FutureCheck = new Date();
|
||||
FutureCheck.setDate(FutureCheck.getDate() + 3); // Allow some wiggle room for misconfigured hosts
|
||||
if (DateChecker.getFullYear() < 2007) {
|
||||
logger.warn(
|
||||
apLogger.info(
|
||||
"Note somehow made before Activitypub was created; discarding",
|
||||
);
|
||||
return null;
|
||||
}
|
||||
if (DateChecker > FutureCheck) {
|
||||
logger.warn("Note somehow made after today; discarding");
|
||||
apLogger.info("Note somehow made after today; discarding");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@ -169,8 +170,8 @@ export async function createNote(
|
|||
|
||||
// Skip if author is suspended.
|
||||
if (actor.isSuspended) {
|
||||
logger.debug(
|
||||
`User ${actor.usernameLower}@${actor.host} suspended; discarding.`,
|
||||
apLogger.info(
|
||||
`User ${actor.usernameLower}@${actor.host} is suspended; discarding.`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
@ -224,7 +225,7 @@ export async function createNote(
|
|||
? await resolveNote(note.inReplyTo, resolver)
|
||||
.then((x) => {
|
||||
if (x == null) {
|
||||
logger.warn("Specified inReplyTo, but nout found");
|
||||
apLogger.info(`Specified inReplyTo not found: ${note.inReplyTo}`);
|
||||
throw new Error("inReplyTo not found");
|
||||
} else {
|
||||
return x;
|
||||
|
@ -242,7 +243,8 @@ export async function createNote(
|
|||
}
|
||||
}
|
||||
|
||||
logger.warn(`Error in inReplyTo ${note.inReplyTo}:\n${inspect(e)}`);
|
||||
apLogger.info(`Error in inReplyTo ${note.inReplyTo}`);
|
||||
apLogger.debug(inspect(e));
|
||||
throw e;
|
||||
})
|
||||
: null;
|
||||
|
@ -336,11 +338,11 @@ export async function createNote(
|
|||
index: number,
|
||||
): Promise<null> => {
|
||||
if (poll.expiresAt && Date.now() > new Date(poll.expiresAt).getTime()) {
|
||||
logger.warn(
|
||||
`vote to expired poll from AP: actor=${actor.username}@${actor.host}, note=${note.id}, choice=${name}`,
|
||||
apLogger.info(
|
||||
`discarding vote to expired poll: actor=${actor.username}@${actor.host}, note=${note.id}, choice=${name}`,
|
||||
);
|
||||
} else if (index >= 0) {
|
||||
logger.info(
|
||||
apLogger.info(
|
||||
`vote from AP: actor=${actor.username}@${actor.host}, note=${note.id}, choice=${name}`,
|
||||
);
|
||||
await vote(actor, reply, index);
|
||||
|
@ -357,7 +359,8 @@ export async function createNote(
|
|||
}
|
||||
|
||||
const emojis = await extractEmojis(note.tag || [], actor.host).catch((e) => {
|
||||
logger.info(`extractEmojis:\n${inspect(e)}`);
|
||||
apLogger.info("Failed to extract emojis");
|
||||
apLogger.debug(inspect(e));
|
||||
return [] as Emoji[];
|
||||
});
|
||||
|
||||
|
@ -515,7 +518,7 @@ export async function extractEmojis(
|
|||
return exists;
|
||||
}
|
||||
|
||||
logger.info(`register emoji host=${host}, name=${name}`);
|
||||
apLogger.info(`register emoji host=${host}, name=${name}`);
|
||||
|
||||
let size: Size = { width: 0, height: 0 };
|
||||
try {
|
||||
|
@ -619,7 +622,7 @@ export async function updateNote(value: string | IObject, resolver?: Resolver) {
|
|||
const file = await resolveImage(actor, x, null);
|
||||
const update: Partial<DriveFile> = {};
|
||||
|
||||
const altText = truncate(x.name, DB_MAX_IMAGE_COMMENT_LENGTH);
|
||||
const altText = truncate(x.name, config.maxCaptionLength);
|
||||
if (file.comment !== altText) {
|
||||
update.comment = altText;
|
||||
}
|
||||
|
|
|
@ -48,8 +48,6 @@ import { resolveNote, extractEmojis } from "./note.js";
|
|||
import { resolveImage } from "./image.js";
|
||||
import { inspect } from "node:util";
|
||||
|
||||
const logger = apLogger;
|
||||
|
||||
const nameLength = 128;
|
||||
const summaryLength = 2048;
|
||||
|
||||
|
@ -178,7 +176,7 @@ export async function createPerson(
|
|||
|
||||
const person = validateActor(object, uri);
|
||||
|
||||
logger.info(`Creating the Person: ${person.id}`);
|
||||
apLogger.info(`Creating Person: ${person.id}`);
|
||||
|
||||
const host = toPuny(new URL(object.id).hostname);
|
||||
|
||||
|
@ -347,7 +345,8 @@ export async function createPerson(
|
|||
throw new Error("already registered");
|
||||
}
|
||||
} else {
|
||||
logger.error(inspect(e));
|
||||
apLogger.info(`Failed to create a Person actor: ${person.url}`);
|
||||
apLogger.debug(inspect(e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
@ -388,7 +387,8 @@ export async function createPerson(
|
|||
|
||||
//#region Get custom emoji
|
||||
const emojis = await extractEmojis(person.tag || [], host).catch((e) => {
|
||||
logger.info(`extractEmojis:\n${inspect(e)}`);
|
||||
apLogger.info("Failed to extract emojis");
|
||||
apLogger.debug(inspect(e));
|
||||
return [] as Emoji[];
|
||||
});
|
||||
|
||||
|
@ -399,9 +399,10 @@ export async function createPerson(
|
|||
});
|
||||
//#endregion
|
||||
|
||||
await updateFeatured(user!.id, resolver).catch((err) =>
|
||||
logger.error(inspect(err)),
|
||||
);
|
||||
await updateFeatured(user!.id, resolver).catch((err) => {
|
||||
apLogger.info(`Failed to update featured collection of ${user.uri}`);
|
||||
apLogger.debug(inspect(err));
|
||||
});
|
||||
|
||||
return user!;
|
||||
}
|
||||
|
@ -439,7 +440,7 @@ export async function updatePerson(
|
|||
|
||||
const person = validateActor(object, uri);
|
||||
|
||||
logger.info(`Updating the Person: ${person.id}`);
|
||||
apLogger.info(`Updating the Person: ${person.id}`);
|
||||
|
||||
// Fetch avatar and header image
|
||||
const [avatar, banner] = await Promise.all(
|
||||
|
@ -456,7 +457,8 @@ export async function updatePerson(
|
|||
|
||||
// Custom pictogram acquisition
|
||||
const emojis = await extractEmojis(person.tag || [], user.host).catch((e) => {
|
||||
logger.info(`extractEmojis:\n${inspect(e)}`);
|
||||
apLogger.info("Failed to extract emojis");
|
||||
apLogger.debug(inspect(e));
|
||||
return [] as Emoji[];
|
||||
});
|
||||
|
||||
|
@ -626,9 +628,10 @@ export async function updatePerson(
|
|||
},
|
||||
);
|
||||
|
||||
await updateFeatured(user.id, resolver).catch((err) =>
|
||||
logger.error(inspect(err)),
|
||||
);
|
||||
await updateFeatured(user.id, resolver).catch((err) => {
|
||||
apLogger.info(`Failed to update featured collection of ${user.uri}`);
|
||||
apLogger.debug(inspect(err));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -681,7 +684,7 @@ export async function updateFeatured(userId: User["id"], resolver?: Resolver) {
|
|||
if (!Users.isRemoteUser(user)) return;
|
||||
if (!user.featured) return;
|
||||
|
||||
logger.info(`Updating the featured: ${user.uri}`);
|
||||
apLogger.info(`Updating the featured collection: ${user.uri}`);
|
||||
|
||||
if (resolver == null) resolver = new Resolver();
|
||||
|
||||
|
|
|
@ -76,12 +76,12 @@ export default class Resolver {
|
|||
throw new Error("instance is blocked");
|
||||
}
|
||||
}
|
||||
apLogger.debug("Returning existing object:");
|
||||
apLogger.debug(JSON.stringify(value, null, 2));
|
||||
apLogger.debug("Returning the existing object");
|
||||
apLogger.trace(JSON.stringify(value, null, 2));
|
||||
return value;
|
||||
}
|
||||
|
||||
apLogger.debug(`Resolving: ${value}`);
|
||||
apLogger.info(`Resolving: ${value}`);
|
||||
|
||||
if (value.includes("#")) {
|
||||
// URLs with fragment parts cannot be resolved correctly because
|
||||
|
@ -115,8 +115,10 @@ export default class Resolver {
|
|||
this.user = await getInstanceActor();
|
||||
}
|
||||
|
||||
apLogger.debug("Getting object from remote, authenticated as user:");
|
||||
apLogger.debug(JSON.stringify(this.user, null, 2));
|
||||
apLogger.info(
|
||||
`Getting object from remote, authenticated as user ${this.user.id}`,
|
||||
);
|
||||
apLogger.trace(JSON.stringify(this.user, null, 2));
|
||||
|
||||
const { finalUrl, content: object } = await apGet(value, this.user);
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ export async function resolveUser(
|
|||
if (user == null) {
|
||||
const self = await resolveSelf(acctLower);
|
||||
|
||||
logger.succ(`return new remote user: ${chalk.magenta(acctLower)}`);
|
||||
logger.info(`return new remote user: ${chalk.magenta(acctLower)}`);
|
||||
return await createPerson(self.href);
|
||||
}
|
||||
|
||||
|
|
|
@ -43,8 +43,10 @@ const router = new Router();
|
|||
//#region Routing
|
||||
|
||||
function inbox(ctx: Router.RouterContext) {
|
||||
const inboxLogger = serverLogger.createSubLogger("inbox");
|
||||
|
||||
if (ctx.req.headers.host !== config.host) {
|
||||
serverLogger.warn("inbox: Invalid Host");
|
||||
inboxLogger.warn(`regecting invalid host (${ctx.req.headers.host})`);
|
||||
ctx.status = 400;
|
||||
ctx.message = "Invalid Host";
|
||||
return;
|
||||
|
@ -57,7 +59,6 @@ function inbox(ctx: Router.RouterContext) {
|
|||
headers: ["(request-target)", "digest", "host", "date"],
|
||||
});
|
||||
} catch (e) {
|
||||
serverLogger.warn(`inbox: signature parse error:\n${inspect(e)}`);
|
||||
ctx.status = 401;
|
||||
|
||||
if (e instanceof Error) {
|
||||
|
@ -67,6 +68,9 @@ function inbox(ctx: Router.RouterContext) {
|
|||
ctx.message = "Missing Required Header";
|
||||
}
|
||||
|
||||
inboxLogger.info(`signature parse error: ${ctx.message}`);
|
||||
inboxLogger.debug(inspect(e));
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -76,8 +80,8 @@ function inbox(ctx: Router.RouterContext) {
|
|||
.toLowerCase()
|
||||
.match(/^((dsa|rsa|ecdsa)-(sha256|sha384|sha512)|ed25519-sha512|hs2019)$/)
|
||||
) {
|
||||
serverLogger.warn(
|
||||
`inbox: invalid signature algorithm ${signature.algorithm}`,
|
||||
inboxLogger.info(
|
||||
`rejecting signature: unknown algorithm (${signature.algorithm})`,
|
||||
);
|
||||
ctx.status = 401;
|
||||
ctx.message = "Invalid Signature Algorithm";
|
||||
|
@ -92,8 +96,8 @@ function inbox(ctx: Router.RouterContext) {
|
|||
const digest = ctx.req.headers.digest;
|
||||
|
||||
if (typeof digest !== "string") {
|
||||
serverLogger.warn(
|
||||
"inbox: zero or more than one digest header(s) are present",
|
||||
inboxLogger.info(
|
||||
"rejecting invalid signature: zero or more than one digest header(s)",
|
||||
);
|
||||
ctx.status = 401;
|
||||
ctx.message = "Invalid Digest Header";
|
||||
|
@ -103,7 +107,7 @@ function inbox(ctx: Router.RouterContext) {
|
|||
const match = digest.match(/^([0-9A-Za-z-]+)=(.+)$/);
|
||||
|
||||
if (match == null) {
|
||||
serverLogger.warn("inbox: unrecognized digest header");
|
||||
inboxLogger.info("rejecting signature: unrecognized digest header");
|
||||
ctx.status = 401;
|
||||
ctx.message = "Invalid Digest Header";
|
||||
return;
|
||||
|
@ -113,7 +117,7 @@ function inbox(ctx: Router.RouterContext) {
|
|||
const expectedDigest = match[2];
|
||||
|
||||
if (digestAlgo.toUpperCase() !== "SHA-256") {
|
||||
serverLogger.warn("inbox: unsupported digest algorithm");
|
||||
inboxLogger.info("rejecting signature: unsupported digest algorithm");
|
||||
ctx.status = 401;
|
||||
ctx.message = "Unsupported Digest Algorithm";
|
||||
return;
|
||||
|
@ -125,7 +129,7 @@ function inbox(ctx: Router.RouterContext) {
|
|||
.digest("base64");
|
||||
|
||||
if (expectedDigest !== actualDigest) {
|
||||
serverLogger.warn("inbox: Digest Mismatch");
|
||||
inboxLogger.info("rejecting invalid signature: Digest Mismatch");
|
||||
ctx.status = 401;
|
||||
ctx.message = "Digest Missmatch";
|
||||
return;
|
||||
|
@ -215,7 +219,9 @@ router.get("/notes/:note", async (ctx, next) => {
|
|||
serverLogger.debug(JSON.stringify(remoteUser, null, 2));
|
||||
|
||||
if (remoteUser == null) {
|
||||
serverLogger.debug("Rejecting: no user");
|
||||
serverLogger.info(
|
||||
"rejecting fetch attempt of private post: no authentication",
|
||||
);
|
||||
ctx.status = 401;
|
||||
return;
|
||||
}
|
||||
|
@ -225,14 +231,14 @@ router.get("/notes/:note", async (ctx, next) => {
|
|||
serverLogger.debug(JSON.stringify(relation, null, 2));
|
||||
|
||||
if (!relation.isFollowing || relation.isBlocked) {
|
||||
serverLogger.debug(
|
||||
"Rejecting: authenticated user is not following us or was blocked by us",
|
||||
serverLogger.info(
|
||||
"rejecting fetch attempt of private post: user is not a follower or is blocked",
|
||||
);
|
||||
ctx.status = 403;
|
||||
return;
|
||||
}
|
||||
|
||||
serverLogger.debug("Accepting: access criteria met");
|
||||
serverLogger.debug("accepting fetch attempt of private post");
|
||||
}
|
||||
|
||||
ctx.body = renderActivity(await renderNote(note, false));
|
||||
|
|
|
@ -2,7 +2,7 @@ import {
|
|||
publishMainStream,
|
||||
publishGroupMessagingStream,
|
||||
} from "@/services/stream.js";
|
||||
import { publishMessagingStream } from "@/services/stream.js";
|
||||
import { publishToChatStream, ChatEvent } from "backend-rs";
|
||||
import { publishMessagingIndexStream } from "@/services/stream.js";
|
||||
import { pushNotification } from "@/services/push-notification.js";
|
||||
import type { User, IRemoteUser } from "@/models/entities/user.js";
|
||||
|
@ -54,7 +54,7 @@ export async function readUserMessagingMessage(
|
|||
);
|
||||
|
||||
// Publish event
|
||||
publishMessagingStream(otherpartyId, userId, "read", messageIds);
|
||||
publishToChatStream(otherpartyId, userId, ChatEvent.Read, messageIds);
|
||||
publishMessagingIndexStream(userId, "read", messageIds);
|
||||
|
||||
if (!(await Users.getHasUnreadMessagingMessage(userId))) {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { config } from "@/config.js";
|
||||
import { fetchMeta } from "backend-rs";
|
||||
import { MAX_NOTE_TEXT_LENGTH, MAX_CAPTION_TEXT_LENGTH } from "@/const.js";
|
||||
import define from "@/server/api/define.js";
|
||||
|
||||
export const meta = {
|
||||
|
@ -506,8 +505,8 @@ export default define(meta, paramDef, async () => {
|
|||
iconUrl: instance.iconUrl,
|
||||
backgroundImageUrl: instance.backgroundImageUrl,
|
||||
logoImageUrl: instance.logoImageUrl,
|
||||
maxNoteTextLength: MAX_NOTE_TEXT_LENGTH, // 後方互換性のため
|
||||
maxCaptionTextLength: MAX_CAPTION_TEXT_LENGTH,
|
||||
maxNoteTextLength: config.maxNoteLength, // for backward compatibility
|
||||
maxCaptionTextLength: config.maxCaptionLength,
|
||||
defaultLightTheme: instance.defaultLightTheme,
|
||||
defaultDarkTheme: instance.defaultDarkTheme,
|
||||
enableEmail: instance.enableEmail,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import define from "@/server/api/define.js";
|
||||
import Resolver from "@/remote/activitypub/resolver.js";
|
||||
import { HOUR } from "@/const.js";
|
||||
import { HOUR } from "backend-rs";
|
||||
|
||||
export const meta = {
|
||||
tags: ["federation"],
|
||||
|
|
|
@ -4,13 +4,12 @@ import { createNote } from "@/remote/activitypub/models/note.js";
|
|||
import DbResolver from "@/remote/activitypub/db-resolver.js";
|
||||
import Resolver from "@/remote/activitypub/resolver.js";
|
||||
import { ApiError } from "@/server/api/error.js";
|
||||
import { extractHost, isBlockedServer } from "backend-rs";
|
||||
import { MINUTE, extractHost, isBlockedServer } from "backend-rs";
|
||||
import { Users, Notes } from "@/models/index.js";
|
||||
import type { Note } from "@/models/entities/note.js";
|
||||
import type { CacheableLocalUser, User } from "@/models/entities/user.js";
|
||||
import { isActor, isPost, getApId } from "@/remote/activitypub/type.js";
|
||||
import type { SchemaType } from "@/misc/schema.js";
|
||||
import { MINUTE } from "@/const.js";
|
||||
import { updateQuestion } from "@/remote/activitypub/models/question.js";
|
||||
import { populatePoll } from "@/models/repositories/note.js";
|
||||
import { redisClient } from "@/db/redis.js";
|
||||
|
|
|
@ -3,7 +3,7 @@ import define from "@/server/api/define.js";
|
|||
import { ApiError } from "@/server/api/error.js";
|
||||
import { getUser } from "@/server/api/common/getters.js";
|
||||
import { Blockings, NoteWatchings, Users } from "@/models/index.js";
|
||||
import { HOUR } from "@/const.js";
|
||||
import { HOUR } from "backend-rs";
|
||||
|
||||
export const meta = {
|
||||
tags: ["account"],
|
||||
|
|
|
@ -3,7 +3,7 @@ import define from "@/server/api/define.js";
|
|||
import { ApiError } from "@/server/api/error.js";
|
||||
import { getUser } from "@/server/api/common/getters.js";
|
||||
import { Blockings, Users } from "@/models/index.js";
|
||||
import { HOUR } from "@/const.js";
|
||||
import { HOUR } from "backend-rs";
|
||||
|
||||
export const meta = {
|
||||
tags: ["account"],
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { Emojis } from "@/models/index.js";
|
||||
import type { Emoji } from "@/models/entities/emoji.js";
|
||||
import { IsNull, In } from "typeorm";
|
||||
import { FILE_TYPE_BROWSERSAFE } from "@/const.js";
|
||||
import { FILE_TYPE_BROWSERSAFE } from "backend-rs";
|
||||
import define from "@/server/api/define.js";
|
||||
|
||||
export const meta = {
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import { addFile } from "@/services/drive/add-file.js";
|
||||
import { DriveFiles } from "@/models/index.js";
|
||||
import { DB_MAX_IMAGE_COMMENT_LENGTH } from "@/misc/hard-limits.js";
|
||||
import { config } from "@/config.js";
|
||||
import { IdentifiableError } from "@/misc/identifiable-error.js";
|
||||
import { fetchMeta } from "backend-rs";
|
||||
import { MINUTE } from "@/const.js";
|
||||
import { MINUTE, fetchMeta } from "backend-rs";
|
||||
import define from "@/server/api/define.js";
|
||||
import { apiLogger } from "@/server/api/logger.js";
|
||||
import { ApiError } from "@/server/api/error.js";
|
||||
|
@ -68,7 +67,7 @@ export const paramDef = {
|
|||
comment: {
|
||||
type: "string",
|
||||
nullable: true,
|
||||
maxLength: DB_MAX_IMAGE_COMMENT_LENGTH,
|
||||
maxLength: config.maxCaptionLength,
|
||||
default: null,
|
||||
},
|
||||
isSensitive: { type: "boolean", default: false },
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue