Merge branch 'develop' into feat/schedule-create

This commit is contained in:
naskya 2024-05-19 19:44:11 +09:00
commit ec152499bf
No known key found for this signature in database
GPG key ID: 712D413B3A9FED5C
199 changed files with 2542 additions and 1562 deletions

View file

@ -59,7 +59,7 @@ test:build:
rules:
- if: $TEST == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'develop'
changes:
paths:
- packages/backend-rs/**/*
@ -84,7 +84,7 @@ test:build:backend_ts_only:
rules:
- if: $TEST == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'develop'
changes:
paths:
- packages/backend-rs/**/*
@ -94,7 +94,7 @@ test:build:backend_ts_only:
- Cargo.toml
- Cargo.lock
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'develop'
changes:
paths:
- packages/backend/**/*
@ -124,7 +124,7 @@ test:build:client_only:
rules:
- if: $TEST == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'develop'
changes:
paths:
- packages/backend-rs/**/*
@ -134,7 +134,7 @@ test:build:client_only:
- Cargo.toml
- Cargo.lock
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'develop'
changes:
paths:
- packages/client/**/*
@ -157,7 +157,7 @@ test:build:client_only:
build:container:
stage: build
image: docker.io/debian:bookworm-slim
image: quay.io/buildah/stable:latest
services: []
rules:
- if: $BUILD == 'false'
@ -181,20 +181,17 @@ build:container:
optional: true
- job: test:build:client_only
optional: true
variables:
STORAGE_DRIVER: overlay2
before_script:
- apt-get update && apt-get -y upgrade
- apt-get install -y --no-install-recommends buildah ca-certificates fuse-overlayfs
- buildah login --username "${CI_REGISTRY_USER}" --password "${CI_REGISTRY_PASSWORD}" "${CI_REGISTRY}"
- buildah version
- buildah prune --all --force || true
- echo "${CI_REGISTRY_PASSWORD}" | buildah login --username "${CI_REGISTRY_USER}" --password-stdin "${CI_REGISTRY}"
- export IMAGE_TAG="${CI_REGISTRY}/${CI_PROJECT_PATH}/develop:not-for-production"
- export IMAGE_CACHE="${CI_REGISTRY}/${CI_PROJECT_PATH}/develop/cache"
script:
- |-
buildah build \
--isolation chroot \
--device /dev/fuse:rw \
--security-opt seccomp=unconfined \
--security-opt apparmor=unconfined \
--cap-add all \
--platform linux/amd64 \
--layers \
--cache-to "${IMAGE_CACHE}" \
@ -209,23 +206,17 @@ cargo:test:
rules:
- if: $TEST == 'false'
when: never
- if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'main'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'develop'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- Cargo.toml
- Cargo.lock
- package.json
when: always
script:
- curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C /usr/local/cargo/bin
- pnpm install --frozen-lockfile
- mkdir -p packages/backend-rs/built
- cp packages/backend-rs/index.js packages/backend-rs/built/index.js
- cp packages/backend-rs/index.d.ts packages/backend-rs/built/index.d.ts
- pnpm --filter='!backend-rs' run build:debug
- cargo test --doc
- cargo nextest run
@ -234,9 +225,7 @@ cargo:clippy:
rules:
- if: $TEST == 'false'
when: never
- if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'main'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'develop'
changes:
paths:
- packages/backend-rs/**/*

View file

@ -30,6 +30,10 @@ Chiptune2.js by Simon Gündling
License: MIT
https://github.com/deskjet/chiptune2.js#license
bb8-redis by Kyle Huey
License: MIT
https://github.com/djc/bb8/blob/62597aa45ac1746780b08cb6a68cf7d65452a23a/LICENSE
Licenses for all softwares and software libraries installed via the Node Package Manager ("npm") can be found by running the following shell command in the root directory of this repository:
pnpm licenses list

177
Cargo.lock generated
View file

@ -85,9 +85,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.83"
version = "1.0.84"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3"
checksum = "18b8795de6d09abb2b178fa5a9e3bb10da935750f33449a132b328b9391b2c6a"
[[package]]
name = "arbitrary"
@ -103,7 +103,7 @@ checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -154,7 +154,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -165,7 +165,7 @@ checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -211,7 +211,9 @@ name = "backend-rs"
version = "0.0.0"
dependencies = [
"argon2",
"async-trait",
"basen",
"bb8",
"bcrypt",
"chrono",
"cuid2",
@ -219,7 +221,7 @@ dependencies = [
"idna",
"image",
"isahc",
"macro_rs",
"macro-rs",
"napi",
"napi-build",
"napi-derive",
@ -239,6 +241,7 @@ dependencies = [
"sysinfo",
"thiserror",
"tokio",
"tokio-test",
"tracing",
"tracing-subscriber",
"url",
@ -297,6 +300,19 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dbe4bb73fd931c4d1aaf53b35d1286c8a948ad00ec92c8e3c856f15fd027f43"
[[package]]
name = "bb8"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df7c2093d15d6a1d33b1f972e1c5ea3177748742b97a5f392aa83a65262c6780"
dependencies = [
"async-trait",
"futures-channel",
"futures-util",
"parking_lot",
"tokio",
]
[[package]]
name = "bcrypt"
version = "0.15.1"
@ -414,7 +430,7 @@ dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
"syn_derive",
]
@ -564,7 +580,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd"
dependencies = [
"bytes",
"futures-core",
"memchr",
"pin-project-lite",
"tokio",
"tokio-util",
]
[[package]]
@ -711,7 +731,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f"
dependencies = [
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -896,9 +916,9 @@ dependencies = [
[[package]]
name = "either"
version = "1.11.0"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2"
checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b"
dependencies = [
"serde",
]
@ -1203,9 +1223,9 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.2.15"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
dependencies = [
"cfg-if",
"js-sys",
@ -1458,7 +1478,7 @@ checksum = "0122b7114117e64a63ac49f752a5ca4624d534c7b1c7de796ac196381cd2d947"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -1472,9 +1492,9 @@ dependencies = [
[[package]]
name = "instant"
version = "0.1.12"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222"
dependencies = [
"cfg-if",
]
@ -1487,7 +1507,7 @@ checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -1622,9 +1642,9 @@ checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8"
[[package]]
name = "libc"
version = "0.2.154"
version = "0.2.153"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346"
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
[[package]]
name = "libfuzzer-sys"
@ -1688,9 +1708,9 @@ dependencies = [
[[package]]
name = "linux-raw-sys"
version = "0.4.13"
version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
[[package]]
name = "lock_api"
@ -1718,14 +1738,16 @@ dependencies = [
]
[[package]]
name = "macro_rs"
name = "macro-rs"
version = "0.0.0"
dependencies = [
"convert_case",
"napi",
"proc-macro2",
"quote",
"syn 2.0.63",
"serde",
"serde_json",
"syn 2.0.64",
"thiserror",
]
@ -1769,9 +1791,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniz_oxide"
version = "0.7.2"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7"
checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae"
dependencies = [
"adler",
"simd-adler32",
@ -1790,14 +1812,12 @@ dependencies = [
[[package]]
name = "napi"
version = "2.16.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfc300228808a0e6aea5a58115c82889240bcf8dab16fc25ad675b33e454b368"
version = "3.0.0-alpha.2"
source = "git+https://github.com/napi-rs/napi-rs.git?rev=ca2cd5c35a0c39ec4a94e93c6c5695b681046df2#ca2cd5c35a0c39ec4a94e93c6c5695b681046df2"
dependencies = [
"bitflags 2.5.0",
"chrono",
"ctor",
"napi-derive",
"napi-sys",
"once_cell",
"serde",
@ -1813,23 +1833,23 @@ checksum = "e1c0f5d67ee408a4685b61f5ab7e58605c8ae3f2b4189f0127d804ff13d5560a"
[[package]]
name = "napi-derive"
version = "2.16.4"
version = "2.16.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bb613535cde46cff231e53cd819c1694a32d48946bc2dda6b41174ace52ac08"
checksum = "e0e034ddf6155192cf83f267ede763fe6c164dfa9971585436b16173718d94c4"
dependencies = [
"cfg-if",
"convert_case",
"napi-derive-backend",
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
name = "napi-derive-backend"
version = "1.0.66"
version = "1.0.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da041b19246ab4240998774e987fd9a7d92cc7406b91b5eddb6691e81feac044"
checksum = "bff2c00437f3b3266391eb5e6aa25d0029187daf5caf05b8e3271468fb5ae73e"
dependencies = [
"convert_case",
"once_cell",
@ -1837,14 +1857,13 @@ dependencies = [
"quote",
"regex",
"semver",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
name = "napi-sys"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "427802e8ec3a734331fec1035594a210ce1ff4dc5bc1950530920ab717964ea3"
source = "git+https://github.com/napi-rs/napi-rs.git?rev=ca2cd5c35a0c39ec4a94e93c6c5695b681046df2#ca2cd5c35a0c39ec4a94e93c6c5695b681046df2"
dependencies = [
"libloading",
]
@ -1966,7 +1985,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -2058,7 +2077,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -2119,7 +2138,7 @@ dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -2277,7 +2296,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -2459,7 +2478,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8021cf59c8ec9c432cfc2526ac6b8aa508ecaf29cd415f271b8406c1b851c3fd"
dependencies = [
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -2618,12 +2637,16 @@ version = "0.25.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6472825949c09872e8f2c50bde59fcefc17748b6be5c90fd67cd8b4daca73bfd"
dependencies = [
"async-trait",
"bytes",
"combine",
"futures-util",
"itoa",
"percent-encoding",
"pin-project-lite",
"ryu",
"sha1_smol",
"socket2",
"tokio",
"tokio-util",
"url",
]
@ -2921,7 +2944,7 @@ dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -2962,7 +2985,7 @@ dependencies = [
"proc-macro2",
"quote",
"sea-bae",
"syn 2.0.63",
"syn 2.0.64",
"unicode-ident",
]
@ -3053,7 +3076,7 @@ checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -3100,12 +3123,6 @@ dependencies = [
"digest",
]
[[package]]
name = "sha1_smol"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012"
[[package]]
name = "sha2"
version = "0.10.8"
@ -3532,7 +3549,7 @@ dependencies = [
"proc-macro2",
"quote",
"rustversion",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -3554,9 +3571,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.63"
version = "2.0.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf5be731623ca1a1fb7d8be6f261a3be6d3e2337b8a1f97be944d020c8fcb704"
checksum = "7ad3dee41f36859875573074334c200d1add8e4a87bb37113ebd31d926b7b11f"
dependencies = [
"proc-macro2",
"quote",
@ -3572,7 +3589,7 @@ dependencies = [
"proc-macro-error",
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -3641,22 +3658,22 @@ dependencies = [
[[package]]
name = "thiserror"
version = "1.0.60"
version = "1.0.61"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18"
checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.60"
version = "1.0.61"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524"
checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -3753,7 +3770,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -3767,6 +3784,32 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-test"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
dependencies = [
"async-stream",
"bytes",
"futures-core",
"tokio",
"tokio-stream",
]
[[package]]
name = "tokio-util"
version = "0.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1"
dependencies = [
"bytes",
"futures-core",
"futures-sink",
"pin-project-lite",
"tokio",
]
[[package]]
name = "toml"
version = "0.8.13"
@ -3832,7 +3875,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]
@ -4046,7 +4089,7 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
"wasm-bindgen-shared",
]
@ -4068,7 +4111,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@ -4082,7 +4125,7 @@ checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
[[package]]
name = "web-push"
version = "0.10.1"
source = "git+https://github.com/pimeys/rust-web-push?rev=40febe4085e3cef9cdfd539c315e3e945aba0656#40febe4085e3cef9cdfd539c315e3e945aba0656"
source = "git+https://github.com/pimeys/rust-web-push.git?rev=40febe4085e3cef9cdfd539c315e3e945aba0656#40febe4085e3cef9cdfd539c315e3e945aba0656"
dependencies = [
"async-trait",
"base64 0.13.1",
@ -4352,7 +4395,7 @@ checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.63",
"syn 2.0.64",
]
[[package]]

View file

@ -3,14 +3,16 @@ members = ["packages/backend-rs", "packages/macro-rs"]
resolver = "2"
[workspace.dependencies]
macro_rs = { path = "packages/macro-rs" }
macro-rs = { path = "packages/macro-rs" }
napi = { version = "2.16.6", default-features = false }
napi-derive = "2.16.4"
napi = { git = "https://github.com/napi-rs/napi-rs.git", rev = "ca2cd5c35a0c39ec4a94e93c6c5695b681046df2", default-features = false }
napi-derive = "2.16.5"
napi-build = "2.1.3"
argon2 = "0.5.3"
async-trait = "0.1.80"
basen = "0.1.0"
bb8 = "0.8.3"
bcrypt = "0.15.1"
chrono = "0.4.38"
convert_case = "0.6.0"
@ -26,7 +28,7 @@ pretty_assertions = "1.4.0"
proc-macro2 = "1.0.82"
quote = "1.0.36"
rand = "0.8.5"
redis = "0.25.3"
redis = { version = "0.25.3", default-features = false }
regex = "1.10.4"
rmp-serde = "1.3.0"
sea-orm = "0.12.15"
@ -34,15 +36,16 @@ serde = "1.0.202"
serde_json = "1.0.117"
serde_yaml = "0.9.34"
strum = "0.26.2"
syn = "2.0.63"
syn = "2.0.64"
sysinfo = "0.30.12"
thiserror = "1.0.60"
thiserror = "1.0.61"
tokio = "1.37.0"
tokio-test = "0.4.4"
tracing = "0.1.40"
tracing-subscriber = "0.3.18"
url = "2.5.0"
urlencoding = "2.1.3"
web-push = { git = "https://github.com/pimeys/rust-web-push", rev = "40febe4085e3cef9cdfd539c315e3e945aba0656" }
web-push = { git = "https://github.com/pimeys/rust-web-push.git", rev = "40febe4085e3cef9cdfd539c315e3e945aba0656" }
[profile.release]
lto = true

View file

@ -5,6 +5,10 @@ Critical security updates are indicated by the :warning: icon.
- Server administrators should check [notice-for-admins.md](./notice-for-admins.md) as well.
- Third-party client/bot developers may want to check [api-change.md](./api-change.md) as well.
## Unreleased
- Fix bugs
## [v20240516](https://firefish.dev/firefish/firefish/-/merge_requests/10854/commits)
- Improve timeline UX (you can restore the original appearance by settings)

View file

@ -2,6 +2,10 @@ BEGIN;
DELETE FROM "migrations" WHERE name IN (
'CreateScheduledNoteCreation1714728200194',
'AddBackTimezone1715351290096',
'UserprofileJsonbToArray1714270605574',
'DropUnusedUserprofileColumns1714259023878',
'AntennaJsonbToArray1714192520471',
'AddUserProfileLanguage1714888400293',
'DropUnusedIndexes1714643926317',
'AlterAkaType1714099399879',
@ -31,6 +35,45 @@ DELETE FROM "migrations" WHERE name IN (
-- create-scheduled-note-creation
DROP TABLE "scheduled_note_creation";
-- userprofile-jsonb-to-array
ALTER TABLE "user_profile" RENAME COLUMN "mutedInstances" TO "mutedInstances_old";
ALTER TABLE "user_profile" ADD COLUMN "mutedInstances" jsonb NOT NULL DEFAULT '[]';
UPDATE "user_profile" SET "mutedInstances" = to_jsonb("mutedInstances_old");
ALTER TABLE "user_profile" DROP COLUMN "mutedInstances_old";
ALTER TABLE "user_profile" RENAME COLUMN "mutedWords" TO "mutedWords_old";
ALTER TABLE "user_profile" ADD COLUMN "mutedWords" jsonb NOT NULL DEFAULT '[]';
CREATE TEMP TABLE "BCrsGgLCUeMMLARy" ("userId" character varying(32), "kws" jsonb NOT NULL DEFAULT '[]');
INSERT INTO "BCrsGgLCUeMMLARy" ("userId", "kws") SELECT "userId", jsonb_agg("X"."w") FROM (SELECT "userId", to_jsonb(string_to_array(unnest("mutedWords_old"), ' ')) AS "w" FROM "user_profile") AS "X" GROUP BY "userId";
UPDATE "user_profile" SET "mutedWords" = "kws" FROM "BCrsGgLCUeMMLARy" WHERE "user_profile"."userId" = "BCrsGgLCUeMMLARy"."userId";
ALTER TABLE "user_profile" DROP COLUMN "mutedWords_old";
-- drop-unused-userprofile-columns
ALTER TABLE "user_profile" ADD "room" jsonb NOT NULL DEFAULT '{}';
COMMENT ON COLUMN "user_profile"."room" IS 'The room data of the User.';
ALTER TABLE "user_profile" ADD "clientData" jsonb NOT NULL DEFAULT '{}';
COMMENT ON COLUMN "user_profile"."clientData" IS 'The client-specific data of the User.';
-- antenna-jsonb-to-array
UPDATE "antenna" SET "instances" = '{""}' WHERE "instances" = '{}';
ALTER TABLE "antenna" RENAME COLUMN "instances" TO "instances_old";
ALTER TABLE "antenna" ADD COLUMN "instances" jsonb NOT NULL DEFAULT '[]';
UPDATE "antenna" SET "instances" = to_jsonb("instances_old");
ALTER TABLE "antenna" DROP COLUMN "instances_old";
UPDATE "antenna" SET "keywords" = '{""}' WHERE "keywords" = '{}';
ALTER TABLE "antenna" RENAME COLUMN "keywords" TO "keywords_old";
ALTER TABLE "antenna" ADD COLUMN "keywords" jsonb NOT NULL DEFAULT '[]';
CREATE TEMP TABLE "QvPNcMitBFkqqBgm" ("id" character varying(32), "kws" jsonb NOT NULL DEFAULT '[]');
INSERT INTO "QvPNcMitBFkqqBgm" ("id", "kws") SELECT "id", jsonb_agg("X"."w") FROM (SELECT "id", to_jsonb(string_to_array(unnest("keywords_old"), ' ')) AS "w" FROM "antenna") AS "X" GROUP BY "id";
UPDATE "antenna" SET "keywords" = "kws" FROM "QvPNcMitBFkqqBgm" WHERE "antenna"."id" = "QvPNcMitBFkqqBgm"."id";
ALTER TABLE "antenna" DROP COLUMN "keywords_old";
UPDATE "antenna" SET "excludeKeywords" = '{""}' WHERE "excludeKeywords" = '{}';
ALTER TABLE "antenna" RENAME COLUMN "excludeKeywords" TO "excludeKeywords_old";
ALTER TABLE "antenna" ADD COLUMN "excludeKeywords" jsonb NOT NULL DEFAULT '[]';
CREATE TEMP TABLE "MZvVSjHzYcGXmGmz" ("id" character varying(32), "kws" jsonb NOT NULL DEFAULT '[]');
INSERT INTO "MZvVSjHzYcGXmGmz" ("id", "kws") SELECT "id", jsonb_agg("X"."w") FROM (SELECT "id", to_jsonb(string_to_array(unnest("excludeKeywords_old"), ' ')) AS "w" FROM "antenna") AS "X" GROUP BY "id";
UPDATE "antenna" SET "excludeKeywords" = "kws" FROM "MZvVSjHzYcGXmGmz" WHERE "antenna"."id" = "MZvVSjHzYcGXmGmz"."id";
ALTER TABLE "antenna" DROP COLUMN "excludeKeywords_old";
-- drop-unused-indexes
CREATE INDEX "IDX_01f4581f114e0ebd2bbb876f0b" ON "note_reaction" ("createdAt");
CREATE INDEX "IDX_0610ebcfcfb4a18441a9bcdab2" ON "poll" ("userId");
@ -67,83 +110,6 @@ CREATE INDEX "IDX_8e3bbbeb3df04d1a8105da4c8f" ON "note" USING "pgroonga" ("cw" p
ALTER TABLE "messaging_message" DROP CONSTRAINT "FK_535def119223ac05ad3fa9ef64b";
ALTER TABLE "messaging_message" ADD CONSTRAINT "FK_535def119223ac05ad3fa9ef64b" FOREIGN KEY ("fileId") REFERENCES "drive_file"("id") ON DELETE CASCADE ON UPDATE NO ACTION;
-- drop-time-zone
ALTER TABLE "abuse_user_report" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "access_token" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "access_token" ALTER "lastUsedAt" TYPE timestamp with time zone;
ALTER TABLE "ad" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "ad" ALTER "expiresAt" TYPE timestamp with time zone;
ALTER TABLE "announcement" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "announcement" ALTER "updatedAt" TYPE timestamp with time zone;
ALTER TABLE "announcement_read" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "antenna" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "app" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "attestation_challenge" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "auth_session" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "blocking" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "channel" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "channel" ALTER "lastNotedAt" TYPE timestamp with time zone;
ALTER TABLE "channel_following" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "channel_note_pining" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "clip" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "drive_file" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "drive_folder" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "emoji" ALTER "updatedAt" TYPE timestamp with time zone;
ALTER TABLE "following" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "follow_request" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "gallery_like" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "gallery_post" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "gallery_post" ALTER "updatedAt" TYPE timestamp with time zone;
ALTER TABLE "instance" ALTER "caughtAt" TYPE timestamp with time zone;
ALTER TABLE "instance" ALTER "infoUpdatedAt" TYPE timestamp with time zone;
ALTER TABLE "instance" ALTER "lastCommunicatedAt" TYPE timestamp with time zone;
ALTER TABLE "instance" ALTER "latestRequestReceivedAt" TYPE timestamp with time zone;
ALTER TABLE "instance" ALTER "latestRequestSentAt" TYPE timestamp with time zone;
ALTER TABLE "messaging_message" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "moderation_log" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "muting" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "muting" ALTER "expiresAt" TYPE timestamp with time zone;
ALTER TABLE "note" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "note" ALTER "updatedAt" TYPE timestamp with time zone;
ALTER TABLE "note_edit" ALTER "updatedAt" TYPE timestamp with time zone;
ALTER TABLE "note_favorite" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "note_reaction" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "note_thread_muting" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "note_watching" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "notification" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "page" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "page" ALTER "updatedAt" TYPE timestamp with time zone;
ALTER TABLE "page_like" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "password_reset_request" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "poll" ALTER "expiresAt" TYPE timestamp with time zone;
ALTER TABLE "poll_vote" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "promo_note" ALTER "expiresAt" TYPE timestamp with time zone;
ALTER TABLE "promo_read" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "registration_ticket" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "registry_item" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "registry_item" ALTER "updatedAt" TYPE timestamp with time zone;
ALTER TABLE "renote_muting" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "reply_muting" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "signin" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "sw_subscription" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "used_username" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "user" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "user" ALTER "lastActiveDate" TYPE timestamp with time zone;
ALTER TABLE "user" ALTER "lastFetchedAt" TYPE timestamp with time zone;
ALTER TABLE "user" ALTER "updatedAt" TYPE timestamp with time zone;
ALTER TABLE "user_group" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "user_group_invitation" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "user_group_invite" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "user_group_joining" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "user_ip" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "user_list" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "user_list_joining" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "user_note_pining" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "user_pending" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "user_security_key" ALTER "lastUsed" TYPE timestamp with time zone;
ALTER TABLE "webhook" ALTER "createdAt" TYPE timestamp with time zone;
ALTER TABLE "webhook" ALTER "latestSentAt" TYPE timestamp with time zone;
-- expand-note-edit
ALTER TABLE "note_edit" DROP COLUMN "emojis";

View file

@ -2,6 +2,28 @@
You can skip intermediate versions when upgrading from an old version, but please read the notices and follow the instructions for each intermediate version before [upgrading](./upgrade.md).
## Unreleased
### For all users
We regret to inform you that the upgrade may take a long time to fix a regression we have introduced. The time required to upgrade should be the same as [v20240413](<https://firefish.dev/firefish/firefish/-/blob/main/docs/notice-for-admins.md#v20240413>). This is not a security fix, so please upgrade your server when you have enough time. We are sorry for the inconvenience.
<details>
There are two data types in PostgreSQL to store time: `timestamptz` (`timestamp with time zone`) and `timestamp` (`timestamp without time zone`) [[ref]](<https://www.postgresql.org/docs/current/datatype-datetime.html>).
In Node.js, we manipulate the database using [TypeORM](<https://typeorm.io/>). TypeORM handles time data as a JavaScript `Date` object. Since `Date` doesn't have timezone information [[ref]](<https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date#date_components_and_time_zones>), we don't use the timezone information in the Node.js backend, and both `timestamptz` and `timestamp` behave in the same way. (Technically, the type names are a little confusing, and `timestamptz` (`timestamp with time zone`) doesn't store the timezone data. Please read PostgreSQL documentation for more information.)
In Rust, we manipulate the database using [SeaORM](<https://www.sea-ql.org/SeaORM/>), which does distinguish between `timestamptz` and `timestamp`. `timestamptz` is converted to [`DateTime<FixedOffset>`](<https://docs.rs/chrono/latest/chrono/struct.DateTime.html>) type, whereas `timestamp` is converted to [`NaiveDateTime`](<https://docs.rs/chrono/latest/chrono/struct.NaiveDateTime.html>).
We are using [napi-rs](<https://napi.rs/>) to implement some of the backend features in Rust, which did not support `DateTime<FixedOffset>`. We used to store time data as `timestamptz`, but we converted them to `timestamp` for this reason. As we don't use timezone data, we thought this was okay, and indeed it worked fine.
However, we did not consider the case of migrating a server (hardware) to another timezone. With `timestamp`, there may be inconsistencies in the time data if you migrate your server to another system with a different timezone setting (Docker/Podman users should not be affected by this, as UTC is always used in containers unless you explicitly set one).
Therefore, we have contributed to napi-rs to add support for `DateTime<FixedOffset>` (<https://github.com/napi-rs/napi-rs/pull/2074>) and decided to migrate back from `timestamp` to `timestamptz` to properly address this problem. The migration process takes time roughly proportional to the number of stored posts.
</details>
## v20240516
### For all users

View file

@ -13,13 +13,15 @@ ci = []
crate-type = ["cdylib", "lib"]
[dependencies]
macro_rs = { workspace = true }
macro-rs = { workspace = true }
napi = { workspace = true, optional = true, default-features = false, features = ["napi9", "tokio_rt", "chrono_date", "serde-json"] }
napi-derive = { workspace = true, optional = true }
argon2 = { workspace = true, features = ["std"] }
async-trait = { workspace = true }
basen = { workspace = true }
bb8 = { workspace = true }
bcrypt = { workspace = true }
chrono = { workspace = true }
cuid2 = { workspace = true }
@ -31,7 +33,7 @@ nom-exif = { workspace = true }
once_cell = { workspace = true }
openssl = { workspace = true, features = ["vendored"] }
rand = { workspace = true }
redis = { workspace = true }
redis = { workspace = true, default-features = false, features = ["streams", "tokio-comp"] }
regex = { workspace = true }
rmp-serde = { workspace = true }
sea-orm = { workspace = true, features = ["sqlx-postgres", "runtime-tokio-rustls"] }
@ -50,6 +52,7 @@ web-push = { workspace = true }
[dev-dependencies]
pretty_assertions = { workspace = true }
tokio-test = { workspace = true }
[build-dependencies]
napi-build = { workspace = true }

View file

@ -1,5 +1,9 @@
extern crate napi_build;
fn main() {
// watch the version in the project root package.json
println!("cargo:rerun-if-changed=../../package.json");
// napi
napi_build::setup();
}

View file

@ -191,18 +191,6 @@ export interface Config {
authUrl: string
driveUrl: string
userAgent: string
clientEntry: Manifest
}
export interface Manifest {
file: string
name: string
src: string
isEntry: boolean
isDynamicEntry: boolean
imports: Array<string>
dynamicImports: Array<string>
css: Array<string>
assets: Array<string>
}
export function loadConfig(): Config
export interface Acct {
@ -211,9 +199,9 @@ export interface Acct {
}
export function stringToAcct(acct: string): Acct
export function acctToString(acct: Acct): string
export function showServerInfo(): void
export function greet(): void
export function initializeRustLogger(): void
export function addNoteToAntenna(antennaId: string, note: Note): void
export function showServerInfo(): void
/**
* Checks if a server is blocked.
*
@ -236,15 +224,7 @@ export function isSilencedServer(host: string): Promise<boolean>
* `host` - punycoded instance host
*/
export function isAllowedServer(host: string): Promise<boolean>
export interface NoteLikeForCheckWordMute {
fileIds: Array<string>
userId: string | null
text: string | null
cw: string | null
renoteId: string | null
replyId: string | null
}
export function checkWordMute(note: NoteLikeForCheckWordMute, mutedWordLists: Array<Array<string>>, mutedPatterns: Array<string>): Promise<boolean>
export function checkWordMute(note: NoteLike, mutedWords: Array<string>, mutedPatterns: Array<string>): Promise<boolean>
export function getFullApAccount(username: string, host?: string | undefined | null): string
export function isSelfHost(host?: string | undefined | null): boolean
export function isSameOrigin(uri: string): boolean
@ -260,6 +240,15 @@ export interface ImageSize {
height: number
}
export function getImageSizeFromUrl(url: string): Promise<ImageSize>
/** TODO: handle name collisions better */
export interface NoteLikeForAllTexts {
fileIds: Array<string>
userId: string
text: string | null
cw: string | null
renoteId: string | null
replyId: string | null
}
export interface NoteLikeForGetNoteSummary {
fileIds: Array<string>
text: string | null
@ -267,28 +256,7 @@ export interface NoteLikeForGetNoteSummary {
hasPoll: boolean
}
export function getNoteSummary(note: NoteLikeForGetNoteSummary): string
export interface Cpu {
model: string
cores: number
}
export interface Memory {
/** Total memory amount in bytes */
total: number
/** Used memory amount in bytes */
used: number
/** Available (for (re)use) memory amount in bytes */
available: number
}
export interface Storage {
/** Total storage space in bytes */
total: number
/** Used storage space in bytes */
used: number
}
export function cpuInfo(): Cpu
export function cpuUsage(): number
export function memoryUsage(): Memory
export function storageUsage(): Storage | null
export function isQuote(note: Note): boolean
export function isSafeUrl(url: string): boolean
export function latestVersion(): Promise<string>
export function toMastodonId(firefishId: string): string | null
@ -320,9 +288,31 @@ export function countReactions(reactions: Record<string, number>): Record<string
export function toDbReaction(reaction?: string | undefined | null, host?: string | undefined | null): Promise<string>
/** Delete all entries in the "attestation_challenge" table created at more than 5 minutes ago */
export function removeOldAttestationChallenges(): Promise<void>
export interface Cpu {
model: string
cores: number
}
export interface Memory {
/** Total memory amount in bytes */
total: number
/** Used memory amount in bytes */
used: number
/** Available (for (re)use) memory amount in bytes */
available: number
}
export interface Storage {
/** Total storage space in bytes */
total: number
/** Used storage space in bytes */
used: number
}
export function cpuInfo(): Cpu
export function cpuUsage(): number
export function memoryUsage(): Memory
export function storageUsage(): Storage | null
export interface AbuseUserReport {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
targetUserId: string
reporterId: string
assigneeId: string | null
@ -334,12 +324,12 @@ export interface AbuseUserReport {
}
export interface AccessToken {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
token: string
hash: string
userId: string
appId: string | null
lastUsedAt: Date | null
lastUsedAt: DateTimeWithTimeZone | null
session: string | null
name: string | null
description: string | null
@ -349,8 +339,8 @@ export interface AccessToken {
}
export interface Ad {
id: string
createdAt: Date
expiresAt: Date
createdAt: DateTimeWithTimeZone
expiresAt: DateTimeWithTimeZone
place: string
priority: string
url: string
@ -360,11 +350,11 @@ export interface Ad {
}
export interface Announcement {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
text: string
title: string
imageUrl: string | null
updatedAt: Date | null
updatedAt: DateTimeWithTimeZone | null
showPopup: boolean
isGoodNews: boolean
}
@ -372,16 +362,15 @@ export interface AnnouncementRead {
id: string
userId: string
announcementId: string
createdAt: Date
createdAt: DateTimeWithTimeZone
}
export interface Antenna {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
name: string
src: AntennaSrcEnum
userListId: string | null
keywords: Json
withFile: boolean
expression: string | null
notify: boolean
@ -389,12 +378,13 @@ export interface Antenna {
withReplies: boolean
userGroupJoiningId: string | null
users: Array<string>
excludeKeywords: Json
instances: Json
instances: Array<string>
keywords: Array<string>
excludeKeywords: Array<string>
}
export interface App {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string | null
secret: string
name: string
@ -406,26 +396,26 @@ export interface AttestationChallenge {
id: string
userId: string
challenge: string
createdAt: Date
createdAt: DateTimeWithTimeZone
registrationChallenge: boolean
}
export interface AuthSession {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
token: string
userId: string | null
appId: string
}
export interface Blocking {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
blockeeId: string
blockerId: string
}
export interface Channel {
id: string
createdAt: Date
lastNotedAt: Date | null
createdAt: DateTimeWithTimeZone
lastNotedAt: DateTimeWithTimeZone | null
userId: string | null
name: string
description: string | null
@ -435,19 +425,19 @@ export interface Channel {
}
export interface ChannelFollowing {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
followeeId: string
followerId: string
}
export interface ChannelNotePining {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
channelId: string
noteId: string
}
export interface Clip {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
name: string
isPublic: boolean
@ -460,7 +450,7 @@ export interface ClipNote {
}
export interface DriveFile {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string | null
userHost: string | null
md5: string
@ -489,14 +479,14 @@ export interface DriveFile {
}
export interface DriveFolder {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
name: string
userId: string | null
parentId: string | null
}
export interface Emoji {
id: string
updatedAt: Date | null
updatedAt: DateTimeWithTimeZone | null
name: string
host: string | null
originalUrl: string
@ -511,7 +501,7 @@ export interface Emoji {
}
export interface FollowRequest {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
followeeId: string
followerId: string
requestId: string | null
@ -524,7 +514,7 @@ export interface FollowRequest {
}
export interface Following {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
followeeId: string
followerId: string
followerHost: string | null
@ -536,14 +526,14 @@ export interface Following {
}
export interface GalleryLike {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
postId: string
}
export interface GalleryPost {
id: string
createdAt: Date
updatedAt: Date
createdAt: DateTimeWithTimeZone
updatedAt: DateTimeWithTimeZone
title: string
description: string | null
userId: string
@ -570,16 +560,16 @@ export interface Hashtag {
}
export interface Instance {
id: string
caughtAt: Date
caughtAt: DateTimeWithTimeZone
host: string
usersCount: number
notesCount: number
followingCount: number
followersCount: number
latestRequestSentAt: Date | null
latestRequestSentAt: DateTimeWithTimeZone | null
latestStatus: number | null
latestRequestReceivedAt: Date | null
lastCommunicatedAt: Date
latestRequestReceivedAt: DateTimeWithTimeZone | null
lastCommunicatedAt: DateTimeWithTimeZone
isNotResponding: boolean
softwareName: string | null
softwareVersion: string | null
@ -588,7 +578,7 @@ export interface Instance {
description: string | null
maintainerName: string | null
maintainerEmail: string | null
infoUpdatedAt: Date | null
infoUpdatedAt: DateTimeWithTimeZone | null
isSuspended: boolean
iconUrl: string | null
themeColor: string | null
@ -596,7 +586,7 @@ export interface Instance {
}
export interface MessagingMessage {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
recipientId: string | null
text: string | null
@ -700,7 +690,7 @@ export interface Migrations {
}
export interface ModerationLog {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
type: string
info: Json
@ -713,14 +703,14 @@ export interface MutedNote {
}
export interface Muting {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
muteeId: string
muterId: string
expiresAt: Date | null
expiresAt: DateTimeWithTimeZone | null
}
export interface Note {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
replyId: string | null
renoteId: string | null
text: string | null
@ -750,7 +740,7 @@ export interface Note {
url: string | null
channelId: string | null
threadId: string | null
updatedAt: Date | null
updatedAt: DateTimeWithTimeZone | null
lang: string | null
}
export interface NoteEdit {
@ -759,12 +749,12 @@ export interface NoteEdit {
text: string | null
cw: string | null
fileIds: Array<string>
updatedAt: Date
updatedAt: DateTimeWithTimeZone
emojis: Array<string>
}
export interface NoteFavorite {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
noteId: string
}
@ -775,14 +765,14 @@ export interface NoteFile {
}
export interface NoteReaction {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
noteId: string
reaction: string
}
export interface NoteThreadMuting {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
threadId: string
}
@ -797,14 +787,14 @@ export interface NoteUnread {
}
export interface NoteWatching {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
noteId: string
noteUserId: string
}
export interface Notification {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
notifieeId: string
notifierId: string | null
isRead: boolean
@ -821,8 +811,8 @@ export interface Notification {
}
export interface Page {
id: string
createdAt: Date
updatedAt: Date
createdAt: DateTimeWithTimeZone
updatedAt: DateTimeWithTimeZone
title: string
name: string
summary: string | null
@ -841,19 +831,19 @@ export interface Page {
}
export interface PageLike {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
pageId: string
}
export interface PasswordResetRequest {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
token: string
userId: string
}
export interface Poll {
noteId: string
expiresAt: Date | null
expiresAt: DateTimeWithTimeZone | null
multiple: boolean
choices: Array<string>
votes: Array<number>
@ -863,31 +853,31 @@ export interface Poll {
}
export interface PollVote {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
noteId: string
choice: number
}
export interface PromoNote {
noteId: string
expiresAt: Date
expiresAt: DateTimeWithTimeZone
userId: string
}
export interface PromoRead {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
noteId: string
}
export interface RegistrationTicket {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
code: string
}
export interface RegistryItem {
id: string
createdAt: Date
updatedAt: Date
createdAt: DateTimeWithTimeZone
updatedAt: DateTimeWithTimeZone
userId: string
key: string
scope: Array<string>
@ -901,13 +891,13 @@ export interface Relay {
}
export interface RenoteMuting {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
muteeId: string
muterId: string
}
export interface ReplyMuting {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
muteeId: string
muterId: string
}
@ -993,7 +983,7 @@ export enum UserProfileMutingnotificationtypesEnum {
}
export interface Signin {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
ip: string
headers: Json
@ -1001,7 +991,7 @@ export interface Signin {
}
export interface SwSubscription {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
endpoint: string
auth: string
@ -1010,13 +1000,13 @@ export interface SwSubscription {
}
export interface UsedUsername {
username: string
createdAt: Date
createdAt: DateTimeWithTimeZone
}
export interface User {
id: string
createdAt: Date
updatedAt: Date | null
lastFetchedAt: Date | null
createdAt: DateTimeWithTimeZone
updatedAt: DateTimeWithTimeZone | null
lastFetchedAt: DateTimeWithTimeZone | null
username: string
usernameLower: string
name: string | null
@ -1042,7 +1032,7 @@ export interface User {
token: string | null
isExplorable: boolean
followersUri: string | null
lastActiveDate: Date | null
lastActiveDate: DateTimeWithTimeZone | null
hideOnlineStatus: boolean
isDeleted: boolean
driveCapacityOverrideMb: number | null
@ -1054,32 +1044,32 @@ export interface User {
}
export interface UserGroup {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
name: string
userId: string
isPrivate: boolean
}
export interface UserGroupInvitation {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
userGroupId: string
}
export interface UserGroupInvite {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
userGroupId: string
}
export interface UserGroupJoining {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
userGroupId: string
}
export interface UserIp {
id: number
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
ip: string
}
@ -1090,25 +1080,25 @@ export interface UserKeypair {
}
export interface UserList {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
name: string
}
export interface UserListJoining {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
userListId: string
}
export interface UserNotePining {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
noteId: string
}
export interface UserPending {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
code: string
username: string
email: string
@ -1128,7 +1118,6 @@ export interface UserProfile {
twoFactorSecret: string | null
twoFactorEnabled: boolean
password: string | null
clientData: Json
autoAcceptFollowed: boolean
alwaysMarkNsfw: boolean
carefulBot: boolean
@ -1136,21 +1125,20 @@ export interface UserProfile {
securityKeysAvailable: boolean
usePasswordLessLogin: boolean
pinnedPageId: string | null
room: Json
injectFeaturedNote: boolean
enableWordMute: boolean
mutedWords: Json
mutingNotificationTypes: Array<UserProfileMutingnotificationtypesEnum>
noCrawle: boolean
receiveAnnouncementEmail: boolean
emailNotificationTypes: Json
mutedInstances: Json
publicReactions: boolean
ffVisibility: UserProfileFfvisibilityEnum
moderationNote: string
preventAiLearning: boolean
isIndexable: boolean
mutedPatterns: Array<string>
mutedInstances: Array<string>
mutedWords: Array<string>
lang: string | null
}
export interface UserPublickey {
@ -1162,21 +1150,22 @@ export interface UserSecurityKey {
id: string
userId: string
publicKey: string
lastUsed: Date
lastUsed: DateTimeWithTimeZone
name: string
}
export interface Webhook {
id: string
createdAt: Date
createdAt: DateTimeWithTimeZone
userId: string
name: string
on: Array<string>
url: string
secret: string
active: boolean
latestSentAt: Date | null
latestSentAt: DateTimeWithTimeZone | null
latestStatus: number | null
}
export function updateAntennasOnNewNote(note: Note, noteAuthor: Acct, noteMutedUsers: Array<string>): Promise<void>
export function fetchNodeinfo(host: string): Promise<Nodeinfo>
export function nodeinfo_2_1(): Promise<any>
export function nodeinfo_2_0(): Promise<any>
@ -1285,22 +1274,23 @@ export enum PushNotificationKind {
ReadAllChats = 'readAllChats',
ReadAllChatsInTheRoom = 'readAllChatsInTheRoom',
ReadNotifications = 'readNotifications',
ReadAllNotifications = 'readAllNotifications'
ReadAllNotifications = 'readAllNotifications',
Mastodon = 'mastodon'
}
export function sendPushNotification(receiverUserId: string, kind: PushNotificationKind, content: any): Promise<void>
export function publishToChannelStream(channelId: string, userId: string): void
export function publishToChannelStream(channelId: string, userId: string): Promise<void>
export enum ChatEvent {
Message = 'message',
Read = 'read',
Deleted = 'deleted',
Typing = 'typing'
}
export function publishToChatStream(senderUserId: string, receiverUserId: string, kind: ChatEvent, object: any): void
export function publishToChatStream(senderUserId: string, receiverUserId: string, kind: ChatEvent, object: any): Promise<void>
export enum ChatIndexEvent {
Message = 'message',
Read = 'read'
}
export function publishToChatIndexStream(userId: string, kind: ChatIndexEvent, object: any): void
export function publishToChatIndexStream(userId: string, kind: ChatIndexEvent, object: any): Promise<void>
export interface PackedEmoji {
id: string
aliases: Array<string>
@ -1312,15 +1302,15 @@ export interface PackedEmoji {
width: number | null
height: number | null
}
export function publishToBroadcastStream(emoji: PackedEmoji): void
export function publishToGroupChatStream(groupId: string, kind: ChatEvent, object: any): void
export function publishToBroadcastStream(emoji: PackedEmoji): Promise<void>
export function publishToGroupChatStream(groupId: string, kind: ChatEvent, object: any): Promise<void>
export interface AbuseUserReportLike {
id: string
targetUserId: string
reporterId: string
comment: string
}
export function publishToModerationStream(moderatorId: string, report: AbuseUserReportLike): void
export function publishToModerationStream(moderatorId: string, report: AbuseUserReportLike): Promise<void>
export function getTimestamp(id: string): number
/**
* The generated ID results in the form of `[8 chars timestamp] + [cuid2]`.

View file

@ -310,7 +310,7 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`)
}
const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, showServerInfo, initializeRustLogger, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, cpuInfo, cpuUsage, memoryUsage, storageUsage, isSafeUrl, latestVersion, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, fetchNodeinfo, nodeinfo_2_1, nodeinfo_2_0, Protocol, Inbound, Outbound, watchNote, unwatchNote, PushNotificationKind, sendPushNotification, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToGroupChatStream, publishToModerationStream, getTimestamp, genId, genIdAt, generateSecureRandomString, generateUserToken } = nativeBinding
const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, greet, initializeRustLogger, showServerInfo, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, isQuote, isSafeUrl, latestVersion, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, cpuInfo, cpuUsage, memoryUsage, storageUsage, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, updateAntennasOnNewNote, fetchNodeinfo, nodeinfo_2_1, nodeinfo_2_0, Protocol, Inbound, Outbound, watchNote, unwatchNote, PushNotificationKind, sendPushNotification, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToGroupChatStream, publishToModerationStream, getTimestamp, genId, genIdAt, generateSecureRandomString, generateUserToken } = nativeBinding
module.exports.SECOND = SECOND
module.exports.MINUTE = MINUTE
@ -323,9 +323,9 @@ module.exports.loadEnv = loadEnv
module.exports.loadConfig = loadConfig
module.exports.stringToAcct = stringToAcct
module.exports.acctToString = acctToString
module.exports.showServerInfo = showServerInfo
module.exports.greet = greet
module.exports.initializeRustLogger = initializeRustLogger
module.exports.addNoteToAntenna = addNoteToAntenna
module.exports.showServerInfo = showServerInfo
module.exports.isBlockedServer = isBlockedServer
module.exports.isSilencedServer = isSilencedServer
module.exports.isAllowedServer = isAllowedServer
@ -341,10 +341,7 @@ module.exports.safeForSql = safeForSql
module.exports.formatMilliseconds = formatMilliseconds
module.exports.getImageSizeFromUrl = getImageSizeFromUrl
module.exports.getNoteSummary = getNoteSummary
module.exports.cpuInfo = cpuInfo
module.exports.cpuUsage = cpuUsage
module.exports.memoryUsage = memoryUsage
module.exports.storageUsage = storageUsage
module.exports.isQuote = isQuote
module.exports.isSafeUrl = isSafeUrl
module.exports.latestVersion = latestVersion
module.exports.toMastodonId = toMastodonId
@ -359,6 +356,10 @@ module.exports.decodeReaction = decodeReaction
module.exports.countReactions = countReactions
module.exports.toDbReaction = toDbReaction
module.exports.removeOldAttestationChallenges = removeOldAttestationChallenges
module.exports.cpuInfo = cpuInfo
module.exports.cpuUsage = cpuUsage
module.exports.memoryUsage = memoryUsage
module.exports.storageUsage = storageUsage
module.exports.AntennaSrcEnum = AntennaSrcEnum
module.exports.DriveFileUsageHintEnum = DriveFileUsageHintEnum
module.exports.MutedNoteReasonEnum = MutedNoteReasonEnum
@ -370,6 +371,7 @@ module.exports.RelayStatusEnum = RelayStatusEnum
module.exports.UserEmojimodpermEnum = UserEmojimodpermEnum
module.exports.UserProfileFfvisibilityEnum = UserProfileFfvisibilityEnum
module.exports.UserProfileMutingnotificationtypesEnum = UserProfileMutingnotificationtypesEnum
module.exports.updateAntennasOnNewNote = updateAntennasOnNewNote
module.exports.fetchNodeinfo = fetchNodeinfo
module.exports.nodeinfo_2_1 = nodeinfo_2_1
module.exports.nodeinfo_2_0 = nodeinfo_2_0

View file

@ -3,6 +3,8 @@ use serde::Deserialize;
use std::env;
use std::fs;
pub const VERSION: &str = macro_rs::read_version_from_package_json!();
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
@ -231,34 +233,6 @@ pub struct Config {
pub auth_url: String,
pub drive_url: String,
pub user_agent: String,
pub client_entry: Manifest,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[serde(rename_all = "camelCase")]
struct Meta {
pub version: String,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
struct ManifestJson {
#[serde(rename = "src/init.ts")]
pub init_ts: Manifest,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
pub struct Manifest {
pub file: String,
pub name: String,
pub src: String,
pub is_entry: bool,
pub is_dynamic_entry: bool,
pub imports: Vec<String>,
pub dynamic_imports: Vec<String>,
pub css: Vec<String>,
pub assets: Vec<String>,
}
fn read_config_file() -> ServerConfig {
@ -280,28 +254,10 @@ fn read_config_file() -> ServerConfig {
data
}
fn read_meta() -> Meta {
let cwd = env::current_dir().unwrap();
let meta_json = fs::File::open(cwd.join("../../built/meta.json"))
.expect("Failed to open 'built/meta.json'");
serde_json::from_reader(meta_json).expect("Failed to parse built/meta.json")
}
fn read_manifest() -> Manifest {
let cwd = env::current_dir().unwrap();
let manifest_json = fs::File::open(cwd.join("../../built/_client_dist_/manifest.json"))
.expect("Failed to open 'built/_client_dist_/manifest.json'");
let manifest: ManifestJson = serde_json::from_reader(manifest_json)
.expect("Failed to parse built/_client_dist_/manifest.json");
manifest.init_ts
}
#[crate::export]
pub fn load_config() -> Config {
let server_config = read_config_file();
let version = read_meta().version;
let manifest = read_manifest();
let version = VERSION.to_owned();
let url = url::Url::parse(&server_config.url).expect("Config url is invalid");
let hostname = url
.host_str()
@ -379,7 +335,6 @@ pub fn load_config() -> Config {
redis_key_prefix,
scheme,
ws_scheme,
client_entry: manifest,
}
}

View file

@ -1,11 +1,15 @@
use crate::database::{redis_conn, redis_key};
use redis::{Commands, RedisError};
use crate::database::{redis_conn, redis_key, RedisConnError};
use redis::{AsyncCommands, RedisError};
use serde::{Deserialize, Serialize};
#[derive(strum::Display, Debug)]
pub enum Category {
#[strum(serialize = "fetchUrl")]
FetchUrl,
#[strum(serialize = "blocking")]
Block,
#[strum(serialize = "following")]
Follow,
#[cfg(test)]
#[strum(serialize = "usedOnlyForTesting")]
Test,
@ -14,11 +18,13 @@ pub enum Category {
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Redis error: {0}")]
RedisError(#[from] RedisError),
RedisErr(#[from] RedisError),
#[error("Redis connection error: {0}")]
RedisConnErr(#[from] RedisConnError),
#[error("Data serialization error: {0}")]
SerializeError(#[from] rmp_serde::encode::Error),
SerializeErr(#[from] rmp_serde::encode::Error),
#[error("Data deserialization error: {0}")]
DeserializeError(#[from] rmp_serde::decode::Error),
DeserializeErr(#[from] rmp_serde::decode::Error),
}
#[inline]
@ -50,26 +56,31 @@ fn wildcard(category: Category) -> String {
///
/// ```
/// # use backend_rs::database::cache;
/// # tokio_test::block_on(async {
/// let key = "apple";
/// let data = "I want to cache this string".to_string();
///
/// // caches the data for 10 seconds
/// cache::set(key, &data, 10);
/// cache::set(key, &data, 10).await;
///
/// // get the cache
/// let cached_data = cache::get::<String>(key).unwrap();
/// let cached_data = cache::get::<String>(key).await.unwrap();
/// assert_eq!(data, cached_data.unwrap());
/// # })
/// ```
pub fn set<V: for<'a> Deserialize<'a> + Serialize>(
pub async fn set<V: for<'a> Deserialize<'a> + Serialize>(
key: &str,
value: &V,
expire_seconds: u64,
) -> Result<(), Error> {
redis_conn()?.set_ex(
prefix_key(key),
rmp_serde::encode::to_vec(&value)?,
expire_seconds,
)?;
redis_conn()
.await?
.set_ex(
prefix_key(key),
rmp_serde::encode::to_vec(&value)?,
expire_seconds,
)
.await?;
Ok(())
}
@ -86,22 +97,24 @@ pub fn set<V: for<'a> Deserialize<'a> + Serialize>(
///
/// ```
/// # use backend_rs::database::cache;
/// # tokio_test::block_on(async {
/// let key = "banana";
/// let data = "I want to cache this string".to_string();
///
/// // set cache
/// cache::set(key, &data, 10).unwrap();
/// cache::set(key, &data, 10).await.unwrap();
///
/// // get cache
/// let cached_data = cache::get::<String>(key).unwrap();
/// let cached_data = cache::get::<String>(key).await.unwrap();
/// assert_eq!(data, cached_data.unwrap());
///
/// // get nonexistent (or expired) cache
/// let no_cache = cache::get::<String>("nonexistent").unwrap();
/// let no_cache = cache::get::<String>("nonexistent").await.unwrap();
/// assert!(no_cache.is_none());
/// # })
/// ```
pub fn get<V: for<'a> Deserialize<'a> + Serialize>(key: &str) -> Result<Option<V>, Error> {
let serialized_value: Option<Vec<u8>> = redis_conn()?.get(prefix_key(key))?;
pub async fn get<V: for<'a> Deserialize<'a> + Serialize>(key: &str) -> Result<Option<V>, Error> {
let serialized_value: Option<Vec<u8>> = redis_conn().await?.get(prefix_key(key)).await?;
Ok(match serialized_value {
Some(v) => Some(rmp_serde::from_slice::<V>(v.as_ref())?),
None => None,
@ -121,22 +134,24 @@ pub fn get<V: for<'a> Deserialize<'a> + Serialize>(key: &str) -> Result<Option<V
///
/// ```
/// # use backend_rs::database::cache;
/// # tokio_test::block_on(async {
/// let key = "chocolate";
/// let value = "I want to cache this string".to_string();
///
/// // set cache
/// cache::set(key, &value, 10).unwrap();
/// cache::set(key, &value, 10).await.unwrap();
///
/// // delete the cache
/// cache::delete("foo").unwrap();
/// cache::delete("nonexistent").unwrap(); // this is okay
/// cache::delete("foo").await.unwrap();
/// cache::delete("nonexistent").await.unwrap(); // this is okay
///
/// // the cache is gone
/// let cached_value = cache::get::<String>("foo").unwrap();
/// let cached_value = cache::get::<String>("foo").await.unwrap();
/// assert!(cached_value.is_none());
/// # })
/// ```
pub fn delete(key: &str) -> Result<(), Error> {
Ok(redis_conn()?.del(prefix_key(key))?)
pub async fn delete(key: &str) -> Result<(), Error> {
Ok(redis_conn().await?.del(prefix_key(key)).await?)
}
/// Sets a Redis cache under a `category`.
@ -150,13 +165,13 @@ pub fn delete(key: &str) -> Result<(), Error> {
/// * `key` - key (will be prefixed automatically)
/// * `value` - (de)serializable value
/// * `expire_seconds` - TTL
pub fn set_one<V: for<'a> Deserialize<'a> + Serialize>(
pub async fn set_one<V: for<'a> Deserialize<'a> + Serialize>(
category: Category,
key: &str,
value: &V,
expire_seconds: u64,
) -> Result<(), Error> {
set(&categorize(category, key), value, expire_seconds)
set(&categorize(category, key), value, expire_seconds).await
}
/// Gets a Redis cache under a `category`.
@ -167,11 +182,11 @@ pub fn set_one<V: for<'a> Deserialize<'a> + Serialize>(
///
/// * `category` - one of [Category]
/// * `key` - key (will be prefixed automatically)
pub fn get_one<V: for<'a> Deserialize<'a> + Serialize>(
pub async fn get_one<V: for<'a> Deserialize<'a> + Serialize>(
category: Category,
key: &str,
) -> Result<Option<V>, Error> {
get(&categorize(category, key))
get(&categorize(category, key)).await
}
/// Deletes a Redis cache under a `category`.
@ -182,8 +197,8 @@ pub fn get_one<V: for<'a> Deserialize<'a> + Serialize>(
///
/// * `category` - one of [Category]
/// * `key` - key (will be prefixed automatically)
pub fn delete_one(category: Category, key: &str) -> Result<(), Error> {
delete(&categorize(category, key))
pub async fn delete_one(category: Category, key: &str) -> Result<(), Error> {
delete(&categorize(category, key)).await
}
/// Deletes all Redis caches under a `category`.
@ -191,28 +206,27 @@ pub fn delete_one(category: Category, key: &str) -> Result<(), Error> {
/// ## Arguments
///
/// * `category` - one of [Category]
pub fn delete_all(category: Category) -> Result<(), Error> {
let mut redis = redis_conn()?;
let keys: Vec<Vec<u8>> = redis.keys(wildcard(category))?;
pub async fn delete_all(category: Category) -> Result<(), Error> {
let mut redis = redis_conn().await?;
let keys: Vec<Vec<u8>> = redis.keys(wildcard(category)).await?;
if !keys.is_empty() {
redis.del(keys)?
redis.del(keys).await?
}
Ok(())
}
// TODO: set_all(), get_all()
// TODO: get_all()
#[cfg(test)]
mod unit_test {
use crate::database::cache::delete_one;
use super::{delete_all, get, get_one, set, set_one, Category::Test};
use crate::database::cache::delete_one;
use pretty_assertions::assert_eq;
#[test]
fn set_get_expire() {
#[tokio::test]
async fn set_get_expire() {
#[derive(serde::Deserialize, serde::Serialize, PartialEq, Debug)]
struct Data {
id: u32,
@ -231,13 +245,13 @@ mod unit_test {
kind: "prime number".to_string(),
};
set(key_1, &value_1, 1).unwrap();
set(key_2, &value_2, 1).unwrap();
set(key_3, &value_3, 1).unwrap();
set(key_1, &value_1, 1).await.unwrap();
set(key_2, &value_2, 1).await.unwrap();
set(key_3, &value_3, 1).await.unwrap();
let cached_value_1: Vec<i32> = get(key_1).unwrap().unwrap();
let cached_value_2: String = get(key_2).unwrap().unwrap();
let cached_value_3: Data = get(key_3).unwrap().unwrap();
let cached_value_1: Vec<i32> = get(key_1).await.unwrap().unwrap();
let cached_value_2: String = get(key_2).await.unwrap().unwrap();
let cached_value_3: Data = get(key_3).await.unwrap().unwrap();
assert_eq!(value_1, cached_value_1);
assert_eq!(value_2, cached_value_2);
@ -246,17 +260,17 @@ mod unit_test {
// wait for the cache to expire
std::thread::sleep(std::time::Duration::from_millis(1100));
let expired_value_1: Option<Vec<i32>> = get(key_1).unwrap();
let expired_value_2: Option<Vec<i32>> = get(key_2).unwrap();
let expired_value_3: Option<Vec<i32>> = get(key_3).unwrap();
let expired_value_1: Option<Vec<i32>> = get(key_1).await.unwrap();
let expired_value_2: Option<Vec<i32>> = get(key_2).await.unwrap();
let expired_value_3: Option<Vec<i32>> = get(key_3).await.unwrap();
assert!(expired_value_1.is_none());
assert!(expired_value_2.is_none());
assert!(expired_value_3.is_none());
}
#[test]
fn use_category() {
#[tokio::test]
async fn use_category() {
let key_1 = "fire";
let key_2 = "fish";
let key_3 = "awawa";
@ -265,24 +279,30 @@ mod unit_test {
let value_2 = 998244353u32;
let value_3 = 'あ';
set_one(Test, key_1, &value_1, 5 * 60).unwrap();
set_one(Test, key_2, &value_2, 5 * 60).unwrap();
set_one(Test, key_3, &value_3, 5 * 60).unwrap();
set_one(Test, key_1, &value_1, 5 * 60).await.unwrap();
set_one(Test, key_2, &value_2, 5 * 60).await.unwrap();
set_one(Test, key_3, &value_3, 5 * 60).await.unwrap();
assert_eq!(get_one::<String>(Test, key_1).unwrap().unwrap(), value_1);
assert_eq!(get_one::<u32>(Test, key_2).unwrap().unwrap(), value_2);
assert_eq!(get_one::<char>(Test, key_3).unwrap().unwrap(), value_3);
assert_eq!(
get_one::<String>(Test, key_1).await.unwrap().unwrap(),
value_1
);
assert_eq!(get_one::<u32>(Test, key_2).await.unwrap().unwrap(), value_2);
assert_eq!(
get_one::<char>(Test, key_3).await.unwrap().unwrap(),
value_3
);
delete_one(Test, key_1).unwrap();
delete_one(Test, key_1).await.unwrap();
assert!(get_one::<String>(Test, key_1).unwrap().is_none());
assert!(get_one::<u32>(Test, key_2).unwrap().is_some());
assert!(get_one::<char>(Test, key_3).unwrap().is_some());
assert!(get_one::<String>(Test, key_1).await.unwrap().is_none());
assert!(get_one::<u32>(Test, key_2).await.unwrap().is_some());
assert!(get_one::<char>(Test, key_3).await.unwrap().is_some());
delete_all(Test).unwrap();
delete_all(Test).await.unwrap();
assert!(get_one::<String>(Test, key_1).unwrap().is_none());
assert!(get_one::<u32>(Test, key_2).unwrap().is_none());
assert!(get_one::<char>(Test, key_3).unwrap().is_none());
assert!(get_one::<String>(Test, key_1).await.unwrap().is_none());
assert!(get_one::<u32>(Test, key_2).await.unwrap().is_none());
assert!(get_one::<char>(Test, key_3).await.unwrap().is_none());
}
}

View file

@ -1,6 +1,7 @@
pub use postgresql::db_conn;
pub use redis::key as redis_key;
pub use redis::redis_conn;
pub use redis::RedisConnError;
pub mod cache;
pub mod postgresql;

View file

@ -1,10 +1,50 @@
use crate::config::CONFIG;
use once_cell::sync::OnceCell;
use redis::{Client, Connection, RedisError};
use async_trait::async_trait;
use bb8::{ManageConnection, Pool, PooledConnection, RunError};
use redis::{aio::MultiplexedConnection, Client, ErrorKind, IntoConnectionInfo, RedisError};
use tokio::sync::OnceCell;
static REDIS_CLIENT: OnceCell<Client> = OnceCell::new();
/// A `bb8::ManageConnection` for `redis::Client::get_multiplexed_async_connection`.
#[derive(Clone, Debug)]
pub struct RedisConnectionManager {
client: Client,
}
fn init_redis() -> Result<Client, RedisError> {
impl RedisConnectionManager {
/// Create a new `RedisConnectionManager`.
/// See `redis::Client::open` for a description of the parameter types.
pub fn new<T: IntoConnectionInfo>(info: T) -> Result<Self, RedisError> {
Ok(Self {
client: Client::open(info.into_connection_info()?)?,
})
}
}
#[async_trait]
impl ManageConnection for RedisConnectionManager {
type Connection = MultiplexedConnection;
type Error = RedisError;
async fn connect(&self) -> Result<Self::Connection, Self::Error> {
self.client.get_multiplexed_async_connection().await
}
async fn is_valid(&self, conn: &mut Self::Connection) -> Result<(), Self::Error> {
let pong: String = redis::cmd("PING").query_async(conn).await?;
match pong.as_str() {
"PONG" => Ok(()),
_ => Err((ErrorKind::ResponseError, "ping request").into()),
}
}
fn has_broken(&self, _: &mut Self::Connection) -> bool {
false
}
}
static CONN_POOL: OnceCell<Pool<RedisConnectionManager>> = OnceCell::const_new();
async fn init_conn_pool() -> Result<(), RedisError> {
let redis_url = {
let mut params = vec!["redis://".to_owned()];
@ -27,16 +67,40 @@ fn init_redis() -> Result<Client, RedisError> {
params.concat()
};
tracing::info!("Initializing Redis client");
tracing::info!("Initializing connection manager");
let manager = RedisConnectionManager::new(redis_url)?;
Client::open(redis_url)
tracing::info!("Creating connection pool");
let pool = Pool::builder().build(manager).await?;
CONN_POOL.get_or_init(|| async { pool }).await;
Ok(())
}
pub fn redis_conn() -> Result<Connection, RedisError> {
match REDIS_CLIENT.get() {
Some(client) => Ok(client.get_connection()?),
None => init_redis()?.get_connection(),
#[derive(thiserror::Error, Debug)]
pub enum RedisConnError {
#[error("Failed to initialize Redis connection pool: {0}")]
RedisErr(RedisError),
#[error("Redis connection pool error: {0}")]
Bb8PoolErr(RunError<RedisError>),
}
pub async fn redis_conn(
) -> Result<PooledConnection<'static, RedisConnectionManager>, RedisConnError> {
if !CONN_POOL.initialized() {
let init_res = init_conn_pool().await;
if let Err(err) = init_res {
return Err(RedisConnError::RedisErr(err));
}
}
CONN_POOL
.get()
.unwrap()
.get()
.await
.map_err(RedisConnError::Bb8PoolErr)
}
/// prefix redis key
@ -49,23 +113,26 @@ pub fn key(key: impl ToString) -> String {
mod unit_test {
use super::redis_conn;
use pretty_assertions::assert_eq;
use redis::Commands;
use redis::AsyncCommands;
#[test]
fn connect() {
assert!(redis_conn().is_ok());
assert!(redis_conn().is_ok());
#[tokio::test]
async fn connect() {
assert!(redis_conn().await.is_ok());
assert!(redis_conn().await.is_ok());
}
#[test]
fn access() {
let mut redis = redis_conn().unwrap();
#[tokio::test]
async fn access() {
let mut redis = redis_conn().await.unwrap();
let key = "CARGO_UNIT_TEST_KEY";
let value = "CARGO_UNIT_TEST_VALUE";
assert_eq!(redis.set::<&str, &str, String>(key, value).unwrap(), "OK");
assert_eq!(redis.get::<&str, String>(key).unwrap(), value);
assert_eq!(redis.del::<&str, u32>(key).unwrap(), 1);
assert_eq!(
redis.set::<&str, &str, String>(key, value).await.unwrap(),
"OK"
);
assert_eq!(redis.get::<&str, String>(key).await.unwrap(), value);
assert_eq!(redis.del::<&str, u32>(key).await.unwrap(), 1);
}
}

View file

@ -0,0 +1,20 @@
use crate::config::server::VERSION;
const GREETING_MESSAGE: &str = "\
Firefish is an open-source decentralized microblogging platform.
If you like Firefish, please consider contributing to the repo. https://firefish.dev/firefish/firefish
";
#[crate::export]
pub fn greet() {
println!("{}", GREETING_MESSAGE);
tracing::info!("Welcome to Firefish!");
tracing::info!("Firefish {VERSION}");
}

View file

@ -1,2 +1,3 @@
pub mod hardware_stats;
pub mod greet;
pub mod log;
pub mod system_info;

View file

@ -1,16 +1,16 @@
use std::sync::{Mutex, MutexGuard, OnceLock, PoisonError};
use sysinfo::System;
pub type SystemMutexError = PoisonError<MutexGuard<'static, System>>;
pub type SysinfoPoisonError = PoisonError<MutexGuard<'static, System>>;
// TODO: handle this in a more proper way when we move the entry point to backend-rs
pub fn system() -> Result<MutexGuard<'static, System>, SystemMutexError> {
pub fn system() -> Result<MutexGuard<'static, System>, SysinfoPoisonError> {
pub static SYSTEM: OnceLock<Mutex<System>> = OnceLock::new();
SYSTEM.get_or_init(|| Mutex::new(System::new_all())).lock()
}
#[crate::export]
pub fn show_server_info() -> Result<(), SystemMutexError> {
pub fn show_server_info() -> Result<(), SysinfoPoisonError> {
let system_info = system()?;
tracing::info!(

View file

@ -1,31 +0,0 @@
use crate::database::{redis_conn, redis_key};
use crate::model::entity::note;
use crate::service::stream;
use crate::util::id::{get_timestamp, InvalidIdErr};
use redis::{streams::StreamMaxlen, Commands, RedisError};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Redis error: {0}")]
RedisErr(#[from] RedisError),
#[error("Invalid ID: {0}")]
InvalidIdErr(#[from] InvalidIdErr),
#[error("Stream error: {0}")]
StreamErr(#[from] stream::Error),
}
type Note = note::Model;
#[crate::export]
pub fn add_note_to_antenna(antenna_id: String, note: &Note) -> Result<(), Error> {
// for timeline API
redis_conn()?.xadd_maxlen(
redis_key(format!("antennaTimeline:{}", antenna_id)),
StreamMaxlen::Approx(200),
format!("{}-*", get_timestamp(&note.id)?),
&[("note", &note.id)],
)?;
// for streaming API
Ok(stream::antenna::publish(antenna_id, note)?)
}

View file

@ -0,0 +1,175 @@
use crate::config::CONFIG;
use crate::database::{cache, db_conn};
use crate::federation::acct::Acct;
use crate::misc::get_note_all_texts::{all_texts, NoteLike};
use crate::model::entity::{antenna, blocking, following, note, sea_orm_active_enums::*};
use sea_orm::{ColumnTrait, DbErr, EntityTrait, QueryFilter, QuerySelect};
#[derive(thiserror::Error, Debug)]
pub enum AntennaCheckError {
#[error("Database error: {0}")]
DbErr(#[from] DbErr),
#[error("Cache error: {0}")]
CacheErr(#[from] cache::Error),
#[error("User profile not found: {0}")]
UserProfileNotFoundErr(String),
}
fn match_all(space_separated_words: &str, text: &str, case_sensitive: bool) -> bool {
if case_sensitive {
space_separated_words
.split_whitespace()
.all(|word| text.contains(word))
} else {
space_separated_words
.to_lowercase()
.split_whitespace()
.all(|word| text.to_lowercase().contains(word))
}
}
pub async fn check_hit_antenna(
antenna: &antenna::Model,
note: note::Model,
note_author: &Acct,
) -> Result<bool, AntennaCheckError> {
if note.visibility == NoteVisibilityEnum::Specified {
return Ok(false);
}
if antenna.with_file && note.file_ids.is_empty() {
return Ok(false);
}
if !antenna.with_replies && note.reply_id.is_some() {
return Ok(false);
}
if antenna.src == AntennaSrcEnum::Users {
let is_from_one_of_specified_authors = antenna
.users
.iter()
.map(|s| s.parse::<Acct>().unwrap())
.any(|acct| acct.username == note_author.username && acct.host == note_author.host);
if !is_from_one_of_specified_authors {
return Ok(false);
}
} else if antenna.src == AntennaSrcEnum::Instances {
let is_from_one_of_specified_servers = antenna.instances.iter().any(|host| {
host.to_ascii_lowercase()
== note_author
.host
.clone()
.unwrap_or(CONFIG.host.clone())
.to_ascii_lowercase()
});
if !is_from_one_of_specified_servers {
return Ok(false);
}
}
// "Home", "Group", "List" sources are currently disabled
let note_texts = all_texts(NoteLike {
file_ids: note.file_ids,
user_id: note.user_id.clone(),
text: note.text,
cw: note.cw,
renote_id: note.renote_id,
reply_id: note.reply_id,
})
.await?;
let has_keyword = antenna.keywords.iter().any(|words| {
note_texts
.iter()
.any(|text| match_all(words, text, antenna.case_sensitive))
});
if !has_keyword {
return Ok(false);
}
let has_excluded_word = antenna.exclude_keywords.iter().any(|words| {
note_texts
.iter()
.any(|text| match_all(words, text, antenna.case_sensitive))
});
if has_excluded_word {
return Ok(false);
}
let db = db_conn().await?;
let blocked_user_ids: Vec<String> = cache::get_one(cache::Category::Block, &note.user_id)
.await?
.unwrap_or({
// cache miss
let blocks = blocking::Entity::find()
.select_only()
.column(blocking::Column::BlockeeId)
.filter(blocking::Column::BlockerId.eq(&note.user_id))
.into_tuple::<String>()
.all(db)
.await?;
cache::set_one(cache::Category::Block, &note.user_id, &blocks, 10 * 60).await?;
blocks
});
// if the antenna owner is blocked by the note author, return false
if blocked_user_ids.contains(&antenna.user_id) {
return Ok(false);
}
if [NoteVisibilityEnum::Home, NoteVisibilityEnum::Followers].contains(&note.visibility) {
let following_user_ids: Vec<String> =
cache::get_one(cache::Category::Follow, &antenna.user_id)
.await?
.unwrap_or({
// cache miss
let following = following::Entity::find()
.select_only()
.column(following::Column::FolloweeId)
.filter(following::Column::FollowerId.eq(&antenna.user_id))
.into_tuple::<String>()
.all(db)
.await?;
cache::set_one(
cache::Category::Follow,
&antenna.user_id,
&following,
10 * 60,
)
.await?;
following
});
// if the antenna owner is not following the note author, return false
if !following_user_ids.contains(&note.user_id) {
return Ok(false);
}
}
Ok(true)
}
#[cfg(test)]
mod unit_test {
use super::match_all;
use pretty_assertions::assert_eq;
#[test]
fn test_match_all() {
assert_eq!(match_all("Apple", "apple and banana", false), true);
assert_eq!(match_all("Apple", "apple and banana", true), false);
assert_eq!(match_all("Apple Banana", "apple and banana", false), true);
assert_eq!(match_all("Apple Banana", "apple and cinnamon", true), false);
assert_eq!(
match_all("Apple Banana", "apple and cinnamon", false),
false
);
}
}

View file

@ -1,84 +1,7 @@
use crate::database::db_conn;
use crate::model::entity::{drive_file, note};
use crate::misc::get_note_all_texts::{all_texts, NoteLike};
use once_cell::sync::Lazy;
use regex::Regex;
use sea_orm::{prelude::*, QuerySelect};
// TODO: handle name collisions in a better way
#[crate::export(object, js_name = "NoteLikeForCheckWordMute")]
pub struct NoteLike {
pub file_ids: Vec<String>,
pub user_id: Option<String>,
pub text: Option<String>,
pub cw: Option<String>,
pub renote_id: Option<String>,
pub reply_id: Option<String>,
}
async fn all_texts(note: NoteLike) -> Result<Vec<String>, DbErr> {
let db = db_conn().await?;
let mut texts: Vec<String> = vec![];
if let Some(text) = note.text {
texts.push(text);
}
if let Some(cw) = note.cw {
texts.push(cw);
}
texts.extend(
drive_file::Entity::find()
.select_only()
.column(drive_file::Column::Comment)
.filter(drive_file::Column::Id.is_in(note.file_ids))
.into_tuple::<Option<String>>()
.all(db)
.await?
.into_iter()
.flatten(),
);
if let Some(renote_id) = &note.renote_id {
if let Some((text, cw)) = note::Entity::find_by_id(renote_id)
.select_only()
.columns([note::Column::Text, note::Column::Cw])
.into_tuple::<(Option<String>, Option<String>)>()
.one(db)
.await?
{
if let Some(t) = text {
texts.push(t);
}
if let Some(c) = cw {
texts.push(c);
}
} else {
tracing::warn!("nonexistent renote id: {:#?}", renote_id);
}
}
if let Some(reply_id) = &note.reply_id {
if let Some((text, cw)) = note::Entity::find_by_id(reply_id)
.select_only()
.columns([note::Column::Text, note::Column::Cw])
.into_tuple::<(Option<String>, Option<String>)>()
.one(db)
.await?
{
if let Some(t) = text {
texts.push(t);
}
if let Some(c) = cw {
texts.push(c);
}
} else {
tracing::warn!("nonexistent reply id: {:#?}", reply_id);
}
}
Ok(texts)
}
use sea_orm::DbErr;
fn convert_regex(js_regex: &str) -> String {
static RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"^/(.+)/(.*)$").unwrap());
@ -87,14 +10,13 @@ fn convert_regex(js_regex: &str) -> String {
fn check_word_mute_impl(
texts: &[String],
muted_word_lists: &[Vec<String>],
muted_words: &[String],
muted_patterns: &[String],
) -> bool {
muted_word_lists.iter().any(|muted_word_list| {
muted_words.iter().any(|item| {
texts.iter().any(|text| {
let text_lower = text.to_lowercase();
muted_word_list
.iter()
item.split_whitespace()
.all(|muted_word| text_lower.contains(&muted_word.to_lowercase()))
})
}) || muted_patterns.iter().any(|muted_pattern| {
@ -107,16 +29,138 @@ fn check_word_mute_impl(
#[crate::export]
pub async fn check_word_mute(
note: NoteLike,
muted_word_lists: Vec<Vec<String>>,
muted_patterns: Vec<String>,
muted_words: &[String],
muted_patterns: &[String],
) -> Result<bool, DbErr> {
if muted_word_lists.is_empty() && muted_patterns.is_empty() {
if muted_words.is_empty() && muted_patterns.is_empty() {
Ok(false)
} else {
Ok(check_word_mute_impl(
&all_texts(note).await?,
&muted_word_lists,
&muted_patterns,
muted_words,
muted_patterns,
))
}
}
#[cfg(test)]
mod unit_test {
use super::check_word_mute_impl;
#[test]
fn test_word_mute_match() {
let texts = vec![
"The quick brown fox jumps over the lazy dog.".to_string(),
"色は匂へど 散りぬるを 我が世誰ぞ 常ならむ".to_string(),
"😇".to_string(),
];
let hiragana_1 = r#"/[\u{3040}-\u{309f}]/u"#.to_string();
let hiragana_2 = r#"/[あ-ん]/u"#.to_string();
let katakana_1 = r#"/[\u{30a1}-\u{30ff}]/u"#.to_string();
let katakana_2 = r#"/[ア-ン]/u"#.to_string();
let emoji = r#"/[\u{1f300}-\u{1f5ff}\u{1f900}-\u{1f9ff}\u{1f600}-\u{1f64f}\u{1f680}-\u{1f6ff}\u{2600}-\u{26ff}\u{2700}-\u{27bf}\u{1f1e6}-\u{1f1ff}\u{1f191}-\u{1f251}\u{1f004}\u{1f0cf}\u{1f170}-\u{1f171}\u{1f17e}-\u{1f17f}\u{1f18e}\u{3030}\u{2b50}\u{2b55}\u{2934}-\u{2935}\u{2b05}-\u{2b07}\u{2b1b}-\u{2b1c}\u{3297}\u{3299}\u{303d}\u{00a9}\u{00ae}\u{2122}\u{23f3}\u{24c2}\u{23e9}-\u{23ef}\u{25b6}\u{23f8}-\u{23fa}]/u"#.to_string();
assert!(check_word_mute_impl(&texts, &[], &["/the/i".to_string()]));
assert!(!check_word_mute_impl(&texts, &[], &["/the/".to_string()]));
assert!(check_word_mute_impl(&texts, &[], &["/QuICk/i".to_string()]));
assert!(!check_word_mute_impl(&texts, &[], &["/QuICk/".to_string()]));
assert!(check_word_mute_impl(
&texts,
&[
"".to_string(),
"有為の奥山 今日越えて 浅き夢見し 酔ひもせず".to_string()
],
&[]
));
assert!(!check_word_mute_impl(
&texts,
&["有為の奥山 今日越えて 浅き夢見し 酔ひもせず".to_string()],
&[]
));
assert!(!check_word_mute_impl(
&texts,
&[
"有為の奥山".to_string(),
"今日越えて".to_string(),
"浅き夢見し".to_string(),
"酔ひもせず".to_string()
],
&[]
));
assert!(check_word_mute_impl(
&texts,
&["yellow fox".to_string(), "mastodon".to_string()],
&[hiragana_1.clone()]
));
assert!(check_word_mute_impl(
&texts,
&["yellow fox".to_string(), "mastodon".to_string()],
&[hiragana_2.clone()]
));
assert!(!check_word_mute_impl(
&texts,
&["yellow fox".to_string(), "mastodon".to_string()],
&[katakana_1.clone()]
));
assert!(!check_word_mute_impl(
&texts,
&["yellow fox".to_string(), "mastodon".to_string()],
&[katakana_2.clone()]
));
assert!(check_word_mute_impl(
&texts,
&["brown fox".to_string(), "mastodon".to_string()],
&[katakana_1.clone()]
));
assert!(check_word_mute_impl(
&texts,
&["brown fox".to_string(), "mastodon".to_string()],
&[katakana_2.clone()]
));
assert!(check_word_mute_impl(
&texts,
&["yellow fox".to_string(), "dog".to_string()],
&[katakana_1.clone()]
));
assert!(check_word_mute_impl(
&texts,
&["yellow fox".to_string(), "dog".to_string()],
&[katakana_2.clone()]
));
assert!(check_word_mute_impl(
&texts,
&["yellow fox".to_string(), "mastodon".to_string()],
&[hiragana_1.clone(), katakana_1.clone()]
));
assert!(check_word_mute_impl(
&texts,
&["😇".to_string(), "🥲".to_string(), "🥴".to_string()],
&[]
));
assert!(!check_word_mute_impl(
&texts,
&["🙂".to_string(), "🥲".to_string(), "🥴".to_string()],
&[]
));
assert!(check_word_mute_impl(&texts, &[], &[emoji.clone()]));
}
}

View file

@ -55,10 +55,12 @@ pub async fn get_image_size_from_url(url: &str) -> Result<ImageSize, Error> {
{
let _ = MTX_GUARD.lock().await;
attempted = cache::get_one::<bool>(cache::Category::FetchUrl, url)?.is_some();
attempted = cache::get_one::<bool>(cache::Category::FetchUrl, url)
.await?
.is_some();
if !attempted {
cache::set_one(cache::Category::FetchUrl, url, &true, 10 * 60)?;
cache::set_one(cache::Category::FetchUrl, url, &true, 10 * 60).await?;
}
}
@ -138,7 +140,7 @@ mod unit_test {
let mp3_url = "https://firefish.dev/firefish/firefish/-/blob/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/sounds/aisha/1.mp3";
// delete caches in case you run this test multiple times
cache::delete_all(cache::Category::FetchUrl).unwrap();
cache::delete_all(cache::Category::FetchUrl).await.unwrap();
let png_size_1 = ImageSize {
width: 1024,
@ -207,7 +209,9 @@ mod unit_test {
let url = "https://firefish.dev/firefish/firefish/-/raw/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/splash.png";
// delete caches in case you run this test multiple times
cache::delete_one(cache::Category::FetchUrl, url).unwrap();
cache::delete_one(cache::Category::FetchUrl, url)
.await
.unwrap();
assert!(get_image_size_from_url(url).await.is_ok());
assert!(get_image_size_from_url(url).await.is_err());

View file

@ -0,0 +1,79 @@
use crate::database::db_conn;
use crate::model::entity::{drive_file, note};
use sea_orm::{prelude::*, QuerySelect};
/// TODO: handle name collisions better
#[crate::export(object, js_name = "NoteLikeForAllTexts")]
pub struct NoteLike {
pub file_ids: Vec<String>,
pub user_id: String,
pub text: Option<String>,
pub cw: Option<String>,
pub renote_id: Option<String>,
pub reply_id: Option<String>,
}
pub async fn all_texts(note: NoteLike) -> Result<Vec<String>, DbErr> {
let db = db_conn().await?;
let mut texts: Vec<String> = vec![];
if let Some(text) = note.text {
texts.push(text);
}
if let Some(cw) = note.cw {
texts.push(cw);
}
texts.extend(
drive_file::Entity::find()
.select_only()
.column(drive_file::Column::Comment)
.filter(drive_file::Column::Id.is_in(note.file_ids))
.into_tuple::<Option<String>>()
.all(db)
.await?
.into_iter()
.flatten(),
);
if let Some(renote_id) = &note.renote_id {
if let Some((text, cw)) = note::Entity::find_by_id(renote_id)
.select_only()
.columns([note::Column::Text, note::Column::Cw])
.into_tuple::<(Option<String>, Option<String>)>()
.one(db)
.await?
{
if let Some(t) = text {
texts.push(t);
}
if let Some(c) = cw {
texts.push(c);
}
} else {
tracing::warn!("nonexistent renote id: {:#?}", renote_id);
}
}
if let Some(reply_id) = &note.reply_id {
if let Some((text, cw)) = note::Entity::find_by_id(reply_id)
.select_only()
.columns([note::Column::Text, note::Column::Cw])
.into_tuple::<(Option<String>, Option<String>)>()
.one(db)
.await?
{
if let Some(t) = text {
texts.push(t);
}
if let Some(c) = cw {
texts.push(c);
}
} else {
tracing::warn!("nonexistent reply id: {:#?}", reply_id);
}
}
Ok(texts)
}

View file

@ -0,0 +1,9 @@
use crate::model::entity::note;
// https://github.com/napi-rs/napi-rs/issues/2060
type Note = note::Model;
#[crate::export]
pub fn is_quote(note: Note) -> bool {
note.renote_id.is_some() && (note.text.is_some() || note.has_poll || !note.file_ids.is_empty())
}

View file

@ -46,7 +46,7 @@ async fn get_latest_version() -> Result<String, Error> {
#[crate::export]
pub async fn latest_version() -> Result<String, Error> {
let version: Option<String> =
cache::get_one(cache::Category::FetchUrl, UPSTREAM_PACKAGE_JSON_URL)?;
cache::get_one(cache::Category::FetchUrl, UPSTREAM_PACKAGE_JSON_URL).await?;
if let Some(v) = version {
tracing::trace!("use cached value: {}", v);
@ -61,7 +61,8 @@ pub async fn latest_version() -> Result<String, Error> {
UPSTREAM_PACKAGE_JSON_URL,
&fetched_version,
3 * 60 * 60,
)?;
)
.await?;
Ok(fetched_version)
}
}
@ -97,7 +98,9 @@ mod unit_test {
#[tokio::test]
async fn check_version() {
// delete caches in case you run this test multiple times
cache::delete_one(cache::Category::FetchUrl, UPSTREAM_PACKAGE_JSON_URL).unwrap();
cache::delete_one(cache::Category::FetchUrl, UPSTREAM_PACKAGE_JSON_URL)
.await
.unwrap();
// fetch from firefish.dev
validate_version(latest_version().await.unwrap());

View file

@ -1,4 +1,4 @@
pub mod add_note_to_antenna;
pub mod check_hit_antenna;
pub mod check_server_block;
pub mod check_word_mute;
pub mod convert_host;
@ -6,8 +6,9 @@ pub mod emoji;
pub mod escape_sql;
pub mod format_milliseconds;
pub mod get_image_size;
pub mod get_note_all_texts;
pub mod get_note_summary;
pub mod hardware_stats;
pub mod is_quote;
pub mod is_safe_url;
pub mod latest_version;
pub mod mastodon_id;
@ -16,3 +17,4 @@ pub mod nyaify;
pub mod password;
pub mod reaction;
pub mod remove_old_attestation_challenges;
pub mod system_info;

View file

@ -1,4 +1,4 @@
use crate::init::hardware_stats::{system, SystemMutexError};
use crate::init::system_info::{system, SysinfoPoisonError};
use sysinfo::{Disks, MemoryRefreshKind};
// TODO: i64 -> u64 (we can't export u64 to Node.js)
@ -29,7 +29,7 @@ pub struct Storage {
}
#[crate::export]
pub fn cpu_info() -> Result<Cpu, SystemMutexError> {
pub fn cpu_info() -> Result<Cpu, SysinfoPoisonError> {
let system_info = system()?;
Ok(Cpu {
@ -45,7 +45,7 @@ pub fn cpu_info() -> Result<Cpu, SystemMutexError> {
}
#[crate::export]
pub fn cpu_usage() -> Result<f32, SystemMutexError> {
pub fn cpu_usage() -> Result<f32, SysinfoPoisonError> {
let mut system_info = system()?;
system_info.refresh_cpu_usage();
@ -56,7 +56,7 @@ pub fn cpu_usage() -> Result<f32, SystemMutexError> {
}
#[crate::export]
pub fn memory_usage() -> Result<Memory, SystemMutexError> {
pub fn memory_usage() -> Result<Memory, SysinfoPoisonError> {
let mut system_info = system()?;
system_info.refresh_memory_specifics(MemoryRefreshKind::new().with_ram());

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "targetUserId")]
pub target_user_id: String,
#[sea_orm(column_name = "reporterId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
pub token: String,
pub hash: String,
#[sea_orm(column_name = "userId")]
@ -21,7 +21,7 @@ pub struct Model {
#[sea_orm(column_name = "appId")]
pub app_id: Option<String>,
#[sea_orm(column_name = "lastUsedAt")]
pub last_used_at: Option<DateTime>,
pub last_used_at: Option<DateTimeWithTimeZone>,
pub session: Option<String>,
pub name: Option<String>,
pub description: Option<String>,

View file

@ -13,9 +13,9 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "expiresAt")]
pub expires_at: DateTime,
pub expires_at: DateTimeWithTimeZone,
pub place: String,
pub priority: String,
pub url: String,

View file

@ -13,13 +13,13 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
pub text: String,
pub title: String,
#[sea_orm(column_name = "imageUrl")]
pub image_url: Option<String>,
#[sea_orm(column_name = "updatedAt")]
pub updated_at: Option<DateTime>,
pub updated_at: Option<DateTimeWithTimeZone>,
#[sea_orm(column_name = "showPopup")]
pub show_popup: bool,
#[sea_orm(column_name = "isGoodNews")]

View file

@ -17,7 +17,7 @@ pub struct Model {
#[sea_orm(column_name = "announcementId")]
pub announcement_id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View file

@ -14,15 +14,13 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
pub name: String,
pub src: AntennaSrcEnum,
#[sea_orm(column_name = "userListId")]
pub user_list_id: Option<String>,
#[sea_orm(column_type = "JsonBinary")]
pub keywords: Json,
#[sea_orm(column_name = "withFile")]
pub with_file: bool,
pub expression: Option<String>,
@ -34,10 +32,10 @@ pub struct Model {
#[sea_orm(column_name = "userGroupJoiningId")]
pub user_group_joining_id: Option<String>,
pub users: Vec<String>,
#[sea_orm(column_name = "excludeKeywords", column_type = "JsonBinary")]
pub exclude_keywords: Json,
#[sea_orm(column_type = "JsonBinary")]
pub instances: Json,
pub instances: Vec<String>,
pub keywords: Vec<String>,
#[sea_orm(column_name = "excludeKeywords")]
pub exclude_keywords: Vec<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: Option<String>,
pub secret: String,

View file

@ -16,7 +16,7 @@ pub struct Model {
pub user_id: String,
pub challenge: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "registrationChallenge")]
pub registration_challenge: bool,
}

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
pub token: String,
#[sea_orm(column_name = "userId")]
pub user_id: Option<String>,

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "blockeeId")]
pub blockee_id: String,
#[sea_orm(column_name = "blockerId")]

View file

@ -13,9 +13,9 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "lastNotedAt")]
pub last_noted_at: Option<DateTime>,
pub last_noted_at: Option<DateTimeWithTimeZone>,
#[sea_orm(column_name = "userId")]
pub user_id: Option<String>,
pub name: String,

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "followeeId")]
pub followee_id: String,
#[sea_orm(column_name = "followerId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "channelId")]
pub channel_id: String,
#[sea_orm(column_name = "noteId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
pub name: String,

View file

@ -14,7 +14,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: Option<String>,
#[sea_orm(column_name = "userHost")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
pub name: String,
#[sea_orm(column_name = "userId")]
pub user_id: Option<String>,

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "updatedAt")]
pub updated_at: Option<DateTime>,
pub updated_at: Option<DateTimeWithTimeZone>,
pub name: String,
pub host: Option<String>,
#[sea_orm(column_name = "originalUrl")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "followeeId")]
pub followee_id: String,
#[sea_orm(column_name = "followerId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "followeeId")]
pub followee_id: String,
#[sea_orm(column_name = "followerId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "postId")]

View file

@ -13,9 +13,9 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "updatedAt")]
pub updated_at: DateTime,
pub updated_at: DateTimeWithTimeZone,
pub title: String,
pub description: Option<String>,
#[sea_orm(column_name = "userId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "caughtAt")]
pub caught_at: DateTime,
pub caught_at: DateTimeWithTimeZone,
pub host: String,
#[sea_orm(column_name = "usersCount")]
pub users_count: i32,
@ -24,13 +24,13 @@ pub struct Model {
#[sea_orm(column_name = "followersCount")]
pub followers_count: i32,
#[sea_orm(column_name = "latestRequestSentAt")]
pub latest_request_sent_at: Option<DateTime>,
pub latest_request_sent_at: Option<DateTimeWithTimeZone>,
#[sea_orm(column_name = "latestStatus")]
pub latest_status: Option<i32>,
#[sea_orm(column_name = "latestRequestReceivedAt")]
pub latest_request_received_at: Option<DateTime>,
pub latest_request_received_at: Option<DateTimeWithTimeZone>,
#[sea_orm(column_name = "lastCommunicatedAt")]
pub last_communicated_at: DateTime,
pub last_communicated_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "isNotResponding")]
pub is_not_responding: bool,
#[sea_orm(column_name = "softwareName")]
@ -46,7 +46,7 @@ pub struct Model {
#[sea_orm(column_name = "maintainerEmail")]
pub maintainer_email: Option<String>,
#[sea_orm(column_name = "infoUpdatedAt")]
pub info_updated_at: Option<DateTime>,
pub info_updated_at: Option<DateTimeWithTimeZone>,
#[sea_orm(column_name = "isSuspended")]
pub is_suspended: bool,
#[sea_orm(column_name = "iconUrl")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "recipientId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
pub r#type: String,

View file

@ -13,13 +13,13 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "muteeId")]
pub mutee_id: String,
#[sea_orm(column_name = "muterId")]
pub muter_id: String,
#[sea_orm(column_name = "expiresAt")]
pub expires_at: Option<DateTime>,
pub expires_at: Option<DateTimeWithTimeZone>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View file

@ -14,7 +14,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "replyId")]
pub reply_id: Option<String>,
#[sea_orm(column_name = "renoteId")]
@ -66,7 +66,7 @@ pub struct Model {
#[sea_orm(column_name = "threadId")]
pub thread_id: Option<String>,
#[sea_orm(column_name = "updatedAt")]
pub updated_at: Option<DateTime>,
pub updated_at: Option<DateTimeWithTimeZone>,
pub lang: Option<String>,
}

View file

@ -20,7 +20,7 @@ pub struct Model {
#[sea_orm(column_name = "fileIds")]
pub file_ids: Vec<String>,
#[sea_orm(column_name = "updatedAt")]
pub updated_at: DateTime,
pub updated_at: DateTimeWithTimeZone,
pub emojis: Vec<String>,
}

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "noteId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "noteId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "threadId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "noteId")]

View file

@ -14,7 +14,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "notifieeId")]
pub notifiee_id: String,
#[sea_orm(column_name = "notifierId")]

View file

@ -14,9 +14,9 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "updatedAt")]
pub updated_at: DateTime,
pub updated_at: DateTimeWithTimeZone,
pub title: String,
pub name: String,
pub summary: Option<String>,

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "pageId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
pub token: String,
#[sea_orm(column_name = "userId")]
pub user_id: String,

View file

@ -14,7 +14,7 @@ pub struct Model {
#[sea_orm(column_name = "noteId", primary_key, auto_increment = false, unique)]
pub note_id: String,
#[sea_orm(column_name = "expiresAt")]
pub expires_at: Option<DateTime>,
pub expires_at: Option<DateTimeWithTimeZone>,
pub multiple: bool,
pub choices: Vec<String>,
pub votes: Vec<i32>,

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "noteId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(column_name = "noteId", primary_key, auto_increment = false, unique)]
pub note_id: String,
#[sea_orm(column_name = "expiresAt")]
pub expires_at: DateTime,
pub expires_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
}

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "noteId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
pub code: String,
}

View file

@ -13,9 +13,9 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "updatedAt")]
pub updated_at: DateTime,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
pub key: String,

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "muteeId")]
pub mutee_id: String,
#[sea_orm(column_name = "muterId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "muteeId")]
pub mutee_id: String,
#[sea_orm(column_name = "muterId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
pub ip: String,

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
pub endpoint: String,

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub username: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View file

@ -14,11 +14,11 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "updatedAt")]
pub updated_at: Option<DateTime>,
pub updated_at: Option<DateTimeWithTimeZone>,
#[sea_orm(column_name = "lastFetchedAt")]
pub last_fetched_at: Option<DateTime>,
pub last_fetched_at: Option<DateTimeWithTimeZone>,
pub username: String,
#[sea_orm(column_name = "usernameLower")]
pub username_lower: String,
@ -62,7 +62,7 @@ pub struct Model {
#[sea_orm(column_name = "followersUri")]
pub followers_uri: Option<String>,
#[sea_orm(column_name = "lastActiveDate")]
pub last_active_date: Option<DateTime>,
pub last_active_date: Option<DateTimeWithTimeZone>,
#[sea_orm(column_name = "hideOnlineStatus")]
pub hide_online_status: bool,
#[sea_orm(column_name = "isDeleted")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
pub name: String,
#[sea_orm(column_name = "userId")]
pub user_id: String,

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "userGroupId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "userGroupId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "userGroupId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key)]
pub id: i32,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
pub ip: String,

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
pub name: String,

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "userListId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
#[sea_orm(column_name = "noteId")]

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
pub code: String,
pub username: String,
pub email: String,

View file

@ -32,8 +32,6 @@ pub struct Model {
#[sea_orm(column_name = "twoFactorEnabled")]
pub two_factor_enabled: bool,
pub password: Option<String>,
#[sea_orm(column_name = "clientData", column_type = "JsonBinary")]
pub client_data: Json,
#[sea_orm(column_name = "autoAcceptFollowed")]
pub auto_accept_followed: bool,
#[sea_orm(column_name = "alwaysMarkNsfw")]
@ -48,14 +46,10 @@ pub struct Model {
pub use_password_less_login: bool,
#[sea_orm(column_name = "pinnedPageId", unique)]
pub pinned_page_id: Option<String>,
#[sea_orm(column_type = "JsonBinary")]
pub room: Json,
#[sea_orm(column_name = "injectFeaturedNote")]
pub inject_featured_note: bool,
#[sea_orm(column_name = "enableWordMute")]
pub enable_word_mute: bool,
#[sea_orm(column_name = "mutedWords", column_type = "JsonBinary")]
pub muted_words: Json,
#[sea_orm(column_name = "mutingNotificationTypes")]
pub muting_notification_types: Vec<UserProfileMutingnotificationtypesEnum>,
#[sea_orm(column_name = "noCrawle")]
@ -64,8 +58,6 @@ pub struct Model {
pub receive_announcement_email: bool,
#[sea_orm(column_name = "emailNotificationTypes", column_type = "JsonBinary")]
pub email_notification_types: Json,
#[sea_orm(column_name = "mutedInstances", column_type = "JsonBinary")]
pub muted_instances: Json,
#[sea_orm(column_name = "publicReactions")]
pub public_reactions: bool,
#[sea_orm(column_name = "ffVisibility")]
@ -78,6 +70,10 @@ pub struct Model {
pub is_indexable: bool,
#[sea_orm(column_name = "mutedPatterns")]
pub muted_patterns: Vec<String>,
#[sea_orm(column_name = "mutedInstances")]
pub muted_instances: Vec<String>,
#[sea_orm(column_name = "mutedWords")]
pub muted_words: Vec<String>,
pub lang: Option<String>,
}

View file

@ -17,7 +17,7 @@ pub struct Model {
#[sea_orm(column_name = "publicKey")]
pub public_key: String,
#[sea_orm(column_name = "lastUsed")]
pub last_used: DateTime,
pub last_used: DateTimeWithTimeZone,
pub name: String,
}

View file

@ -13,7 +13,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(column_name = "createdAt")]
pub created_at: DateTime,
pub created_at: DateTimeWithTimeZone,
#[sea_orm(column_name = "userId")]
pub user_id: String,
pub name: String,
@ -22,7 +22,7 @@ pub struct Model {
pub secret: String,
pub active: bool,
#[sea_orm(column_name = "latestSentAt")]
pub latest_sent_at: Option<DateTime>,
pub latest_sent_at: Option<DateTimeWithTimeZone>,
#[sea_orm(column_name = "latestStatus")]
pub latest_status: Option<i32>,
}

View file

@ -0,0 +1,78 @@
use crate::database::cache;
use crate::database::{db_conn, redis_conn, redis_key, RedisConnError};
use crate::federation::acct::Acct;
use crate::misc::check_hit_antenna::{check_hit_antenna, AntennaCheckError};
use crate::model::entity::{antenna, note};
use crate::service::stream;
use crate::util::id::{get_timestamp, InvalidIdErr};
use redis::{streams::StreamMaxlen, AsyncCommands, RedisError};
use sea_orm::{DbErr, EntityTrait};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Database error: {0}")]
DbErr(#[from] DbErr),
#[error("Cache error: {0}")]
CacheErr(#[from] cache::Error),
#[error("Redis error: {0}")]
RedisErr(#[from] RedisError),
#[error("Redis connection error: {0}")]
RedisConnErr(#[from] RedisConnError),
#[error("Invalid ID: {0}")]
InvalidIdErr(#[from] InvalidIdErr),
#[error("Stream error: {0}")]
StreamErr(#[from] stream::Error),
#[error("Failed to check if the note should be added to antenna: {0}")]
AntennaCheckErr(#[from] AntennaCheckError),
}
// https://github.com/napi-rs/napi-rs/issues/2060
type Antenna = antenna::Model;
type Note = note::Model;
// TODO?: it might be better to store this directly in memory
// (like fetch_meta) instead of Redis as it's used so much
async fn antennas() -> Result<Vec<Antenna>, Error> {
const CACHE_KEY: &str = "antennas";
Ok(cache::get::<Vec<Antenna>>(CACHE_KEY).await?.unwrap_or({
let antennas = antenna::Entity::find().all(db_conn().await?).await?;
cache::set(CACHE_KEY, &antennas, 5 * 60).await?;
antennas
}))
}
#[crate::export]
pub async fn update_antennas_on_new_note(
note: Note,
note_author: &Acct,
note_muted_users: Vec<String>,
) -> Result<(), Error> {
// TODO: do this in parallel
for antenna in antennas().await?.iter() {
if note_muted_users.contains(&antenna.user_id) {
continue;
}
if check_hit_antenna(antenna, note.clone(), note_author).await? {
add_note_to_antenna(&antenna.id, &note).await?;
}
}
Ok(())
}
pub async fn add_note_to_antenna(antenna_id: &str, note: &Note) -> Result<(), Error> {
// for timeline API
redis_conn()
.await?
.xadd_maxlen(
redis_key(format!("antennaTimeline:{}", antenna_id)),
StreamMaxlen::Approx(200),
format!("{}-*", get_timestamp(&note.id)?),
&[("note", &note.id)],
)
.await?;
// for streaming API
Ok(stream::antenna::publish(antenna_id.to_string(), note).await?)
}

View file

@ -1,3 +1,4 @@
pub mod antenna;
pub mod nodeinfo;
pub mod note;
pub mod push_notification;

View file

@ -116,13 +116,13 @@ async fn generate_nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
pub async fn nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
const NODEINFO_2_1_CACHE_KEY: &str = "nodeinfo_2_1";
let cached = cache::get::<Nodeinfo21>(NODEINFO_2_1_CACHE_KEY)?;
let cached = cache::get::<Nodeinfo21>(NODEINFO_2_1_CACHE_KEY).await?;
if let Some(nodeinfo) = cached {
Ok(nodeinfo)
} else {
let nodeinfo = generate_nodeinfo_2_1().await?;
cache::set(NODEINFO_2_1_CACHE_KEY, &nodeinfo, 60 * 60)?;
cache::set(NODEINFO_2_1_CACHE_KEY, &nodeinfo, 60 * 60).await?;
Ok(nodeinfo)
}
}

View file

@ -12,7 +12,7 @@ pub async fn watch_note(
if watcher_id != note_author_id {
note_watching::Entity::insert(note_watching::ActiveModel {
id: ActiveValue::set(gen_id()),
created_at: ActiveValue::set(chrono::Local::now().naive_local()),
created_at: ActiveValue::set(chrono::Utc::now().into()),
user_id: ActiveValue::Set(watcher_id.to_string()),
note_user_id: ActiveValue::Set(note_author_id.to_string()),
note_id: ActiveValue::Set(note_id.to_string()),

View file

@ -47,6 +47,7 @@ pub enum PushNotificationKind {
ReadNotifications,
#[strum(serialize = "readAllNotifications")]
ReadAllNotifications,
Mastodon,
}
fn compact_content(
@ -158,15 +159,29 @@ pub async fn send_push_notification(
.all(db)
.await?;
let payload = format!(
"{{\"type\":\"{}\",\"userId\":\"{}\",\"dateTime\":{},\"body\":{}}}",
kind,
receiver_user_id,
chrono::Utc::now().timestamp_millis(),
serde_json::to_string(&compact_content(&kind, content.clone())?)?
);
// TODO: refactoring
let payload = if kind == PushNotificationKind::Mastodon {
// Leave the `content` as it is
serde_json::to_string(content)?
} else {
// Format the `content` passed from the TypeScript backend
// for Firefish push notifications
format!(
"{{\"type\":\"{}\",\"userId\":\"{}\",\"dateTime\":{},\"body\":{}}}",
kind,
receiver_user_id,
chrono::Utc::now().timestamp_millis(),
serde_json::to_string(&compact_content(&kind, content.clone())?)?
)
};
tracing::trace!("payload: {:#?}", payload);
let encoding = if kind == PushNotificationKind::Mastodon {
ContentEncoding::AesGcm
} else {
ContentEncoding::Aes128Gcm
};
for subscription in subscriptions.iter() {
if !subscription.send_read_message
&& [
@ -211,7 +226,7 @@ pub async fn send_push_notification(
let mut message_builder = WebPushMessageBuilder::new(&subscription_info);
message_builder.set_ttl(1000);
message_builder.set_payload(ContentEncoding::Aes128Gcm, payload.as_bytes());
message_builder.set_payload(encoding, payload.as_bytes());
message_builder.set_vapid_signature(signature.unwrap());
let message = message_builder.build();

View file

@ -7,8 +7,8 @@ pub mod group_chat;
pub mod moderation;
use crate::config::CONFIG;
use crate::database::redis_conn;
use redis::{Commands, RedisError};
use crate::database::{redis_conn, RedisConnError};
use redis::{AsyncCommands, RedisError};
#[derive(strum::Display)]
pub enum Stream {
@ -49,13 +49,15 @@ pub enum Stream {
pub enum Error {
#[error("Redis error: {0}")]
RedisError(#[from] RedisError),
#[error("Redis connection error: {0}")]
RedisConnErr(#[from] RedisConnError),
#[error("Json (de)serialization error: {0}")]
JsonError(#[from] serde_json::Error),
#[error("Value error: {0}")]
ValueError(String),
}
pub fn publish_to_stream(
pub async fn publish_to_stream(
stream: &Stream,
kind: Option<String>,
value: Option<String>,
@ -70,10 +72,13 @@ pub fn publish_to_stream(
value.ok_or(Error::ValueError("Invalid streaming message".to_string()))?
};
redis_conn()?.publish(
&CONFIG.host,
format!("{{\"channel\":\"{}\",\"message\":{}}}", stream, message),
)?;
redis_conn()
.await?
.publish(
&CONFIG.host,
format!("{{\"channel\":\"{}\",\"message\":{}}}", stream, message),
)
.await?;
Ok(())
}

View file

@ -1,10 +1,11 @@
use crate::model::entity::note;
use crate::service::stream::{publish_to_stream, Error, Stream};
pub fn publish(antenna_id: String, note: &note::Model) -> Result<(), Error> {
pub async fn publish(antenna_id: String, note: &note::Model) -> Result<(), Error> {
publish_to_stream(
&Stream::Antenna { antenna_id },
Some("note".to_string()),
Some(serde_json::to_string(note)?),
)
.await
}

View file

@ -1,10 +1,11 @@
use crate::service::stream::{publish_to_stream, Error, Stream};
#[crate::export(js_name = "publishToChannelStream")]
pub fn publish(channel_id: String, user_id: String) -> Result<(), Error> {
pub async fn publish(channel_id: String, user_id: String) -> Result<(), Error> {
publish_to_stream(
&Stream::Channel { channel_id },
Some("typing".to_string()),
Some(format!("\"{}\"", user_id)),
)
.await
}

View file

@ -17,7 +17,7 @@ pub enum ChatEvent {
// https://github.com/napi-rs/napi-rs/issues/2036
#[crate::export(js_name = "publishToChatStream")]
pub fn publish(
pub async fn publish(
sender_user_id: String,
receiver_user_id: String,
kind: ChatEvent,
@ -31,4 +31,5 @@ pub fn publish(
Some(kind.to_string()),
Some(serde_json::to_string(object)?),
)
.await
}

View file

@ -13,7 +13,7 @@ pub enum ChatIndexEvent {
// https://github.com/napi-rs/napi-rs/issues/2036
#[crate::export(js_name = "publishToChatIndexStream")]
pub fn publish(
pub async fn publish(
user_id: String,
kind: ChatIndexEvent,
object: &serde_json::Value,
@ -23,4 +23,5 @@ pub fn publish(
Some(kind.to_string()),
Some(serde_json::to_string(object)?),
)
.await
}

View file

@ -18,10 +18,11 @@ pub struct PackedEmoji {
}
#[crate::export(js_name = "publishToBroadcastStream")]
pub fn publish(emoji: &PackedEmoji) -> Result<(), Error> {
pub async fn publish(emoji: &PackedEmoji) -> Result<(), Error> {
publish_to_stream(
&Stream::CustomEmoji,
Some("emojiAdded".to_string()),
Some(format!("{{\"emoji\":{}}}", serde_json::to_string(emoji)?)),
)
.await
}

View file

@ -4,10 +4,15 @@ use crate::service::stream::{chat::ChatEvent, publish_to_stream, Error, Stream};
// https://github.com/napi-rs/napi-rs/issues/2036
#[crate::export(js_name = "publishToGroupChatStream")]
pub fn publish(group_id: String, kind: ChatEvent, object: &serde_json::Value) -> Result<(), Error> {
pub async fn publish(
group_id: String,
kind: ChatEvent,
object: &serde_json::Value,
) -> Result<(), Error> {
publish_to_stream(
&Stream::GroupChat { group_id },
Some(kind.to_string()),
Some(serde_json::to_string(object)?),
)
.await
}

View file

@ -12,10 +12,11 @@ pub struct AbuseUserReportLike {
}
#[crate::export(js_name = "publishToModerationStream")]
pub fn publish(moderator_id: String, report: &AbuseUserReportLike) -> Result<(), Error> {
pub async fn publish(moderator_id: String, report: &AbuseUserReportLike) -> Result<(), Error> {
publish_to_stream(
&Stream::Moderation { moderator_id },
Some("newAbuseUserReport".to_string()),
Some(serde_json::to_string(report)?),
)
.await
}

View file

@ -36,15 +36,14 @@
"adm-zip": "0.5.10",
"ajv": "8.13.0",
"archiver": "7.0.1",
"aws-sdk": "2.1621.0",
"aws-sdk": "2.1623.0",
"axios": "1.6.8",
"backend-rs": "workspace:*",
"blurhash": "2.0.5",
"bull": "4.12.4",
"bull": "4.12.6",
"cacheable-lookup": "TheEssem/cacheable-lookup",
"cbor-x": "1.5.9",
"chalk": "5.3.0",
"chalk-template": "1.1.0",
"cli-highlight": "2.1.11",
"color-convert": "2.0.1",
"content-disposition": "0.5.4",
@ -58,7 +57,7 @@
"firefish-js": "workspace:*",
"fluent-ffmpeg": "2.1.2",
"form-data": "4.0.0",
"got": "14.2.1",
"got": "14.3.0",
"gunzip-maybe": "1.4.2",
"hpagent": "1.2.0",
"ioredis": "5.4.1",
@ -107,7 +106,7 @@
"rss-parser": "3.13.0",
"sanitize-html": "2.13.0",
"semver": "7.6.2",
"sharp": "0.33.3",
"sharp": "0.33.4",
"stringz": "2.1.0",
"summaly": "2.7.0",
"syslog-pro": "1.0.0",
@ -169,7 +168,7 @@
"@types/websocket": "1.0.10",
"@types/ws": "8.5.10",
"cross-env": "7.0.3",
"eslint": "9.2.0",
"eslint": "9.3.0",
"mocha": "10.4.0",
"pug": "3.0.2",
"strict-event-emitter-types": "2.0.0",

Some files were not shown because too many files have changed in this diff Show more