Merge branch 'develop' into refactor/types

This commit is contained in:
naskya 2024-05-16 16:45:03 +09:00
commit 9298a6252d
No known key found for this signature in database
GPG key ID: 712D413B3A9FED5C
197 changed files with 15244 additions and 14917 deletions

View file

@ -1,195 +1,11 @@
#━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ url: http://localhost:3000
# Firefish configuration
#━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# ┌─────┐
#───┘ URL └─────────────────────────────────────────────────────
# Final accessible URL seen by a user.
url: https://example.tld/
# ONCE YOU HAVE STARTED THE INSTANCE, DO NOT CHANGE THE
# URL SETTINGS AFTER THAT!
# ┌───────────────────────┐
#───┘ Port and TLS settings └───────────────────────────────────
#
# Misskey requires a reverse proxy to support HTTPS connections.
#
# +----- https://example.tld/ ------------+
# +------+ |+-------------+ +----------------+|
# | User | ---> || Proxy (443) | ---> | Misskey (3000) ||
# +------+ |+-------------+ +----------------+|
# +---------------------------------------+
#
# You need to set up a reverse proxy. (e.g. nginx)
# An encrypted connection with HTTPS is highly recommended
# because tokens may be transferred in GET requests.
# The port that your Misskey server should listen on.
port: 3000 port: 3000
# ┌──────────────────────────┐
#───┘ PostgreSQL configuration └────────────────────────────────
db: db:
host: postgres host: postgres
port: 5432 port: 5432
db: firefish_db
# Database name user: firefish
db: postgres pass: password
# Auth
user: postgres
pass: test
# Whether disable Caching queries
#disableCache: true
# Extra Connection options
#extra:
# ssl: true
# ┌─────────────────────┐
#───┘ Redis configuration └─────────────────────────────────────
redis: redis:
host: redis host: redis
port: 6379 port: 6379
#family: 0 # 0=Both, 4=IPv4, 6=IPv6
#pass: example-pass
#prefix: example-prefix
#db: 1
# ┌─────────────────────────────┐
#───┘ Elasticsearch configuration └─────────────────────────────
#elasticsearch:
# host: localhost
# port: 9200
# ssl: false
# user:
# pass:
# ┌───────────────┐
#───┘ ID generation └───────────────────────────────────────────
# You can select the ID generation method.
# You don't usually need to change this setting, but you can
# change it according to your preferences.
# Available methods:
# aid ... Short, Millisecond accuracy
# meid ... Similar to ObjectID, Millisecond accuracy
# ulid ... Millisecond accuracy
# objectid ... This is left for backward compatibility
# ONCE YOU HAVE STARTED THE INSTANCE, DO NOT CHANGE THE
# ID SETTINGS AFTER THAT!
id: 'aid'
# ┌─────────────────────┐
#───┘ Other configuration └─────────────────────────────────────
# Max note length, should be < 8000.
#maxNoteLength: 3000
# Whether disable HSTS
#disableHsts: true
# Number of worker processes
#clusterLimit: 1
# Job concurrency per worker
# deliverJobConcurrency: 128
# inboxJobConcurrency: 16
# Job rate limiter
# deliverJobPerSec: 128
# inboxJobPerSec: 16
# Job attempts
# deliverJobMaxAttempts: 12
# inboxJobMaxAttempts: 8
# IP address family used for outgoing request (ipv4, ipv6 or dual)
#outgoingAddressFamily: ipv4
# Syslog option
#syslog:
# host: localhost
# port: 514
# Proxy for HTTP/HTTPS
#proxy: http://127.0.0.1:3128
#proxyBypassHosts: [
# 'example.com',
# '192.0.2.8'
#]
# Proxy for SMTP/SMTPS
#proxySmtp: http://127.0.0.1:3128 # use HTTP/1.1 CONNECT
#proxySmtp: socks4://127.0.0.1:1080 # use SOCKS4
#proxySmtp: socks5://127.0.0.1:1080 # use SOCKS5
# Media Proxy
#mediaProxy: https://example.com/proxy
# Proxy remote files (default: false)
#proxyRemoteFiles: true
#allowedPrivateNetworks: [
# '127.0.0.1/32'
#]
# Upload or download file size limits (bytes)
#maxFileSize: 262144000
# Managed hosting settings
# !!!!!!!!!!
# >>>>>> NORMAL SELF-HOSTERS, STAY AWAY! <<<<<<
# >>>>>> YOU DON'T NEED THIS! <<<<<<
# !!!!!!!!!!
# Each category is optional, but if each item in each category is mandatory!
# If you mess this up, that's on you, you've been warned...
#maxUserSignups: 100
#isManagedHosting: true
#deepl:
# managed: true
# authKey: ''
# isPro: false
#
#email:
# managed: true
# address: 'example@email.com'
# host: 'email.com'
# port: 587
# user: 'example@email.com'
# pass: ''
# useImplicitSslTls: false
#
#objectStorage:
# managed: true
# baseUrl: ''
# bucket: ''
# prefix: ''
# endpoint: ''
# region: ''
# accessKey: ''
# secretKey: ''
# useSsl: true
# connnectOverProxy: false
# setPublicReadOnUpload: true
# s3ForcePathStyle: true
# !!!!!!!!!!
# >>>>>> AGAIN, NORMAL SELF-HOSTERS, STAY AWAY! <<<<<<
# >>>>>> YOU DON'T NEED THIS, ABOVE SETTINGS ARE FOR MANAGED HOSTING ONLY! <<<<<<
# !!!!!!!!!!
# Seriously. Do NOT fill out the above settings if you're self-hosting.
# They're much better off being set from the control panel.

View file

@ -51,12 +51,11 @@ title.svg
/dev /dev
/docs /docs
/scripts /scripts
!/scripts/copy-assets.mjs
biome.json biome.json
COPYING
CODE_OF_CONDUCT.md CODE_OF_CONDUCT.md
CONTRIBUTING.md CONTRIBUTING.md
Dockerfile Dockerfile
LICENSE
Procfile Procfile
README.md README.md
SECURITY.md SECURITY.md

265
.gitlab-ci.yml Normal file
View file

@ -0,0 +1,265 @@
image: docker.io/rust:slim-bookworm
services:
- name: docker.io/groonga/pgroonga:latest-alpine-12-slim
alias: postgres
pull_policy: if-not-present
- name: docker.io/redis:7-alpine
alias: redis
pull_policy: if-not-present
workflow:
rules:
- if: $CI_PROJECT_PATH == 'firefish/firefish' || $CI_MERGE_REQUEST_PROJECT_PATH == 'firefish/firefish'
changes:
paths:
- packages/**/*
- locales/**/*
- scripts/**/*
- package.json
- Cargo.toml
- Cargo.lock
- Dockerfile
- .dockerignore
when: always
- when: never
stages:
- dependency
- test
- build
variables:
POSTGRES_DB: 'firefish_db'
POSTGRES_USER: 'firefish'
POSTGRES_PASSWORD: 'password'
POSTGRES_HOST_AUTH_METHOD: 'trust'
DEBIAN_FRONTEND: 'noninteractive'
CARGO_PROFILE_DEV_OPT_LEVEL: '0'
CARGO_PROFILE_DEV_LTO: 'off'
CARGO_PROFILE_DEV_DEBUG: 'none'
CARGO_TERM_COLOR: 'always'
GIT_CLEAN_FLAGS: -ffdx -e node_modules/ -e built/ -e target/ -e packages/backend-rs/built/
default:
before_script:
- apt-get update && apt-get -y upgrade
- apt-get -y --no-install-recommends install curl
- curl -fsSL 'https://deb.nodesource.com/setup_18.x' | bash -
- apt-get install -y --no-install-recommends build-essential clang mold python3 perl nodejs postgresql-client
- corepack enable
- corepack prepare pnpm@latest --activate
- cp .config/ci.yml .config/default.yml
- cp ci/cargo/config.toml /usr/local/cargo/config.toml
- export PGPASSWORD="${POSTGRES_PASSWORD}"
- psql --host postgres --user "${POSTGRES_USER}" --dbname "${POSTGRES_DB}" --command 'CREATE EXTENSION pgroonga'
test:build:
stage: test
rules:
- if: $TEST == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- scripts/**/*
- package.json
- Cargo.toml
- Cargo.lock
when: always
needs:
- job: cargo:clippy
optional: true
- job: cargo:test
optional: true
script:
- pnpm install --frozen-lockfile
- pnpm run build:debug
- pnpm run migrate
test:build:backend_ts_only:
stage: test
rules:
- if: $TEST == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- scripts/**/*
- package.json
- Cargo.toml
- Cargo.lock
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend/**/*
- packages/megalodon/**/*
when: always
before_script:
- apt-get update && apt-get -y upgrade
- apt-get -y --no-install-recommends install curl
- curl -fsSL 'https://deb.nodesource.com/setup_18.x' | bash -
- apt-get install -y --no-install-recommends build-essential python3 nodejs postgresql-client
- corepack enable
- corepack prepare pnpm@latest --activate
- mkdir -p packages/backend-rs/built
- cp packages/backend-rs/index.js packages/backend-rs/built/index.js
- cp packages/backend-rs/index.d.ts packages/backend-rs/built/index.d.ts
- cp .config/ci.yml .config/default.yml
- export PGPASSWORD="${POSTGRES_PASSWORD}"
- psql --host postgres --user "${POSTGRES_USER}" --dbname "${POSTGRES_DB}" --command 'CREATE EXTENSION pgroonga'
script:
- pnpm install --frozen-lockfile
- pnpm --filter 'backend' --filter 'megalodon' run build:debug
- pnpm run migrate
test:build:client_only:
stage: test
rules:
- if: $TEST == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- scripts/**/*
- package.json
- Cargo.toml
- Cargo.lock
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/client/**/*
- packages/firefish-js/**/*
- packages/sw/**/*
- locales/**/*
when: always
services: []
before_script:
- apt-get update && apt-get -y upgrade
- apt-get -y --no-install-recommends install curl
- curl -fsSL 'https://deb.nodesource.com/setup_18.x' | bash -
- apt-get install -y --no-install-recommends build-essential python3 perl nodejs
- corepack enable
- corepack prepare pnpm@latest --activate
- cp .config/ci.yml .config/default.yml
script:
- pnpm install --frozen-lockfile
- pnpm --filter 'firefish-js' --filter 'client' --filter 'sw' run build:debug
build:container:
stage: build
image: docker.io/debian:bookworm-slim
services: []
rules:
- if: $BUILD == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop'
changes:
paths:
- packages/**/*
- locales/**/*
- scripts/copy-assets.mjs
- package.json
- Cargo.toml
- Cargo.lock
- Dockerfile
- .dockerignore
when: always
needs:
- job: test:build
optional: true
- job: test:build:backend_ts_only
optional: true
- job: test:build:client_only
optional: true
before_script:
- apt-get update && apt-get -y upgrade
- apt-get install -y --no-install-recommends buildah ca-certificates fuse-overlayfs
- buildah login --username "${CI_REGISTRY_USER}" --password "${CI_REGISTRY_PASSWORD}" "${CI_REGISTRY}"
- export IMAGE_TAG="${CI_REGISTRY}/${CI_PROJECT_PATH}/develop:not-for-production"
- export IMAGE_CACHE="${CI_REGISTRY}/${CI_PROJECT_PATH}/develop/cache"
script:
- |-
buildah build \
--isolation chroot \
--device /dev/fuse:rw \
--security-opt seccomp=unconfined \
--security-opt apparmor=unconfined \
--cap-add all \
--platform linux/amd64 \
--layers \
--cache-to "${IMAGE_CACHE}" \
--cache-from "${IMAGE_CACHE}" \
--tag "${IMAGE_TAG}" \
.
- buildah inspect "${IMAGE_TAG}"
- buildah push "${IMAGE_TAG}"
cargo:test:
stage: test
rules:
- if: $TEST == 'false'
when: never
- if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'main'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- Cargo.toml
- Cargo.lock
when: always
script:
- curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C /usr/local/cargo/bin
- pnpm install --frozen-lockfile
- mkdir -p packages/backend-rs/built
- cp packages/backend-rs/index.js packages/backend-rs/built/index.js
- cp packages/backend-rs/index.d.ts packages/backend-rs/built/index.d.ts
- pnpm --filter='!backend-rs' run build:debug
- cargo test --doc
- cargo nextest run
cargo:clippy:
stage: test
rules:
- if: $TEST == 'false'
when: never
- if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'main'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- Cargo.toml
- Cargo.lock
when: always
services: []
before_script:
- apt-get update && apt-get -y upgrade
- apt-get install -y --no-install-recommends build-essential clang mold perl
- cp ci/cargo/config.toml /usr/local/cargo/config.toml
- rustup component add clippy
script:
- cargo clippy -- -D warnings
renovate:
stage: dependency
image:
name: docker.io/renovate/renovate:37-slim
entrypoint: [""]
rules:
- if: $RENOVATE && $CI_PIPELINE_SOURCE == 'schedule'
services: []
before_script: []
script:
- renovate --platform gitlab --token "${API_TOKEN}" --endpoint "${CI_SERVER_URL}/api/v4" "${CI_PROJECT_PATH}"

View file

@ -26,10 +26,6 @@ RsaSignature2017 implementation by Transmute Industries Inc
License: MIT License: MIT
https://github.com/transmute-industries/RsaSignature2017/blob/master/LICENSE https://github.com/transmute-industries/RsaSignature2017/blob/master/LICENSE
Machine learning model for sensitive images by Infinite Red, Inc.
License: MIT
https://github.com/infinitered/nsfwjs/blob/master/LICENSE
Chiptune2.js by Simon Gündling Chiptune2.js by Simon Gündling
License: MIT License: MIT
https://github.com/deskjet/chiptune2.js#license https://github.com/deskjet/chiptune2.js#license

1225
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -5,8 +5,8 @@ resolver = "2"
[workspace.dependencies] [workspace.dependencies]
macro_rs = { path = "packages/macro-rs" } macro_rs = { path = "packages/macro-rs" }
napi = { version = "2.16.4", default-features = false } napi = { version = "2.16.6", default-features = false }
napi-derive = "2.16.3" napi-derive = "2.16.4"
napi-build = "2.1.3" napi-build = "2.1.3"
argon2 = "0.5.3" argon2 = "0.5.3"
@ -18,29 +18,31 @@ cuid2 = "0.1.2"
emojis = "0.6.2" emojis = "0.6.2"
idna = "0.5.0" idna = "0.5.0"
image = "0.25.1" image = "0.25.1"
isahc = "1.7.2"
nom-exif = "1.2.0" nom-exif = "1.2.0"
once_cell = "1.19.0" once_cell = "1.19.0"
openssl = "0.10.64" openssl = "0.10.64"
pretty_assertions = "1.4.0" pretty_assertions = "1.4.0"
proc-macro2 = "1.0.81" proc-macro2 = "1.0.82"
quote = "1.0.36" quote = "1.0.36"
rand = "0.8.5" rand = "0.8.5"
redis = "0.25.3" redis = "0.25.3"
regex = "1.10.4" regex = "1.10.4"
reqwest = "0.12.4" rmp-serde = "1.3.0"
rmp-serde = "1.2.0"
sea-orm = "0.12.15" sea-orm = "0.12.15"
serde = "1.0.198" serde = "1.0.202"
serde_json = "1.0.116" serde_json = "1.0.117"
serde_yaml = "0.9.34" serde_yaml = "0.9.34"
strum = "0.26.2" strum = "0.26.2"
syn = "2.0.60" syn = "2.0.63"
thiserror = "1.0.59" sysinfo = "0.30.12"
thiserror = "1.0.60"
tokio = "1.37.0" tokio = "1.37.0"
tracing = "0.1.40" tracing = "0.1.40"
tracing-subscriber = "0.3.18" tracing-subscriber = "0.3.18"
url = "2.5.0" url = "2.5.0"
urlencoding = "2.1.3" urlencoding = "2.1.3"
web-push = { git = "https://github.com/pimeys/rust-web-push", rev = "40febe4085e3cef9cdfd539c315e3e945aba0656" }
[profile.release] [profile.release]
lto = true lto = true

View file

@ -7,7 +7,12 @@ RUN apk update && apk add --no-cache build-base linux-headers curl ca-certificat
RUN curl --proto '=https' --tlsv1.2 --silent --show-error --fail https://sh.rustup.rs | sh -s -- -y RUN curl --proto '=https' --tlsv1.2 --silent --show-error --fail https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}" ENV PATH="/root/.cargo/bin:${PATH}"
# Copy only the cargo dependency-related files first, to cache efficiently # Copy only backend-rs dependency-related files first, to cache efficiently
COPY package.json pnpm-workspace.yaml ./
COPY packages/backend-rs/package.json packages/backend-rs/package.json
COPY packages/backend-rs/npm/linux-x64-musl/package.json packages/backend-rs/npm/linux-x64-musl/package.json
COPY packages/backend-rs/npm/linux-arm64-musl/package.json packages/backend-rs/npm/linux-arm64-musl/package.json
COPY Cargo.toml Cargo.toml COPY Cargo.toml Cargo.toml
COPY Cargo.lock Cargo.lock COPY Cargo.lock Cargo.lock
COPY packages/backend-rs/Cargo.toml packages/backend-rs/Cargo.toml COPY packages/backend-rs/Cargo.toml packages/backend-rs/Cargo.toml
@ -15,22 +20,9 @@ COPY packages/backend-rs/src/lib.rs packages/backend-rs/src/
COPY packages/macro-rs/Cargo.toml packages/macro-rs/Cargo.toml COPY packages/macro-rs/Cargo.toml packages/macro-rs/Cargo.toml
COPY packages/macro-rs/src/lib.rs packages/macro-rs/src/ COPY packages/macro-rs/src/lib.rs packages/macro-rs/src/
# Install cargo dependencies # Configure pnpm, and install backend-rs dependencies
RUN cargo fetch --locked --manifest-path /firefish/packages/backend-rs/Cargo.toml RUN corepack enable && corepack prepare pnpm@latest --activate && pnpm --filter backend-rs install
RUN cargo fetch --locked --manifest-path Cargo.toml
# Copy only the dependency-related files first, to cache efficiently
COPY package.json pnpm*.yaml ./
COPY packages/backend/package.json packages/backend/package.json
COPY packages/client/package.json packages/client/package.json
COPY packages/sw/package.json packages/sw/package.json
COPY packages/firefish-js/package.json packages/firefish-js/package.json
COPY packages/megalodon/package.json packages/megalodon/package.json
COPY packages/backend-rs/package.json packages/backend-rs/package.json
COPY packages/backend-rs/npm/linux-x64-musl/package.json packages/backend-rs/npm/linux-x64-musl/package.json
COPY packages/backend-rs/npm/linux-arm64-musl/package.json packages/backend-rs/npm/linux-arm64-musl/package.json
# Configure pnpm, and install dev mode dependencies for compilation
RUN corepack enable && corepack prepare pnpm@latest --activate && pnpm install --frozen-lockfile
# Copy in the rest of the rust files # Copy in the rest of the rust files
COPY packages/backend-rs packages/backend-rs/ COPY packages/backend-rs packages/backend-rs/
@ -39,10 +31,25 @@ COPY packages/backend-rs packages/backend-rs/
# Compile backend-rs # Compile backend-rs
RUN NODE_ENV='production' pnpm run --filter backend-rs build RUN NODE_ENV='production' pnpm run --filter backend-rs build
# Copy in the rest of the files to compile # Copy/Overwrite index.js to mitigate the bug in napi-rs codegen
COPY packages/backend-rs/index.js packages/backend-rs/built/index.js
# Copy only the dependency-related files first, to cache efficiently
COPY packages/backend/package.json packages/backend/package.json
COPY packages/client/package.json packages/client/package.json
COPY packages/sw/package.json packages/sw/package.json
COPY packages/firefish-js/package.json packages/firefish-js/package.json
COPY packages/megalodon/package.json packages/megalodon/package.json
COPY pnpm-lock.yaml ./
# Install dev mode dependencies for compilation
RUN pnpm install --frozen-lockfile
# Copy in the rest of the files to build
COPY . ./ COPY . ./
RUN NODE_ENV='production' pnpm run --filter firefish-js build
RUN NODE_ENV='production' pnpm run --recursive --parallel --filter '!backend-rs' --filter '!firefish-js' build && pnpm run gulp # Build other workspaces
RUN NODE_ENV='production' pnpm run --recursive --filter '!backend-rs' build && pnpm run build:assets
# Trim down the dependencies to only those for production # Trim down the dependencies to only those for production
RUN find . -path '*/node_modules/*' -delete && pnpm install --prod --frozen-lockfile RUN find . -path '*/node_modules/*' -delete && pnpm install --prod --frozen-lockfile

View file

@ -3,7 +3,7 @@ export
.PHONY: pre-commit .PHONY: pre-commit
pre-commit: format entities napi-index pre-commit: format entities napi
.PHONY: format .PHONY: format
format: format:
@ -11,11 +11,12 @@ format:
.PHONY: entities .PHONY: entities
entities: entities:
pnpm --filter=backend run build:debug
pnpm run migrate pnpm run migrate
$(MAKE) -C ./packages/backend-rs regenerate-entities $(MAKE) -C ./packages/backend-rs regenerate-entities
.PHONY: napi-index .PHONY: napi
napi-index: napi:
$(MAKE) -C ./packages/backend-rs update-index $(MAKE) -C ./packages/backend-rs update-index

3
ci/cargo/config.toml Normal file
View file

@ -0,0 +1,3 @@
[target.x86_64-unknown-linux-gnu]
linker = "/usr/bin/clang"
rustflags = ["-C", "link-arg=--ld-path=/usr/bin/mold"]

View file

@ -1,4 +1,4 @@
COMPOSE='docker compose' COMPOSE='podman-compose'
POSTGRES_PASSWORD=password POSTGRES_PASSWORD=password
POSTGRES_USER=firefish POSTGRES_USER=firefish
POSTGRES_DB=firefish_db POSTGRES_DB=firefish_db

View file

@ -7,6 +7,8 @@
- Node.js - Node.js
- pnpm - pnpm
- Rust toolchain - Rust toolchain
- Python 3
- Perl
- FFmpeg - FFmpeg
- Container runtime - Container runtime
- [Docker](https://docs.docker.com/get-docker/) - [Docker](https://docs.docker.com/get-docker/)
@ -31,7 +33,7 @@ You can refer to [local-installation.md](./local-installation.md) to install the
1. Copy example config file 1. Copy example config file
```sh ```sh
cp dev/config.example.env dev/config.env cp dev/config.example.env dev/config.env
# If you use container runtime other than Docker, you need to modify the "COMPOSE" variable # If you use container runtime other than Podman, you need to modify the "COMPOSE" variable
# vim dev/config.env # vim dev/config.env
``` ```
1. Create `.config/default.yml` with the following content 1. Create `.config/default.yml` with the following content
@ -51,12 +53,7 @@ You can refer to [local-installation.md](./local-installation.md) to install the
host: localhost host: localhost
port: 26379 port: 26379
logLevel: [ maxlogLevel: 'debug' # or 'trace'
'error',
'success',
'warning',
'info'
]
``` ```
1. Start database containers 1. Start database containers
```sh ```sh
@ -84,6 +81,19 @@ You can refer to [local-installation.md](./local-installation.md) to install the
DONE * [core boot] Now listening on port 3000 on http://localhost:3000 DONE * [core boot] Now listening on port 3000 on http://localhost:3000
``` ```
## Update auto-generated files in `package/backend-rs`
You need to install `sea-orm-cli` to regenerate database entities.
```sh
cargo install sea-orm-cli
```
```sh
make entities
make napi
```
## Reset the environment ## Reset the environment
You can recreate a fresh local Firefish environment by recreating the database containers: You can recreate a fresh local Firefish environment by recreating the database containers:

View file

@ -141,12 +141,7 @@ sudo apt install ffmpeg
host: localhost host: localhost
port: 6379 port: 6379
logLevel: [ maxLogLevel: 'debug' # or 'trace'
'error',
'success',
'warning',
'info'
]
``` ```
## 4. Build and start Firefish ## 4. Build and start Firefish

View file

@ -2,6 +2,16 @@
Breaking changes are indicated by the :warning: icon. Breaking changes are indicated by the :warning: icon.
## v20240516
- :warning: `server-info` (an endpoint to get server hardware information) now requires credentials.
- :warning: `net` (server's default network interface) has been removed from `admin/server-info`.
- Adding `lang` to the response of `i` and the request parameter of `i/update`.
## v20240504
- :warning: Removed `release` endpoint.
## v20240424 ## v20240424
- Added `antennaLimit` field to the response of `meta` and `admin/meta`, and the request of `admin/update-meta` (optional). - Added `antennaLimit` field to the response of `meta` and `admin/meta`, and the request of `admin/update-meta` (optional).

View file

@ -5,17 +5,31 @@ Critical security updates are indicated by the :warning: icon.
- Server administrators should check [notice-for-admins.md](./notice-for-admins.md) as well. - Server administrators should check [notice-for-admins.md](./notice-for-admins.md) as well.
- Third-party client/bot developers may want to check [api-change.md](./api-change.md) as well. - Third-party client/bot developers may want to check [api-change.md](./api-change.md) as well.
## Unreleased ## [v20240516](https://firefish.dev/firefish/firefish/-/merge_requests/10854/commits)
- Improve timeline UX (you can restore the original appearance by settings)
- Remove `$[center]` MFM function
- This function was suddenly added last year (https://firefish.dev/firefish/firefish/-/commit/1a971efa689323d54eebb4d3646e102fb4d1d95a), but according to the [MFM spec](https://github.com/misskey-dev/mfm.js/blob/6aaf68089023c6adebe44123eebbc4dcd75955e0/docs/syntax.md#fn), `$[something]` must be an inline element (while `center` is a block element), so such a syntax is not expected by MFM renderers. Please use `<center></center>` instead.
- Fix bugs
## [v20240504](https://firefish.dev/firefish/firefish/-/merge_requests/10790/commits)
- Fix bugs
## :warning: [v20240430](https://firefish.dev/firefish/firefish/-/merge_requests/10781/commits)
- Add ability to group similar notifications
- Add features to share links to an account in the three dots menu on the profile page - Add features to share links to an account in the three dots menu on the profile page
- Improve server logs - Improve server logs
- Fix bugs - Fix bugs (including a critical security issue)
- We are very thankful to @tesaguri and Laura Hausmann for helping to fix the security issue.
## [v20240424](https://firefish.dev/firefish/firefish/-/merge_requests/10765/commits) ## [v20240424](https://firefish.dev/firefish/firefish/-/merge_requests/10765/commits)
- Improve the usability of the feature to prevent forgetting to write alt texts - Improve the usability of the feature to prevent forgetting to write alt texts
- Add a server-wide setting for the maximum number of antennas each user can create - Add a server-wide setting for the maximum number of antennas each user can create
- Fix bugs - Fix bugs (including a medium severity security issue)
- We are very thankful to @mei23 for kindly sharing the information about the security issue.
## [v20240421](https://firefish.dev/firefish/firefish/-/merge_requests/10756/commits) ## [v20240421](https://firefish.dev/firefish/firefish/-/merge_requests/10756/commits)

View file

@ -1,6 +1,8 @@
BEGIN; BEGIN;
DELETE FROM "migrations" WHERE name IN ( DELETE FROM "migrations" WHERE name IN (
'AddUserProfileLanguage1714888400293',
'DropUnusedIndexes1714643926317',
'AlterAkaType1714099399879', 'AlterAkaType1714099399879',
'AddDriveFileUsage1713451569342', 'AddDriveFileUsage1713451569342',
'ConvertCwVarcharToText1713225866247', 'ConvertCwVarcharToText1713225866247',
@ -25,6 +27,22 @@ DELETE FROM "migrations" WHERE name IN (
'RemoveNativeUtilsMigration1705877093218' 'RemoveNativeUtilsMigration1705877093218'
); );
-- drop-unused-indexes
CREATE INDEX "IDX_01f4581f114e0ebd2bbb876f0b" ON "note_reaction" ("createdAt");
CREATE INDEX "IDX_0610ebcfcfb4a18441a9bcdab2" ON "poll" ("userId");
CREATE INDEX "IDX_25dfc71b0369b003a4cd434d0b" ON "note" ("attachedFileTypes");
CREATE INDEX "IDX_2710a55f826ee236ea1a62698f" ON "hashtag" ("mentionedUsersCount");
CREATE INDEX "IDX_4c02d38a976c3ae132228c6fce" ON "hashtag" ("mentionedRemoteUsersCount");
CREATE INDEX "IDX_51c063b6a133a9cb87145450f5" ON "note" ("fileIds");
CREATE INDEX "IDX_54ebcb6d27222913b908d56fd8" ON "note" ("mentions");
CREATE INDEX "IDX_7fa20a12319c7f6dc3aed98c0a" ON "poll" ("userHost");
CREATE INDEX "IDX_88937d94d7443d9a99a76fa5c0" ON "note" ("tags");
CREATE INDEX "IDX_b11a5e627c41d4dc3170f1d370" ON "notification" ("createdAt");
CREATE INDEX "IDX_c8dfad3b72196dd1d6b5db168a" ON "drive_file" ("createdAt");
CREATE INDEX "IDX_d57f9030cd3af7f63ffb1c267c" ON "hashtag" ("attachedUsersCount");
CREATE INDEX "IDX_e5848eac4940934e23dbc17581" ON "drive_file" ("uri");
CREATE INDEX "IDX_fa99d777623947a5b05f394cae" ON "user" ("tags");
-- alter-aka-type -- alter-aka-type
ALTER TABLE "user" RENAME COLUMN "alsoKnownAs" TO "alsoKnownAsOld"; ALTER TABLE "user" RENAME COLUMN "alsoKnownAs" TO "alsoKnownAsOld";
ALTER TABLE "user" ADD COLUMN "alsoKnownAs" text; ALTER TABLE "user" ADD COLUMN "alsoKnownAs" text;
@ -747,9 +765,6 @@ CREATE SEQUENCE public.__chart_day__users_id_seq
CACHE 1; CACHE 1;
ALTER SEQUENCE public.__chart_day__users_id_seq OWNED BY public.__chart_day__users.id; ALTER SEQUENCE public.__chart_day__users_id_seq OWNED BY public.__chart_day__users.id;
-- drop-user-profile-language
ALTER TABLE "user_profile" ADD COLUMN "lang" character varying(32);
-- emoji-moderator -- emoji-moderator
ALTER TABLE "user" DROP COLUMN "emojiModPerm"; ALTER TABLE "user" DROP COLUMN "emojiModPerm";
DROP TYPE "public"."user_emojimodperm_enum"; DROP TYPE "public"."user_emojimodperm_enum";

View file

@ -24,6 +24,7 @@ Firefish depends on the following software.
- `build-essential` on Debian/Ubuntu Linux - `build-essential` on Debian/Ubuntu Linux
- `base-devel` on Arch Linux - `base-devel` on Arch Linux
- [Python 3](https://www.python.org/) - [Python 3](https://www.python.org/)
- [Perl](https://www.perl.org/)
This document shows an example procedure for installing these dependencies and Firefish on Debian 12. Note that there is much room for customizing the server setup; this document merely demonstrates a simple installation. This document shows an example procedure for installing these dependencies and Firefish on Debian 12. Note that there is much room for customizing the server setup; this document merely demonstrates a simple installation.
@ -325,7 +326,7 @@ cd ~/firefish
- To add custom locales, place them in the `./custom/locales/` directory. If you name your custom locale the same as an existing locale, it will overwrite it. If you give it a unique name, it will be added to the list. Also make sure that the first part of the filename matches the locale you're basing it on. (Example: `en-FOO.yml`) - To add custom locales, place them in the `./custom/locales/` directory. If you name your custom locale the same as an existing locale, it will overwrite it. If you give it a unique name, it will be added to the list. Also make sure that the first part of the filename matches the locale you're basing it on. (Example: `en-FOO.yml`)
- To add custom error images, place them in the `./custom/assets/badges` directory, replacing the files already there. - To add custom error images, place them in the `./custom/assets/badges` directory, replacing the files already there.
- To add custom sounds, place only mp3 files in the `./custom/assets/sounds` directory. - To add custom sounds, place only mp3 files in the `./custom/assets/sounds` directory.
- To update custom assets without rebuilding, just run `pnpm run gulp`. - To update custom assets without rebuilding, just run `pnpm run build:assets`.
- To block ChatGPT, CommonCrawl, or other crawlers from indexing your instance, uncomment the respective rules in `./custom/robots.txt`. - To block ChatGPT, CommonCrawl, or other crawlers from indexing your instance, uncomment the respective rules in `./custom/robots.txt`.
## Tips & Tricks ## Tips & Tricks

View file

@ -2,12 +2,39 @@
You can skip intermediate versions when upgrading from an old version, but please read the notices and follow the instructions for each intermediate version before [upgrading](./upgrade.md). You can skip intermediate versions when upgrading from an old version, but please read the notices and follow the instructions for each intermediate version before [upgrading](./upgrade.md).
## Unreleased ## v20240516
### For all users
Firefish is now compatible with [Node v22](https://nodejs.org/en/blog/announcements/v22-release-announce). The pre-built OCI container image will still be using the latest LTS version (v20.13.1 as of now).
## v20240430
### For all users ### For all users
You can control the verbosity of the server log by adding `maxLogLevel` in `.config/default.yml`. `logLevels` has been deprecated in favor of this setting. (see also: <https://firefish.dev/firefish/firefish/-/blob/eac0c1c47cd23789dcc395ab08b074934409fd96/.config/example.yml#L152>) You can control the verbosity of the server log by adding `maxLogLevel` in `.config/default.yml`. `logLevels` has been deprecated in favor of this setting. (see also: <https://firefish.dev/firefish/firefish/-/blob/eac0c1c47cd23789dcc395ab08b074934409fd96/.config/example.yml#L152>)
### For systemd/pm2 users
- You need to install Perl to build Firefish. Since Git depends on Perl in many packaging systems, you probably already have Perl installed on your system. You can check the Perl version by this command:
```sh
perl --version
```
- Not only Firefish but also Node.js has recently fixed a few security issues:
- https://nodejs.org/en/blog/vulnerability/april-2024-security-releases
- https://nodejs.org/en/blog/vulnerability/april-2024-security-releases-2
So, it is highly recommended that you upgrade your Node.js version as well. The new versions are
- Node v18.20.2 (v18.x LTS)
- Node v20.12.2 (v20.x LTS)
- Node v21.7.3 (v21.x)
You can check your Node.js version by this command:
```sh
node --version
```
[Node v22](https://nodejs.org/en/blog/announcements/v22-release-announce) was also released several days ago, but we have not yet tested Firefish with this version.
## v20240413 ## v20240413
### For all users ### For all users

View file

@ -1,101 +0,0 @@
/**
* Gulp tasks
*/
const fs = require("fs");
const gulp = require("gulp");
const replace = require("gulp-replace");
const terser = require("gulp-terser");
const cssnano = require("gulp-cssnano");
const meta = require("./package.json");
gulp.task("copy:backend:views", () =>
gulp
.src("./packages/backend/src/server/web/views/**/*")
.pipe(gulp.dest("./packages/backend/built/server/web/views")),
);
gulp.task("copy:backend:custom", () =>
gulp
.src("./custom/assets/**/*")
.pipe(gulp.dest("./packages/backend/assets/")),
);
gulp.task("copy:client:fonts", () =>
gulp
.src("./packages/client/node_modules/three/examples/fonts/**/*")
.pipe(gulp.dest("./built/_client_dist_/fonts/")),
);
gulp.task("copy:client:locales", async (cb) => {
fs.mkdirSync("./built/_client_dist_/locales", { recursive: true });
const { default: locales } = await import("./locales/index.mjs");
const v = { _version_: meta.version };
for (const [lang, locale] of Object.entries(locales)) {
fs.writeFileSync(
`./built/_client_dist_/locales/${lang}.${meta.version}.json`,
JSON.stringify({ ...locale, ...v }),
"utf-8",
);
}
cb();
});
gulp.task("build:backend:script", async () => {
const { default: locales } = await import("./locales/index.mjs");
return gulp
.src([
"./packages/backend/src/server/web/boot.js",
"./packages/backend/src/server/web/bios.js",
"./packages/backend/src/server/web/cli.js",
])
.pipe(replace("SUPPORTED_LANGS", JSON.stringify(Object.keys(locales))))
.pipe(
terser({
toplevel: true,
}),
)
.pipe(gulp.dest("./packages/backend/built/server/web/"));
});
gulp.task("build:backend:style", () => {
return gulp
.src([
"./packages/backend/src/server/web/style.css",
"./packages/backend/src/server/web/bios.css",
"./packages/backend/src/server/web/cli.css",
])
.pipe(
cssnano({
zindex: false,
}),
)
.pipe(gulp.dest("./packages/backend/built/server/web/"));
});
gulp.task(
"build",
gulp.parallel(
"copy:client:locales",
"copy:backend:views",
"copy:backend:custom",
"build:backend:script",
"build:backend:style",
"copy:client:fonts",
),
);
gulp.task("default", gulp.task("build"));
gulp.task("watch", () => {
gulp.watch(
["./packages/*/src/**/*"],
{ ignoreInitial: false },
gulp.task("build"),
);
});

View file

@ -738,6 +738,7 @@ _notification:
reacted: Ha reaccionat a la teva publicació reacted: Ha reaccionat a la teva publicació
renoted: Ha impulsat la teva publicació renoted: Ha impulsat la teva publicació
voted: Ha votat a la teva enquesta voted: Ha votat a la teva enquesta
andCountUsers: I {count} usuaris més {acted}
_deck: _deck:
_columns: _columns:
notifications: "Notificacions" notifications: "Notificacions"
@ -2292,3 +2293,17 @@ media: Multimèdia
antennaLimit: El nombre màxim d'antenes que pot crear un usuari antennaLimit: El nombre màxim d'antenes que pot crear un usuari
showAddFileDescriptionAtFirstPost: Obra de forma automàtica un formulari per escriure showAddFileDescriptionAtFirstPost: Obra de forma automàtica un formulari per escriure
una descripció quant intentes publicar un fitxer que no en té una descripció quant intentes publicar un fitxer que no en té
remoteFollow: Seguiment remot
cannotEditVisibility: No pots canviar la visibilitat
useThisAccountConfirm: Vols continuar amb aquest compte?
inputAccountId: Sisplau introdueix el teu compte (per exemple @firefish@info.firefish.dev)
getQrCode: Mostrar el codi QR
copyRemoteFollowUrl: Còpia la adreça URL del seguidor remot
foldNotification: Agrupar les notificacions similars
slashQuote: Cita encadenada
i18nServerInfo: Els nous clients els trobares en {language} per defecte.
i18nServerChange: Fes servir {language} en comptes.
i18nServerSet: Fes servir {language} per els nous clients.
mergeThreadInTimeline: Fusiona diferents publicacions en un mateix fil a les línies
de temps
mergeRenotesInTimeline: Agrupa diferents impulsos d'una mateixa publicació

View file

@ -766,6 +766,9 @@ confirmToUnclipAlreadyClippedNote: "This post is already part of the \"{name}\"
public: "Public" public: "Public"
i18nInfo: "Firefish is being translated into various languages by volunteers. You i18nInfo: "Firefish is being translated into various languages by volunteers. You
can help at {link}." can help at {link}."
i18nServerInfo: "New clients will be in {language} by default."
i18nServerChange: "Use {language} instead."
i18nServerSet: "Use {language} for new clients."
manageAccessTokens: "Manage access tokens" manageAccessTokens: "Manage access tokens"
accountInfo: "Account Info" accountInfo: "Account Info"
notesCount: "Number of posts" notesCount: "Number of posts"
@ -2239,4 +2242,7 @@ autocorrectNoteLanguage: "Show a warning if the post language does not match the
incorrectLanguageWarning: "It looks like your post is in {detected}, but you selected incorrectLanguageWarning: "It looks like your post is in {detected}, but you selected
{current}.\nWould you like to set the language to {detected} instead?" {current}.\nWould you like to set the language to {detected} instead?"
noteEditHistory: "Post edit history" noteEditHistory: "Post edit history"
slashQuote: "Chain quote"
foldNotification: "Group similar notifications" foldNotification: "Group similar notifications"
mergeThreadInTimeline: "Merge multiple posts in the same thread in timelines"
mergeRenotesInTimeline: "Group multiple boosts of the same post"

1
locales/eo.yml Normal file
View file

@ -0,0 +1 @@
_lang_: "Esperanto"

View file

@ -928,7 +928,7 @@ colored: "Coloré"
label: "Étiquette" label: "Étiquette"
localOnly: "Local seulement" localOnly: "Local seulement"
account: "Comptes" account: "Comptes"
getQrCode: "Obtenir le code QR" getQrCode: "Afficher le code QR"
_emailUnavailable: _emailUnavailable:
used: "Adresse non disponible" used: "Adresse non disponible"
@ -1142,8 +1142,8 @@ _wordMute:
mutedNotes: "Publications masquées" mutedNotes: "Publications masquées"
muteLangsDescription2: Utiliser les codes de langue (i.e en, fr, ja, zh). muteLangsDescription2: Utiliser les codes de langue (i.e en, fr, ja, zh).
lang: Langue lang: Langue
langDescription: Cacher du fil de publication les publications qui correspondent langDescription: Cachez les publications qui correspondent à la langue définie dans
à ces langues. le fil d'actualité.
muteLangs: Langues filtrées muteLangs: Langues filtrées
muteLangsDescription: Séparer avec des espaces ou des retours à la ligne pour une muteLangsDescription: Séparer avec des espaces ou des retours à la ligne pour une
condition OU (OR). condition OU (OR).
@ -1260,7 +1260,7 @@ _tutorial:
step2_2: "En fournissant quelques informations sur qui vous êtes, il sera plus facile step2_2: "En fournissant quelques informations sur qui vous êtes, il sera plus facile
pour les autres de savoir s'ils veulent voir vos publcations ou s'abonner à vous." pour les autres de savoir s'ils veulent voir vos publcations ou s'abonner à vous."
step3_1: "Maintenant il est temps de vous abonner à des gens!" step3_1: "Maintenant il est temps de vous abonner à des gens!"
step3_2: "Vos fils d'actualités Principal et Social sont basés sur les personnes step3_2: "Vos fils d'actualité Principal et Social sont basés sur les personnes
que vous êtes abonné, alors essayez de vous abonner à quelques comptes pour commencer.\n que vous êtes abonné, alors essayez de vous abonner à quelques comptes pour commencer.\n
Cliquez sur le cercle « plus » en haut à droite d'un profil pour vous abonner." Cliquez sur le cercle « plus » en haut à droite d'un profil pour vous abonner."
step4_1: "On y va." step4_1: "On y va."
@ -1836,6 +1836,7 @@ _notification:
reacted: a réagit à votre publication reacted: a réagit à votre publication
renoted: a boosté votre publication renoted: a boosté votre publication
voted: a voté pour votre sondage voted: a voté pour votre sondage
andCountUsers: et {count} utilisateur(s) de plus {acted}
_deck: _deck:
alwaysShowMainColumn: "Toujours afficher la colonne principale" alwaysShowMainColumn: "Toujours afficher la colonne principale"
columnAlign: "Aligner les colonnes" columnAlign: "Aligner les colonnes"
@ -2321,3 +2322,19 @@ markLocalFilesNsfwByDefaultDescription: Indépendamment de ce réglage, les util
ne sont pas affectés. ne sont pas affectés.
noteEditHistory: Historique des publications noteEditHistory: Historique des publications
media: Multimédia media: Multimédia
antennaLimit: Le nombre maximal d'antennes que chaque utilisateur peut créer
showAddFileDescriptionAtFirstPost: Ouvrez automatiquement un formulaire pour écrire
une description lorsque vous tentez de publier des fichiers sans description
foldNotification: Grouper les notifications similaires
cannotEditVisibility: Vous ne pouvez pas modifier la visibilité
useThisAccountConfirm: Voulez-vous continuer avec ce compte?
inputAccountId: Veuillez saisir votre compte (par exemple, @firefish@info.firefish.dev)
remoteFollow: Abonnement à distance
copyRemoteFollowUrl: Copier l'URL d'abonnement à distance
slashQuote: Citation enchaînée
i18nServerInfo: Les nouveaux clients seront en {language} par défaut.
i18nServerChange: Utilisez {language} à la place.
i18nServerSet: Utilisez {language} pour les nouveaux clients.
mergeThreadInTimeline: Fusionner plusieurs publications dans le même fil dans les
fils d'actualité
mergeRenotesInTimeline: Regrouper plusieurs boosts du même publication

View file

@ -2278,3 +2278,4 @@ cannotEditVisibility: Kamu tidak bisa menyunting keterlihatan
useThisAccountConfirm: Apakah kamu ingin melanjutkan dengan akun ini? useThisAccountConfirm: Apakah kamu ingin melanjutkan dengan akun ini?
inputAccountId: Silakan memasukkan akunmu (misalnya, @firefish@info.firefish.dev) inputAccountId: Silakan memasukkan akunmu (misalnya, @firefish@info.firefish.dev)
copyRemoteFollowUrl: Salin URL ikuti jarak jauh copyRemoteFollowUrl: Salin URL ikuti jarak jauh
slashQuote: Kutipan rantai

View file

@ -685,6 +685,9 @@ unclip: "クリップ解除"
confirmToUnclipAlreadyClippedNote: "この投稿はすでにクリップ「{name}」に含まれています。投稿をこのクリップから除外しますか?" confirmToUnclipAlreadyClippedNote: "この投稿はすでにクリップ「{name}」に含まれています。投稿をこのクリップから除外しますか?"
public: "公開" public: "公開"
i18nInfo: "Firefishは有志によって様々な言語に翻訳されています。{link}で翻訳に協力できます。" i18nInfo: "Firefishは有志によって様々な言語に翻訳されています。{link}で翻訳に協力できます。"
i18nServerInfo: "新しい端末では{language}が既定の言語になります。"
i18nServerChange: "{language}に変更する。"
i18nServerSet: "新しい端末での表示言語を{language}にします。"
manageAccessTokens: "アクセストークンの管理" manageAccessTokens: "アクセストークンの管理"
accountInfo: "アカウント情報" accountInfo: "アカウント情報"
notesCount: "投稿の数" notesCount: "投稿の数"
@ -1902,7 +1905,7 @@ _notification:
reacted: がリアクションしました reacted: がリアクションしました
renoted: がブーストしました renoted: がブーストしました
voted: が投票しました voted: が投票しました
andCountUsers: と{count}人{acted}しました andCountUsers: と{count}人{acted}
_deck: _deck:
alwaysShowMainColumn: "常にメインカラムを表示" alwaysShowMainColumn: "常にメインカラムを表示"
columnAlign: "カラムの寄せ" columnAlign: "カラムの寄せ"
@ -2067,3 +2070,6 @@ useThisAccountConfirm: このアカウントで操作を続けますか?
getQrCode: QRコードを表示 getQrCode: QRコードを表示
copyRemoteFollowUrl: リモートからフォローするURLをコピー copyRemoteFollowUrl: リモートからフォローするURLをコピー
foldNotification: 同じ種類の通知をまとめて表示する foldNotification: 同じ種類の通知をまとめて表示する
slashQuote: 繋げて引用
mergeRenotesInTimeline: タイムラインで同じ投稿のブーストをまとめる
mergeThreadInTimeline: タイムラインで同じスレッドの投稿をまとめる

View file

@ -667,6 +667,9 @@ unclip: "移除便签"
confirmToUnclipAlreadyClippedNote: "本帖已包含在便签 \"{name}\" 里。您想要将本帖从该便签中移除吗?" confirmToUnclipAlreadyClippedNote: "本帖已包含在便签 \"{name}\" 里。您想要将本帖从该便签中移除吗?"
public: "公开" public: "公开"
i18nInfo: "Firefish 已经被志愿者们翻译成了各种语言。如果您也有兴趣,可以通过 {link} 帮助翻译。" i18nInfo: "Firefish 已经被志愿者们翻译成了各种语言。如果您也有兴趣,可以通过 {link} 帮助翻译。"
i18nServerInfo: "新客户端将默认使用 {language}。"
i18nServerChange: "改为 {language}。"
i18nServerSet: "设定新客户端使用 {language}。"
manageAccessTokens: "管理访问令牌" manageAccessTokens: "管理访问令牌"
accountInfo: "账号信息" accountInfo: "账号信息"
notesCount: "帖子数量" notesCount: "帖子数量"
@ -2066,4 +2069,7 @@ autocorrectNoteLanguage: 当帖子语言不符合自动检测的结果的时候
incorrectLanguageWarning: "看上去您帖子使用的语言是{detected},但您选择的语言是{current}。\n要改为以{detected}发帖吗?" incorrectLanguageWarning: "看上去您帖子使用的语言是{detected},但您选择的语言是{current}。\n要改为以{detected}发帖吗?"
noteEditHistory: "帖子编辑历史" noteEditHistory: "帖子编辑历史"
media: 媒体 media: 媒体
slashQuote: "斜杠引用"
foldNotification: "将通知按同类型分组" foldNotification: "将通知按同类型分组"
mergeThreadInTimeline: "将时间线内的连续回复合并成一串"
mergeRenotesInTimeline: "合并同一个帖子的转发"

View file

@ -661,6 +661,9 @@ unclip: "解除摘錄"
confirmToUnclipAlreadyClippedNote: "此貼文已包含在摘錄「{name}」中。 你想將貼文從這個摘錄中排除嗎?" confirmToUnclipAlreadyClippedNote: "此貼文已包含在摘錄「{name}」中。 你想將貼文從這個摘錄中排除嗎?"
public: "公開" public: "公開"
i18nInfo: "Firefish已經被志願者們翻譯成各種語言版本如果想要幫忙的話可以進入{link}幫助翻譯。" i18nInfo: "Firefish已經被志願者們翻譯成各種語言版本如果想要幫忙的話可以進入{link}幫助翻譯。"
i18nServerInfo: "新客戶端將默認使用 {language}。"
i18nServerChange: "改為 {language}。"
i18nServerSet: "設定新客戶端使用 {language}。"
manageAccessTokens: "管理存取權杖" manageAccessTokens: "管理存取權杖"
accountInfo: "帳戶資訊" accountInfo: "帳戶資訊"
notesCount: "貼文數量" notesCount: "貼文數量"

View file

@ -1,22 +1,23 @@
{ {
"name": "firefish", "name": "firefish",
"version": "20240424", "version": "20240516",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://firefish.dev/firefish/firefish.git" "url": "https://firefish.dev/firefish/firefish.git"
}, },
"packageManager": "pnpm@8.15.7", "packageManager": "pnpm@9.1.1",
"private": true, "private": true,
"scripts": { "scripts": {
"rebuild": "pnpm run clean && pnpm run build", "rebuild": "pnpm run clean && pnpm run build",
"build": "pnpm node ./scripts/build.mjs && pnpm run gulp", "build": "pnpm --recursive --color run build && pnpm node ./scripts/copy-index.mjs && pnpm run build:assets",
"build:assets": "pnpm node ./scripts/copy-assets.mjs",
"build:debug": "pnpm run clean && pnpm --recursive --color run build:debug && pnpm node ./scripts/copy-index-dev.mjs && pnpm run build:assets",
"start": "pnpm --filter backend run start", "start": "pnpm --filter backend run start",
"start:container": "pnpm run gulp && pnpm run migrate && pnpm run start", "start:container": "pnpm run build:assets && pnpm run migrate && pnpm run start",
"start:test": "pnpm --filter backend run start:test", "start:test": "pnpm --filter backend run start:test",
"init": "pnpm run migrate", "init": "pnpm run migrate",
"migrate": "pnpm --filter backend run migration:run", "migrate": "pnpm --filter backend run migration:run",
"revertmigration": "pnpm --filter backend run migration:revert", "revertmigration": "pnpm --filter backend run migration:revert",
"gulp": "gulp build",
"watch": "pnpm run dev", "watch": "pnpm run dev",
"dev": "pnpm node ./scripts/dev.mjs", "dev": "pnpm node ./scripts/dev.mjs",
"dev:staging": "NODE_OPTIONS=--max_old_space_size=3072 NODE_ENV=development pnpm run build && pnpm run start", "dev:staging": "NODE_OPTIONS=--max_old_space_size=3072 NODE_ENV=development pnpm run build && pnpm run start",
@ -24,7 +25,6 @@
"lint:ts": "pnpm --filter !firefish-js -r --parallel run lint", "lint:ts": "pnpm --filter !firefish-js -r --parallel run lint",
"lint:rs": "cargo clippy --fix --allow-dirty --allow-staged && cargo fmt --all --", "lint:rs": "cargo clippy --fix --allow-dirty --allow-staged && cargo fmt --all --",
"debug": "pnpm run build:debug && pnpm run start", "debug": "pnpm run build:debug && pnpm run start",
"build:debug": "pnpm run clean && pnpm node ./scripts/dev-build.mjs && pnpm run gulp",
"mocha": "pnpm --filter backend run mocha", "mocha": "pnpm --filter backend run mocha",
"test": "pnpm run test:ts && pnpm run test:rs", "test": "pnpm run test:ts && pnpm run test:rs",
"test:ts": "pnpm run mocha", "test:ts": "pnpm run mocha",
@ -38,21 +38,17 @@
"clean-all": "pnpm run clean && pnpm run clean-cargo && pnpm run clean-npm" "clean-all": "pnpm run clean && pnpm run clean-cargo && pnpm run clean-npm"
}, },
"dependencies": { "dependencies": {
"gulp": "4.0.2",
"gulp-cssnano": "2.1.3",
"gulp-replace": "1.1.4",
"gulp-terser": "2.1.0",
"js-yaml": "4.1.0" "js-yaml": "4.1.0"
}, },
"devDependencies": { "devDependencies": {
"@biomejs/biome": "1.7.1", "@biomejs/biome": "1.7.3",
"@biomejs/cli-darwin-arm64": "^1.7.1", "@biomejs/cli-darwin-arm64": "1.7.3",
"@biomejs/cli-darwin-x64": "^1.7.1", "@biomejs/cli-darwin-x64": "1.7.3",
"@biomejs/cli-linux-arm64": "^1.7.1", "@biomejs/cli-linux-arm64": "1.7.3",
"@biomejs/cli-linux-x64": "^1.7.1", "@biomejs/cli-linux-x64": "1.7.3",
"@types/node": "20.12.7", "@types/node": "20.12.12",
"execa": "8.0.1", "execa": "9.1.0",
"pnpm": "8.15.7", "pnpm": "9.1.1",
"typescript": "5.4.5" "typescript": "5.4.5"
} }
} }

View file

@ -1,12 +0,0 @@
target
Cargo.lock
.cargo
.github
npm
.eslintrc
rustfmt.toml
yarn.lock
*.node
.yarn
__test__
renovate.json

View file

@ -7,6 +7,7 @@ rust-version = "1.74"
[features] [features]
default = [] default = []
napi = ["dep:napi", "dep:napi-derive"] napi = ["dep:napi", "dep:napi-derive"]
ci = []
[lib] [lib]
crate-type = ["cdylib", "lib"] crate-type = ["cdylib", "lib"]
@ -25,25 +26,27 @@ cuid2 = { workspace = true }
emojis = { workspace = true } emojis = { workspace = true }
idna = { workspace = true } idna = { workspace = true }
image = { workspace = true } image = { workspace = true }
isahc = { workspace = true }
nom-exif = { workspace = true } nom-exif = { workspace = true }
once_cell = { workspace = true } once_cell = { workspace = true }
openssl = { workspace = true, features = ["vendored"] } openssl = { workspace = true, features = ["vendored"] }
rand = { workspace = true } rand = { workspace = true }
redis = { workspace = true } redis = { workspace = true }
regex = { workspace = true } regex = { workspace = true }
reqwest = { workspace = true, features = ["blocking"] }
rmp-serde = { workspace = true } rmp-serde = { workspace = true }
sea-orm = { workspace = true, features = ["sqlx-postgres", "runtime-tokio-rustls"] } sea-orm = { workspace = true, features = ["sqlx-postgres", "runtime-tokio-rustls"] }
serde = { workspace = true, features = ["derive"] } serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true } serde_json = { workspace = true }
serde_yaml = { workspace = true } serde_yaml = { workspace = true }
strum = { workspace = true, features = ["derive"] } strum = { workspace = true, features = ["derive"] }
sysinfo = { workspace = true }
thiserror = { workspace = true } thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"] } tokio = { workspace = true, features = ["full"] }
tracing = { workspace = true } tracing = { workspace = true }
tracing-subscriber = { workspace = true } tracing-subscriber = { workspace = true }
url = { workspace = true } url = { workspace = true }
urlencoding = { workspace = true } urlencoding = { workspace = true }
web-push = { workspace = true }
[dev-dependencies] [dev-dependencies]
pretty_assertions = { workspace = true } pretty_assertions = { workspace = true }

View file

@ -1,32 +0,0 @@
import test from "ava";
import {
convertId,
IdConvertType,
nativeInitIdGenerator,
nativeCreateId,
nativeRandomStr,
} from "../built/index.js";
test("convert to mastodon id", (t) => {
t.is(convertId("9gf61ehcxv", IdConvertType.MastodonId), "960365976481219");
t.is(
convertId("9fbr9z0wbrjqyd3u", IdConvertType.MastodonId),
"2083785058661759970208986",
);
t.is(
convertId("9fbs680oyviiqrol9md73p8g", IdConvertType.MastodonId),
"5878598648988104013828532260828151168",
);
});
test("create cuid2 with timestamp prefix", (t) => {
nativeInitIdGenerator(16, "");
t.not(nativeCreateId(Date.now()), nativeCreateId(Date.now()));
t.is(nativeCreateId(Date.now()).length, 16);
});
test("create random string", (t) => {
t.not(nativeRandomStr(16), nativeRandomStr(16));
t.is(nativeRandomStr(24).length, 24);
});

View file

@ -41,7 +41,6 @@ export interface ServerConfig {
proxySmtp?: string proxySmtp?: string
proxyBypassHosts?: Array<string> proxyBypassHosts?: Array<string>
allowedPrivateNetworks?: Array<string> allowedPrivateNetworks?: Array<string>
/** `NapiValue` is not implemented for `u64` */
maxFileSize?: number maxFileSize?: number
accessLog?: string accessLog?: string
clusterLimits?: WorkerConfigInternal clusterLimits?: WorkerConfigInternal
@ -212,23 +211,31 @@ export interface Acct {
} }
export function stringToAcct(acct: string): Acct export function stringToAcct(acct: string): Acct
export function acctToString(acct: Acct): string export function acctToString(acct: Acct): string
export function showServerInfo(): void
export function initializeRustLogger(): void
export function addNoteToAntenna(antennaId: string, note: Note): void export function addNoteToAntenna(antennaId: string, note: Note): void
/** /**
* @param host punycoded instance host * Checks if a server is blocked.
* @returns whether the given host should be blocked *
* ## Argument
* `host` - punycoded instance host
*/ */
export function isBlockedServer(host: string): Promise<boolean> export function isBlockedServer(host: string): Promise<boolean>
/** /**
* @param host punycoded instance host * Checks if a server is silenced.
* @returns whether the given host should be limited *
* ## Argument
* `host` - punycoded instance host
*/ */
export function isSilencedServer(host: string): Promise<boolean> export function isSilencedServer(host: string): Promise<boolean>
/** /**
* @param host punycoded instance host * Checks if a server is allowlisted.
* @returns whether the given host is allowlisted (this is always true if private mode is disabled) * Returns `Ok(true)` if private mode is disabled.
*
* ## Argument
* `host` - punycoded instance host
*/ */
export function isAllowedServer(host: string): Promise<boolean> export function isAllowedServer(host: string): Promise<boolean>
/** TODO: handle name collisions better */
export interface NoteLikeForCheckWordMute { export interface NoteLikeForCheckWordMute {
fileIds: Array<string> fileIds: Array<string>
userId: string | null userId: string | null
@ -253,7 +260,6 @@ export interface ImageSize {
height: number height: number
} }
export function getImageSizeFromUrl(url: string): Promise<ImageSize> export function getImageSizeFromUrl(url: string): Promise<ImageSize>
/** TODO: handle name collisions better */
export interface NoteLikeForGetNoteSummary { export interface NoteLikeForGetNoteSummary {
fileIds: Array<string> fileIds: Array<string>
text: string | null text: string | null
@ -261,6 +267,30 @@ export interface NoteLikeForGetNoteSummary {
hasPoll: boolean hasPoll: boolean
} }
export function getNoteSummary(note: NoteLikeForGetNoteSummary): string export function getNoteSummary(note: NoteLikeForGetNoteSummary): string
export interface Cpu {
model: string
cores: number
}
export interface Memory {
/** Total memory amount in bytes */
total: number
/** Used memory amount in bytes */
used: number
/** Available (for (re)use) memory amount in bytes */
available: number
}
export interface Storage {
/** Total storage space in bytes */
total: number
/** Used storage space in bytes */
used: number
}
export function cpuInfo(): Cpu
export function cpuUsage(): number
export function memoryUsage(): Memory
export function storageUsage(): Storage | null
export function isSafeUrl(url: string): boolean
export function latestVersion(): Promise<string>
export function toMastodonId(firefishId: string): string | null export function toMastodonId(firefishId: string): string | null
export function fromMastodonId(mastodonId: string): string | null export function fromMastodonId(mastodonId: string): string | null
export function fetchMeta(useCache: boolean): Promise<Meta> export function fetchMeta(useCache: boolean): Promise<Meta>
@ -1121,6 +1151,7 @@ export interface UserProfile {
preventAiLearning: boolean preventAiLearning: boolean
isIndexable: boolean isIndexable: boolean
mutedPatterns: Array<string> mutedPatterns: Array<string>
lang: string | null
} }
export interface UserPublickey { export interface UserPublickey {
userId: string userId: string
@ -1146,9 +1177,117 @@ export interface Webhook {
latestSentAt: Date | null latestSentAt: Date | null
latestStatus: number | null latestStatus: number | null
} }
export function initializeRustLogger(): void export function fetchNodeinfo(host: string): Promise<Nodeinfo>
export function nodeinfo_2_1(): Promise<any>
export function nodeinfo_2_0(): Promise<any>
/** NodeInfo schema version 2.0. https://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.0 */
export interface Nodeinfo {
/** The schema version, must be 2.0. */
version: string
/** Metadata about server software in use. */
software: Software20
/** The protocols supported on this server. */
protocols: Array<Protocol>
/** The third party sites this server can connect to via their application API. */
services: Services
/** Whether this server allows open self-registration. */
openRegistrations: boolean
/** Usage statistics for this server. */
usage: Usage
/** Free form key value pairs for software specific values. Clients should not rely on any specific key present. */
metadata: Record<string, any>
}
/** Metadata about server software in use (version 2.0). */
export interface Software20 {
/** The canonical name of this server software. */
name: string
/** The version of this server software. */
version: string
}
export enum Protocol {
Activitypub = 'activitypub',
Buddycloud = 'buddycloud',
Dfrn = 'dfrn',
Diaspora = 'diaspora',
Libertree = 'libertree',
Ostatus = 'ostatus',
Pumpio = 'pumpio',
Tent = 'tent',
Xmpp = 'xmpp',
Zot = 'zot'
}
/** The third party sites this server can connect to via their application API. */
export interface Services {
/** The third party sites this server can retrieve messages from for combined display with regular traffic. */
inbound: Array<Inbound>
/** The third party sites this server can publish messages to on the behalf of a user. */
outbound: Array<Outbound>
}
/** The third party sites this server can retrieve messages from for combined display with regular traffic. */
export enum Inbound {
Atom1 = 'atom1',
Gnusocial = 'gnusocial',
Imap = 'imap',
Pnut = 'pnut',
Pop3 = 'pop3',
Pumpio = 'pumpio',
Rss2 = 'rss2',
Twitter = 'twitter'
}
/** The third party sites this server can publish messages to on the behalf of a user. */
export enum Outbound {
Atom1 = 'atom1',
Blogger = 'blogger',
Buddycloud = 'buddycloud',
Diaspora = 'diaspora',
Dreamwidth = 'dreamwidth',
Drupal = 'drupal',
Facebook = 'facebook',
Friendica = 'friendica',
Gnusocial = 'gnusocial',
Google = 'google',
Insanejournal = 'insanejournal',
Libertree = 'libertree',
Linkedin = 'linkedin',
Livejournal = 'livejournal',
Mediagoblin = 'mediagoblin',
Myspace = 'myspace',
Pinterest = 'pinterest',
Pnut = 'pnut',
Posterous = 'posterous',
Pumpio = 'pumpio',
Redmatrix = 'redmatrix',
Rss2 = 'rss2',
Smtp = 'smtp',
Tent = 'tent',
Tumblr = 'tumblr',
Twitter = 'twitter',
Wordpress = 'wordpress',
Xmpp = 'xmpp'
}
/** Usage statistics for this server. */
export interface Usage {
users: Users
localPosts: number | null
localComments: number | null
}
/** statistics about the users of this server. */
export interface Users {
total: number | null
activeHalfyear: number | null
activeMonth: number | null
}
export function watchNote(watcherId: string, noteAuthorId: string, noteId: string): Promise<void> export function watchNote(watcherId: string, noteAuthorId: string, noteId: string): Promise<void>
export function unwatchNote(watcherId: string, noteId: string): Promise<void> export function unwatchNote(watcherId: string, noteId: string): Promise<void>
export enum PushNotificationKind {
Generic = 'generic',
Chat = 'chat',
ReadAllChats = 'readAllChats',
ReadAllChatsInTheRoom = 'readAllChatsInTheRoom',
ReadNotifications = 'readNotifications',
ReadAllNotifications = 'readAllNotifications'
}
export function sendPushNotification(receiverUserId: string, kind: PushNotificationKind, content: any): Promise<void>
export function publishToChannelStream(channelId: string, userId: string): void export function publishToChannelStream(channelId: string, userId: string): void
export enum ChatEvent { export enum ChatEvent {
Message = 'message', Message = 'message',
@ -1194,4 +1333,6 @@ export function getTimestamp(id: string): number
export function genId(): string export function genId(): string
/** Generate an ID using a specific datetime */ /** Generate an ID using a specific datetime */
export function genIdAt(date: Date): string export function genIdAt(date: Date): string
export function secureRndstr(length?: number | undefined | null): string /** Generate random string based on [thread_rng] and [Alphanumeric]. */
export function generateSecureRandomString(length: number): string
export function generateUserToken(): string

View file

@ -310,7 +310,7 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`) throw new Error(`Failed to load native binding`)
} }
const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, initializeRustLogger, watchNote, unwatchNote, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToGroupChatStream, publishToModerationStream, getTimestamp, genId, genIdAt, secureRndstr } = nativeBinding const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, showServerInfo, initializeRustLogger, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, cpuInfo, cpuUsage, memoryUsage, storageUsage, isSafeUrl, latestVersion, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, fetchNodeinfo, nodeinfo_2_1, nodeinfo_2_0, Protocol, Inbound, Outbound, watchNote, unwatchNote, PushNotificationKind, sendPushNotification, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToGroupChatStream, publishToModerationStream, getTimestamp, genId, genIdAt, generateSecureRandomString, generateUserToken } = nativeBinding
module.exports.SECOND = SECOND module.exports.SECOND = SECOND
module.exports.MINUTE = MINUTE module.exports.MINUTE = MINUTE
@ -323,6 +323,8 @@ module.exports.loadEnv = loadEnv
module.exports.loadConfig = loadConfig module.exports.loadConfig = loadConfig
module.exports.stringToAcct = stringToAcct module.exports.stringToAcct = stringToAcct
module.exports.acctToString = acctToString module.exports.acctToString = acctToString
module.exports.showServerInfo = showServerInfo
module.exports.initializeRustLogger = initializeRustLogger
module.exports.addNoteToAntenna = addNoteToAntenna module.exports.addNoteToAntenna = addNoteToAntenna
module.exports.isBlockedServer = isBlockedServer module.exports.isBlockedServer = isBlockedServer
module.exports.isSilencedServer = isSilencedServer module.exports.isSilencedServer = isSilencedServer
@ -339,6 +341,12 @@ module.exports.safeForSql = safeForSql
module.exports.formatMilliseconds = formatMilliseconds module.exports.formatMilliseconds = formatMilliseconds
module.exports.getImageSizeFromUrl = getImageSizeFromUrl module.exports.getImageSizeFromUrl = getImageSizeFromUrl
module.exports.getNoteSummary = getNoteSummary module.exports.getNoteSummary = getNoteSummary
module.exports.cpuInfo = cpuInfo
module.exports.cpuUsage = cpuUsage
module.exports.memoryUsage = memoryUsage
module.exports.storageUsage = storageUsage
module.exports.isSafeUrl = isSafeUrl
module.exports.latestVersion = latestVersion
module.exports.toMastodonId = toMastodonId module.exports.toMastodonId = toMastodonId
module.exports.fromMastodonId = fromMastodonId module.exports.fromMastodonId = fromMastodonId
module.exports.fetchMeta = fetchMeta module.exports.fetchMeta = fetchMeta
@ -362,9 +370,16 @@ module.exports.RelayStatusEnum = RelayStatusEnum
module.exports.UserEmojimodpermEnum = UserEmojimodpermEnum module.exports.UserEmojimodpermEnum = UserEmojimodpermEnum
module.exports.UserProfileFfvisibilityEnum = UserProfileFfvisibilityEnum module.exports.UserProfileFfvisibilityEnum = UserProfileFfvisibilityEnum
module.exports.UserProfileMutingnotificationtypesEnum = UserProfileMutingnotificationtypesEnum module.exports.UserProfileMutingnotificationtypesEnum = UserProfileMutingnotificationtypesEnum
module.exports.initializeRustLogger = initializeRustLogger module.exports.fetchNodeinfo = fetchNodeinfo
module.exports.nodeinfo_2_1 = nodeinfo_2_1
module.exports.nodeinfo_2_0 = nodeinfo_2_0
module.exports.Protocol = Protocol
module.exports.Inbound = Inbound
module.exports.Outbound = Outbound
module.exports.watchNote = watchNote module.exports.watchNote = watchNote
module.exports.unwatchNote = unwatchNote module.exports.unwatchNote = unwatchNote
module.exports.PushNotificationKind = PushNotificationKind
module.exports.sendPushNotification = sendPushNotification
module.exports.publishToChannelStream = publishToChannelStream module.exports.publishToChannelStream = publishToChannelStream
module.exports.ChatEvent = ChatEvent module.exports.ChatEvent = ChatEvent
module.exports.publishToChatStream = publishToChatStream module.exports.publishToChatStream = publishToChatStream
@ -376,4 +391,5 @@ module.exports.publishToModerationStream = publishToModerationStream
module.exports.getTimestamp = getTimestamp module.exports.getTimestamp = getTimestamp
module.exports.genId = genId module.exports.genId = genId
module.exports.genIdAt = genIdAt module.exports.genIdAt = genIdAt
module.exports.secureRndstr = secureRndstr module.exports.generateSecureRandomString = generateSecureRandomString
module.exports.generateUserToken = generateUserToken

View file

@ -22,25 +22,14 @@
} }
}, },
"devDependencies": { "devDependencies": {
"@napi-rs/cli": "2.18.1", "@napi-rs/cli": "2.18.3"
"ava": "6.1.2"
},
"ava": {
"timeout": "3m"
},
"engines": {
"node": ">= 10"
}, },
"scripts": { "scripts": {
"artifacts": "napi artifacts", "artifacts": "napi artifacts",
"build": "napi build --features napi --no-const-enum --platform --release ./built/", "build": "napi build --features napi --no-const-enum --platform --release ./built/",
"build:debug": "napi build --features napi --no-const-enum --platform ./built/", "build:debug": "napi build --features napi --no-const-enum --platform ./built/",
"prepublishOnly": "napi prepublish -t npm", "prepublishOnly": "napi prepublish -t npm",
"test": "pnpm run cargo:test && pnpm run build:debug && ava",
"universal": "napi universal", "universal": "napi universal",
"version": "napi version", "version": "napi version"
"cargo:test": "pnpm run cargo:unit && pnpm run cargo:integration",
"cargo:unit": "cargo test unit_test && cargo test -F napi unit_test",
"cargo:integration": "cargo test int_test"
} }
} }

View file

@ -22,7 +22,7 @@ struct ServerConfig {
pub proxy_bypass_hosts: Option<Vec<String>>, pub proxy_bypass_hosts: Option<Vec<String>>,
pub allowed_private_networks: Option<Vec<String>>, pub allowed_private_networks: Option<Vec<String>>,
/// `NapiValue` is not implemented for `u64` // TODO: i64 -> u64 (NapiValue is not implemented for u64)
pub max_file_size: Option<i64>, pub max_file_size: Option<i64>,
pub access_log: Option<String>, pub access_log: Option<String>,
pub cluster_limits: Option<WorkerConfigInternal>, pub cluster_limits: Option<WorkerConfigInternal>,
@ -298,7 +298,7 @@ fn read_manifest() -> Manifest {
} }
#[crate::export] #[crate::export]
fn load_config() -> Config { pub fn load_config() -> Config {
let server_config = read_config_file(); let server_config = read_config_file();
let version = read_meta().version; let version = read_meta().version;
let manifest = read_manifest(); let manifest = read_manifest();

View file

@ -0,0 +1,288 @@
use crate::database::{redis_conn, redis_key};
use redis::{Commands, RedisError};
use serde::{Deserialize, Serialize};
#[derive(strum::Display, Debug)]
pub enum Category {
#[strum(serialize = "fetchUrl")]
FetchUrl,
#[cfg(test)]
#[strum(serialize = "usedOnlyForTesting")]
Test,
}
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Redis error: {0}")]
RedisError(#[from] RedisError),
#[error("Data serialization error: {0}")]
SerializeError(#[from] rmp_serde::encode::Error),
#[error("Data deserialization error: {0}")]
DeserializeError(#[from] rmp_serde::decode::Error),
}
#[inline]
fn prefix_key(key: &str) -> String {
redis_key(format!("cache:{}", key))
}
#[inline]
fn categorize(category: Category, key: &str) -> String {
format!("{}:{}", category, key)
}
#[inline]
fn wildcard(category: Category) -> String {
prefix_key(&categorize(category, "*"))
}
/// Sets a Redis cache.
///
/// This overwrites the exsisting cache with the same key.
///
/// ## Arguments
///
/// * `key` - key (will be prefixed automatically)
/// * `value` - (de)serializable value
/// * `expire_seconds` - TTL
///
/// ## Example
///
/// ```
/// # use backend_rs::database::cache;
/// let key = "apple";
/// let data = "I want to cache this string".to_string();
///
/// // caches the data for 10 seconds
/// cache::set(key, &data, 10);
///
/// // get the cache
/// let cached_data = cache::get::<String>(key).unwrap();
/// assert_eq!(data, cached_data.unwrap());
/// ```
pub fn set<V: for<'a> Deserialize<'a> + Serialize>(
key: &str,
value: &V,
expire_seconds: u64,
) -> Result<(), Error> {
redis_conn()?.set_ex(
prefix_key(key),
rmp_serde::encode::to_vec(&value)?,
expire_seconds,
)?;
Ok(())
}
/// Gets a Redis cache.
///
/// If the Redis connection is fine, this returns `Ok(data)` where `data`
/// is the cached value. Returns `Ok(None)` if there is no value corresponding to `key`.
///
/// ## Arguments
///
/// * `key` - key (will be prefixed automatically)
///
/// ## Example
///
/// ```
/// # use backend_rs::database::cache;
/// let key = "banana";
/// let data = "I want to cache this string".to_string();
///
/// // set cache
/// cache::set(key, &data, 10).unwrap();
///
/// // get cache
/// let cached_data = cache::get::<String>(key).unwrap();
/// assert_eq!(data, cached_data.unwrap());
///
/// // get nonexistent (or expired) cache
/// let no_cache = cache::get::<String>("nonexistent").unwrap();
/// assert!(no_cache.is_none());
/// ```
pub fn get<V: for<'a> Deserialize<'a> + Serialize>(key: &str) -> Result<Option<V>, Error> {
let serialized_value: Option<Vec<u8>> = redis_conn()?.get(prefix_key(key))?;
Ok(match serialized_value {
Some(v) => Some(rmp_serde::from_slice::<V>(v.as_ref())?),
None => None,
})
}
/// Deletes a Redis cache.
///
/// If the Redis connection is fine, this returns `Ok(())`
/// regardless of whether the cache exists.
///
/// ## Arguments
///
/// * `key` - key (will be prefixed automatically)
///
/// ## Example
///
/// ```
/// # use backend_rs::database::cache;
/// let key = "chocolate";
/// let value = "I want to cache this string".to_string();
///
/// // set cache
/// cache::set(key, &value, 10).unwrap();
///
/// // delete the cache
/// cache::delete("foo").unwrap();
/// cache::delete("nonexistent").unwrap(); // this is okay
///
/// // the cache is gone
/// let cached_value = cache::get::<String>("foo").unwrap();
/// assert!(cached_value.is_none());
/// ```
pub fn delete(key: &str) -> Result<(), Error> {
Ok(redis_conn()?.del(prefix_key(key))?)
}
/// Sets a Redis cache under a `category`.
///
/// The usage is the same as [set], except that you need to
/// use [get_one] and [delete_one] to get/delete the cache.
///
/// ## Arguments
///
/// * `category` - one of [Category]
/// * `key` - key (will be prefixed automatically)
/// * `value` - (de)serializable value
/// * `expire_seconds` - TTL
pub fn set_one<V: for<'a> Deserialize<'a> + Serialize>(
category: Category,
key: &str,
value: &V,
expire_seconds: u64,
) -> Result<(), Error> {
set(&categorize(category, key), value, expire_seconds)
}
/// Gets a Redis cache under a `category`.
///
/// The usage is basically the same as [get].
///
/// ## Arguments
///
/// * `category` - one of [Category]
/// * `key` - key (will be prefixed automatically)
pub fn get_one<V: for<'a> Deserialize<'a> + Serialize>(
category: Category,
key: &str,
) -> Result<Option<V>, Error> {
get(&categorize(category, key))
}
/// Deletes a Redis cache under a `category`.
///
/// The usage is basically the same as [delete].
///
/// ## Arguments
///
/// * `category` - one of [Category]
/// * `key` - key (will be prefixed automatically)
pub fn delete_one(category: Category, key: &str) -> Result<(), Error> {
delete(&categorize(category, key))
}
/// Deletes all Redis caches under a `category`.
///
/// ## Arguments
///
/// * `category` - one of [Category]
pub fn delete_all(category: Category) -> Result<(), Error> {
let mut redis = redis_conn()?;
let keys: Vec<Vec<u8>> = redis.keys(wildcard(category))?;
if !keys.is_empty() {
redis.del(keys)?
}
Ok(())
}
// TODO: set_all(), get_all()
#[cfg(test)]
mod unit_test {
use crate::database::cache::delete_one;
use super::{delete_all, get, get_one, set, set_one, Category::Test};
use pretty_assertions::assert_eq;
#[test]
fn set_get_expire() {
#[derive(serde::Deserialize, serde::Serialize, PartialEq, Debug)]
struct Data {
id: u32,
kind: String,
}
let key_1 = "CARGO_TEST_CACHE_KEY_1";
let value_1: Vec<i32> = vec![1, 2, 3, 4, 5];
let key_2 = "CARGO_TEST_CACHE_KEY_2";
let value_2 = "Hello fedizens".to_string();
let key_3 = "CARGO_TEST_CACHE_KEY_3";
let value_3 = Data {
id: 1000000007,
kind: "prime number".to_string(),
};
set(key_1, &value_1, 1).unwrap();
set(key_2, &value_2, 1).unwrap();
set(key_3, &value_3, 1).unwrap();
let cached_value_1: Vec<i32> = get(key_1).unwrap().unwrap();
let cached_value_2: String = get(key_2).unwrap().unwrap();
let cached_value_3: Data = get(key_3).unwrap().unwrap();
assert_eq!(value_1, cached_value_1);
assert_eq!(value_2, cached_value_2);
assert_eq!(value_3, cached_value_3);
// wait for the cache to expire
std::thread::sleep(std::time::Duration::from_millis(1100));
let expired_value_1: Option<Vec<i32>> = get(key_1).unwrap();
let expired_value_2: Option<Vec<i32>> = get(key_2).unwrap();
let expired_value_3: Option<Vec<i32>> = get(key_3).unwrap();
assert!(expired_value_1.is_none());
assert!(expired_value_2.is_none());
assert!(expired_value_3.is_none());
}
#[test]
fn use_category() {
let key_1 = "fire";
let key_2 = "fish";
let key_3 = "awawa";
let value_1 = "hello".to_string();
let value_2 = 998244353u32;
let value_3 = 'あ';
set_one(Test, key_1, &value_1, 5 * 60).unwrap();
set_one(Test, key_2, &value_2, 5 * 60).unwrap();
set_one(Test, key_3, &value_3, 5 * 60).unwrap();
assert_eq!(get_one::<String>(Test, key_1).unwrap().unwrap(), value_1);
assert_eq!(get_one::<u32>(Test, key_2).unwrap().unwrap(), value_2);
assert_eq!(get_one::<char>(Test, key_3).unwrap().unwrap(), value_3);
delete_one(Test, key_1).unwrap();
assert!(get_one::<String>(Test, key_1).unwrap().is_none());
assert!(get_one::<u32>(Test, key_2).unwrap().is_some());
assert!(get_one::<char>(Test, key_3).unwrap().is_some());
delete_all(Test).unwrap();
assert!(get_one::<String>(Test, key_1).unwrap().is_none());
assert!(get_one::<u32>(Test, key_2).unwrap().is_none());
assert!(get_one::<char>(Test, key_3).unwrap().is_none());
}
}

View file

@ -2,5 +2,6 @@ pub use postgresql::db_conn;
pub use redis::key as redis_key; pub use redis::key as redis_key;
pub use redis::redis_conn; pub use redis::redis_conn;
pub mod cache;
pub mod postgresql; pub mod postgresql;
pub mod redis; pub mod redis;

View file

@ -1,8 +1,9 @@
use crate::config::CONFIG; use crate::config::CONFIG;
use once_cell::sync::OnceCell;
use sea_orm::{ConnectOptions, Database, DbConn, DbErr}; use sea_orm::{ConnectOptions, Database, DbConn, DbErr};
use tracing::log::LevelFilter; use tracing::log::LevelFilter;
static DB_CONN: once_cell::sync::OnceCell<DbConn> = once_cell::sync::OnceCell::new(); static DB_CONN: OnceCell<DbConn> = OnceCell::new();
async fn init_database() -> Result<&'static DbConn, DbErr> { async fn init_database() -> Result<&'static DbConn, DbErr> {
let database_uri = format!( let database_uri = format!(

View file

@ -1,7 +1,8 @@
use crate::config::CONFIG; use crate::config::CONFIG;
use once_cell::sync::OnceCell;
use redis::{Client, Connection, RedisError}; use redis::{Client, Connection, RedisError};
static REDIS_CLIENT: once_cell::sync::OnceCell<Client> = once_cell::sync::OnceCell::new(); static REDIS_CLIENT: OnceCell<Client> = OnceCell::new();
fn init_redis() -> Result<Client, RedisError> { fn init_redis() -> Result<Client, RedisError> {
let redis_url = { let redis_url = {
@ -26,7 +27,7 @@ fn init_redis() -> Result<Client, RedisError> {
params.concat() params.concat()
}; };
tracing::info!("Initializing Redis connection"); tracing::info!("Initializing Redis client");
Client::open(redis_url) Client::open(redis_url)
} }
@ -38,8 +39,8 @@ pub fn redis_conn() -> Result<Connection, RedisError> {
} }
} }
#[inline]
/// prefix redis key /// prefix redis key
#[inline]
pub fn key(key: impl ToString) -> String { pub fn key(key: impl ToString) -> String {
format!("{}:{}", CONFIG.redis_key_prefix, key.to_string()) format!("{}:{}", CONFIG.redis_key_prefix, key.to_string())
} }

View file

@ -0,0 +1,105 @@
use std::fmt;
use std::str::FromStr;
#[derive(Debug, PartialEq)]
#[crate::export(object)]
pub struct Acct {
pub username: String,
pub host: Option<String>,
}
impl FromStr for Acct {
type Err = ();
/// This never throw errors. Feel free to `.unwrap()` the result.
fn from_str(value: &str) -> Result<Self, Self::Err> {
let split: Vec<&str> = if let Some(stripped) = value.strip_prefix('@') {
stripped
} else {
value
}
.split('@')
.collect();
Ok(Self {
username: split[0].to_string(),
host: if split.len() == 1 {
None
} else {
Some(split[1].to_string())
},
})
}
}
impl fmt::Display for Acct {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let result = match &self.host {
Some(host) => format!("{}@{}", self.username, host),
None => self.username.clone(),
};
write!(f, "{result}")
}
}
impl From<Acct> for String {
fn from(value: Acct) -> Self {
value.to_string()
}
}
#[crate::ts_only_warn("Use `acct.parse().unwrap()` or `Acct::from_str(acct).unwrap()` instead.")]
#[crate::export]
pub fn string_to_acct(acct: &str) -> Acct {
Acct::from_str(acct).unwrap()
}
#[crate::ts_only_warn("Use `acct.to_string()` instead.")]
#[crate::export]
pub fn acct_to_string(acct: &Acct) -> String {
acct.to_string()
}
#[cfg(test)]
mod unit_test {
use super::Acct;
use pretty_assertions::assert_eq;
use std::str::FromStr;
#[test]
fn test_acct_to_string() {
let remote_acct = Acct {
username: "firefish".to_string(),
host: Some("example.com".to_string()),
};
let local_acct = Acct {
username: "MisakaMikoto".to_string(),
host: None,
};
assert_eq!(remote_acct.to_string(), "firefish@example.com");
assert_ne!(remote_acct.to_string(), "mastodon@example.com");
assert_eq!(local_acct.to_string(), "MisakaMikoto");
assert_ne!(local_acct.to_string(), "ShiraiKuroko");
}
#[test]
fn test_string_to_acct() {
let remote_acct = Acct {
username: "firefish".to_string(),
host: Some("example.com".to_string()),
};
let local_acct = Acct {
username: "MisakaMikoto".to_string(),
host: None,
};
assert_eq!(
Acct::from_str("@firefish@example.com").unwrap(),
remote_acct
);
assert_eq!(Acct::from_str("firefish@example.com").unwrap(), remote_acct);
assert_eq!(Acct::from_str("@MisakaMikoto").unwrap(), local_acct);
assert_eq!(Acct::from_str("MisakaMikoto").unwrap(), local_acct);
}
}

View file

@ -0,0 +1 @@
pub mod acct;

View file

@ -0,0 +1,39 @@
use std::sync::{Mutex, MutexGuard, OnceLock, PoisonError};
use sysinfo::System;
pub type SystemMutexError = PoisonError<MutexGuard<'static, System>>;
// TODO: handle this in a more proper way when we move the entry point to backend-rs
pub fn system() -> Result<MutexGuard<'static, System>, SystemMutexError> {
pub static SYSTEM: OnceLock<Mutex<System>> = OnceLock::new();
SYSTEM.get_or_init(|| Mutex::new(System::new_all())).lock()
}
#[crate::export]
pub fn show_server_info() -> Result<(), SystemMutexError> {
let system_info = system()?;
tracing::info!(
"Hostname: {}",
System::host_name().unwrap_or("unknown".to_string())
);
tracing::info!(
"OS: {}",
System::long_os_version().unwrap_or("unknown".to_string())
);
tracing::info!(
"Kernel: {}",
System::kernel_version().unwrap_or("unknown".to_string())
);
tracing::info!(
"CPU architecture: {}",
System::cpu_arch().unwrap_or("unknown".to_string())
);
tracing::info!("CPU threads: {}", system_info.cpus().len());
tracing::info!("Total memory: {} MiB", system_info.total_memory() / 1048576);
tracing::info!("Free memory: {} MiB", system_info.free_memory() / 1048576);
tracing::info!("Total swap: {} MiB", system_info.total_swap() / 1048576);
tracing::info!("Free swap: {} MiB", system_info.free_swap() / 1048576);
Ok(())
}

View file

@ -0,0 +1,2 @@
pub mod hardware_stats;
pub mod log;

View file

@ -1,7 +1,9 @@
pub use macro_rs::export; pub use macro_rs::{export, ts_only_warn};
pub mod config; pub mod config;
pub mod database; pub mod database;
pub mod federation;
pub mod init;
pub mod misc; pub mod misc;
pub mod model; pub mod model;
pub mod service; pub mod service;

View file

@ -1,74 +0,0 @@
#[derive(Debug, PartialEq)]
#[crate::export(object)]
pub struct Acct {
pub username: String,
pub host: Option<String>,
}
#[crate::export]
pub fn string_to_acct(acct: &str) -> Acct {
let split: Vec<&str> = if let Some(stripped) = acct.strip_prefix('@') {
stripped
} else {
acct
}
.split('@')
.collect();
Acct {
username: split[0].to_string(),
host: if split.len() == 1 {
None
} else {
Some(split[1].to_string())
},
}
}
#[crate::export]
pub fn acct_to_string(acct: &Acct) -> String {
match &acct.host {
Some(host) => format!("{}@{}", acct.username, host),
None => acct.username.clone(),
}
}
#[cfg(test)]
mod unit_test {
use super::{acct_to_string, string_to_acct, Acct};
use pretty_assertions::assert_eq;
#[test]
fn test_acct_to_string() {
let remote_acct = Acct {
username: "firefish".to_string(),
host: Some("example.com".to_string()),
};
let local_acct = Acct {
username: "MisakaMikoto".to_string(),
host: None,
};
assert_eq!(acct_to_string(&remote_acct), "firefish@example.com");
assert_ne!(acct_to_string(&remote_acct), "mastodon@example.com");
assert_eq!(acct_to_string(&local_acct), "MisakaMikoto");
assert_ne!(acct_to_string(&local_acct), "ShiraiKuroko");
}
#[test]
fn test_string_to_acct() {
let remote_acct = Acct {
username: "firefish".to_string(),
host: Some("example.com".to_string()),
};
let local_acct = Acct {
username: "MisakaMikoto".to_string(),
host: None,
};
assert_eq!(string_to_acct("@firefish@example.com"), remote_acct);
assert_eq!(string_to_acct("firefish@example.com"), remote_acct);
assert_eq!(string_to_acct("@MisakaMikoto"), local_acct);
assert_eq!(string_to_acct("MisakaMikoto"), local_acct);
}
}

View file

@ -1,10 +1,10 @@
use crate::misc::meta::fetch_meta; use crate::misc::meta::fetch_meta;
use sea_orm::DbErr; use sea_orm::DbErr;
/** /// Checks if a server is blocked.
* @param host punycoded instance host ///
* @returns whether the given host should be blocked /// ## Argument
*/ /// `host` - punycoded instance host
#[crate::export] #[crate::export]
pub async fn is_blocked_server(host: &str) -> Result<bool, DbErr> { pub async fn is_blocked_server(host: &str) -> Result<bool, DbErr> {
Ok(fetch_meta(true) Ok(fetch_meta(true)
@ -16,10 +16,10 @@ pub async fn is_blocked_server(host: &str) -> Result<bool, DbErr> {
})) }))
} }
/** /// Checks if a server is silenced.
* @param host punycoded instance host ///
* @returns whether the given host should be limited /// ## Argument
*/ /// `host` - punycoded instance host
#[crate::export] #[crate::export]
pub async fn is_silenced_server(host: &str) -> Result<bool, DbErr> { pub async fn is_silenced_server(host: &str) -> Result<bool, DbErr> {
Ok(fetch_meta(true) Ok(fetch_meta(true)
@ -31,10 +31,11 @@ pub async fn is_silenced_server(host: &str) -> Result<bool, DbErr> {
})) }))
} }
/** /// Checks if a server is allowlisted.
* @param host punycoded instance host /// Returns `Ok(true)` if private mode is disabled.
* @returns whether the given host is allowlisted (this is always true if private mode is disabled) ///
*/ /// ## Argument
/// `host` - punycoded instance host
#[crate::export] #[crate::export]
pub async fn is_allowed_server(host: &str) -> Result<bool, DbErr> { pub async fn is_allowed_server(host: &str) -> Result<bool, DbErr> {
let meta = fetch_meta(true).await?; let meta = fetch_meta(true).await?;

View file

@ -4,7 +4,7 @@ use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use sea_orm::{prelude::*, QuerySelect}; use sea_orm::{prelude::*, QuerySelect};
/// TODO: handle name collisions better // TODO: handle name collisions in a better way
#[crate::export(object, js_name = "NoteLikeForCheckWordMute")] #[crate::export(object, js_name = "NoteLikeForCheckWordMute")]
pub struct NoteLike { pub struct NoteLike {
pub file_ids: Vec<String>, pub file_ids: Vec<String>,

View file

@ -1,6 +1,7 @@
use crate::misc::redis_cache::{get_cache, set_cache, CacheError}; use crate::database::cache;
use crate::util::http_client; use crate::util::http_client;
use image::{io::Reader, ImageError, ImageFormat}; use image::{io::Reader, ImageError, ImageFormat};
use isahc::ReadResponseExt;
use nom_exif::{parse_jpeg_exif, EntryValue, ExifTag}; use nom_exif::{parse_jpeg_exif, EntryValue, ExifTag};
use std::io::Cursor; use std::io::Cursor;
use tokio::sync::Mutex; use tokio::sync::Mutex;
@ -8,9 +9,13 @@ use tokio::sync::Mutex;
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum Error { pub enum Error {
#[error("Redis cache error: {0}")] #[error("Redis cache error: {0}")]
CacheErr(#[from] CacheError), CacheErr(#[from] cache::Error),
#[error("Reqwest error: {0}")] #[error("HTTP client aquisition error: {0}")]
ReqwestErr(#[from] reqwest::Error), HttpClientErr(#[from] http_client::Error),
#[error("Isahc error: {0}")]
IsahcErr(#[from] isahc::Error),
#[error("HTTP error: {0}")]
HttpErr(String),
#[error("Image decoding error: {0}")] #[error("Image decoding error: {0}")]
ImageErr(#[from] ImageError), ImageErr(#[from] ImageError),
#[error("Image decoding error: {0}")] #[error("Image decoding error: {0}")]
@ -50,11 +55,10 @@ pub async fn get_image_size_from_url(url: &str) -> Result<ImageSize, Error> {
{ {
let _ = MTX_GUARD.lock().await; let _ = MTX_GUARD.lock().await;
let key = format!("fetchImage:{}", url); attempted = cache::get_one::<bool>(cache::Category::FetchUrl, url)?.is_some();
attempted = get_cache::<bool>(&key)?.is_some();
if !attempted { if !attempted {
set_cache(&key, &true, 10 * 60)?; cache::set_one(cache::Category::FetchUrl, url, &true, 10 * 60)?;
} }
} }
@ -65,7 +69,16 @@ pub async fn get_image_size_from_url(url: &str) -> Result<ImageSize, Error> {
tracing::info!("retrieving image size from {}", url); tracing::info!("retrieving image size from {}", url);
let image_bytes = http_client()?.get(url).send().await?.bytes().await?; let mut response = http_client::client()?.get(url)?;
if !response.status().is_success() {
tracing::info!("status: {}", response.status());
tracing::debug!("response body: {:#?}", response.body());
return Err(Error::HttpErr(format!("Failed to get image from {}", url)));
}
let image_bytes = response.bytes()?;
let reader = Reader::new(Cursor::new(&image_bytes)).with_guessed_format()?; let reader = Reader::new(Cursor::new(&image_bytes)).with_guessed_format()?;
let format = reader.format(); let format = reader.format();
@ -109,7 +122,7 @@ pub async fn get_image_size_from_url(url: &str) -> Result<ImageSize, Error> {
#[cfg(test)] #[cfg(test)]
mod unit_test { mod unit_test {
use super::{get_image_size_from_url, ImageSize}; use super::{get_image_size_from_url, ImageSize};
use crate::misc::redis_cache::delete_cache; use crate::database::cache;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
#[tokio::test] #[tokio::test]
@ -124,17 +137,8 @@ mod unit_test {
let gif_url = "https://firefish.dev/firefish/firefish/-/raw/b9c3dfbd3d473cb2cee20c467eeae780bc401271/packages/backend/test/resources/anime.gif"; let gif_url = "https://firefish.dev/firefish/firefish/-/raw/b9c3dfbd3d473cb2cee20c467eeae780bc401271/packages/backend/test/resources/anime.gif";
let mp3_url = "https://firefish.dev/firefish/firefish/-/blob/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/sounds/aisha/1.mp3"; let mp3_url = "https://firefish.dev/firefish/firefish/-/blob/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/sounds/aisha/1.mp3";
// Delete caches in case you run this test multiple times // delete caches in case you run this test multiple times
// (should be disabled in CI tasks) cache::delete_all(cache::Category::FetchUrl).unwrap();
delete_cache(&format!("fetchImage:{}", png_url_1)).unwrap();
delete_cache(&format!("fetchImage:{}", png_url_2)).unwrap();
delete_cache(&format!("fetchImage:{}", png_url_3)).unwrap();
delete_cache(&format!("fetchImage:{}", rotated_jpeg_url)).unwrap();
delete_cache(&format!("fetchImage:{}", webp_url_1)).unwrap();
delete_cache(&format!("fetchImage:{}", webp_url_2)).unwrap();
delete_cache(&format!("fetchImage:{}", ico_url)).unwrap();
delete_cache(&format!("fetchImage:{}", gif_url)).unwrap();
delete_cache(&format!("fetchImage:{}", mp3_url)).unwrap();
let png_size_1 = ImageSize { let png_size_1 = ImageSize {
width: 1024, width: 1024,
@ -197,4 +201,15 @@ mod unit_test {
assert_eq!(gif_size, get_image_size_from_url(gif_url).await.unwrap()); assert_eq!(gif_size, get_image_size_from_url(gif_url).await.unwrap());
assert!(get_image_size_from_url(mp3_url).await.is_err()); assert!(get_image_size_from_url(mp3_url).await.is_err());
} }
#[tokio::test]
async fn too_many_attempts() {
let url = "https://firefish.dev/firefish/firefish/-/raw/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/splash.png";
// delete caches in case you run this test multiple times
cache::delete_one(cache::Category::FetchUrl, url).unwrap();
assert!(get_image_size_from_url(url).await.is_ok());
assert!(get_image_size_from_url(url).await.is_err());
}
} }

View file

@ -1,4 +1,8 @@
/// TODO: handle name collisions better use serde::{Deserialize, Serialize};
// TODO: handle name collisions in a better way
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, js_name = "NoteLikeForGetNoteSummary")] #[crate::export(object, js_name = "NoteLikeForGetNoteSummary")]
pub struct NoteLike { pub struct NoteLike {
pub file_ids: Vec<String>, pub file_ids: Vec<String>,

View file

@ -0,0 +1,90 @@
use crate::init::hardware_stats::{system, SystemMutexError};
use sysinfo::{Disks, MemoryRefreshKind};
// TODO: i64 -> u64 (we can't export u64 to Node.js)
#[crate::export(object)]
pub struct Cpu {
pub model: String,
// TODO: u16 -> usize (we can't export usize to Node.js)
pub cores: u16,
}
#[crate::export(object)]
pub struct Memory {
/// Total memory amount in bytes
pub total: i64,
/// Used memory amount in bytes
pub used: i64,
/// Available (for (re)use) memory amount in bytes
pub available: i64,
}
#[crate::export(object)]
pub struct Storage {
/// Total storage space in bytes
pub total: i64,
/// Used storage space in bytes
pub used: i64,
}
#[crate::export]
pub fn cpu_info() -> Result<Cpu, SystemMutexError> {
let system_info = system()?;
Ok(Cpu {
model: match system_info.cpus() {
[] => {
tracing::debug!("failed to get CPU info");
"unknown".to_string()
}
cpus => cpus[0].brand().to_string(),
},
cores: system_info.cpus().len() as u16,
})
}
#[crate::export]
pub fn cpu_usage() -> Result<f32, SystemMutexError> {
let mut system_info = system()?;
system_info.refresh_cpu_usage();
let total_cpu_usage: f32 = system_info.cpus().iter().map(|cpu| cpu.cpu_usage()).sum();
let cpu_threads = system_info.cpus().len();
Ok(total_cpu_usage / (cpu_threads as f32))
}
#[crate::export]
pub fn memory_usage() -> Result<Memory, SystemMutexError> {
let mut system_info = system()?;
system_info.refresh_memory_specifics(MemoryRefreshKind::new().with_ram());
Ok(Memory {
total: system_info.total_memory() as i64,
used: system_info.used_memory() as i64,
available: system_info.available_memory() as i64,
})
}
#[crate::export]
pub fn storage_usage() -> Option<Storage> {
// Get the first disk that is actualy used.
let disks = Disks::new_with_refreshed_list();
let disk = disks
.iter()
.find(|disk| disk.available_space() > 0 && disk.total_space() > disk.available_space());
if let Some(disk) = disk {
let total = disk.total_space() as i64;
let available = disk.available_space() as i64;
return Some(Storage {
total,
used: total - available,
});
}
tracing::debug!("failed to get stats");
None
}

View file

@ -0,0 +1,34 @@
#[crate::export]
pub fn is_safe_url(url: &str) -> bool {
if let Ok(url) = url.parse::<url::Url>() {
if url.host_str().unwrap_or_default() == "unix"
|| !["http", "https"].contains(&url.scheme())
|| ![None, Some(80), Some(443)].contains(&url.port())
{
return false;
}
true
} else {
false
}
}
#[cfg(test)]
mod unit_test {
use super::is_safe_url;
#[test]
fn safe_url() {
assert!(is_safe_url("http://firefish.dev/firefish/firefish"));
assert!(is_safe_url("https://firefish.dev/firefish/firefish"));
assert!(is_safe_url("http://firefish.dev:80/firefish/firefish"));
assert!(is_safe_url("https://firefish.dev:80/firefish/firefish"));
assert!(is_safe_url("http://firefish.dev:443/firefish/firefish"));
assert!(is_safe_url("https://firefish.dev:443/firefish/firefish"));
assert!(!is_safe_url("https://unix/firefish/firefish"));
assert!(!is_safe_url("https://firefish.dev:35/firefish/firefish"));
assert!(!is_safe_url("ftp://firefish.dev/firefish/firefish"));
assert!(!is_safe_url("nyaa"));
assert!(!is_safe_url(""));
}
}

View file

@ -0,0 +1,108 @@
use crate::database::cache;
use crate::util::http_client;
use isahc::ReadResponseExt;
use serde::{Deserialize, Serialize};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Cache error: {0}")]
CacheErr(#[from] cache::Error),
#[error("Isahc error: {0}")]
IsahcErr(#[from] isahc::Error),
#[error("HTTP client aquisition error: {0}")]
HttpClientErr(#[from] http_client::Error),
#[error("HTTP error: {0}")]
HttpErr(String),
#[error("Response parsing error: {0}")]
IoErr(#[from] std::io::Error),
#[error("Failed to deserialize JSON: {0}")]
JsonErr(#[from] serde_json::Error),
}
const UPSTREAM_PACKAGE_JSON_URL: &str =
"https://firefish.dev/firefish/firefish/-/raw/main/package.json";
async fn get_latest_version() -> Result<String, Error> {
#[derive(Debug, Deserialize, Serialize)]
struct Response {
version: String,
}
let mut response = http_client::client()?.get(UPSTREAM_PACKAGE_JSON_URL)?;
if !response.status().is_success() {
tracing::info!("status: {}", response.status());
tracing::debug!("response body: {:#?}", response.body());
return Err(Error::HttpErr(
"Failed to fetch version from Firefish GitLab".to_string(),
));
}
let res_parsed: Response = serde_json::from_str(&response.text()?)?;
Ok(res_parsed.version)
}
#[crate::export]
pub async fn latest_version() -> Result<String, Error> {
let version: Option<String> =
cache::get_one(cache::Category::FetchUrl, UPSTREAM_PACKAGE_JSON_URL)?;
if let Some(v) = version {
tracing::trace!("use cached value: {}", v);
Ok(v)
} else {
tracing::trace!("cache is expired, fetching the latest version");
let fetched_version = get_latest_version().await?;
tracing::trace!("fetched value: {}", fetched_version);
cache::set_one(
cache::Category::FetchUrl,
UPSTREAM_PACKAGE_JSON_URL,
&fetched_version,
3 * 60 * 60,
)?;
Ok(fetched_version)
}
}
#[cfg(test)]
mod unit_test {
use super::{latest_version, UPSTREAM_PACKAGE_JSON_URL};
use crate::database::cache;
fn validate_version(version: String) {
// version: YYYYMMDD or YYYYMMDD-X
assert!(version.len() >= 8);
assert!(version[..8].chars().all(|c| c.is_ascii_digit()));
// YYYY
assert!(&version[..4] >= "2024");
// MM
assert!(&version[4..6] >= "01");
assert!(&version[4..6] <= "12");
// DD
assert!(&version[6..8] >= "01");
assert!(&version[6..8] <= "31");
// -X
if version.len() > 8 {
assert!(version.chars().nth(8).unwrap() == '-');
assert!(version[9..].chars().all(|c| c.is_ascii_digit()));
}
}
#[tokio::test]
async fn check_version() {
// delete caches in case you run this test multiple times
cache::delete_one(cache::Category::FetchUrl, UPSTREAM_PACKAGE_JSON_URL).unwrap();
// fetch from firefish.dev
validate_version(latest_version().await.unwrap());
// use cache
validate_version(latest_version().await.unwrap());
}
}

View file

@ -1,4 +1,3 @@
pub mod acct;
pub mod add_note_to_antenna; pub mod add_note_to_antenna;
pub mod check_server_block; pub mod check_server_block;
pub mod check_word_mute; pub mod check_word_mute;
@ -8,10 +7,12 @@ pub mod escape_sql;
pub mod format_milliseconds; pub mod format_milliseconds;
pub mod get_image_size; pub mod get_image_size;
pub mod get_note_summary; pub mod get_note_summary;
pub mod hardware_stats;
pub mod is_safe_url;
pub mod latest_version;
pub mod mastodon_id; pub mod mastodon_id;
pub mod meta; pub mod meta;
pub mod nyaify; pub mod nyaify;
pub mod password; pub mod password;
pub mod reaction; pub mod reaction;
pub mod redis_cache;
pub mod remove_old_attestation_challenges; pub mod remove_old_attestation_challenges;

View file

@ -1,94 +0,0 @@
use crate::database::{redis_conn, redis_key};
use redis::{Commands, RedisError};
use serde::{Deserialize, Serialize};
#[derive(thiserror::Error, Debug)]
pub enum CacheError {
#[error("Redis error: {0}")]
RedisError(#[from] RedisError),
#[error("Data serialization error: {0}")]
SerializeError(#[from] rmp_serde::encode::Error),
#[error("Data deserialization error: {0}")]
DeserializeError(#[from] rmp_serde::decode::Error),
}
fn prefix_key(key: &str) -> String {
redis_key(format!("cache:{}", key))
}
pub fn set_cache<V: for<'a> Deserialize<'a> + Serialize>(
key: &str,
value: &V,
expire_seconds: u64,
) -> Result<(), CacheError> {
redis_conn()?.set_ex(
prefix_key(key),
rmp_serde::encode::to_vec(&value)?,
expire_seconds,
)?;
Ok(())
}
pub fn get_cache<V: for<'a> Deserialize<'a> + Serialize>(
key: &str,
) -> Result<Option<V>, CacheError> {
let serialized_value: Option<Vec<u8>> = redis_conn()?.get(prefix_key(key))?;
Ok(match serialized_value {
Some(v) => Some(rmp_serde::from_slice::<V>(v.as_ref())?),
None => None,
})
}
pub fn delete_cache(key: &str) -> Result<(), CacheError> {
Ok(redis_conn()?.del(prefix_key(key))?)
}
#[cfg(test)]
mod unit_test {
use super::{get_cache, set_cache};
use pretty_assertions::assert_eq;
#[test]
fn set_get_expire() {
#[derive(serde::Deserialize, serde::Serialize, PartialEq, Debug)]
struct Data {
id: u32,
kind: String,
}
let key_1 = "CARGO_TEST_CACHE_KEY_1";
let value_1: Vec<i32> = vec![1, 2, 3, 4, 5];
let key_2 = "CARGO_TEST_CACHE_KEY_2";
let value_2 = "Hello fedizens".to_string();
let key_3 = "CARGO_TEST_CACHE_KEY_3";
let value_3 = Data {
id: 1000000007,
kind: "prime number".to_string(),
};
set_cache(key_1, &value_1, 1).unwrap();
set_cache(key_2, &value_2, 1).unwrap();
set_cache(key_3, &value_3, 1).unwrap();
let cached_value_1: Vec<i32> = get_cache(key_1).unwrap().unwrap();
let cached_value_2: String = get_cache(key_2).unwrap().unwrap();
let cached_value_3: Data = get_cache(key_3).unwrap().unwrap();
assert_eq!(value_1, cached_value_1);
assert_eq!(value_2, cached_value_2);
assert_eq!(value_3, cached_value_3);
// wait for the cache to expire
std::thread::sleep(std::time::Duration::from_millis(1100));
let expired_value_1: Option<Vec<i32>> = get_cache(key_1).unwrap();
let expired_value_2: Option<Vec<i32>> = get_cache(key_2).unwrap();
let expired_value_3: Option<Vec<i32>> = get_cache(key_3).unwrap();
assert!(expired_value_1.is_none());
assert!(expired_value_2.is_none());
assert!(expired_value_3.is_none());
}
}

View file

@ -78,6 +78,7 @@ pub struct Model {
pub is_indexable: bool, pub is_indexable: bool,
#[sea_orm(column_name = "mutedPatterns")] #[sea_orm(column_name = "mutedPatterns")]
pub muted_patterns: Vec<String>, pub muted_patterns: Vec<String>,
pub lang: Option<String>,
} }
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View file

@ -1,3 +1,4 @@
pub mod log; pub mod nodeinfo;
pub mod note; pub mod note;
pub mod push_notification;
pub mod stream; pub mod stream;

View file

@ -0,0 +1,161 @@
use crate::service::nodeinfo::schema::*;
use crate::util::http_client;
use isahc::AsyncReadResponseExt;
use serde::{Deserialize, Serialize};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Http client aquisition error: {0}")]
HttpClientErr(#[from] http_client::Error),
#[error("Http error: {0}")]
HttpErr(#[from] isahc::Error),
#[error("Bad status: {0}")]
BadStatus(String),
#[error("Failed to parse response body as text: {0}")]
ResponseErr(#[from] std::io::Error),
#[error("Failed to parse response body as json: {0}")]
JsonErr(#[from] serde_json::Error),
#[error("No nodeinfo provided")]
MissingNodeinfo,
}
#[derive(Deserialize, Serialize, Debug)]
pub struct NodeinfoLinks {
links: Vec<NodeinfoLink>,
}
#[derive(Deserialize, Serialize, Debug)]
pub struct NodeinfoLink {
rel: String,
href: String,
}
#[inline]
fn wellknown_nodeinfo_url(host: &str) -> String {
format!("https://{}/.well-known/nodeinfo", host)
}
async fn fetch_nodeinfo_links(host: &str) -> Result<NodeinfoLinks, Error> {
let client = http_client::client()?;
let wellknown_url = wellknown_nodeinfo_url(host);
let mut wellknown_response = client.get_async(&wellknown_url).await?;
if !wellknown_response.status().is_success() {
tracing::debug!("{:#?}", wellknown_response.body());
return Err(Error::BadStatus(format!(
"{} returned {}",
wellknown_url,
wellknown_response.status()
)));
}
Ok(serde_json::from_str(&wellknown_response.text().await?)?)
}
fn check_nodeinfo_link(links: NodeinfoLinks) -> Result<String, Error> {
for link in links.links {
if link.rel == "http://nodeinfo.diaspora.software/ns/schema/2.1"
|| link.rel == "http://nodeinfo.diaspora.software/ns/schema/2.0"
{
return Ok(link.href);
}
}
Err(Error::MissingNodeinfo)
}
async fn fetch_nodeinfo_impl(nodeinfo_link: &str) -> Result<Nodeinfo20, Error> {
let client = http_client::client()?;
let mut response = client.get_async(nodeinfo_link).await?;
if !response.status().is_success() {
tracing::debug!("{:#?}", response.body());
return Err(Error::BadStatus(format!(
"{} returned {}",
nodeinfo_link,
response.status()
)));
}
Ok(serde_json::from_str(&response.text().await?)?)
}
// for napi export
type Nodeinfo = Nodeinfo20;
#[crate::export]
pub async fn fetch_nodeinfo(host: &str) -> Result<Nodeinfo, Error> {
tracing::info!("fetching from {}", host);
let links = fetch_nodeinfo_links(host).await?;
let nodeinfo_link = check_nodeinfo_link(links)?;
fetch_nodeinfo_impl(&nodeinfo_link).await
}
#[cfg(test)]
mod unit_test {
use super::{check_nodeinfo_link, fetch_nodeinfo, NodeinfoLink, NodeinfoLinks};
use pretty_assertions::assert_eq;
#[test]
fn test_check_nodeinfo_link() {
let links_1 = NodeinfoLinks {
links: vec![
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.0".to_string(),
href: "https://example.com/dummy".to_string(),
},
NodeinfoLink {
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0".to_string(),
href: "https://example.com/real".to_string(),
},
],
};
assert_eq!(
check_nodeinfo_link(links_1).unwrap(),
"https://example.com/real"
);
let links_2 = NodeinfoLinks {
links: vec![
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.0".to_string(),
href: "https://example.com/dummy".to_string(),
},
NodeinfoLink {
rel: "http://nodeinfo.diaspora.software/ns/schema/2.1".to_string(),
href: "https://example.com/real".to_string(),
},
],
};
assert_eq!(
check_nodeinfo_link(links_2).unwrap(),
"https://example.com/real"
);
let links_3 = NodeinfoLinks {
links: vec![
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.0".to_string(),
href: "https://example.com/dummy/2.0".to_string(),
},
NodeinfoLink {
rel: "https://example.com/incorrect/schema/2.1".to_string(),
href: "https://example.com/dummy/2.1".to_string(),
},
],
};
check_nodeinfo_link(links_3).expect_err("No nodeinfo");
}
#[tokio::test]
async fn test_fetch_nodeinfo() {
assert_eq!(
fetch_nodeinfo("info.firefish.dev")
.await
.unwrap()
.software
.name,
"firefish"
);
}
}

View file

@ -0,0 +1,142 @@
use crate::config::CONFIG;
use crate::database::cache;
use crate::database::db_conn;
use crate::misc::meta::fetch_meta;
use crate::model::entity::{note, user};
use crate::service::nodeinfo::schema::*;
use sea_orm::{ColumnTrait, DbErr, EntityTrait, PaginatorTrait, QueryFilter};
use serde_json::json;
use std::collections::HashMap;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Database error: {0}")]
DbErr(#[from] DbErr),
#[error("Cache error: {0}")]
CacheErr(#[from] cache::Error),
#[error("Failed to serialize nodeinfo to JSON: {0}")]
JsonErr(#[from] serde_json::Error),
}
async fn statistics() -> Result<(u64, u64, u64, u64), DbErr> {
let db = db_conn().await?;
let now = chrono::Local::now().naive_local();
const MONTH: chrono::TimeDelta = chrono::Duration::seconds(2592000000);
const HALF_YEAR: chrono::TimeDelta = chrono::Duration::seconds(15552000000);
let local_users = user::Entity::find()
.filter(user::Column::Host.is_null())
.count(db);
let local_active_halfyear = user::Entity::find()
.filter(user::Column::Host.is_null())
.filter(user::Column::LastActiveDate.gt(now - HALF_YEAR))
.count(db);
let local_active_month = user::Entity::find()
.filter(user::Column::Host.is_null())
.filter(user::Column::LastActiveDate.gt(now - MONTH))
.count(db);
let local_posts = note::Entity::find()
.filter(note::Column::UserHost.is_null())
.count(db);
tokio::try_join!(
local_users,
local_active_halfyear,
local_active_month,
local_posts
)
}
async fn generate_nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
let (local_users, local_active_halfyear, local_active_month, local_posts) =
statistics().await?;
let meta = fetch_meta(true).await?;
let metadata = HashMap::from([
(
"nodeName".to_string(),
json!(meta.name.unwrap_or(CONFIG.host.clone())),
),
("nodeDescription".to_string(), json!(meta.description)),
("repositoryUrl".to_string(), json!(meta.repository_url)),
(
"enableLocalTimeline".to_string(),
json!(!meta.disable_local_timeline),
),
(
"enableRecommendedTimeline".to_string(),
json!(!meta.disable_recommended_timeline),
),
(
"enableGlobalTimeline".to_string(),
json!(!meta.disable_global_timeline),
),
(
"enableGuestTimeline".to_string(),
json!(meta.enable_guest_timeline),
),
(
"maintainer".to_string(),
json!({"name":meta.maintainer_name,"email":meta.maintainer_email}),
),
("proxyAccountName".to_string(), json!(meta.proxy_account_id)),
(
"themeColor".to_string(),
json!(meta.theme_color.unwrap_or("#31748f".to_string())),
),
]);
Ok(Nodeinfo21 {
version: "2.1".to_string(),
software: Software21 {
name: "firefish".to_string(),
version: CONFIG.version.clone(),
repository: Some(meta.repository_url),
homepage: Some("https://firefish.dev/firefish/firefish".to_string()),
},
protocols: vec![Protocol::Activitypub],
services: Services {
inbound: vec![],
outbound: vec![Outbound::Atom1, Outbound::Rss2],
},
open_registrations: !meta.disable_registration,
usage: Usage {
users: Users {
total: Some(local_users as u32),
active_halfyear: Some(local_active_halfyear as u32),
active_month: Some(local_active_month as u32),
},
local_posts: Some(local_posts as u32),
local_comments: None,
},
metadata,
})
}
pub async fn nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
const NODEINFO_2_1_CACHE_KEY: &str = "nodeinfo_2_1";
let cached = cache::get::<Nodeinfo21>(NODEINFO_2_1_CACHE_KEY)?;
if let Some(nodeinfo) = cached {
Ok(nodeinfo)
} else {
let nodeinfo = generate_nodeinfo_2_1().await?;
cache::set(NODEINFO_2_1_CACHE_KEY, &nodeinfo, 60 * 60)?;
Ok(nodeinfo)
}
}
pub async fn nodeinfo_2_0() -> Result<Nodeinfo20, Error> {
Ok(nodeinfo_2_1().await?.into())
}
#[crate::export(js_name = "nodeinfo_2_1")]
pub async fn nodeinfo_2_1_as_json() -> Result<serde_json::Value, Error> {
Ok(serde_json::to_value(nodeinfo_2_1().await?)?)
}
#[crate::export(js_name = "nodeinfo_2_0")]
pub async fn nodeinfo_2_0_as_json() -> Result<serde_json::Value, Error> {
Ok(serde_json::to_value(nodeinfo_2_0().await?)?)
}

View file

@ -0,0 +1,3 @@
pub mod fetch;
pub mod generate;
pub mod schema;

View file

@ -0,0 +1,263 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
// TODO: I want to use these macros but they don't work with rmp_serde
// - #[serde(skip_serializing_if = "Option::is_none")] (https://github.com/3Hren/msgpack-rust/issues/86)
// - #[serde(tag = "version", rename = "2.1")] (https://github.com/3Hren/msgpack-rust/issues/318)
/// NodeInfo schema version 2.1. https://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.1
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Nodeinfo21 {
/// The schema version, must be 2.1.
pub version: String,
/// Metadata about server software in use.
pub software: Software21,
/// The protocols supported on this server.
pub protocols: Vec<Protocol>,
/// The third party sites this server can connect to via their application API.
pub services: Services,
/// Whether this server allows open self-registration.
pub open_registrations: bool,
/// Usage statistics for this server.
pub usage: Usage,
/// Free form key value pairs for software specific values. Clients should not rely on any specific key present.
pub metadata: HashMap<String, serde_json::Value>,
}
/// NodeInfo schema version 2.0. https://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.0
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, js_name = "Nodeinfo")]
pub struct Nodeinfo20 {
/// The schema version, must be 2.0.
pub version: String,
/// Metadata about server software in use.
pub software: Software20,
/// The protocols supported on this server.
pub protocols: Vec<Protocol>,
/// The third party sites this server can connect to via their application API.
pub services: Services,
/// Whether this server allows open self-registration.
pub open_registrations: bool,
/// Usage statistics for this server.
pub usage: Usage,
/// Free form key value pairs for software specific values. Clients should not rely on any specific key present.
pub metadata: HashMap<String, serde_json::Value>,
}
/// Metadata about server software in use (version 2.1).
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Software21 {
/// The canonical name of this server software.
pub name: String,
/// The version of this server software.
pub version: String,
/// The url of the source code repository of this server software.
pub repository: Option<String>,
/// The url of the homepage of this server software.
pub homepage: Option<String>,
}
/// Metadata about server software in use (version 2.0).
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Software20 {
/// The canonical name of this server software.
pub name: String,
/// The version of this server software.
pub version: String,
}
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
pub enum Protocol {
Activitypub,
Buddycloud,
Dfrn,
Diaspora,
Libertree,
Ostatus,
Pumpio,
Tent,
Xmpp,
Zot,
}
/// The third party sites this server can connect to via their application API.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Services {
/// The third party sites this server can retrieve messages from for combined display with regular traffic.
pub inbound: Vec<Inbound>,
/// The third party sites this server can publish messages to on the behalf of a user.
pub outbound: Vec<Outbound>,
}
/// The third party sites this server can retrieve messages from for combined display with regular traffic.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
pub enum Inbound {
#[serde(rename = "atom1.0")]
Atom1,
Gnusocial,
Imap,
Pnut,
#[serde(rename = "pop3")]
Pop3,
Pumpio,
#[serde(rename = "rss2.0")]
Rss2,
Twitter,
}
/// The third party sites this server can publish messages to on the behalf of a user.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
pub enum Outbound {
#[serde(rename = "atom1.0")]
Atom1,
Blogger,
Buddycloud,
Diaspora,
Dreamwidth,
Drupal,
Facebook,
Friendica,
Gnusocial,
Google,
Insanejournal,
Libertree,
Linkedin,
Livejournal,
Mediagoblin,
Myspace,
Pinterest,
Pnut,
Posterous,
Pumpio,
Redmatrix,
#[serde(rename = "rss2.0")]
Rss2,
Smtp,
Tent,
Tumblr,
Twitter,
Wordpress,
Xmpp,
}
/// Usage statistics for this server.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Usage {
pub users: Users,
pub local_posts: Option<u32>,
pub local_comments: Option<u32>,
}
/// statistics about the users of this server.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct Users {
pub total: Option<u32>,
pub active_halfyear: Option<u32>,
pub active_month: Option<u32>,
}
impl From<Software21> for Software20 {
fn from(software: Software21) -> Self {
Self {
name: software.name,
version: software.version,
}
}
}
impl From<Nodeinfo21> for Nodeinfo20 {
fn from(nodeinfo: Nodeinfo21) -> Self {
Self {
version: "2.0".to_string(),
software: nodeinfo.software.into(),
protocols: nodeinfo.protocols,
services: nodeinfo.services,
open_registrations: nodeinfo.open_registrations,
usage: nodeinfo.usage,
metadata: nodeinfo.metadata,
}
}
}
#[cfg(test)]
mod unit_test {
use super::{Nodeinfo20, Nodeinfo21};
use pretty_assertions::assert_eq;
#[test]
fn parse_nodeinfo_2_0() {
let json_str_1 = r#"{"version":"2.0","software":{"name":"mastodon","version":"4.3.0-nightly.2024-04-30"},"protocols":["activitypub"],"services":{"outbound":[],"inbound":[]},"usage":{"users":{"total":1935016,"activeMonth":238223,"activeHalfyear":618795},"localPosts":90175135},"openRegistrations":true,"metadata":{"nodeName":"Mastodon","nodeDescription":"The original server operated by the Mastodon gGmbH non-profit"}}"#;
let parsed_1: Nodeinfo20 = serde_json::from_str(json_str_1).unwrap();
let serialized_1 = serde_json::to_string(&parsed_1).unwrap();
let reparsed_1: Nodeinfo20 = serde_json::from_str(&serialized_1).unwrap();
assert_eq!(parsed_1, reparsed_1);
assert_eq!(parsed_1.software.name, "mastodon");
assert_eq!(parsed_1.software.version, "4.3.0-nightly.2024-04-30");
let json_str_2 = r#"{"version":"2.0","software":{"name":"peertube","version":"5.0.0"},"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"openRegistrations":false,"usage":{"users":{"total":5,"activeMonth":0,"activeHalfyear":2},"localPosts":1018,"localComments":1},"metadata":{"taxonomy":{"postsName":"Videos"},"nodeName":"Blender Video","nodeDescription":"Blender Foundation PeerTube instance.","nodeConfig":{"search":{"remoteUri":{"users":true,"anonymous":false}},"plugin":{"registered":[]},"theme":{"registered":[],"default":"default"},"email":{"enabled":false},"contactForm":{"enabled":true},"transcoding":{"hls":{"enabled":true},"webtorrent":{"enabled":true},"enabledResolutions":[1080]},"live":{"enabled":false,"transcoding":{"enabled":true,"enabledResolutions":[]}},"import":{"videos":{"http":{"enabled":true},"torrent":{"enabled":false}}},"autoBlacklist":{"videos":{"ofUsers":{"enabled":false}}},"avatar":{"file":{"size":{"max":4194304},"extensions":[".png",".jpeg",".jpg",".gif",".webp"]}},"video":{"image":{"extensions":[".png",".jpg",".jpeg",".webp"],"size":{"max":4194304}},"file":{"extensions":[".webm",".ogv",".ogg",".mp4",".mkv",".mov",".qt",".mqv",".m4v",".flv",".f4v",".wmv",".avi",".3gp",".3gpp",".3g2",".3gpp2",".nut",".mts",".m2ts",".mpv",".m2v",".m1v",".mpg",".mpe",".mpeg",".vob",".mxf",".mp3",".wma",".wav",".flac",".aac",".m4a",".ac3"]}},"videoCaption":{"file":{"size":{"max":20971520},"extensions":[".vtt",".srt"]}},"user":{"videoQuota":5368709120,"videoQuotaDaily":-1},"trending":{"videos":{"intervalDays":7}},"tracker":{"enabled":true}}}}"#;
let parsed_2: Nodeinfo20 = serde_json::from_str(json_str_2).unwrap();
let serialized_2 = serde_json::to_string(&parsed_2).unwrap();
let reparsed_2: Nodeinfo20 = serde_json::from_str(&serialized_2).unwrap();
assert_eq!(parsed_2, reparsed_2);
assert_eq!(parsed_2.software.name, "peertube");
assert_eq!(parsed_2.software.version, "5.0.0");
let json_str_3 = r#"{"metadata":{"nodeName":"pixelfed","software":{"homepage":"https://pixelfed.org","repo":"https://github.com/pixelfed/pixelfed"},"config":{"features":{"timelines":{"local":true,"network":true},"mobile_apis":true,"stories":true,"video":true,"import":{"instagram":false,"mastodon":false,"pixelfed":false},"label":{"covid":{"enabled":false,"org":"visit the WHO website","url":"https://www.who.int/emergencies/diseases/novel-coronavirus-2019/advice-for-public"}},"hls":{"enabled":false}}}},"protocols":["activitypub"],"services":{"inbound":[],"outbound":[]},"software":{"name":"pixelfed","version":"0.12.0"},"usage":{"localPosts":24059868,"localComments":0,"users":{"total":112832,"activeHalfyear":24366,"activeMonth":8921}},"version":"2.0","openRegistrations":true}"#;
let parsed_3: Nodeinfo20 = serde_json::from_str(json_str_3).unwrap();
let serialized_3 = serde_json::to_string(&parsed_3).unwrap();
let reparsed_3: Nodeinfo20 = serde_json::from_str(&serialized_3).unwrap();
assert_eq!(parsed_3, reparsed_3);
assert_eq!(parsed_3.software.name, "pixelfed");
assert_eq!(parsed_3.software.version, "0.12.0");
}
#[test]
fn parse_nodeinfo_2_1() {
let json_str_1 = r##"{"version":"2.1","software":{"name":"catodon","version":"24.04-dev.2","repository":"https://codeberg.org/catodon/catodon","homepage":"https://codeberg.org/catodon/catodon"},"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"openRegistrations":true,"usage":{"users":{"total":294,"activeHalfyear":292,"activeMonth":139},"localPosts":22616,"localComments":0},"metadata":{"nodeName":"Catodon Social","nodeDescription":"🌎 Home of Catodon, a new platform for fedi communities, initially based on Iceshrimp/Firefish/Misskey. Be aware that our first release is not out yet, so things are still experimental.","maintainer":{"name":"admin","email":"redacted@example.com"},"langs":[],"tosUrl":"https://example.com/redacted","repositoryUrl":"https://codeberg.org/catodon/catodon","feedbackUrl":"https://codeberg.org/catodon/catodon/issues","disableRegistration":false,"disableLocalTimeline":false,"disableRecommendedTimeline":true,"disableGlobalTimeline":false,"emailRequiredForSignup":true,"postEditing":true,"postImports":false,"enableHcaptcha":true,"enableRecaptcha":false,"maxNoteTextLength":8000,"maxCaptionTextLength":1500,"enableGithubIntegration":false,"enableDiscordIntegration":false,"enableEmail":true,"themeColor":"#31748f"}}"##;
let parsed_1: Nodeinfo21 = serde_json::from_str(json_str_1).unwrap();
let serialized_1 = serde_json::to_string(&parsed_1).unwrap();
let reparsed_1: Nodeinfo21 = serde_json::from_str(&serialized_1).unwrap();
assert_eq!(parsed_1, reparsed_1);
assert_eq!(parsed_1.software.name, "catodon");
assert_eq!(parsed_1.software.version, "24.04-dev.2");
let json_str_2 = r#"{"version":"2.1","software":{"name":"meisskey","version":"10.102.699-m544","repository":"https://github.com/mei23/misskey"},"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"openRegistrations":true,"usage":{"users":{"total":1123,"activeHalfyear":305,"activeMonth":89},"localPosts":268739,"localComments":0},"metadata":{"nodeName":"meisskey.one","nodeDescription":"ローカルタイムラインのないインスタンスなのだわ\n\n\n[通報・報告 (Report)](https://example.com/redacted)","name":"meisskey.one","description":"ローカルタイムラインのないインスタンスなのだわ\n\n\n[通報・報告 (Report)](https://example.com/redacted)","maintainer":{"name":"redacted","email":"redacted"},"langs":[],"announcements":[{"title":"問題・要望など","text":"問題・要望などは <a href=\"https://example.com/redacted\">#meisskeyone要望</a> で投稿してなのだわ"}],"relayActor":"https://example.com/redacted","relays":[],"disableRegistration":false,"disableLocalTimeline":true,"enableRecaptcha":true,"maxNoteTextLength":5000,"enableTwitterIntegration":false,"enableGithubIntegration":false,"enableDiscordIntegration":false,"enableServiceWorker":true,"proxyAccountName":"ghost"}}"#;
let parsed_2: Nodeinfo21 = serde_json::from_str(json_str_2).unwrap();
let serialized_2 = serde_json::to_string(&parsed_2).unwrap();
let reparsed_2: Nodeinfo21 = serde_json::from_str(&serialized_2).unwrap();
assert_eq!(parsed_2, reparsed_2);
assert_eq!(parsed_2.software.name, "meisskey");
assert_eq!(parsed_2.software.version, "10.102.699-m544");
let json_str_3 = r##"{"metadata":{"enableGlobalTimeline":true,"enableGuestTimeline":false,"enableLocalTimeline":true,"enableRecommendedTimeline":false,"maintainer":{"name":"Firefish dev team"},"nodeDescription":"","nodeName":"Firefish","repositoryUrl":"https://firefish.dev/firefish/firefish","themeColor":"#F25A85"},"openRegistrations":false,"protocols":["activitypub"],"services":{"inbound":[],"outbound":["atom1.0","rss2.0"]},"software":{"homepage":"https://firefish.dev/firefish/firefish","name":"firefish","repository":"https://firefish.dev/firefish/firefish","version":"20240504"},"usage":{"localPosts":23857,"users":{"activeHalfyear":7,"activeMonth":7,"total":9}},"version":"2.1"}"##;
let parsed_3: Nodeinfo20 = serde_json::from_str(json_str_3).unwrap();
let serialized_3 = serde_json::to_string(&parsed_3).unwrap();
let reparsed_3: Nodeinfo20 = serde_json::from_str(&serialized_3).unwrap();
assert_eq!(parsed_3, reparsed_3);
assert_eq!(parsed_3.software.name, "firefish");
assert_eq!(parsed_3.software.version, "20240504");
}
}

View file

@ -0,0 +1,232 @@
use crate::database::db_conn;
use crate::misc::get_note_summary::{get_note_summary, NoteLike};
use crate::misc::meta::fetch_meta;
use crate::model::entity::sw_subscription;
use crate::util::http_client;
use once_cell::sync::OnceCell;
use sea_orm::{prelude::*, DbErr};
use web_push::{
ContentEncoding, IsahcWebPushClient, SubscriptionInfo, SubscriptionKeys, VapidSignatureBuilder,
WebPushClient, WebPushError, WebPushMessageBuilder,
};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Database error: {0}")]
DbErr(#[from] DbErr),
#[error("Web Push error: {0}")]
WebPushErr(#[from] WebPushError),
#[error("Failed to (de)serialize an object: {0}")]
SerializeErr(#[from] serde_json::Error),
#[error("Invalid content: {0}")]
InvalidContentErr(String),
#[error("HTTP client aquisition error: {0}")]
HttpClientErr(#[from] http_client::Error),
}
static CLIENT: OnceCell<IsahcWebPushClient> = OnceCell::new();
fn get_client() -> Result<IsahcWebPushClient, Error> {
Ok(CLIENT
.get_or_try_init(|| http_client::client().map(IsahcWebPushClient::from))
.cloned()?)
}
#[derive(strum::Display, PartialEq)]
#[crate::export(string_enum = "camelCase")]
pub enum PushNotificationKind {
#[strum(serialize = "notification")]
Generic,
#[strum(serialize = "unreadMessagingMessage")]
Chat,
#[strum(serialize = "readAllMessagingMessages")]
ReadAllChats,
#[strum(serialize = "readAllMessagingMessagesOfARoom")]
ReadAllChatsInTheRoom,
#[strum(serialize = "readNotifications")]
ReadNotifications,
#[strum(serialize = "readAllNotifications")]
ReadAllNotifications,
}
fn compact_content(
kind: &PushNotificationKind,
mut content: serde_json::Value,
) -> Result<serde_json::Value, Error> {
if kind != &PushNotificationKind::Generic {
return Ok(content);
}
if !content.is_object() {
return Err(Error::InvalidContentErr("not a JSON object".to_string()));
}
let object = content.as_object_mut().unwrap();
if !object.contains_key("note") {
return Ok(content);
}
let mut note = if object.contains_key("type") && object.get("type").unwrap() == "renote" {
object
.get("note")
.unwrap()
.get("renote")
.ok_or(Error::InvalidContentErr(
"renote object is missing".to_string(),
))?
} else {
object.get("note").unwrap()
}
.clone();
if !note.is_object() {
return Err(Error::InvalidContentErr(
"(re)note is not an object".to_string(),
));
}
let note_like: NoteLike = serde_json::from_value(note.clone())?;
let text = get_note_summary(note_like);
let note_object = note.as_object_mut().unwrap();
note_object.remove("reply");
note_object.remove("renote");
note_object.remove("user");
note_object.insert("text".to_string(), text.into());
object.insert("note".to_string(), note);
Ok(serde_json::from_value(Json::Object(object.clone()))?)
}
async fn handle_web_push_failure(
db: &DatabaseConnection,
err: WebPushError,
subscription_id: &str,
error_message: &str,
) -> Result<(), DbErr> {
match err {
WebPushError::BadRequest(_)
| WebPushError::ServerError(_)
| WebPushError::InvalidUri
| WebPushError::EndpointNotValid
| WebPushError::EndpointNotFound
| WebPushError::TlsError
| WebPushError::SslError
| WebPushError::InvalidPackageName
| WebPushError::MissingCryptoKeys
| WebPushError::InvalidCryptoKeys
| WebPushError::InvalidResponse => {
sw_subscription::Entity::delete_by_id(subscription_id)
.exec(db)
.await?;
tracing::info!("{}; {} was unsubscribed", error_message, subscription_id);
tracing::debug!("reason: {:#?}", err);
}
_ => {
tracing::warn!("{}; subscription id: {}", error_message, subscription_id);
tracing::info!("reason: {:#?}", err);
}
};
Ok(())
}
#[crate::export]
pub async fn send_push_notification(
receiver_user_id: &str,
kind: PushNotificationKind,
content: &serde_json::Value,
) -> Result<(), Error> {
let meta = fetch_meta(true).await?;
if !meta.enable_service_worker || meta.sw_public_key.is_none() || meta.sw_private_key.is_none()
{
return Ok(());
}
let db = db_conn().await?;
let signature_builder = VapidSignatureBuilder::from_base64_no_sub(
meta.sw_private_key.unwrap().as_str(),
web_push::URL_SAFE_NO_PAD,
)?;
let subscriptions = sw_subscription::Entity::find()
.filter(sw_subscription::Column::UserId.eq(receiver_user_id))
.all(db)
.await?;
let payload = format!(
"{{\"type\":\"{}\",\"userId\":\"{}\",\"dateTime\":{},\"body\":{}}}",
kind,
receiver_user_id,
chrono::Utc::now().timestamp_millis(),
serde_json::to_string(&compact_content(&kind, content.clone())?)?
);
tracing::trace!("payload: {:#?}", payload);
for subscription in subscriptions.iter() {
if !subscription.send_read_message
&& [
PushNotificationKind::ReadAllChats,
PushNotificationKind::ReadAllChatsInTheRoom,
PushNotificationKind::ReadAllNotifications,
PushNotificationKind::ReadNotifications,
]
.contains(&kind)
{
continue;
}
let subscription_info = SubscriptionInfo {
endpoint: subscription.endpoint.to_owned(),
keys: SubscriptionKeys {
// convert standard base64 into base64url
// https://en.wikipedia.org/wiki/Base64#Variants_summary_table
p256dh: subscription
.publickey
.replace('+', "-")
.replace('/', "_")
.to_owned(),
auth: subscription
.auth
.replace('+', "-")
.replace('/', "_")
.to_owned(),
},
};
let signature = signature_builder
.clone()
.add_sub_info(&subscription_info)
.build();
if let Err(err) = signature {
handle_web_push_failure(db, err, &subscription.id, "failed to build a signature")
.await?;
continue;
}
let mut message_builder = WebPushMessageBuilder::new(&subscription_info);
message_builder.set_ttl(1000);
message_builder.set_payload(ContentEncoding::Aes128Gcm, payload.as_bytes());
message_builder.set_vapid_signature(signature.unwrap());
let message = message_builder.build();
if let Err(err) = message {
handle_web_push_failure(db, err, &subscription.id, "failed to build a payload").await?;
continue;
}
if let Err(err) = get_client()?.send(message.unwrap()).await {
handle_web_push_failure(db, err, &subscription.id, "failed to send").await?;
continue;
}
tracing::debug!("success; subscription id: {}", subscription.id);
}
Ok(())
}

View file

@ -1,24 +1,34 @@
use crate::config::CONFIG; use crate::config::CONFIG;
use isahc::{config::*, HttpClient};
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use reqwest::{Client, Error, NoProxy, Proxy};
use std::time::Duration; use std::time::Duration;
static CLIENT: OnceCell<Client> = OnceCell::new(); #[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Isahc error: {0}")]
IsahcErr(#[from] isahc::Error),
#[error("Url parse error: {0}")]
UrlParseErr(#[from] isahc::http::uri::InvalidUri),
}
pub fn http_client() -> Result<Client, Error> { static CLIENT: OnceCell<HttpClient> = OnceCell::new();
pub fn client() -> Result<HttpClient, Error> {
CLIENT CLIENT
.get_or_try_init(|| { .get_or_try_init(|| {
let mut builder = Client::builder().timeout(Duration::from_secs(5)); let mut builder = HttpClient::builder()
.timeout(Duration::from_secs(10))
.default_header("user-agent", &CONFIG.user_agent)
.dns_cache(DnsCache::Timeout(Duration::from_secs(60 * 60)));
if let Some(proxy_url) = &CONFIG.proxy { if let Some(proxy_url) = &CONFIG.proxy {
let mut proxy = Proxy::all(proxy_url)?; builder = builder.proxy(Some(proxy_url.parse()?));
if let Some(proxy_bypass_hosts) = &CONFIG.proxy_bypass_hosts { if let Some(proxy_bypass_hosts) = &CONFIG.proxy_bypass_hosts {
proxy = proxy.no_proxy(NoProxy::from_string(&proxy_bypass_hosts.join(","))); builder = builder.proxy_blacklist(proxy_bypass_hosts);
} }
builder = builder.proxy(proxy);
} }
builder.build() Ok(builder.build()?)
}) })
.cloned() .cloned()
} }

View file

@ -1,5 +1,3 @@
pub use http_client::http_client;
pub mod http_client; pub mod http_client;
pub mod id; pub mod id;
pub mod random; pub mod random;

View file

@ -1,7 +1,8 @@
use rand::{distributions::Alphanumeric, thread_rng, Rng}; use rand::{distributions::Alphanumeric, thread_rng, Rng};
/// Generate random string based on [thread_rng] and [Alphanumeric]. /// Generate random string based on [thread_rng] and [Alphanumeric].
pub fn gen_string(length: u16) -> String { #[crate::export]
pub fn generate_secure_random_string(length: u16) -> String {
thread_rng() thread_rng()
.sample_iter(Alphanumeric) .sample_iter(Alphanumeric)
.take(length.into()) .take(length.into())
@ -9,9 +10,9 @@ pub fn gen_string(length: u16) -> String {
.collect() .collect()
} }
#[crate::export(js_name = "secureRndstr")] #[crate::export]
pub fn native_random_str(length: Option<u16>) -> String { pub fn generate_user_token() -> String {
gen_string(length.unwrap_or(32)) generate_secure_random_string(16)
} }
#[cfg(test)] #[cfg(test)]
@ -19,14 +20,17 @@ mod unit_test {
use pretty_assertions::{assert_eq, assert_ne}; use pretty_assertions::{assert_eq, assert_ne};
use std::thread; use std::thread;
use super::gen_string; use super::generate_secure_random_string;
#[test] #[test]
fn can_generate_unique_strings() { fn can_generate_unique_strings() {
assert_eq!(gen_string(16).len(), 16); assert_eq!(generate_secure_random_string(16).len(), 16);
assert_ne!(gen_string(16), gen_string(16)); assert_ne!(
let s1 = thread::spawn(|| gen_string(16)); generate_secure_random_string(16),
let s2 = thread::spawn(|| gen_string(16)); generate_secure_random_string(16)
);
let s1 = thread::spawn(|| generate_secure_random_string(16));
let s2 = thread::spawn(|| generate_secure_random_string(16));
assert_ne!(s1.join().unwrap(), s2.join().unwrap()); assert_ne!(s1.join().unwrap(), s2.join().unwrap());
} }
} }

View file

@ -22,55 +22,55 @@
"@swc/core-android-arm64": "1.3.11" "@swc/core-android-arm64": "1.3.11"
}, },
"dependencies": { "dependencies": {
"@bull-board/api": "5.16.0", "@bull-board/api": "5.17.1",
"@bull-board/koa": "5.16.0", "@bull-board/koa": "5.17.1",
"@bull-board/ui": "5.16.0", "@bull-board/ui": "5.17.1",
"@discordapp/twemoji": "^15.0.3", "@discordapp/twemoji": "15.0.3",
"@koa/cors": "5.0.0", "@koa/cors": "5.0.0",
"@koa/multer": "3.0.2", "@koa/multer": "3.0.2",
"@koa/router": "12.0.1", "@koa/router": "12.0.1",
"@ladjs/koa-views": "9.0.0", "@ladjs/koa-views": "9.0.0",
"@peertube/http-signature": "1.7.0", "@peertube/http-signature": "1.7.0",
"@redocly/openapi-core": "1.12.0", "@redocly/openapi-core": "1.12.2",
"@sinonjs/fake-timers": "11.2.2", "@sinonjs/fake-timers": "11.2.2",
"adm-zip": "0.5.10", "adm-zip": "0.5.10",
"ajv": "8.12.0", "ajv": "8.13.0",
"archiver": "7.0.1", "archiver": "7.0.1",
"aws-sdk": "2.1608.0", "aws-sdk": "2.1621.0",
"axios": "^1.6.8", "axios": "1.6.8",
"backend-rs": "workspace:*", "backend-rs": "workspace:*",
"firefish-js": "workspace:*", "firefish-js": "workspace:*",
"blurhash": "2.0.5", "blurhash": "2.0.5",
"bull": "4.12.2", "bull": "4.12.4",
"cacheable-lookup": "TheEssem/cacheable-lookup", "cacheable-lookup": "TheEssem/cacheable-lookup",
"cbor-x": "^1.5.9", "cbor-x": "1.5.9",
"chalk": "5.3.0", "chalk": "5.3.0",
"chalk-template": "1.1.0", "chalk-template": "1.1.0",
"cli-highlight": "2.1.11", "cli-highlight": "2.1.11",
"color-convert": "2.0.1", "color-convert": "2.0.1",
"content-disposition": "0.5.4", "content-disposition": "0.5.4",
"date-fns": "3.6.0", "date-fns": "3.6.0",
"decompress": "^4.2.1", "decompress": "4.2.1",
"deep-email-validator": "0.1.21", "deep-email-validator": "0.1.21",
"deepl-node": "1.13.0", "deepl-node": "1.13.0",
"escape-regexp": "0.0.1", "escape-regexp": "0.0.1",
"feed": "4.2.2", "feed": "4.2.2",
"file-type": "19.0.0", "file-type": "19.0.0",
"fluent-ffmpeg": "2.1.2", "fluent-ffmpeg": "2.1.2",
"form-data": "^4.0.0", "form-data": "4.0.0",
"got": "14.2.1", "got": "14.2.1",
"gunzip-maybe": "^1.4.2", "gunzip-maybe": "1.4.2",
"happy-dom": "^14.7.1",
"hpagent": "1.2.0", "hpagent": "1.2.0",
"ioredis": "5.4.1", "ioredis": "5.4.1",
"ip-cidr": "4.0.0", "ip-cidr": "4.0.0",
"is-svg": "5.0.0", "is-svg": "5.0.1",
"jsdom": "24.0.0",
"json5": "2.2.3", "json5": "2.2.3",
"jsonld": "8.3.2", "jsonld": "8.3.2",
"jsrsasign": "11.1.0", "jsrsasign": "11.1.0",
"katex": "0.16.10", "katex": "0.16.10",
"koa": "2.15.3", "koa": "2.15.3",
"koa-body": "^6.0.1", "koa-body": "6.0.1",
"koa-bodyparser": "4.4.1", "koa-bodyparser": "4.4.1",
"koa-favicon": "2.1.0", "koa-favicon": "2.1.0",
"koa-json-body": "5.3.0", "koa-json-body": "5.3.0",
@ -82,14 +82,13 @@
"megalodon": "workspace:*", "megalodon": "workspace:*",
"mfm-js": "0.24.0", "mfm-js": "0.24.0",
"mime-types": "2.1.35", "mime-types": "2.1.35",
"msgpackr": "^1.10.1", "msgpackr": "1.10.2",
"multer": "1.4.5-lts.1", "multer": "1.4.5-lts.1",
"nested-property": "4.0.0", "nested-property": "4.0.0",
"node-fetch": "3.3.2", "node-fetch": "3.3.2",
"nodemailer": "6.9.13", "nodemailer": "6.9.13",
"opencc-js": "^1.0.5", "opencc-js": "1.0.5",
"os-utils": "0.0.14", "otpauth": "9.2.4",
"otpauth": "^9.2.3",
"parse5": "7.1.2", "parse5": "7.1.2",
"pg": "8.11.5", "pg": "8.11.5",
"private-ip": "3.0.2", "private-ip": "3.0.2",
@ -107,33 +106,32 @@
"rndstr": "1.0.0", "rndstr": "1.0.0",
"rss-parser": "3.13.0", "rss-parser": "3.13.0",
"sanitize-html": "2.13.0", "sanitize-html": "2.13.0",
"semver": "7.6.0", "semver": "7.6.2",
"sharp": "0.33.3", "sharp": "0.33.3",
"stringz": "2.1.0", "stringz": "2.1.0",
"summaly": "2.7.0", "summaly": "2.7.0",
"syslog-pro": "1.0.0", "syslog-pro": "1.0.0",
"systeminformation": "5.22.7", "tar-stream": "3.1.7",
"tar-stream": "^3.1.7", "tesseract.js": "5.1.0",
"tesseract.js": "^5.0.5",
"tinycolor2": "1.6.0", "tinycolor2": "1.6.0",
"tmp": "0.2.3", "tmp": "0.2.3",
"typeorm": "0.3.20", "typeorm": "0.3.20",
"ulid": "2.3.0", "ulid": "2.3.0",
"uuid": "9.0.1", "uuid": "9.0.1",
"web-push": "3.6.7", "websocket": "1.0.35",
"websocket": "1.0.34",
"xev": "3.0.2" "xev": "3.0.2"
}, },
"devDependencies": { "devDependencies": {
"@swc/cli": "0.3.12", "@swc/cli": "0.3.12",
"@swc/core": "1.5.0", "@swc/core": "1.5.7",
"@types/adm-zip": "^0.5.5", "@types/adm-zip": "0.5.5",
"@types/color-convert": "^2.0.3", "@types/color-convert": "2.0.3",
"@types/content-disposition": "^0.5.8", "@types/content-disposition": "0.5.8",
"@types/escape-regexp": "0.0.3", "@types/escape-regexp": "0.0.3",
"@types/fluent-ffmpeg": "2.1.24", "@types/fluent-ffmpeg": "2.1.24",
"@types/jsdom": "21.1.6",
"@types/jsonld": "1.5.13", "@types/jsonld": "1.5.13",
"@types/jsrsasign": "10.5.13", "@types/jsrsasign": "10.5.14",
"@types/katex": "0.16.7", "@types/katex": "0.16.7",
"@types/koa": "2.15.0", "@types/koa": "2.15.0",
"@types/koa-bodyparser": "4.3.12", "@types/koa-bodyparser": "4.3.12",
@ -146,13 +144,13 @@
"@types/koa__multer": "2.0.7", "@types/koa__multer": "2.0.7",
"@types/koa__router": "12.0.4", "@types/koa__router": "12.0.4",
"@types/mocha": "10.0.6", "@types/mocha": "10.0.6",
"@types/node": "20.12.7", "@types/node": "20.12.12",
"@types/node-fetch": "2.6.11", "@types/node-fetch": "2.6.11",
"@types/nodemailer": "6.4.14", "@types/nodemailer": "6.4.15",
"@types/oauth": "0.9.4", "@types/oauth": "0.9.4",
"@types/opencc-js": "^1.0.3", "@types/opencc-js": "1.0.3",
"@types/pg": "^8.11.5", "@types/pg": "8.11.6",
"@types/probe-image-size": "^7.2.4", "@types/probe-image-size": "7.2.4",
"@types/pug": "2.0.10", "@types/pug": "2.0.10",
"@types/punycode": "2.1.4", "@types/punycode": "2.1.4",
"@types/qrcode": "1.5.5", "@types/qrcode": "1.5.5",
@ -163,7 +161,7 @@
"@types/sanitize-html": "2.11.0", "@types/sanitize-html": "2.11.0",
"@types/semver": "7.5.8", "@types/semver": "7.5.8",
"@types/sinonjs__fake-timers": "8.1.5", "@types/sinonjs__fake-timers": "8.1.5",
"@types/syslog-pro": "^1.0.3", "@types/syslog-pro": "1.0.3",
"@types/tinycolor2": "1.4.6", "@types/tinycolor2": "1.4.6",
"@types/tmp": "0.2.6", "@types/tmp": "0.2.6",
"@types/uuid": "9.0.8", "@types/uuid": "9.0.8",
@ -171,17 +169,17 @@
"@types/websocket": "1.0.10", "@types/websocket": "1.0.10",
"@types/ws": "8.5.10", "@types/ws": "8.5.10",
"cross-env": "7.0.3", "cross-env": "7.0.3",
"eslint": "^9.1.1", "eslint": "9.2.0",
"mocha": "10.4.0", "mocha": "10.4.0",
"pug": "3.0.2", "pug": "3.0.2",
"strict-event-emitter-types": "2.0.0", "strict-event-emitter-types": "2.0.0",
"swc-loader": "^0.2.6", "swc-loader": "0.2.6",
"ts-loader": "9.5.1", "ts-loader": "9.5.1",
"ts-node": "10.9.2", "ts-node": "10.9.2",
"tsconfig-paths": "4.2.0", "tsconfig-paths": "4.2.0",
"type-fest": "4.17.0", "type-fest": "4.18.2",
"typescript": "5.4.5", "typescript": "5.4.5",
"webpack": "^5.91.0", "webpack": "5.91.0",
"ws": "8.16.0" "ws": "8.17.0"
} }
} }

View file

@ -1,33 +0,0 @@
declare module "os-utils" {
type FreeCommandCallback = (usedmem: number) => void;
type HarddriveCallback = (total: number, free: number, used: number) => void;
type GetProcessesCallback = (result: string) => void;
type CPUCallback = (perc: number) => void;
export function platform(): NodeJS.Platform;
export function cpuCount(): number;
export function sysUptime(): number;
export function processUptime(): number;
export function freemem(): number;
export function totalmem(): number;
export function freememPercentage(): number;
export function freeCommand(callback: FreeCommandCallback): void;
export function harddrive(callback: HarddriveCallback): void;
export function getProcesses(callback: GetProcessesCallback): void;
export function getProcesses(
nProcess: number,
callback: GetProcessesCallback,
): void;
export function allLoadavg(): string;
export function loadavg(_time?: number): number;
export function cpuFree(callback: CPUCallback): void;
export function cpuUsage(callback: CPUCallback): void;
}

View file

@ -8,11 +8,14 @@ import chalkTemplate from "chalk-template";
import semver from "semver"; import semver from "semver";
import Logger from "@/services/logger.js"; import Logger from "@/services/logger.js";
import type { Config } from "backend-rs"; import {
import { initializeRustLogger } from "backend-rs"; fetchMeta,
import { fetchMeta, removeOldAttestationChallenges } from "backend-rs"; initializeRustLogger,
removeOldAttestationChallenges,
showServerInfo,
type Config,
} from "backend-rs";
import { config, envOption } from "@/config.js"; import { config, envOption } from "@/config.js";
import { showMachineInfo } from "@/misc/show-machine-info.js";
import { db, initDb } from "@/db/postgre.js"; import { db, initDb } from "@/db/postgre.js";
import { inspect } from "node:util"; import { inspect } from "node:util";
@ -90,12 +93,12 @@ function greet() {
export async function masterMain() { export async function masterMain() {
// initialize app // initialize app
try { try {
initializeRustLogger();
greet(); greet();
showEnvironment(); showEnvironment();
await showMachineInfo(bootLogger); showServerInfo();
showNodejsVersion(); showNodejsVersion();
await connectDb(); await connectDb();
initializeRustLogger();
} catch (e) { } catch (e) {
bootLogger.error( bootLogger.error(
`Fatal error occurred during initialization:\n${inspect(e)}`, `Fatal error occurred during initialization:\n${inspect(e)}`,

View file

@ -1,15 +1,8 @@
import si from "systeminformation";
import Xev from "xev"; import Xev from "xev";
import * as osUtils from "os-utils"; import { fetchMeta, cpuUsage, memoryUsage } from "backend-rs";
import { fetchMeta } from "backend-rs";
const ev = new Xev(); const ev = new Xev();
const interval = 2000;
const roundCpu = (num: number) => Math.round(num * 1000) / 1000;
const round = (num: number) => Math.round(num * 10) / 10;
/** /**
* Report server stats regularly * Report server stats regularly
*/ */
@ -24,26 +17,9 @@ export default async function () {
if (!meta.enableServerMachineStats) return; if (!meta.enableServerMachineStats) return;
async function tick() { async function tick() {
const cpu = await cpuUsage();
const memStats = await mem();
const netStats = await net();
const fsStats = await fs();
const stats = { const stats = {
cpu: roundCpu(cpu), cpu: cpuUsage(),
mem: { mem: memoryUsage(),
used: round(memStats.used - memStats.buffers - memStats.cached),
active: round(memStats.active),
total: round(memStats.total),
},
net: {
rx: round(Math.max(0, netStats.rx_sec)),
tx: round(Math.max(0, netStats.tx_sec)),
},
fs: {
r: round(Math.max(0, fsStats.rIO_sec ?? 0)),
w: round(Math.max(0, fsStats.wIO_sec ?? 0)),
},
}; };
ev.emit("serverStats", stats); ev.emit("serverStats", stats);
log.unshift(stats); log.unshift(stats);
@ -52,33 +28,5 @@ export default async function () {
tick(); tick();
setInterval(tick, interval); setInterval(tick, 3000);
}
// CPU STAT
function cpuUsage(): Promise<number> {
return new Promise((res, rej) => {
osUtils.cpuUsage((cpuUsage) => {
res(cpuUsage);
});
});
}
// MEMORY STAT
async function mem() {
const data = await si.mem();
return data;
}
// NETWORK STAT
async function net() {
const iface = await si.networkInterfaceDefault();
const data = await si.networkStats(iface);
return data[0];
}
// FS STAT
async function fs() {
const data = await si.disksIO().catch(() => ({ rIO_sec: 0, wIO_sec: 0 }));
return data || { rIO_sec: 0, wIO_sec: 0 };
} }

View file

@ -19,6 +19,13 @@ export function fromHtml(html: string, hashtagNames?: string[]): string {
return appendChildren(childNodes, background).join("").trim(); return appendChildren(childNodes, background).join("").trim();
} }
/**
* We only exclude text containing asterisks, since the other marks can almost be considered intentionally used.
*/
function escapeAmbiguousMfmMarks(text: string) {
return text.includes("*") ? `<plain>${text}</plain>` : text;
}
/** /**
* Get only the text, ignoring all formatting inside * Get only the text, ignoring all formatting inside
* @param node * @param node
@ -62,7 +69,7 @@ export function fromHtml(html: string, hashtagNames?: string[]): string {
background = "", background = "",
): (string | string[])[] { ): (string | string[])[] {
if (treeAdapter.isTextNode(node)) { if (treeAdapter.isTextNode(node)) {
return [node.value]; return [escapeAmbiguousMfmMarks(node.value)];
} }
// Skip comment or document type node // Skip comment or document type node

View file

@ -1,21 +1,17 @@
import { type HTMLElement, Window } from "happy-dom"; import { JSDOM } from "jsdom";
import type * as mfm from "mfm-js"; import type * as mfm from "mfm-js";
import katex from "katex"; import katex from "katex";
import { config } from "@/config.js"; import { config } from "@/config.js";
import { intersperse } from "@/prelude/array.js"; import { intersperse } from "@/prelude/array.js";
import type { IMentionedRemoteUsers } from "@/models/entities/note.js"; import type { IMentionedRemoteUsers } from "@/models/entities/note.js";
function toMathMl(code: string, displayMode: boolean): HTMLElement | null { function toMathMl(code: string, displayMode: boolean): MathMLElement | null {
const { window } = new Window(); const rendered = katex.renderToString(code, {
const document = window.document;
document.body.innerHTML = katex.renderToString(code, {
throwOnError: false, throwOnError: false,
output: "mathml", output: "mathml",
displayMode, displayMode,
}); });
return JSDOM.fragment(rendered).querySelector("math");
return document.querySelector("math");
} }
export function toHtml( export function toHtml(
@ -26,7 +22,7 @@ export function toHtml(
return null; return null;
} }
const { window } = new Window(); const { window } = new JSDOM("");
const doc = window.document; const doc = window.document;

View file

@ -0,0 +1,65 @@
import type { MigrationInterface, QueryRunner } from "typeorm";
export class DropUnusedIndexes1714643926317 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP INDEX "IDX_01f4581f114e0ebd2bbb876f0b"`);
await queryRunner.query(`DROP INDEX "IDX_0610ebcfcfb4a18441a9bcdab2"`);
await queryRunner.query(`DROP INDEX "IDX_25dfc71b0369b003a4cd434d0b"`);
await queryRunner.query(`DROP INDEX "IDX_2710a55f826ee236ea1a62698f"`);
await queryRunner.query(`DROP INDEX "IDX_4c02d38a976c3ae132228c6fce"`);
await queryRunner.query(`DROP INDEX "IDX_51c063b6a133a9cb87145450f5"`);
await queryRunner.query(`DROP INDEX "IDX_54ebcb6d27222913b908d56fd8"`);
await queryRunner.query(`DROP INDEX "IDX_7fa20a12319c7f6dc3aed98c0a"`);
await queryRunner.query(`DROP INDEX "IDX_88937d94d7443d9a99a76fa5c0"`);
await queryRunner.query(`DROP INDEX "IDX_b11a5e627c41d4dc3170f1d370"`);
await queryRunner.query(`DROP INDEX "IDX_c8dfad3b72196dd1d6b5db168a"`);
await queryRunner.query(`DROP INDEX "IDX_d57f9030cd3af7f63ffb1c267c"`);
await queryRunner.query(`DROP INDEX "IDX_e5848eac4940934e23dbc17581"`);
await queryRunner.query(`DROP INDEX "IDX_fa99d777623947a5b05f394cae"`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE INDEX "IDX_01f4581f114e0ebd2bbb876f0b" ON "note_reaction" ("createdAt")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_0610ebcfcfb4a18441a9bcdab2" ON "poll" ("userId")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_25dfc71b0369b003a4cd434d0b" ON "note" ("attachedFileTypes")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_2710a55f826ee236ea1a62698f" ON "hashtag" ("mentionedUsersCount")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_4c02d38a976c3ae132228c6fce" ON "hashtag" ("mentionedRemoteUsersCount")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_51c063b6a133a9cb87145450f5" ON "note" ("fileIds")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_54ebcb6d27222913b908d56fd8" ON "note" ("mentions")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_7fa20a12319c7f6dc3aed98c0a" ON "poll" ("userHost")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_88937d94d7443d9a99a76fa5c0" ON "note" ("tags")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_b11a5e627c41d4dc3170f1d370" ON "notification" ("createdAt")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_c8dfad3b72196dd1d6b5db168a" ON "drive_file" ("createdAt")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_d57f9030cd3af7f63ffb1c267c" ON "hashtag" ("attachedUsersCount")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_e5848eac4940934e23dbc17581" ON "drive_file" ("uri")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_fa99d777623947a5b05f394cae" ON "user" ("tags")`,
);
}
}

View file

@ -0,0 +1,13 @@
import type { MigrationInterface, QueryRunner } from "typeorm";
export class AddUserProfileLanguage1714888400293 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "user_profile" ADD COLUMN "lang" character varying(32)`,
);
}
async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "user_profile" DROP COLUMN "lang"`);
}
}

View file

@ -7,10 +7,10 @@ import chalk from "chalk";
import Logger from "@/services/logger.js"; import Logger from "@/services/logger.js";
import IPCIDR from "ip-cidr"; import IPCIDR from "ip-cidr";
import PrivateIp from "private-ip"; import PrivateIp from "private-ip";
import { isValidUrl } from "./is-valid-url.js"; import { isSafeUrl } from "backend-rs";
export async function downloadUrl(url: string, path: string): Promise<void> { export async function downloadUrl(url: string, path: string): Promise<void> {
if (!isValidUrl(url)) { if (!isSafeUrl(url)) {
throw new StatusError("Invalid URL", 400); throw new StatusError("Invalid URL", 400);
} }
@ -43,8 +43,8 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
limit: 0, limit: 0,
}, },
}) })
.on("redirect", (res: Got.Response, opts: Got.NormalizedOptions) => { .on("redirect", (_res: Got.Response, opts: Got.NormalizedOptions) => {
if (!isValidUrl(opts.url)) { if (!isSafeUrl(opts.url)) {
downloadLogger.warn(`Invalid URL: ${opts.url}`); downloadLogger.warn(`Invalid URL: ${opts.url}`);
req.destroy(); req.destroy();
} }

View file

@ -5,7 +5,7 @@ import CacheableLookup from "cacheable-lookup";
import fetch, { type RequestRedirect } from "node-fetch"; import fetch, { type RequestRedirect } from "node-fetch";
import { HttpProxyAgent, HttpsProxyAgent } from "hpagent"; import { HttpProxyAgent, HttpsProxyAgent } from "hpagent";
import { config } from "@/config.js"; import { config } from "@/config.js";
import { isValidUrl } from "./is-valid-url.js"; import { isSafeUrl } from "backend-rs";
export async function getJson( export async function getJson(
url: string, url: string,
@ -60,7 +60,7 @@ export async function getResponse(args: {
size?: number; size?: number;
redirect?: RequestRedirect; redirect?: RequestRedirect;
}) { }) {
if (!isValidUrl(args.url)) { if (!isSafeUrl(args.url)) {
throw new StatusError("Invalid URL", 400); throw new StatusError("Invalid URL", 400);
} }
@ -83,7 +83,7 @@ export async function getResponse(args: {
}); });
if (args.redirect === "manual" && [301, 302, 307, 308].includes(res.status)) { if (args.redirect === "manual" && [301, 302, 307, 308].includes(res.status)) {
if (!isValidUrl(res.url)) { if (!isSafeUrl(res.url)) {
throw new StatusError("Invalid URL", 400); throw new StatusError("Invalid URL", 400);
} }
return res; return res;

View file

@ -7,20 +7,16 @@ import probeImageSize from "probe-image-size";
import isSvg from "is-svg"; import isSvg from "is-svg";
import sharp from "sharp"; import sharp from "sharp";
import { encode } from "blurhash"; import { encode } from "blurhash";
import { inspect } from "node:util";
export type FileInfo = { type FileInfo = {
size: number; size: number;
md5: string; md5: string;
type: {
mime: string; mime: string;
ext: string | null; fileExtension: string | null;
};
width?: number; width?: number;
height?: number; height?: number;
orientation?: number; orientation?: number;
blurhash?: string; blurhash?: string;
warnings: string[];
}; };
const TYPE_OCTET_STREAM = { const TYPE_OCTET_STREAM = {
@ -37,8 +33,6 @@ const TYPE_SVG = {
* Get file information * Get file information
*/ */
export async function getFileInfo(path: string): Promise<FileInfo> { export async function getFileInfo(path: string): Promise<FileInfo> {
const warnings = [] as string[];
const size = await getFileSize(path); const size = await getFileSize(path);
const md5 = await calcHash(path); const md5 = await calcHash(path);
@ -63,14 +57,12 @@ export async function getFileInfo(path: string): Promise<FileInfo> {
"image/avif", "image/avif",
].includes(type.mime) ].includes(type.mime)
) { ) {
const imageSize = await detectImageSize(path).catch((e) => { const imageSize = await detectImageSize(path).catch((_) => {
warnings.push(`detectImageSize failed:\n${inspect(e)}`);
return undefined; return undefined;
}); });
// うまく判定できない画像は octet-stream にする // うまく判定できない画像は octet-stream にする
if (!imageSize) { if (!imageSize) {
warnings.push("cannot detect image dimensions");
type = TYPE_OCTET_STREAM; type = TYPE_OCTET_STREAM;
} else if (imageSize.wUnits === "px") { } else if (imageSize.wUnits === "px") {
width = imageSize.width; width = imageSize.width;
@ -79,11 +71,8 @@ export async function getFileInfo(path: string): Promise<FileInfo> {
// 制限を超えている画像は octet-stream にする // 制限を超えている画像は octet-stream にする
if (imageSize.width > 16383 || imageSize.height > 16383) { if (imageSize.width > 16383 || imageSize.height > 16383) {
warnings.push("image dimensions exceeds limits");
type = TYPE_OCTET_STREAM; type = TYPE_OCTET_STREAM;
} }
} else {
warnings.push(`unsupported unit type: ${imageSize.wUnits}`);
} }
} }
@ -100,8 +89,7 @@ export async function getFileInfo(path: string): Promise<FileInfo> {
"image/avif", "image/avif",
].includes(type.mime) ].includes(type.mime)
) { ) {
blurhash = await getBlurhash(path).catch((e) => { blurhash = await getBlurhash(path).catch((_) => {
warnings.push(`getBlurhash failed:\n${inspect(e)}`);
return undefined; return undefined;
}); });
} }
@ -109,12 +97,12 @@ export async function getFileInfo(path: string): Promise<FileInfo> {
return { return {
size, size,
md5, md5,
type, mime: type.mime,
fileExtension: type.ext,
width, width,
height, height,
orientation, orientation,
blurhash, blurhash,
warnings,
}; };
} }

View file

@ -1,20 +0,0 @@
export function isValidUrl(url: string | URL | undefined): boolean {
if (process.env.NODE_ENV !== "production") return true;
try {
if (url == null) return false;
const u = typeof url === "string" ? new URL(url) : url;
if (!u.protocol.match(/^https?:$/) || u.hostname === "unix") {
return false;
}
if (u.port !== "" && !["80", "443"].includes(u.port)) {
return false;
}
return true;
} catch {
return false;
}
}

View file

@ -1,17 +0,0 @@
import * as os from "node:os";
import sysUtils from "systeminformation";
import type Logger from "@/services/logger.js";
export async function showMachineInfo(parentLogger: Logger) {
const logger = parentLogger.createSubLogger("machine");
logger.debug(`Hostname: ${os.hostname()}`);
logger.debug(`Platform: ${process.platform} Arch: ${process.arch}`);
const mem = await sysUtils.mem();
const totalmem = (mem.total / 1024 / 1024 / 1024).toFixed(1);
const availmem = (mem.available / 1024 / 1024 / 1024).toFixed(1);
logger.debug(
`CPU: ${
os.cpus().length
} core MEM: ${totalmem}GB (available: ${availmem}GB)`,
);
}

View file

@ -1,8 +1,7 @@
import { Brackets } from "typeorm"; import { Brackets } from "typeorm";
import { isBlockedServer } from "backend-rs"; import { isBlockedServer, DAY } from "backend-rs";
import { Instances } from "@/models/index.js"; import { Instances } from "@/models/index.js";
import type { Instance } from "@/models/entities/instance.js"; import type { Instance } from "@/models/entities/instance.js";
import { DAY } from "backend-rs";
// Threshold from last contact after which an instance will be considered // Threshold from last contact after which an instance will be considered
// "dead" and should no longer get activities delivered to it. // "dead" and should no longer get activities delivered to it.

View file

@ -23,7 +23,6 @@ export class DriveFile {
@PrimaryColumn(id()) @PrimaryColumn(id())
public id: string; public id: string;
@Index()
@Column("timestamp without time zone", { @Column("timestamp without time zone", {
comment: "The created date of the DriveFile.", comment: "The created date of the DriveFile.",
}) })
@ -147,7 +146,6 @@ export class DriveFile {
}) })
public webpublicAccessKey: string | null; public webpublicAccessKey: string | null;
@Index()
@Column("varchar", { @Column("varchar", {
length: 512, length: 512,
nullable: true, nullable: true,

View file

@ -19,7 +19,6 @@ export class Hashtag {
}) })
public mentionedUserIds: User["id"][]; public mentionedUserIds: User["id"][];
@Index()
@Column("integer", { @Column("integer", {
default: 0, default: 0,
}) })
@ -43,7 +42,6 @@ export class Hashtag {
}) })
public mentionedRemoteUserIds: User["id"][]; public mentionedRemoteUserIds: User["id"][];
@Index()
@Column("integer", { @Column("integer", {
default: 0, default: 0,
}) })
@ -55,7 +53,6 @@ export class Hashtag {
}) })
public attachedUserIds: User["id"][]; public attachedUserIds: User["id"][];
@Index()
@Column("integer", { @Column("integer", {
default: 0, default: 0,
}) })

View file

@ -17,7 +17,6 @@ export class NoteReaction {
@PrimaryColumn(id()) @PrimaryColumn(id())
public id: string; public id: string;
@Index()
@Column("timestamp without time zone", { @Column("timestamp without time zone", {
comment: "The created date of the NoteReaction.", comment: "The created date of the NoteReaction.",
}) })

View file

@ -139,7 +139,6 @@ export class Note {
// FIXME: file id is not removed from this array even if the file is deleted // FIXME: file id is not removed from this array even if the file is deleted
// TODO: drop this column and use note_files // TODO: drop this column and use note_files
@Index()
@Column({ @Column({
...id(), ...id(),
array: true, array: true,
@ -147,7 +146,6 @@ export class Note {
}) })
public fileIds: DriveFile["id"][]; public fileIds: DriveFile["id"][];
@Index()
@Column("varchar", { @Column("varchar", {
length: 256, length: 256,
array: true, array: true,
@ -163,7 +161,6 @@ export class Note {
}) })
public visibleUserIds: User["id"][]; public visibleUserIds: User["id"][];
@Index()
@Column({ @Column({
...id(), ...id(),
array: true, array: true,
@ -184,7 +181,6 @@ export class Note {
}) })
public emojis: string[]; public emojis: string[];
@Index()
@Column("varchar", { @Column("varchar", {
length: 128, length: 128,
array: true, array: true,

View file

@ -20,7 +20,6 @@ export class Notification {
@PrimaryColumn(id()) @PrimaryColumn(id())
public id: string; public id: string;
@Index()
@Column("timestamp without time zone", { @Column("timestamp without time zone", {
comment: "The created date of the Notification.", comment: "The created date of the Notification.",
}) })

View file

@ -44,14 +44,12 @@ export class Poll {
}) })
public noteVisibility: (typeof noteVisibilities)[number]; public noteVisibility: (typeof noteVisibilities)[number];
@Index()
@Column({ @Column({
...id(), ...id(),
comment: "[Denormalized]", comment: "[Denormalized]",
}) })
public userId: User["id"]; public userId: User["id"];
@Index()
@Column("varchar", { @Column("varchar", {
length: 512, length: 512,
nullable: true, nullable: true,

View file

@ -50,6 +50,12 @@ export class UserProfile {
verified?: boolean; verified?: boolean;
}[]; }[];
@Column("varchar", {
length: 32,
nullable: true,
})
public lang: string | null;
@Column("varchar", { @Column("varchar", {
length: 512, length: 512,
nullable: true, nullable: true,

View file

@ -116,7 +116,6 @@ export class User {
}) })
public bannerId: DriveFile["id"] | null; public bannerId: DriveFile["id"] | null;
@Index()
@Column("varchar", { @Column("varchar", {
length: 128, length: 128,
array: true, array: true,

View file

@ -512,6 +512,7 @@ export const UserRepository = db.getRepository(User).extend({
description: profile!.description, description: profile!.description,
location: profile!.location, location: profile!.location,
birthday: profile!.birthday, birthday: profile!.birthday,
lang: profile!.lang,
fields: profile!.fields, fields: profile!.fields,
followersCount: followersCount ?? null, followersCount: followersCount ?? null,
followingCount: followingCount ?? null, followingCount: followingCount ?? null,

View file

@ -1,7 +1,7 @@
// https://gist.github.com/tkrotoff/a6baf96eb6b61b445a9142e5555511a0 // https://gist.github.com/tkrotoff/a6baf96eb6b61b445a9142e5555511a0
import type { Primitive } from "type-fest"; import type { Primitive } from "type-fest";
type NullToUndefined<T> = T extends null export type NullToUndefined<T> = T extends null
? undefined ? undefined
: T extends Primitive | Function | Date | RegExp : T extends Primitive | Function | Date | RegExp
? T ? T
@ -15,7 +15,7 @@ type NullToUndefined<T> = T extends null
? { [K in keyof T]: NullToUndefined<T[K]> } ? { [K in keyof T]: NullToUndefined<T[K]> }
: unknown; : unknown;
type UndefinedToNull<T> = T extends undefined export type UndefinedToNull<T> = T extends undefined
? null ? null
: T extends Primitive | Function | Date | RegExp : T extends Primitive | Function | Date | RegExp
? T ? T
@ -47,6 +47,16 @@ function _nullToUndefined<T>(obj: T): NullToUndefined<T> {
return obj as any; return obj as any;
} }
/**
* Recursively converts all null values to undefined.
*
* @param obj object to convert
* @returns a copy of the object with all its null values converted to undefined
*/
export function fromRustObject<T>(obj: T) {
return _nullToUndefined(structuredClone(obj));
}
function _undefinedToNull<T>(obj: T): UndefinedToNull<T> { function _undefinedToNull<T>(obj: T): UndefinedToNull<T> {
if (obj === undefined) { if (obj === undefined) {
return null as any; return null as any;
@ -71,6 +81,6 @@ function _undefinedToNull<T>(obj: T): UndefinedToNull<T> {
* @param obj object to convert * @param obj object to convert
* @returns a copy of the object with all its undefined values converted to null * @returns a copy of the object with all its undefined values converted to null
*/ */
export function undefinedToNull<T>(obj: T) { export function toRustObject<T>(obj: T) {
return _undefinedToNull(structuredClone(obj)); return _undefinedToNull(structuredClone(obj));
} }

View file

@ -5,8 +5,12 @@ import perform from "@/remote/activitypub/perform.js";
import Logger from "@/services/logger.js"; import Logger from "@/services/logger.js";
import { registerOrFetchInstanceDoc } from "@/services/register-or-fetch-instance-doc.js"; import { registerOrFetchInstanceDoc } from "@/services/register-or-fetch-instance-doc.js";
import { Instances } from "@/models/index.js"; import { Instances } from "@/models/index.js";
import { isAllowedServer, isBlockedServer } from "backend-rs"; import {
import { toPuny, extractHost } from "backend-rs"; extractHost,
isAllowedServer,
isBlockedServer,
toPuny,
} from "backend-rs";
import { getApId } from "@/remote/activitypub/type.js"; import { getApId } from "@/remote/activitypub/type.js";
import { fetchInstanceMetadata } from "@/services/fetch-instance-metadata.js"; import { fetchInstanceMetadata } from "@/services/fetch-instance-metadata.js";
import type { InboxJobData } from "../types.js"; import type { InboxJobData } from "../types.js";
@ -24,7 +28,7 @@ const logger = new Logger("inbox");
// Processing when an activity arrives in the user's inbox // Processing when an activity arrives in the user's inbox
export default async (job: Bull.Job<InboxJobData>): Promise<string> => { export default async (job: Bull.Job<InboxJobData>): Promise<string> => {
const signature = job.data.signature; // HTTP-signature const signature = job.data.signature; // HTTP-signature
const activity = job.data.activity; let activity = job.data.activity;
//#region Log //#region Log
const info = Object.assign({}, activity) as any; const info = Object.assign({}, activity) as any;
@ -149,6 +153,8 @@ export default async (job: Bull.Job<InboxJobData>): Promise<string> => {
return "skip: LD-Signatureの検証に失敗しました"; return "skip: LD-Signatureの検証に失敗しました";
} }
activity = await ldSignature.compactToWellKnown(activity);
// もう一度actorチェック // もう一度actorチェック
if (authUser.user.uri !== activity.actor) { if (authUser.user.uri !== activity.actor) {
return `skip: LD-Signature user(${authUser.user.uri}) !== activity.actor(${activity.actor})`; return `skip: LD-Signature user(${authUser.user.uri}) !== activity.actor(${activity.actor})`;

View file

@ -1,11 +1,15 @@
import { URL } from "url"; import { URL } from "node:url";
import httpSignature, { type IParsedSignature } from "@peertube/http-signature"; import httpSignature, { type IParsedSignature } from "@peertube/http-signature";
import { config } from "@/config.js"; import { config } from "@/config.js";
import { fetchMeta, isAllowedServer, isBlockedServer } from "backend-rs"; import {
import { toPuny } from "backend-rs"; fetchMeta,
isAllowedServer,
isBlockedServer,
toPuny,
} from "backend-rs";
import DbResolver from "@/remote/activitypub/db-resolver.js"; import DbResolver from "@/remote/activitypub/db-resolver.js";
import { getApId } from "@/remote/activitypub/type.js"; import { getApId } from "@/remote/activitypub/type.js";
import type { IncomingMessage } from "http"; import type { IncomingMessage } from "node:http";
import type { CacheableRemoteUser } from "@/models/entities/user.js"; import type { CacheableRemoteUser } from "@/models/entities/user.js";
import type { UserPublickey } from "@/models/entities/user-publickey.js"; import type { UserPublickey } from "@/models/entities/user-publickey.js";
import { verify } from "node:crypto"; import { verify } from "node:crypto";

View file

@ -5,12 +5,11 @@ import type { IAnnounce } from "../../type.js";
import { getApId } from "../../type.js"; import { getApId } from "../../type.js";
import { fetchNote, resolveNote } from "../../models/note.js"; import { fetchNote, resolveNote } from "../../models/note.js";
import { apLogger } from "../../logger.js"; import { apLogger } from "../../logger.js";
import { extractHost } from "backend-rs"; import { extractHost, isBlockedServer } from "backend-rs";
import { getApLock } from "@/misc/app-lock.js"; import { getApLock } from "@/misc/app-lock.js";
import { parseAudience } from "../../audience.js"; import { parseAudience } from "../../audience.js";
import { StatusError } from "@/misc/fetch.js"; import { StatusError } from "@/misc/fetch.js";
import { Notes } from "@/models/index.js"; import { Notes } from "@/models/index.js";
import { isBlockedServer } from "backend-rs";
import { inspect } from "node:util"; import { inspect } from "node:util";
/** /**

View file

@ -518,6 +518,54 @@ const activitystreams = {
}, },
}; };
export const WellKnownContext = {
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
{
// as non-standards
manuallyApprovesFollowers: "as:manuallyApprovesFollowers",
movedTo: {
"@id": "https://www.w3.org/ns/activitystreams#movedTo",
"@type": "@id",
},
movedToUri: "as:movedTo",
sensitive: "as:sensitive",
Hashtag: "as:Hashtag",
quoteUri: "fedibird:quoteUri",
quoteUrl: "as:quoteUrl",
// Mastodon
toot: "http://joinmastodon.org/ns#",
Emoji: "toot:Emoji",
featured: "toot:featured",
discoverable: "toot:discoverable",
indexable: "toot:indexable",
// schema
schema: "http://schema.org#",
PropertyValue: "schema:PropertyValue",
value: "schema:value",
// Firefish
firefish: "https://firefish.dev/ns#",
speakAsCat: "firefish:speakAsCat",
// Misskey
misskey: "https://misskey-hub.net/ns#",
_misskey_talk: "misskey:_misskey_talk",
_misskey_reaction: "misskey:_misskey_reaction",
_misskey_votes: "misskey:_misskey_votes",
_misskey_summary: "misskey:_misskey_summary",
isCat: "misskey:isCat",
// Fedibird
fedibird: "http://fedibird.com/ns#",
// vcard
vcard: "http://www.w3.org/2006/vcard/ns#",
// litepub
litepub: "http://litepub.social/ns#",
ChatMessage: "litepub:ChatMessage",
directMessage: "litepub:directMessage",
},
],
};
export const CONTEXTS: Record<string, unknown> = { export const CONTEXTS: Record<string, unknown> = {
"https://w3id.org/identity/v1": id_v1, "https://w3id.org/identity/v1": id_v1,
"https://w3id.org/security/v1": security_v1, "https://w3id.org/security/v1": security_v1,

View file

@ -1,6 +1,6 @@
import * as crypto from "node:crypto"; import * as crypto from "node:crypto";
import jsonld from "jsonld"; import jsonld from "jsonld";
import { CONTEXTS } from "./contexts.js"; import { CONTEXTS, WellKnownContext } from "./contexts.js";
import fetch from "node-fetch"; import fetch from "node-fetch";
import { httpAgent, httpsAgent } from "@/misc/fetch.js"; import { httpAgent, httpsAgent } from "@/misc/fetch.js";
@ -89,6 +89,13 @@ export class LdSignature {
}); });
} }
public async compactToWellKnown(data: any): Promise<any> {
const options = { documentLoader: this.getLoader() };
const context = WellKnownContext as any;
delete data["signature"];
return await jsonld.compact(data, context, options);
}
private getLoader() { private getLoader() {
return async (url: string): Promise<any> => { return async (url: string): Promise<any> => {
if (!url.match("^https?://")) throw new Error(`Invalid URL ${url}`); if (!url.match("^https?://")) throw new Error(`Invalid URL ${url}`);

View file

@ -16,7 +16,9 @@ import type { DriveFile } from "@/models/entities/drive-file.js";
import { import {
type ImageSize, type ImageSize,
extractHost, extractHost,
genId,
getImageSizeFromUrl, getImageSizeFromUrl,
isBlockedServer,
isSameOrigin, isSameOrigin,
toPuny, toPuny,
} from "backend-rs"; } from "backend-rs";
@ -39,7 +41,6 @@ import {
getApType, getApType,
} from "../type.js"; } from "../type.js";
import type { Emoji } from "@/models/entities/emoji.js"; import type { Emoji } from "@/models/entities/emoji.js";
import { genId, isBlockedServer } from "backend-rs";
import { getApLock } from "@/misc/app-lock.js"; import { getApLock } from "@/misc/app-lock.js";
import { createMessage } from "@/services/messages/create.js"; import { createMessage } from "@/services/messages/create.js";
import { parseAudience } from "../audience.js"; import { parseAudience } from "../audience.js";

View file

@ -16,10 +16,9 @@ import type { IRemoteUser, CacheableUser } from "@/models/entities/user.js";
import { User } from "@/models/entities/user.js"; import { User } from "@/models/entities/user.js";
import type { Emoji } from "@/models/entities/emoji.js"; import type { Emoji } from "@/models/entities/emoji.js";
import { UserNotePining } from "@/models/entities/user-note-pining.js"; import { UserNotePining } from "@/models/entities/user-note-pining.js";
import { genId } from "backend-rs"; import { genId, isSameOrigin, toPuny } from "backend-rs";
import { UserPublickey } from "@/models/entities/user-publickey.js"; import { UserPublickey } from "@/models/entities/user-publickey.js";
import { isDuplicateKeyValueError } from "@/misc/is-duplicate-key-value-error.js"; import { isDuplicateKeyValueError } from "@/misc/is-duplicate-key-value-error.js";
import { isSameOrigin, toPuny } from "backend-rs";
import { UserProfile } from "@/models/entities/user-profile.js"; import { UserProfile } from "@/models/entities/user-profile.js";
import { toArray } from "@/prelude/array.js"; import { toArray } from "@/prelude/array.js";
import { fetchInstanceMetadata } from "@/services/fetch-instance-metadata.js"; import { fetchInstanceMetadata } from "@/services/fetch-instance-metadata.js";

View file

@ -4,6 +4,7 @@ import { getUserKeypair } from "@/misc/keypair-store.js";
import type { User } from "@/models/entities/user.js"; import type { User } from "@/models/entities/user.js";
import { LdSignature } from "../misc/ld-signature.js"; import { LdSignature } from "../misc/ld-signature.js";
import type { IActivity } from "../type.js"; import type { IActivity } from "../type.js";
import { WellKnownContext } from "@/remote/activitypub/misc/contexts.js";
export const renderActivity = (x: any): IActivity | null => { export const renderActivity = (x: any): IActivity | null => {
if (x == null) return null; if (x == null) return null;
@ -12,52 +13,7 @@ export const renderActivity = (x: any): IActivity | null => {
x.id = `${config.url}/${uuid()}`; x.id = `${config.url}/${uuid()}`;
} }
return Object.assign( return Object.assign({}, WellKnownContext, x);
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
{
// as non-standards
manuallyApprovesFollowers: "as:manuallyApprovesFollowers",
movedToUri: "as:movedTo",
sensitive: "as:sensitive",
Hashtag: "as:Hashtag",
quoteUri: "fedibird:quoteUri",
quoteUrl: "as:quoteUrl",
// Mastodon
toot: "http://joinmastodon.org/ns#",
Emoji: "toot:Emoji",
featured: "toot:featured",
discoverable: "toot:discoverable",
indexable: "toot:indexable",
// schema
schema: "http://schema.org#",
PropertyValue: "schema:PropertyValue",
value: "schema:value",
// Firefish
firefish: "https://firefish.dev/ns#",
speakAsCat: "firefish:speakAsCat",
// Misskey
misskey: "https://misskey-hub.net/ns#",
_misskey_talk: "misskey:_misskey_talk",
_misskey_reaction: "misskey:_misskey_reaction",
_misskey_votes: "misskey:_misskey_votes",
_misskey_summary: "misskey:_misskey_summary",
isCat: "misskey:isCat",
// Fedibird
fedibird: "http://fedibird.com/ns#",
// vcard
vcard: "http://www.w3.org/2006/vcard/ns#",
// ChatMessage
litepub: "http://litepub.social/ns#",
ChatMessage: "litepub:ChatMessage",
directMessage: "litepub:directMessage",
},
],
},
x,
);
}; };
export const attachLdSignature = async ( export const attachLdSignature = async (

View file

@ -5,8 +5,8 @@ import { StatusError, getResponse } from "@/misc/fetch.js";
import { createSignedPost, createSignedGet } from "./ap-request.js"; import { createSignedPost, createSignedGet } from "./ap-request.js";
import type { Response } from "node-fetch"; import type { Response } from "node-fetch";
import type { IObject } from "./type.js"; import type { IObject } from "./type.js";
import { isValidUrl } from "@/misc/is-valid-url.js";
import { apLogger } from "@/remote/activitypub/logger.js"; import { apLogger } from "@/remote/activitypub/logger.js";
import { isSafeUrl } from "backend-rs";
export default async (user: { id: User["id"] }, url: string, object: any) => { export default async (user: { id: User["id"] }, url: string, object: any) => {
const body = JSON.stringify(object); const body = JSON.stringify(object);
@ -44,7 +44,7 @@ export async function apGet(
user?: ILocalUser, user?: ILocalUser,
redirects: boolean = true, redirects: boolean = true,
): Promise<{ finalUrl: string; content: IObject }> { ): Promise<{ finalUrl: string; content: IObject }> {
if (!isValidUrl(url)) { if (!isSafeUrl(url)) {
throw new StatusError("Invalid URL", 400); throw new StatusError("Invalid URL", 400);
} }

View file

@ -1,3 +0,0 @@
import { secureRndstr } from "backend-rs";
export default () => secureRndstr(16);

View file

@ -3,10 +3,11 @@ import {
publishToChatStream, publishToChatStream,
publishToGroupChatStream, publishToGroupChatStream,
publishToChatIndexStream, publishToChatIndexStream,
sendPushNotification,
ChatEvent, ChatEvent,
ChatIndexEvent, ChatIndexEvent,
PushNotificationKind,
} from "backend-rs"; } from "backend-rs";
import { pushNotification } from "@/services/push-notification.js";
import type { User, IRemoteUser } from "@/models/entities/user.js"; import type { User, IRemoteUser } from "@/models/entities/user.js";
import type { MessagingMessage } from "@/models/entities/messaging-message.js"; import type { MessagingMessage } from "@/models/entities/messaging-message.js";
import { MessagingMessages, UserGroupJoinings, Users } from "@/models/index.js"; import { MessagingMessages, UserGroupJoinings, Users } from "@/models/index.js";
@ -62,20 +63,19 @@ export async function readUserMessagingMessage(
if (!(await Users.getHasUnreadMessagingMessage(userId))) { if (!(await Users.getHasUnreadMessagingMessage(userId))) {
// 全ての(いままで未読だった)自分宛てのメッセージを(これで)読みましたよというイベントを発行 // 全ての(いままで未読だった)自分宛てのメッセージを(これで)読みましたよというイベントを発行
publishMainStream(userId, "readAllMessagingMessages"); publishMainStream(userId, "readAllMessagingMessages");
pushNotification(userId, "readAllMessagingMessages", undefined); sendPushNotification(userId, PushNotificationKind.ReadAllChats, {});
} else { } else {
// そのユーザーとのメッセージで未読がなければイベント発行 // そのユーザーとのメッセージで未読がなければイベント発行
const count = await MessagingMessages.count({ const hasUnread = await MessagingMessages.exists({
where: { where: {
userId: otherpartyId, userId: otherpartyId,
recipientId: userId, recipientId: userId,
isRead: false, isRead: false,
}, },
take: 1,
}); });
if (!count) { if (!hasUnread) {
pushNotification(userId, "readAllMessagingMessagesOfARoom", { sendPushNotification(userId, PushNotificationKind.ReadAllChatsInTheRoom, {
userId: otherpartyId, userId: otherpartyId,
}); });
} }
@ -137,10 +137,10 @@ export async function readGroupMessagingMessage(
if (!(await Users.getHasUnreadMessagingMessage(userId))) { if (!(await Users.getHasUnreadMessagingMessage(userId))) {
// 全ての(いままで未読だった)自分宛てのメッセージを(これで)読みましたよというイベントを発行 // 全ての(いままで未読だった)自分宛てのメッセージを(これで)読みましたよというイベントを発行
publishMainStream(userId, "readAllMessagingMessages"); publishMainStream(userId, "readAllMessagingMessages");
pushNotification(userId, "readAllMessagingMessages", undefined); sendPushNotification(userId, PushNotificationKind.ReadAllChats, {});
} else { } else {
// そのグループにおいて未読がなければイベント発行 // そのグループにおいて未読がなければイベント発行
const unreadExist = await MessagingMessages.createQueryBuilder("message") const hasUnread = await MessagingMessages.createQueryBuilder("message")
.where("message.groupId = :groupId", { groupId: groupId }) .where("message.groupId = :groupId", { groupId: groupId })
.andWhere("message.userId != :userId", { userId: userId }) .andWhere("message.userId != :userId", { userId: userId })
.andWhere("NOT (:userId = ANY(message.reads))", { userId: userId }) .andWhere("NOT (:userId = ANY(message.reads))", { userId: userId })
@ -150,8 +150,10 @@ export async function readGroupMessagingMessage(
.getOne() .getOne()
.then((x) => x != null); .then((x) => x != null);
if (!unreadExist) { if (!hasUnread) {
pushNotification(userId, "readAllMessagingMessagesOfARoom", { groupId }); sendPushNotification(userId, PushNotificationKind.ReadAllChatsInTheRoom, {
groupId,
});
} }
} }
} }

View file

@ -1,6 +1,6 @@
import { In } from "typeorm"; import { In } from "typeorm";
import { publishMainStream } from "@/services/stream.js"; import { publishMainStream } from "@/services/stream.js";
import { pushNotification } from "@/services/push-notification.js"; import { sendPushNotification, PushNotificationKind } from "backend-rs";
import type { User } from "@/models/entities/user.js"; import type { User } from "@/models/entities/user.js";
import type { Notification } from "@/models/entities/notification.js"; import type { Notification } from "@/models/entities/notification.js";
import { Notifications, Users } from "@/models/index.js"; import { Notifications, Users } from "@/models/index.js";
@ -47,7 +47,11 @@ export async function readNotificationByQuery(
function postReadAllNotifications(userId: User["id"]) { function postReadAllNotifications(userId: User["id"]) {
publishMainStream(userId, "readAllNotifications"); publishMainStream(userId, "readAllNotifications");
return pushNotification(userId, "readAllNotifications", undefined); return sendPushNotification(
userId,
PushNotificationKind.ReadAllNotifications,
{},
);
} }
function postReadNotifications( function postReadNotifications(
@ -55,5 +59,7 @@ function postReadNotifications(
notificationIds: Notification["id"][], notificationIds: Notification["id"][],
) { ) {
publishMainStream(userId, "readNotifications", notificationIds); publishMainStream(userId, "readNotifications", notificationIds);
return pushNotification(userId, "readNotifications", { notificationIds }); return sendPushNotification(userId, PushNotificationKind.ReadNotifications, {
notificationIds,
});
} }

Some files were not shown because too many files have changed in this diff Show more