Merge branch 'develop' into fix/post_import_if

This commit is contained in:
naskya 2024-07-04 02:25:39 +09:00
commit f6601aef5e
No known key found for this signature in database
GPG key ID: 712D413B3A9FED5C
453 changed files with 8816 additions and 9796 deletions

View file

@ -40,14 +40,11 @@ packages/backend/assets/instance.css
.gitattributes
.weblate
animated.svg
cliff.toml
docker-compose.yml
docker-compose.example.yml
firefish.apache.conf
firefish.nginx.conf
title.svg
/.gitlab
/chart
/ci
/dev
/docs
/scripts
@ -56,6 +53,8 @@ biome.json
CODE_OF_CONDUCT.md
CONTRIBUTING.md
Dockerfile
Procfile
Makefile
README.md
SECURITY.md
patrons.json
renovate.json

1
.gitignore vendored
View file

@ -15,6 +15,7 @@ node_modules
report.*.json
# Cargo
/.cargo
/target
# Cypress

View file

@ -25,10 +25,11 @@ workflow:
- when: never
stages:
- dependency
- test
- doc
- build
- dependency
- clean
variables:
POSTGRES_DB: 'firefish_db'
@ -36,6 +37,7 @@ variables:
POSTGRES_PASSWORD: 'password'
POSTGRES_HOST_AUTH_METHOD: 'trust'
DEBIAN_FRONTEND: 'noninteractive'
NODE_OPTIONS: '--max_old_space_size=3072'
CARGO_PROFILE_DEV_OPT_LEVEL: '0'
CARGO_PROFILE_DEV_LTO: 'off'
CARGO_PROFILE_DEV_DEBUG: 'none'
@ -119,6 +121,7 @@ test:build:backend_ts:
- cp packages/backend-rs/index.js packages/backend-rs/built/index.js
- cp packages/backend-rs/index.d.ts packages/backend-rs/built/index.d.ts
- cp ci/cargo/config.toml /usr/local/cargo/config.toml
- test -f packages/backend-rs/built/backend-rs.linux-x64-gnu.node || pnpm install --frozen-lockfile
- test -f packages/backend-rs/built/backend-rs.linux-x64-gnu.node || pnpm --filter 'backend-rs' run build:debug
- cp .config/ci.yml .config/default.yml
- export PGPASSWORD="${POSTGRES_PASSWORD}"
@ -199,6 +202,8 @@ build:container:
STORAGE_DRIVER: overlay
before_script:
- apt-get update && apt-get -y upgrade
- |-
sed -i -r 's/"version": "([-0-9]+)",/"version": "\1-dev",/' package.json
- apt-get install -y --no-install-recommends ca-certificates fuse-overlayfs buildah
- echo "${CI_REGISTRY_PASSWORD}" | buildah login --username "${CI_REGISTRY_USER}" --password-stdin "${CI_REGISTRY}"
- export IMAGE_TAG="${CI_REGISTRY}/${CI_PROJECT_PATH}/develop:not-for-production"
@ -221,6 +226,31 @@ build:container:
- buildah inspect "${IMAGE_TAG}"
- buildah push "${IMAGE_TAG}"
cargo:check:msrv:
stage: test
image: docker.io/rust:1.74-slim-bookworm
rules:
- if: $TEST == 'true'
when: always
- if: $TEST == 'false'
when: never
- if: $CI_COMMIT_BRANCH == 'develop' || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'develop'
changes:
paths:
- packages/backend-rs/**/*
- packages/macro-rs/**/*
- Cargo.toml
- Cargo.lock
when: always
services: []
before_script:
- apt-get update && apt-get -y upgrade
- apt-get install -y --no-install-recommends build-essential clang mold python3 perl nodejs postgresql-client
- cp ci/cargo/config.toml /usr/local/cargo/config.toml
script:
- cargo fetch --locked --manifest-path Cargo.toml
- cargo check --locked --frozen --all-features
cargo:test:
stage: test
rules:
@ -235,7 +265,6 @@ cargo:test:
- packages/macro-rs/**/*
- Cargo.toml
- Cargo.lock
- package.json
when: always
script:
- curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C /usr/local/cargo/bin
@ -287,7 +316,7 @@ cargo:doc:
- cp ci/cargo/config.toml /usr/local/cargo/config.toml
script:
- cargo doc --document-private-items
- printf "window.ALL_CRATES = ['backend_rs', 'macro_rs'];" > target/doc/crates.js
- printf 'window.ALL_CRATES = ["backend_rs", "macros", "macros_impl"];' > target/doc/crates.js
- printf '<meta http-equiv="refresh" content="0; url=%s">' 'backend_rs' > target/doc/index.html
- cd target/doc
- npx --yes netlify-cli deploy --prod --site="${CARGO_DOC_SITE_ID}" --dir=.
@ -303,3 +332,19 @@ renovate:
before_script: []
script:
- renovate --platform gitlab --token "${API_TOKEN}" --endpoint "${CI_SERVER_URL}/api/v4" "${CI_PROJECT_PATH}"
clean:
stage: clean
rules:
- if: $CLEAN && $CI_PIPELINE_SOURCE == 'schedule'
services: []
before_script:
- apt-get update && apt-get -y upgrade
- apt-get -y --no-install-recommends install curl
- curl -fsSL 'https://deb.nodesource.com/setup_18.x' | bash -
- apt-get install -y --no-install-recommends nodejs
- corepack enable
- corepack prepare pnpm@latest --activate
- pnpm install --frozen-lockfile
script:
- pnpm run clean-all

View file

@ -1,49 +0,0 @@
<!-- 💖 Thanks for taking the time to fill out this bug report!
💁 Having trouble with deployment? [Ask the support chat.](https://matrix.to/#/#firefish-community:nitro.chat)
🔒 Found a security vulnerability? [Please disclose it responsibly.](https://firefish.dev/firefish/firefish/-/blob/develop/SECURITY.md)
🤝 By submitting this issue, you agree to follow our [Contribution Guidelines.](https://firefish.dev/firefish/firefish/-/blob/develop/CONTRIBUTING.md) -->
## What happened? <!-- Please give us a brief description of what happened. -->
## What did you expect to happen? <!-- Please give us a brief description of what you expected to happen. -->
## Version <!-- What version of firefish is your instance running? You can find this by clicking your instance's logo at the bottom left and then clicking instance information. -->
## What type of issue is this? <!-- If this happens on your device and has to do with the user interface, it's client-side. If this happens on either with the API or the backend, or you got a server-side error in the client, it's server-side. -->
- [ ] server-side
- [ ] client-side
- [ ] not sure
<details>
### Instance <!-- What instance of firefish are you using? -->
### What browser are you using? (client-side issues only)
### What operating system are you using? (client-side issues only)
### How do you deploy Firefish on your server? (server-side issues only)
### What operating system are you using? (Server-side issues only)
### Relevant log output <!-- Please copy and paste any relevant log output. You can find your log by inspecting the page, and going to the "console" tab. -->
</details>
## Contribution Guidelines
By submitting this issue, you agree to follow our [Contribution Guidelines](https://firefish.dev/firefish/firefish/-/blob/develop/CONTRIBUTING.md)
- [ ] I agree to follow this project's Contribution Guidelines
- [ ] I have searched the issue tracker for similar issues, and this is not a duplicate.
## Are you willing to fix this bug? (optional)
- [ ] Yes. I will fix this bug and open a merge request if the change is agreed upon.

View file

@ -0,0 +1,99 @@
<!--
This issue template is for bug reports.
There are other issue templates for feature requests and refactor proposals,
so please use them if this is not a bug report.
Also, you don't need to prefix the issue title with "Bug:", because it's
managed by issue labels.
-->
<!-- 💖 Thanks for taking the time to fill out this bug report!
💁 Having trouble with deployment? [Ask the support chat.](https://matrix.to/#/#firefish-community:nitro.chat)
🔒 Found a security vulnerability? [Please disclose it responsibly.](https://firefish.dev/firefish/firefish/-/blob/develop/SECURITY.md)
🤝 By submitting this issue, you agree to follow our [Contribution Guidelines.](https://firefish.dev/firefish/firefish/-/blob/develop/CONTRIBUTING.md) -->
## What type of issue is this?
<!-- If this happens on your device and has to do with the user interface, it's client-side. If this happens on either with the API or the backend, or you got a server-side error in the client, it's server-side. -->
<!-- Uncomment (remove surrounding arrow signs) the following line(s) to specify the category of this issue. -->
<!-- /label Server -->
<!-- /label Client -->
<!-- /label Mobile -->
<!-- /label Third-party-client -->
<!-- /label Docs -->
<!-- /label "Build from source" -->
<!-- /label Container -->
<!-- /label "Firefish API" -->
<!-- /label "Mastodon API" -->
## What happened?
<!-- Please give us a brief description of what happened. -->
## What did you expect to happen?
<!-- Please give us a brief description of what you expected to happen. -->
## Steps to reproduce the issue
<!-- Please describe how to reproduce this issue (preferably, in a ordered list) -->
## Reproduces how often
<!-- Is it always reproducible, or is it conditional/probabilistic ? -->
## What did you try to solve the issue
<!-- Not to repeat the same thing, let us share what you have tried so far. -->
## Version
<!-- What version of firefish is your instance running? You can find this by the instance information page. -->
<details>
### Instance
<!-- What instance of firefish are you using? -->
### What browser are you using? (client-side issues only)
### What operating system are you using? (client-side issues only)
### How do you deploy Firefish on your server? (server-side issues only)
### What operating system are you using? (Server-side issues only)
### Relevant log output
<!-- Please copy and paste any relevant log output. -->
</details>
## Contribution Guidelines
By submitting this issue, you agree to follow our [Contribution Guidelines](https://firefish.dev/firefish/firefish/-/blob/develop/CONTRIBUTING.md)
- [ ] I agree to follow this project's Contribution Guidelines
- [ ] I have searched the issue tracker for similar issues, and this is not a duplicate.
## Are you willing to fix this bug? (optional)
<!-- Please uncomment the following line if you want to fix this bug -->
<!-- /assign me -->
<!--
Please tell us how to fix this bug.
As noted in the contribution guidelines, there is a good chance that your
merge request will not be merged if there is no agreement with the project maintainers.
However, we are currently so understaffed that it is virtually impossible to
respond to every single proposal. So, feel free to implement it if there is no response
for more than a week or there is a thumbs-up emoji reaction from the project maintainer(s).
-->
<!-- Do not edit the following line -->
/label Bug?

View file

@ -1,18 +1,45 @@
<!-- 💖 Thanks for taking the time to fill out this bug report!
<!--
This issue template is for feature requests.
There are other issue templates for bug reports and refactor proposals,
so please use them if this is not a feature request.
Also, you don't need to prefix the issue title with "Feature:", because it's
managed by issue labels.
-->
<!-- 💖 Thanks for taking the time to fill out this feature request!
💁 Having trouble with deployment? [Ask the support chat.](https://matrix.to/#/#firefish-community:nitro.chat)
🔒 Found a security vulnerability? [Please disclose it responsibly.](https://firefish.dev/firefish/firefish/-/blob/develop/SECURITY.md)
🤝 By submitting this feature request, you agree to follow our [Contribution Guidelines.](https://firefish.dev/firefish/firefish/-/blob/develop/CONTRIBUTING.md) -->
🤝 By submitting this refactor proposal, you agree to follow our [Contribution Guidelines.](https://firefish.dev/firefish/firefish/-/blob/develop/CONTRIBUTING.md) -->
## What feature would you like implemented? <!-- Please give us a brief description of what you'd like. -->
## What type of refactoring is this?
<!-- If this happens on your device and has to do with the user interface, it's client-side. If this happens on either with the API or the backend, or you got a server-side error in the client, it's server-side. -->
<!-- Uncomment (remove surrounding arrow signs) the following line(s) to specify the category of this issue. -->
<!-- /label Server -->
<!-- /label Client -->
<!-- /label Mobile -->
<!-- /label Third-party-client -->
<!-- /label Docs -->
<!-- /label "Build from source" -->
<!-- /label Container -->
<!-- /label "Firefish API" -->
<!-- /label "Mastodon API" -->
## What feature would you like implemented?
<!-- Please give us a brief description of what you'd like to be refactored. -->
## Why should we add this feature? <!-- Please give us a brief description of why your feature is important. -->
## Why should we add this feature?
<!-- Please give us a brief description of why your feature is important. -->
## Version <!-- What version of firefish is your instance running? You can find this by clicking your instance's logo at the bottom left and then clicking instance information. -->
## Version
<!-- What version of firefish is your instance running? You can find this by clicking your instance's logo at the bottom left and then clicking instance information. -->
## Instance <!-- What instance of firefish are you using? -->
## Instance
<!-- What instance of Firefish are you using? -->
## Contribution Guidelines
@ -21,4 +48,20 @@ By submitting this issue, you agree to follow our [Contribution Guidelines](http
- [ ] I have searched the issue tracker for similar requests, and this is not a duplicate.
## Are you willing to implement this feature? (optional)
- [ ] Yes. I will implement this feature and open a merge request if the change is agreed upon.
<!-- Please uncomment the following line if you want to implement this feature -->
<!-- /assign me -->
<!--
Please tell us how to implement this feature.
As noted in the contribution guidelines, there is a good chance that your
merge request will not be merged if there is no agreement with the project maintainers.
However, we are currently so understaffed that it is virtually impossible to
respond to every single proposal. So, feel free to implement it if there is no response
for more than a week or there is a thumbs-up emoji reaction from the project maintainer(s).
-->
<!-- Do not edit the following line -->
/label Feature

View file

@ -0,0 +1,67 @@
<!--
This issue template is for refactor proposals.
There are other issue templates for bug reports and feature requests,
so please use them if this is not a refactor proposal.
Also, you don't need to prefix the issue title with "Refactor:", because it's
managed by issue labels.
-->
<!-- 💖 Thanks for taking the time to fill out this report!
💁 Having trouble with deployment? [Ask the support chat.](https://matrix.to/#/#firefish-community:nitro.chat)
🔒 Found a security vulnerability? [Please disclose it responsibly.](https://firefish.dev/firefish/firefish/-/blob/develop/SECURITY.md)
🤝 By submitting this feature request, you agree to follow our [Contribution Guidelines.](https://firefish.dev/firefish/firefish/-/blob/develop/CONTRIBUTING.md) -->
## What type of feature is this?
<!-- If this happens on your device and has to do with the user interface, it's client-side. If this happens on either with the API or the backend, or you got a server-side error in the client, it's server-side. -->
<!-- Uncomment (remove surrounding arrow signs) the following line(s) to specify the category of this issue. -->
<!-- /label Server -->
<!-- /label Client -->
<!-- /label Mobile -->
<!-- /label Third-party-client -->
<!-- /label Docs -->
<!-- /label "Build from source" -->
<!-- /label Container -->
<!-- /label "Firefish API" -->
<!-- /label "Mastodon API" -->
## What parts of the code do you think should be refactored?
<!-- Please give us a brief description of what you'd like. -->
## Why should the code be refactored that way?
<!-- Please give us a brief description of the reason of your proposal. -->
## Version
<!-- What version of firefish is your instance running? You can find this by clicking your instance's logo at the bottom left and then clicking instance information. -->
## Instance
<!-- What instance of Firefish are you using? -->
## Contribution Guidelines
By submitting this issue, you agree to follow our [Contribution Guidelines](https://firefish.dev/firefish/firefish/-/blob/develop/CONTRIBUTING.md)
- [ ] I agree to follow this project's Contribution Guidelines
- [ ] I have searched the issue tracker for similar requests, and this is not a duplicate.
## Are you willing to refactor the code? (optional)
<!-- Please uncomment the following line if you want to implement it -->
<!-- /assign me -->
<!--
Please tell us how to refactor the code.
As noted in the contribution guidelines, there is a good chance that your
merge request will not be merged if there is no agreement with the project maintainers.
However, we are currently so understaffed that it is virtually impossible to
respond to every single proposal. So, feel free to implement it if there is no response
for more than a week or there is a thumbs-up emoji reaction from the project maintainer(s).
-->
<!-- Do not edit the following line -->
/label Refactor

View file

@ -1,16 +1,19 @@
<!-- Thanks for taking the time to make Firefish better! It's not required, but please consider using [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) when making your commits. If you use VSCode, please use the [Conventional Commits extension](https://marketplace.visualstudio.com/items?itemName=vivaxy.vscode-conventional-commits). -->
<!-- Thanks for taking the time to make Firefish better! -->
## What does this PR do? <!-- Please give us a brief description of what this PR does. -->
## What does this merge request do?
<!-- Please give us a brief description of what this merge request does. -->
## Contribution Guidelines
By submitting this merge request, you agree to follow our [Contribution Guidelines](https://firefish.dev/firefish/firefish/-/blob/develop/CONTRIBUTING.md)
- [ ] This change is reviewed in an issue / This is a minor bug fix
- [ ] I agree to follow this project's Contribution Guidelines
- [ ] I have made sure to test this pull request
- [ ] I have made sure to run `pnpm run format` before submitting this pull request
- [ ] This closes issue #0000 (please substitute the number)
- [ ] This is a minor bug fix or refactoring
If this merge request makes changes to the Firefish API, please update `docs/api-change.md`
- [ ] I agree to follow this project's Contribution Guidelines
- [ ] I have made sure to test this merge request
- [ ] I have made sure to run `pnpm run format` before submitting this merge request
If this merge request makes changes to API, please update `docs/api-change.md`
- [ ] I updated the document / This merge request doesn't include API changes
<!-- Uncomment if your merge request has multiple authors -->

View file

@ -1,4 +1,5 @@
<!-- This template is used only when merging the develop branch into the main branch. Please don't use this for other merge requests. -->
/label Release
## Checklist
@ -13,7 +14,5 @@ I have updated...
- [ ] `packages/backend-rs/index.js`
- [ ] OCI container image
<!-- TODO: Add automated tests (task runners are currently down) -->
## Remarks

565
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,51 +1,58 @@
[workspace]
members = ["packages/backend-rs", "packages/macro-rs"]
members = ["packages/backend-rs", "packages/macro-rs/macros", "packages/macro-rs/macros-impl"]
resolver = "2"
[workspace.dependencies]
macro-rs = { path = "packages/macro-rs" }
macros = { path = "packages/macro-rs/macros" }
macros-impl = { path = "packages/macro-rs/macros-impl" }
napi = { git = "https://github.com/napi-rs/napi-rs.git", rev = "ca2cd5c35a0c39ec4a94e93c6c5695b681046df2", default-features = false }
napi-derive = { version = "2.16.5", default-features = false }
napi-build = { version = "2.1.3", default-features = false }
napi = "2.16.8"
napi-derive = "2.16.8"
napi-build = "2.1.3"
argon2 = { version = "0.5.3", default-features = false }
async-trait = { version = "0.1.80", default-features = false }
basen = { version = "0.1.0", default-features = false }
bb8 = { version = "0.8.3", default-features = false }
bb8 = { version = "0.8.5", default-features = false }
bcrypt = { version = "0.15.1", default-features = false }
chrono = { version = "0.4.38", default-features = false }
convert_case = { version = "0.6.0", default-features = false }
cuid2 = { version = "0.1.2", default-features = false }
emojis = { version = "0.6.2", default-features = false }
idna = { version = "0.5.0", default-features = false }
idna = { version = "1.0.2", default-features = false }
image = { version = "0.25.1", default-features = false }
isahc = { version = "1.7.2", default-features = false }
nom-exif = { version = "1.2.0", default-features = false }
once_cell = { version = "1.19.0", default-features = false }
openssl = { version = "0.10.64", default-features = false }
pretty_assertions = { version = "1.4.0", default-features = false }
proc-macro2 = { version = "1.0.84", default-features = false }
proc-macro2 = { version = "1.0.86", default-features = false }
quote = { version = "1.0.36", default-features = false }
rand = { version = "0.8.5", default-features = false }
redis = { version = "0.25.4", default-features = false }
regex = { version = "1.10.4", default-features = false }
regex = { version = "1.10.5", default-features = false }
rmp-serde = { version = "1.3.0", default-features = false }
sea-orm = { version = "0.12.15", default-features = false }
serde = { version = "1.0.203", default-features = false }
serde_json = { version = "1.0.117", default-features = false }
serde_json = { version = "1.0.120", default-features = false }
serde_yaml = { version = "0.9.34", default-features = false }
strum = { version = "0.26.2", default-features = false }
syn = { version = "2.0.66", default-features = false }
syn = { version = "2.0.68", default-features = false }
sysinfo = { version = "0.30.12", default-features = false }
thiserror = { version = "1.0.61", default-features = false }
tokio = { version = "1.37.0", default-features = false }
tokio = { version = "1.38.0", default-features = false }
tokio-test = { version = "0.4.4", default-features = false }
tracing = { version = "0.1.40", default-features = false }
tracing-subscriber = { version = "0.3.18", default-features = false }
url = { version = "2.5.0", default-features = false }
url = { version = "2.5.2", default-features = false }
urlencoding = { version = "2.1.3", default-features = false }
web-push = { git = "https://github.com/pimeys/rust-web-push.git", rev = "40febe4085e3cef9cdfd539c315e3e945aba0656", default-features = false }
# subdependencies
## explicitly list OpenSSL to use the vendored version
openssl = "0.10.64"
## some subdependencies require higher Rust version than 1.74 (our MSRV)
## cargo update && cargo update ravif --precise 0.11.5 && cargo update bitstream-io --precise 2.3.0
## to pin their versions if needed
[profile.release]
lto = true

View file

@ -2,23 +2,20 @@
FROM docker.io/node:20-alpine as build
WORKDIR /firefish
# Copy only backend-rs pnpm-related files first, to cache efficiently
COPY package.json pnpm-workspace.yaml ./
COPY packages/backend-rs/package.json packages/backend-rs/package.json
# Install compilation dependencies
RUN apk update && apk add --no-cache build-base linux-headers curl ca-certificates python3 perl
RUN curl --proto '=https' --tlsv1.2 --silent --show-error --fail https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
# Copy only backend-rs dependency-related files first, to cache efficiently
COPY package.json pnpm-workspace.yaml ./
COPY packages/backend-rs/package.json packages/backend-rs/package.json
COPY packages/backend-rs/npm/linux-x64-musl/package.json packages/backend-rs/npm/linux-x64-musl/package.json
COPY packages/backend-rs/npm/linux-arm64-musl/package.json packages/backend-rs/npm/linux-arm64-musl/package.json
COPY Cargo.toml Cargo.toml
COPY Cargo.lock Cargo.lock
COPY packages/backend-rs/Cargo.toml packages/backend-rs/Cargo.toml
COPY packages/macro-rs packages/macro-rs/
COPY packages/backend-rs/src/lib.rs packages/backend-rs/src/
COPY packages/macro-rs/Cargo.toml packages/macro-rs/Cargo.toml
COPY packages/macro-rs/src/lib.rs packages/macro-rs/src/
COPY packages/backend-rs/Cargo.toml packages/backend-rs/Cargo.toml
COPY Cargo.toml Cargo.lock ./
# Configure pnpm, and install backend-rs dependencies
RUN corepack enable && corepack prepare pnpm@latest --activate && pnpm --filter backend-rs install
@ -26,10 +23,10 @@ RUN cargo fetch --locked --manifest-path Cargo.toml
# Copy in the rest of the rust files
COPY packages/backend-rs packages/backend-rs/
# COPY packages/macro-rs packages/macro-rs/
# Compile backend-rs
RUN NODE_ENV='production' pnpm run --filter backend-rs build
RUN ln -s $(which gcc) /usr/bin/aarch64-linux-musl-gcc
RUN NODE_ENV='production' NODE_OPTIONS='--max_old_space_size=3072' pnpm run --filter backend-rs build
# Copy/Overwrite index.js to mitigate the bug in napi-rs codegen
COPY packages/backend-rs/index.js packages/backend-rs/built/index.js
@ -49,7 +46,7 @@ RUN pnpm install --frozen-lockfile
COPY . ./
# Build other workspaces
RUN NODE_ENV='production' pnpm run --recursive --filter '!backend-rs' build && pnpm run build:assets
RUN NODE_ENV='production' NODE_OPTIONS='--max_old_space_size=3072' pnpm run --recursive --filter '!backend-rs' build && pnpm run build:assets
# Trim down the dependencies to only those for production
RUN find . -path '*/node_modules/*' -delete && pnpm install --prod --frozen-lockfile

View file

@ -1 +0,0 @@
web: NODE_ENV=production npm start

View file

@ -1,28 +1,400 @@
{
"$schema": "https://biomejs.dev/schemas/1.6.4/schema.json",
"organizeImports": {
"enabled": false
},
"$schema": "https://biomejs.dev/schemas/1.8.2/schema.json",
"organizeImports": { "enabled": false },
"linter": {
"enabled": true,
"rules": {
"recommended": true,
"recommended": false,
"complexity": {
"noBannedTypes": "error",
"noExtraBooleanCast": "error",
"noMultipleSpacesInRegularExpressionLiterals": "error",
"noUselessCatch": "error",
"noUselessConstructor": "off",
"noUselessLoneBlockStatements": "error",
"noUselessRename": "error",
"noUselessTernary": "error",
"noUselessThisAlias": "error",
"noUselessTypeConstraint": "error",
"noVoid": "error",
"noWith": "error",
"useLiteralKeys": "error",
"useRegexLiterals": "error"
},
"correctness": {
"noConstAssign": "error",
"noConstantCondition": "error",
"noEmptyCharacterClassInRegex": "error",
"noEmptyPattern": "error",
"noGlobalObjectCalls": "error",
"noInvalidConstructorSuper": "error",
"noInvalidUseBeforeDeclaration": "error",
"noNewSymbol": "error",
"noPrecisionLoss": "error",
"noSelfAssign": "error",
"noSwitchDeclarations": "error",
"noUndeclaredVariables": "error",
"noUnreachable": "error",
"noUnreachableSuper": "error",
"noUnsafeFinally": "error",
"noUnusedVariables": "off",
"useArrayLiterals": "off",
"useIsNan": "error"
},
"security": { "noGlobalEval": "error" },
"style": {
"noUselessElse": "off"
}
"noCommaOperator": "error",
"noInferrableTypes": "error",
"noNamespace": "error",
"noNonNullAssertion": "warn",
"noUselessElse": "off",
"noVar": "error",
"useAsConstAssertion": "error",
"useBlockStatements": "off",
"useConst": "error",
"useImportType": "error",
"useSingleVarDeclarator": "warn"
},
"suspicious": {
"noAssignInExpressions": "error",
"noAsyncPromiseExecutor": "error",
"noCatchAssign": "error",
"noClassAssign": "error",
"noCompareNegZero": "error",
"noConfusingLabels": "off",
"noConsoleLog": "warn",
"noControlCharactersInRegex": "error",
"noDebugger": "warn",
"noDoubleEquals": "error",
"noDuplicateCase": "error",
"noDuplicateClassMembers": "error",
"noDuplicateObjectKeys": "error",
"noDuplicateParameters": "error",
"noEmptyBlockStatements": "error",
"noEmptyInterface": "error",
"noExplicitAny": "warn",
"noExtraNonNullAssertion": "error",
"noFallthroughSwitchClause": "error",
"noFunctionAssign": "error",
"noGlobalAssign": "error",
"noImportAssign": "error",
"noMisleadingCharacterClass": "error",
"noMisleadingInstantiator": "error",
"noPrototypeBuiltins": "off",
"noRedeclare": "error",
"noSelfCompare": "error",
"noShadowRestrictedNames": "error",
"noUnsafeNegation": "error",
"useAwait": "off",
"useDefaultSwitchClauseLast": "error",
"useNamespaceKeyword": "error",
"useValidTypeof": "error"
}
},
"ignore": [
"**/*.json5",
"**/*.min.*",
"**/dist",
"**/LICENSE*",
"**/output",
"**/coverage",
"**/public",
"**/temp",
"**/packages-lock.json",
"**/pnpm-lock.yaml",
"**/yarn.lock",
"**/__snapshots__"
]
},
"javascript": {
"globals": [
"jest",
"withDefaults",
"$computed",
"$shallowRef",
"defineExpose",
"$toRef",
"h",
"$customRef",
"navigator",
"window",
"defineEmits",
"$ref",
"defineProps",
"document"
]
},
"overrides": [
{
"include": ["*.vue", "packages/client/*.ts"],
"include": ["**/__tests__/*.{j,t}s?(x)", "**/*.spec.{j,t}s?(x)"],
"linter": { "rules": { "suspicious": { "noConsoleLog": "off" } } }
},
{
"include": ["*.vue"],
"linter": { "rules": { "correctness": { "noUnusedVariables": "off" } } }
},
{
"include": ["**/__tests__/*.{j,t}s?(x)", "**/*.spec.{j,t}s?(x)"],
"linter": { "rules": { "suspicious": { "noConsoleLog": "off" } } }
},
{ "include": ["*.vue"], "linter": { "rules": {} } },
{ "include": ["*.json", "*.json5"], "linter": { "rules": {} } },
{ "include": ["*.yaml", "*.yml"], "linter": { "rules": {} } },
{ "include": ["package.json"], "linter": { "rules": {} } },
{ "include": ["*.d.ts"], "linter": { "rules": {} } },
{ "include": ["*.js"] },
{
"include": ["scripts/**/*.*", "cli.*"],
"linter": { "rules": { "suspicious": { "noConsoleLog": "off" } } }
},
{
"include": ["*.test.ts", "*.test.js", "*.spec.ts", "*.spec.js"],
"linter": { "rules": {} }
},
{
"include": ["**/*.md/*.*"],
"linter": {
"rules": {
"correctness": {
"noUndeclaredVariables": "off",
"noUnusedVariables": "off"
},
"suspicious": { "noConsoleLog": "off" }
}
}
},
{ "include": ["*.js"], "linter": { "rules": {} } },
{
"include": ["**/*.md/*.*"],
"linter": {
"rules": {
"correctness": {
"noInvalidUseBeforeDeclaration": "off",
"noUnusedVariables": "off"
},
"suspicious": { "noRedeclare": "off" }
}
}
},
{ "include": ["*.json", "*.json5"], "linter": { "rules": {} } },
{ "include": ["*.yaml", "*.yml"], "linter": { "rules": {} } },
{ "include": ["package.json"], "linter": { "rules": {} } },
{ "include": ["*.d.ts"], "linter": { "rules": {} } },
{ "include": ["*.js"] },
{
"include": ["scripts/**/*.*", "cli.*"],
"linter": { "rules": { "suspicious": { "noConsoleLog": "off" } } }
},
{
"include": ["*.test.ts", "*.test.js", "*.spec.ts", "*.spec.js"],
"linter": { "rules": {} }
},
{
"include": ["**/*.md/*.*"],
"linter": {
"rules": {
"correctness": {
"noUndeclaredVariables": "off",
"noUnusedVariables": "off"
},
"suspicious": { "noConsoleLog": "off" }
}
}
},
{ "include": ["*.js"], "linter": { "rules": {} } },
{
"include": ["**/*.md/*.*"],
"linter": {
"rules": {
"correctness": {
"noInvalidUseBeforeDeclaration": "off",
"noUnusedVariables": "off"
},
"suspicious": { "noRedeclare": "off" }
}
}
},
{
"include": ["*.ts", "*.tsx", "*.mts", "*.cts"],
"linter": {
"rules": {
"correctness": {
"noConstAssign": "off",
"noGlobalObjectCalls": "off",
"noInvalidConstructorSuper": "off",
"noNewSymbol": "off",
"noSetterReturn": "off",
"noUndeclaredVariables": "off",
"noUnreachable": "off",
"noUnreachableSuper": "off"
},
"style": {
"useImportType": "warn",
"useShorthandFunctionType": "warn",
"useTemplate": "warn",
"noNonNullAssertion": "off",
"useNodejsImportProtocol": "off"
"noArguments": "error",
"noVar": "error",
"useConst": "error"
},
"suspicious": {
"noDuplicateClassMembers": "off",
"noDuplicateObjectKeys": "off",
"noDuplicateParameters": "off",
"noFunctionAssign": "off",
"noImportAssign": "off",
"noRedeclare": "off",
"noUnsafeNegation": "off",
"useGetterReturn": "off",
"useValidTypeof": "off"
}
}
}
},
{ "include": ["*.json", "*.json5"], "linter": { "rules": {} } },
{ "include": ["*.yaml", "*.yml"], "linter": { "rules": {} } },
{ "include": ["package.json"], "linter": { "rules": {} } },
{ "include": ["*.d.ts"], "linter": { "rules": {} } },
{ "include": ["*.js"], "linter": { "rules": {} } },
{
"include": ["*.ts", "*.tsx", "*.mts", "*.cts"],
"linter": { "rules": { "complexity": { "noVoid": "error" } } }
},
{
"include": ["script/**/*.*", "scripts/**/*.*", "cli.*"],
"linter": { "rules": { "suspicious": { "noConsoleLog": "off" } } }
},
{
"include": ["*.test.ts", "*.test.js", "*.spec.ts", "*.spec.js"],
"linter": { "rules": {} }
},
{
"include": ["**/*.md/*.*"],
"linter": {
"rules": {
"correctness": {
"noInvalidUseBeforeDeclaration": "off",
"noUndeclaredVariables": "off",
"noUnusedVariables": "off"
},
"style": { "useImportType": "off" },
"suspicious": { "noConsoleLog": "off", "noRedeclare": "off" }
}
}
},
{ "include": ["*.json", "*.json5"], "linter": { "rules": {} } },
{ "include": ["*.yaml", "*.yml"], "linter": { "rules": {} } },
{ "include": ["package.json"], "linter": { "rules": {} } },
{ "include": ["*.d.ts"], "linter": { "rules": {} } },
{ "include": ["*.js"], "linter": { "rules": {} } },
{
"include": ["*.ts", "*.tsx", "*.mts", "*.cts"],
"linter": { "rules": { "complexity": { "noVoid": "error" } } }
},
{
"include": ["script/**/*.*", "scripts/**/*.*", "cli.*"],
"linter": { "rules": { "suspicious": { "noConsoleLog": "off" } } }
},
{
"include": ["*.test.ts", "*.test.js", "*.spec.ts", "*.spec.js"],
"linter": { "rules": {} }
},
{
"include": ["**/*.md/*.*"],
"linter": {
"rules": {
"correctness": {
"noInvalidUseBeforeDeclaration": "off",
"noUndeclaredVariables": "off",
"noUnusedVariables": "off"
},
"style": { "useImportType": "off" },
"suspicious": { "noConsoleLog": "off", "noRedeclare": "off" }
}
}
},
{ "include": ["*.md"] },
{
"include": ["**/*.md/**"],
"linter": {
"rules": {
"correctness": {
"noUndeclaredVariables": "off",
"noUnusedVariables": "off"
}
}
}
},
{
"include": ["*.ts", "*.tsx", "*.mts", "*.cts"],
"linter": {
"rules": {
"correctness": {
"noConstAssign": "off",
"noGlobalObjectCalls": "off",
"noInvalidConstructorSuper": "off",
"noNewSymbol": "off",
"noSetterReturn": "off",
"noUndeclaredVariables": "off",
"noUnreachable": "off",
"noUnreachableSuper": "off"
},
"style": {
"noArguments": "error",
"noVar": "error",
"useConst": "error"
},
"suspicious": {
"noDuplicateClassMembers": "off",
"noDuplicateObjectKeys": "off",
"noDuplicateParameters": "off",
"noFunctionAssign": "off",
"noImportAssign": "off",
"noRedeclare": "off",
"noUnsafeNegation": "off",
"useGetterReturn": "off",
"useValidTypeof": "off"
}
}
}
},
{ "include": ["*.md"] },
{
"include": ["**/*.md/**"],
"linter": {
"rules": {
"correctness": {
"noUndeclaredVariables": "off",
"noUnusedVariables": "off"
}
}
}
},
{
"include": ["*.ts", "*.tsx", "*.mts", "*.cts"],
"linter": {
"rules": {
"correctness": {
"noConstAssign": "off",
"noGlobalObjectCalls": "off",
"noInvalidConstructorSuper": "off",
"noNewSymbol": "off",
"noSetterReturn": "off",
"noUndeclaredVariables": "off",
"noUnreachable": "off",
"noUnreachableSuper": "off"
},
"style": {
"noArguments": "error",
"noVar": "error",
"useConst": "error"
},
"suspicious": {
"noDuplicateClassMembers": "off",
"noDuplicateObjectKeys": "off",
"noDuplicateParameters": "off",
"noFunctionAssign": "off",
"noImportAssign": "off",
"noRedeclare": "off",
"noUnsafeNegation": "off",
"useGetterReturn": "off",
"useValidTypeof": "off"
}
}
}

View file

@ -1,98 +0,0 @@
# configuration file for git-cliff (0.1.0)
[changelog]
# changelog header
header = """
# Changelog\n
"""
# template for the changelog body
# https://tera.netlify.app/docs/#introduction
body = """
{% if version %}\
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
{% else %}\
## [unreleased]
{% endif %}\
{% for group, commits in commits | group_by(attribute="group") %}
### {{ group | upper_first }}
{% for commit in commits %}
- {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first }}\
{% endfor %}
{% endfor %}\n
"""
# remove the leading and trailing whitespace from the template
trim = true
# changelog footer
footer = """
<!-- generated by git-cliff -->
"""
[git]
# parse the commits based on https://www.conventionalcommits.org
conventional_commits = false
# filter out the commits that are not conventional
filter_unconventional = true
# process each line of a commit as an individual commit
split_commits = false
# regex for parsing and grouping commits
commit_parsers = [
{ message = "^feat", group = "Features"},
{ message = "^add", group = "Features"},
{ message = "^fix", group = "Bug Fixes"},
{ message = "^prevent", group = "Bug Fixes"},
{ message = "^doc", group = "Documentation"},
{ message = "^perf", group = "Performance"},
{ message = "^🎨", group = "Refactor"},
{ message = "^enhance", group = "Refactor"},
{ message = "^⚡️", group = "Refactor"},
{ message = "^🔥", group = "Features"},
{ message = "^🐛", group = "Bug Fixes"},
{ message = "^🚑️", group = "Bug Fixes"},
{ message = "^block", group = "Bug Fixes"},
{ message = "^✨", group = "Features"},
{ message = "^📝", group = "Documentation"},
{ message = "^🚀", group = "Features"},
{ message = "^💄", group = "Styling"},
{ message = "^✅", group = "Testing"},
{ message = "^🔒️", group = "Security"},
{ message = "^🚨", group = "Testing"},
{ message = "^💚", group = "CI"},
{ message = "^👷", group = "CI"},
{ message = "^⬇️", group = "Miscellaneous Tasks"},
{ message = "^⬆️", group = "Miscellaneous Tasks"},
{ message = "^📌", group = "Miscellaneous Tasks"},
{ message = "^", group = "Miscellaneous Tasks"},
{ message = "^", group = "Miscellaneous Tasks"},
{ message = "^♻️", group = "Refactor"},
{ message = "^🔧", group = "CI"},
{ message = "^🔨", group = "CI"},
{ message = "^🌐", group = "Localization"},
{ message = "^✏️", group = "Localization"},
{ message = "^👽️", group = "Bug Fixes"},
{ message = "^🍱", group = "Styling"},
{ message = "^♿️", group = "Styling"},
{ message = "^🩹", group = "Bug Fixes"},
{ message = "^refactor", group = "Refactor"},
{ message = "^style", group = "Styling"},
{ message = "^test", group = "Testing"},
{ message = "^chore\\(release\\): prepare for", skip = true},
{ message = "^chore", group = "Miscellaneous Tasks"},
{ message = "^update", group = "Miscellaneous Tasks"},
{ body = ".*security", group = "Security"},
]
# protect breaking changes from being skipped due to matching a skipping commit_parser
protect_breaking_commits = false
# filter out the commits that are not matched by commit parsers
filter_commits = false
# glob pattern for matching git tags
tag_pattern = "v[0-9]*"
# regex for skipping tags
skip_tags = "v0.1.0-beta.1"
# regex for ignoring tags
ignore_tags = ""
# sort the tags chronologically
date_order = false
# sort the commits inside sections by oldest/newest order
sort_commits = "oldest"
# limit the number of commits included in the changelog.
# limit_commits = 42

View file

@ -27,10 +27,10 @@ services:
interval: 5s
timeout: 5s
retries: 5
deploy:
resources:
limits:
memory: 4096M
# deploy:
# resources:
# limits:
# memory: 2048M
redis:
restart: unless-stopped
@ -48,8 +48,7 @@ services:
# deploy:
# resources:
# limits:
# memory: 200M
# memory: 256M
db:
restart: unless-stopped
@ -69,7 +68,7 @@ services:
# deploy:
# resources:
# limits:
# memory: 200M
# memory: 2048M
networks:
calcnet:

View file

@ -2,6 +2,10 @@
Breaking changes are indicated by the :warning: icon.
## v20240607
- `GET` request is now allowed for the `latest-version` endpoint.
## v20240523
- Added `scheduledAt` optional parameter to `notes/create` (!10789)

View file

@ -2,9 +2,37 @@
Critical security updates are indicated by the :warning: icon.
- Server administrators should check [notice-for-admins.md](./notice-for-admins.md) as well.
- Server administrators must check [notice-for-admins.md](./notice-for-admins.md) as well.
- Third-party client/bot developers may want to check [api-change.md](./api-change.md) as well.
## Unreleased
- Fix bugs
## [v20240630](https://firefish.dev/firefish/firefish/-/merge_requests/11072/commits)
- Add ability to automatically append #Alt4Me hashtag when posting a file without an alt text ([What is #Alt4Me?](https://social.growyourown.services/@FediTips/112055775451305236))
- Fix a build issue on some environments
- Fix bugs
## [v20240623](https://firefish.dev/firefish/firefish/-/merge_requests/11049/commits)
- Fix bugs
## [v20240613](https://firefish.dev/firefish/firefish/-/merge_requests/11003/commits)
This update contains code refactoring and dependency updates, with no major user-facing changes.
## [v20240607](https://firefish.dev/firefish/firefish/-/merge_requests/10978/commits)
- Add the ability to share posts via QR code
- Update the API document page (`/api-doc`)
- Fix bugs
## [v20240601](https://firefish.dev/firefish/firefish/-/merge_requests/10943/commits)
- Fix bugs
## [v20240523](https://firefish.dev/firefish/firefish/-/merge_requests/10898/commits)
- Add scheduled posts

View file

@ -1,6 +1,7 @@
BEGIN;
DELETE FROM "migrations" WHERE name IN (
'RefactorScheduledPosts1716804636187',
'RemoveEnumTypenameSuffix1716462794927',
'CreateScheduledNote1714728200194',
'AddBackTimezone1715351290096',
@ -33,6 +34,38 @@ DELETE FROM "migrations" WHERE name IN (
'RemoveNativeUtilsMigration1705877093218'
);
-- refactor-scheduled-post
CREATE TABLE "scheduled_note" (
"id" character varying(32) NOT NULL PRIMARY KEY,
"noteId" character varying(32) NOT NULL,
"userId" character varying(32) NOT NULL,
"scheduledAt" TIMESTAMP WITH TIME ZONE NOT NULL
);
COMMENT ON COLUMN "scheduled_note"."noteId" IS 'The ID of the temporarily created note that corresponds to the schedule.';
CREATE EXTENSION pgcrypto;
CREATE FUNCTION generate_scheduled_note_id(size int) RETURNS text AS $$ DECLARE
characters text := 'abcdefghijklmnopqrstuvwxyz0123456789';
bytes bytea := gen_random_bytes(size);
l int := length(characters);
i int := 0;
output text := '';
BEGIN
WHILE i < size LOOP
output := output || substr(characters, get_byte(bytes, i) % l + 1, 1);
i := i + 1;
END LOOP;
RETURN output;
END;
$$ LANGUAGE plpgsql VOLATILE;
INSERT INTO "scheduled_note" ("id", "noteId", "userId", "scheduledAt") (SELECT generate_scheduled_note_id(16), "id", "userId", "scheduledAt" FROM "note" WHERE "note"."scheduledAt" IS NOT NULL);
DROP EXTENSION pgcrypto;
DROP FUNCTION "generate_scheduled_note_id";
CREATE INDEX "IDX_noteId_ScheduledNote" ON "scheduled_note" ("noteId");
CREATE INDEX "IDX_userId_ScheduledNote" ON "scheduled_note" ("userId");
ALTER TABLE "scheduled_note" ADD FOREIGN KEY ("noteId") REFERENCES "note"("id") ON DELETE CASCADE ON UPDATE NO ACTION;
ALTER TABLE "scheduled_note" ADD FOREIGN KEY ("userId") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION;
ALTER TABLE "note" DROP COLUMN "scheduledAt";
-- remove-enum-typename-suffix
ALTER TYPE "antenna_src" RENAME TO "antenna_src_enum";
ALTER TYPE "drive_file_usage_hint" RENAME TO "drive_file_usage_hint_enum";

View file

@ -4,9 +4,9 @@ Firefish depends on the following software.
## Runtime dependencies
- At least [NodeJS](https://nodejs.org/en/) v18.19.0 (v20/v21 recommended)
- At least [NodeJS](https://nodejs.org/en/) v18.19.0 (v20/v22 recommended)
- At least [PostgreSQL](https://www.postgresql.org/) v12 (v16 recommended) with [PGroonga](https://pgroonga.github.io/) extension
- At least [Redis](https://redis.io/) v7
- At least [Redis](https://redis.io/) v7 or [Valkey](https://valkey.io/) v7
- Web Proxy (one of the following)
- Caddy (recommended)
- Nginx (recommended)
@ -15,27 +15,78 @@ Firefish depends on the following software.
- Caching server (**optional**, one of the following)
- [DragonflyDB](https://www.dragonflydb.io/)
- [KeyDB](https://keydb.dev/)
- Another [Redis](https://redis.io/) server
- Another [Redis](https://redis.io/) / [Valkey](https://valkey.io/) server
## Build dependencies
- At least [Rust](https://www.rust-lang.org/) v1.74
- C/C++ compiler & build tools
- C/C++ compiler & build tools (like [GNU Make](https://www.gnu.org/software/make/))
- `build-essential` on Debian/Ubuntu Linux
- `base-devel` on Arch Linux
- `"Development Tools"` on Fedora/Red Hat Linux
- [Python 3](https://www.python.org/)
- [Perl](https://www.perl.org/)
This document shows an example procedure for installing these dependencies and Firefish on Debian 12. Note that there is much room for customizing the server setup; this document merely demonstrates a simple installation.
### Install on non-Linux systems
We don't test Firefish on non-Linux systems, so please install Firefish on such an environment **only if you can address any problems yourself**. There is absolutely no support. That said, it is possible to install Firefish on some non-Linux systems.
<details>
<summary>Possible setup on FreeBSD (as of version `20240630`)</summary>
You can install Firefish on FreeBSD by adding these extra steps to the standard instructions:
1. Install `vips` package
2. Add the following block to [`package.json`](../package.json)
```json
"pnpm": {
"overrides": {
"rollup": "npm:@rollup/wasm-node@4.17.2"
}
}
```
3. Create an rc script for Firefish
```sh
#!/bin/sh
# PROVIDE: firefish
# REQUIRE: DAEMON redis caddy postgresql
# KEYWORD: shutdown
. /etc/rc.subr
name=firefish
rcvar=firefish_enable
desc="Firefish daemon"
load_rc_config ${name}
: ${firefish_chdir:="/path/to/firefish/local/repository"}
: ${firefish_env:="npm_config_cache=/tmp NODE_ENV=production NODE_OPTIONS=--max-old-space-size=3072"}
pidfile="/var/run/${name}.pid"
command=/usr/sbin/daemon
command_args="-f -S -u firefish -P ${pidfile} /usr/local/bin/pnpm run start"
run_rc_command "$1"
```
</details>
Please let us know if you deployed Firefish on a curious environment :smile:
### Use Docker/Podman containers
If you want to use the pre-built container image, please refer to [`install-container.md`](./install-container.md).
If you do not prepare your environment as document, be sure to meet the minimum dependencies given at the bottom of the page.
## 1. Install dependencies
Make sure that you can use the `sudo` command before proceeding.
## 1. Install dependencies
### Utilities
```sh
@ -215,7 +266,7 @@ sudo ufw status
### 2. Set up a reverse proxy
In this instruction, we use [Caddy](https://caddyserver.com/) to make the Firefish server accesible from internet. However, you can also use [Nginx](https://nginx.org/en/) if you want ([example Nginx config file](../firefish.nginx.conf)).
In this instruction, we use [Caddy](https://caddyserver.com/) to make the Firefish server accesible from internet. However, you can also use [Nginx](https://nginx.org/en/) if you want ([example Nginx config file](./firefish.nginx.conf)).
1. Install Caddy
```sh
@ -310,7 +361,9 @@ In this instruction, we use [Caddy](https://caddyserver.com/) to make the Firefi
sudo systemctl enable --now firefish
```
## Upgrading
# Maintain the server
## Upgrade Firefish version
Please refer to the [upgrade instruction](./upgrade.md). Be sure to switch to `firefish` user and go to the Firefish directory before executing the `git` command:
@ -319,6 +372,85 @@ sudo su --login firefish
cd ~/firefish
```
## Rotate logs
As the server runs longer and longer, the size of the log files increases, filling up the disk space. To prevent this, you should set up a log rotation (removing old logs automatically).
You can edit the `SystemMaxUse` value in the `[journal]` section of `/etc/systemd/journald.conf` to do it:
```conf
[journal]
... (omitted)
SystemMaxUse=500M
...
```
Make sure to remove the leading `#` to uncomment the line. After editing the config file, you need to restart `systemd-journald` service.
```sh
sudo systemctl restart systemd-journald
```
It is also recommended that you change the [PGroonga log level](https://pgroonga.github.io/reference/parameters/log-level.html). The default level is `notice`, but this is too verbose for daily use.
To control the log level, add this line to your `postgresql.conf`:
```conf
pgroonga.log_level = error
```
You can check the `postgresql.conf` location by this command:
```sh
sudo --user=postgres psql --command='SHOW config_file'
```
The PGroonga log file (`pgroonga.log`) is located under this directory:
```sh
sudo --user=postgres psql --command='SHOW data_directory'
```
## Tune database configuration
The default PostgreSQL configuration not suitable for running a Firefish server. Thus, it is highly recommended that you use [PGTune](https://pgtune.leopard.in.ua/) to tweak the configuration.
Here is an example set of parameters you can provide to PGTune:
| Parameter | Value |
|----------------------:|---------------------------------------------------------|
| DB version | 16 (your PostgreSQL major version) |
| OS Type | Linux |
| DB Type | Data warehouse |
| Total Memory | [total physical memory] minus 700 MB |
| Number of CPUs | number of CPU threads (or lower value if you have many) |
| Number of connections | 200 |
| Data storage | SSD storage |
Since this is not a dedicated database server, be sure to leave some memory space for other software such as Firefish and Redis.
Once you have entered the appropriate values for your environment, click the "Generate" button to generate a configuration and replace the values in `postgresql.conf` with the suggested values.
After that, you need to restart the PostgreSQL service.
```sh
sudo systemctl stop firefish
sudo systemctl restart postgresql
sudo systemctl start firefish
```
## VACUUM your database
If the database runs long, accumulated "garbage" can degrade its performance or cause problems. To prevent this, you should `VACUUM` your database regularly.
```sh
sudo systemctl stop firefish
sudo --user=postgres psql --dbname=firefish_db --command='VACUUM FULL VERBOSE ANALYZE'
sudo systemctl start firefish
```
Note that this operation takes some time.
## Customize
- To add custom CSS for all users, edit `./custom/assets/instance.css`.

View file

@ -2,8 +2,29 @@
You can skip intermediate versions when upgrading from an old version, but please read the notices and follow the instructions for each intermediate version before [upgrading](./upgrade.md).
## Upcoming breaking change (unreleased)
Please take a look at #10947.
## Unreleased
### For all users
This is not related to the recent changes, but we have added a new section called "[Maintain the server](https://firefish.dev/firefish/firefish/-/blob/develop/docs/install.md#maintain-the-server)" in the installation guide. We suggest that you take a look at it. (and we welcome your docs contributions!)
## v20240607
The following environment variables are deprecated and no longer have any effect:
- `MK_ONLY_QUEUE`
- `MK_ONLY_SERVER`
- `MK_NO_DAEMONS`
- `MK_DISABLE_CLUSTERING`
- `MK_VERBOSE`
- `MK_WITH_LOG_TIME`
- `MK_SLOW`
## v20240601
### For systemd/pm2 users
Required Node.js version has been bumped from v18.17.0 to v18.19.0. Also, as written in the [v20240430 note](https://firefish.dev/firefish/firefish/-/blob/d3394b97f021dea323ec3ae36e39930680242482/docs/notice-for-admins.md#v20240430), it is highly recommended that you use an even newer version since v18.19.0 has known vulnerabilities.
@ -32,7 +53,7 @@ Therefore, we have contributed to napi-rs to add support for `DateTime<FixedOffs
### For systemd/pm2 users
There is a bug where `pnpm install --frozen-lockfile` may fail on Linux 6.9.x ([GitHub issue](<https://github.com/nodejs/node/issues/53051>)).
There is a bug where `pnpm install --frozen-lockfile` may fail on Linux 6.9.0, 6.9.1, and 6.9.2 ([GitHub issue](<https://github.com/nodejs/node/issues/53051>)).
To check your Linux kernel version, run:

View file

@ -446,7 +446,7 @@ tooShort: "قصير جدًا"
tooLong: "طويل جدًا"
weakPassword: "الكلمة السرية ضعيفة"
normalPassword: "الكلمة السرية جيدة"
strongPassword: "الكلمة السرية قوية"
veryStrongPassword: "الكلمة السرية قوية"
passwordMatched: "التطابق صحيح!"
passwordNotMatched: "غير متطابقتان"
signinWith: "الولوج عبر {x}"

View file

@ -586,7 +586,7 @@ unavailable: Не е свободно
tooShort: Твърде кратко
tooLong: Твърде дълго
weakPassword: Слаба парола
strongPassword: Силна парола
veryStrongPassword: Силна парола
passwordMatched: Съвпада
passwordNotMatched: Не съвпада
signinWith: Вход с {x}

View file

@ -462,7 +462,7 @@ tooShort: "খুব ছোট"
tooLong: "খুব বড়"
weakPassword: "দুর্বল পাসওয়ার্ড"
normalPassword: "সাধারণ পাসওয়ার্ড"
strongPassword: "শক্তিশালী পাসওয়ার্ড"
veryStrongPassword: "শক্তিশালী পাসওয়ার্ড"
passwordMatched: "মিলেছে"
passwordNotMatched: "মিলেনি"
signinWith: "{x} এর সাহায্যে সাইন ইন করুন"

View file

@ -395,7 +395,7 @@ _profile:
metadataLabel: Etiqueta
metadataContent: Contingut
changeAvatar: Canvia l'avatar
changeBanner: Canvia el banner
changeBanner: Canvia el bàner
locationDescription: Si primer introduïu la vostra ciutat, es mostrarà l'hora local
a altres usuaris.
name: Nom
@ -1022,7 +1022,7 @@ yearsOld: '{age} anys'
copyUrl: Copia l'adreça URL
rename: Renombra
unwatch: Deixa de veure
accept: Accepta
accept: Acceptar
reject: Rebutja
yearX: '{year}'
pages: Pàgines
@ -1091,7 +1091,7 @@ usernameInvalidFormat: Pots fer servir lletres en majúscules o minúscules, nom
tooShort: Massa curt
tooLong: Massa llarg
weakPassword: Contrasenya amb seguretat feble
strongPassword: Contrasenya amb seguretat forta
veryStrongPassword: Contrasenya amb seguretat forta
passwordMatched: Coincidències
signinWith: Inicia sessió com {x}
signinFailed: No es pot iniciar sessió. El nom d'usuari o la contrasenya són incorrectes.
@ -1114,7 +1114,7 @@ createAccount: Crea un compte
fontSize: Mida del text
noFollowRequests: No tens cap sol·licitud de seguiment per aprovar
openImageInNewTab: Obre les imatges a una pestanya nova
dashboard: Tauler
dashboard: Taulell
local: Local
remote: Remot
total: Total
@ -1551,7 +1551,7 @@ itsOn: Activat
itsOff: Desactivat
emailRequiredForSignup: Requereix una adreça de correu electrònic per registrar-te
unread: Sense llegir
controlPanel: Tauler de control
controlPanel: Taulell de control
manageAccounts: Gestionar comptes
makeReactionsPublic: Estableix l'historial de reaccions com a públic
classic: Centrat
@ -2233,7 +2233,7 @@ enablePullToRefresh: Activa "Baixa per actualitzar"
pullDownToReload: Baixa per actualitzar
pullToRefreshThreshold: Distancia de baixada per actualitzar
searchWords: Paraules / ID o adreça URL que vols cercar
noSentFollowRequests: No tens cap sol·licitud de seguiment enviada
noSentFollowRequests: No has enviat cap sol·licitud de seguiment
sentFollowRequests: Enviar sol·licituds de seguiment
replyMute: Silencia les respostes a les línies de temps
replyUnmute: Treu el silencia de les respostes a les línies de temps
@ -2311,7 +2311,7 @@ _later:
future: futur
justNow: ara mateix
secondsAgo: en {n}s
minutesAgo: en {n}m
minutesAgo: en {n}min
daysAgo: en {n}d
weeksAgo: en {n}s
monthsAgo: en {n}me
@ -2321,3 +2321,6 @@ scheduledDate: Publica el
scheduledPost: Programa aquesta publicació
scheduledPostAt: Aquesta publicació s'enviarà {time}
cancelScheduledPost: Elimina la planificació
addAlt4MeTag: "Afegeix automàticament l'etiqueta #Alt4Me a les teves publicacions
que tinguin un fitxer adjunt sense descripció"
strongPassword: Bona contrasenya

View file

@ -401,7 +401,7 @@ tooShort: "Příliš krátké"
tooLong: "Příliš dlouhé"
weakPassword: "Slabé heslo"
normalPassword: "Dobré heslo"
strongPassword: "Silné heslo"
veryStrongPassword: "Silné heslo"
passwordMatched: "Hesla se schodují"
passwordNotMatched: "Hesla se neschodují"
signinWith: "Přihlásit se s {x}"

View file

@ -77,7 +77,7 @@ lists: "Listen"
noLists: "Du hast keine Listen angelegt"
note: "Beitrag"
notes: "Beiträge"
following: "Folgen"
following: "Folgend"
followers: "Folgen mir"
followsYou: "Folgt dir"
createList: "Liste erstellen"
@ -95,7 +95,7 @@ youShouldUpgradeClient: "Bitte aktualisiere diese Seite, um eine neuere Version
Clients zu verwenden."
enterListName: "Gib einen Namen für die Liste ein"
privacy: "Privatsphäre"
makeFollowManuallyApprove: "Folgeanfragen bedürfen der Genehmigung"
makeFollowManuallyApprove: "Folgeanfragen müssen akzeptiert werden"
defaultNoteVisibility: "Standard-Sichtbarkeit"
follow: "Folgen"
followRequest: "Follow anfragen"
@ -430,7 +430,7 @@ securityKeyName: "Schlüsselname"
registerSecurityKey: "Sicherheitsschlüssel registrieren"
lastUsed: "Zuletzt benutzt"
unregister: "Deaktivieren"
passwordLessLogin: "Passwortloses Anmelden einrichten"
passwordLessLogin: "Passwortloses Anmelden"
resetPassword: "Passwort zurücksetzen"
newPasswordIs: "Das neue Passwort ist „{password}“"
reduceUiAnimation: "Animationen der Benutzeroberfläche reduzieren"
@ -480,7 +480,7 @@ tooShort: "Zu kurz"
tooLong: "Zu lang"
weakPassword: "Schwaches Passwort"
normalPassword: "Durchschnittliches Passwort"
strongPassword: "Starkes Passwort"
veryStrongPassword: "Starkes Passwort"
passwordMatched: "Stimmt überein"
passwordNotMatched: "Stimmt nicht überein"
signinWith: "Mit {x} anmelden"
@ -865,7 +865,7 @@ customCss: "Benutzerdefiniertes CSS"
customCssWarn: "Verwende diese Einstellung nur, wenn du weißt, was sie tut. Ungültige
Eingaben können dazu führen, dass der Client nicht mehr normal funktioniert."
global: "Global"
squareAvatars: "Profilbilder quadratisch anzeigen"
squareAvatars: "Profilbilder für Accounts ohne Katzenohren quadratisch anzeigen"
sent: "Gesendet"
received: "Erhalten"
searchResult: "Suchergebnisse"
@ -948,7 +948,7 @@ check: "Überprüfe"
driveCapOverrideLabel: "Die Cloud-Drive-Kapazität dieses Nutzers verändern"
driveCapOverrideCaption: "Gib einen Wert von 0 oder weniger ein, um die Kapazität
auf den Standard zurückzusetzen."
requireAdminForView: "Du musst dich mit einem Administratorkonto anmelden um dies
requireAdminForView: "Du musst dich mit einem Administratorkonto anmelden, um dies
zu sehen."
isSystemAccount: "Ein Nutzerkonto, dass durch das System erstellt und automatisch
kontrolliert wird. Jede Anpassung, Veränderung oder Löschung dieses Nutzerkontos,
@ -1241,6 +1241,7 @@ _wordMute:
muteLangsDescription2: Sprachcode verwenden, z.B. en, fr, ja, zh..
lang: Sprache
langDescription: Beiträge in der angegebenen Sprache in der Timeline ausblenden.
mutePatterns: Gedämpfte Muster
_instanceMute:
instanceMuteDescription: "Schaltet alle Beiträge/Boosts stumm, die von den gelisteten
Servern stammen, inklusive Antworten von Nutzern an einen Nutzer eines stummgeschalteten
@ -1332,7 +1333,7 @@ _sfx:
channel: "Kanalbenachrichtigung"
_ago:
future: "Zukunft"
justNow: "Gerade eben"
justNow: "gerade eben"
secondsAgo: "vor {n} s"
minutesAgo: "vor {n} min"
hoursAgo: "vor {n} h"
@ -1931,6 +1932,7 @@ _notification:
voted: haben bei deiner Umfrage abgestimmt
reacted: hat auf deinen Beitrag reagiert
renoted: hat deinen Beitrag geteilt
andCountUsers: und {count} mehr Nutzer {acted}
_deck:
alwaysShowMainColumn: "Hauptspalte immer zeigen"
columnAlign: "Spaltenausrichtung"
@ -1979,8 +1981,8 @@ flagSpeakAsCatDescription: Deine Beiträge werden im Katzenmodus nyanisiert
hiddenTags: Versteckte Hashtags
antennaInstancesDescription: Geben sie einen Server-Namen pro Zeile ein
secureModeInfo: Bei Anfragen an andere Server nicht ohne Nachweis zurücksenden.
renoteMute: Boosts stummschalten
renoteUnmute: Stummschaltung von Boosts aufheben
renoteMute: Boosts in Timelines stummschalten
renoteUnmute: Stummschaltung von Boosts in der Timeline aufheben
noInstances: Keine Server gefunden
privateModeInfo: Wenn diese Option aktiviert ist, können nur als vertrauenswürdig
eingestufte Server mit diesem Server föderieren. Alle Beiträge werden für die Öffentlichkeit
@ -2019,9 +2021,8 @@ moveAccountDescription: 'Dieser Vorgang kann nicht rückgängig gemacht werden!
wie folgt ein: @name@server.xyz'
sendPushNotificationReadMessage: Löschung der Push-Benachrichtigungen sobald die entsprechenden
Benachrichtigungen oder Nachrichten gelesen wurden
signupsDisabled: Derzeit sind keine Anmeldungen auf diesem Server möglich! Anmeldungen
auf anderen Servern sind jedoch möglich! Wenn Sie einen Einladungscode für diesen
Server haben, geben Sie ihn bitte unten ein.
signupsDisabled: Derzeit sind keine Anmeldungen auf diesem Server möglich. Wenn Sie
einen Einladungscode für diesen Server haben, geben Sie ihn bitte unten ein.
swipeOnDesktop: Am Desktop PC das Wischen wie bei mobilen Geräten zulassen
enterSendsMessage: Drücken sie zum Senden des Beitrages die Eingabetaste (Strg-Taste
ausgeschaltet)
@ -2212,3 +2213,131 @@ quotes: Zitate
moreUrlsDescription: "Die Seiten, welche angepinnt werde sollen, im Hilfe-Menü in
der unteren linken Ecke in folgender Notation angeben:\n\"Anzeigename\": https://example.com/"
toQuote: Zitat
releaseToReload: Loslassen, um neu zu laden
pullDownToReload: Herunterziehen zum Aktualisieren
antennaLimit: Die maximale Anzahl von Antennen, die jeder Nutzer erstellen kann
toEdit: Bearbeiten
squareCatAvatars: Profilbilder für Accounts mit Katzenohren quadratisch anzeigen
moderationNote: Moderationsnotiz
ipFirstAcknowledged: Das Datum des ersten Erwerbs der IP Adresse
driveCapacityOverride: Benutzerdefinierte Speicherkapazität
searchWordsDescription: "Hier den Suchbegriff für Beiträge eingeben. Mit einem Leerzeichen
getrennte Begriffe werden in einer UND Suche gesucht, um eine ODER Suche auszuführen
'OR' (ohne Anführungszeichen) zwischen die Begriffe schreiben.\nZum Beispiel findet
die Suche nach \"Morgen Nacht\" Beiträge, die sowohl \"Morgen, als auch \"Nacht\"\
\ enthalten. Die Suchanfrage \"Morgen OR Nacht\" findet Beiträge, die entweder \"\
Morgen\" oder \"Nacht\" (oder beides) enthalten.\nDie AND und OR Suche ist zudem
kombinierbar, z.B. so: \"(Morgen OR Nacht) Eule)\".\nUm nach einer Sequenz von Wörtern
(z.B. einem Satz) zu suchen, muss die gesamte Wortsequenz in Anführungszeichen stehen.
Beispiel: \"Nachrichten von heute\"\n\nUm zu einem bestimmten Profil oder Beitrag
zu gelangen, muss die ID oder URL (Webadresse) eingegeben und der Suchknopf gedrückt
werden. Ansonsten wird nach Beiträgen gesucht, die die ID oder URL wörtlich enthalten."
useCdnDescription: Einige statische Ressourcen, wie einen Twemoji, vom JSDelivr CDN
anstatt von diesem Firefish server laden.
suggested: Vorgeschlagen
preventMisclick: Schutz vor versehentlichen Clicks
replaceWidgetsButtonWithReloadButton: Widget-Knopf durch Aktualisierungs-Knopf ersetzen
hideFollowButtons: Folgen-Knopf in einer versehentlich clickbaren Position verstecken
forMobile: Mobil
privateDescription: Nur für Sie sichtbar machen
makePrivate: Als privat markieren
searchUsers: Erstellt von (optional)
searchWords: Suchbegriffe / ID oder URL als Suchanfrage
searchCwAndAlt: Inhaltswarnungen und Beschreibungen von Dateien einbeziehen
searchUsersDescription: "Um nach Beiträgen eines bestimmten Nutzers/ Servers zu suchen,
einfach die ID (@Benutzer@beispiel.de, or @Benutzer für einen lokalen Benutzer)
oder Webadresse (beispiel.de) eingeben.\n\nDie Suche \"me\" (ohne Anführungszeichen)
findet alle Ihre Beiträge (auch nicht-gelistete, direkte, geheime Beiträge und Beiträge,
die nur für Follower sichtbar sind).\n\nDie Suche \"local\" (ohne Anführungszeichen)
sorgt dafür, dass nur Beiträge von diesem Server angezeigt werden."
publishTimelines: Timelines für Besucher veröffentlichen
publishTimelinesDescription: Falls konfiguriert, werden die lokale und globale Timeline
auf {url} auch ohne Anmeldung angezeigt.
showNoAltTextWarning: Eine Warnung beim Hochladen von Dateien ohne Beschreibung anzeigen
_emojiModPerm:
add: Hinzufügen
full: Alles erlauben
unauthorized: Kein(e)
mod: Hinzufügen und bearbeiten
messagingUnencryptedInfo: Unterhaltungen auf Firefish sind nicht Ende-zu-Ende verschlüsselt.
Teilen Sie keine sensiblen Informationen über Firefish.
autocorrectNoteLanguage: Eine Warnung anzeigen, wenn die Beitragssprache nicht mit
der automatisch ermittelten Sprache übereinstimmt
emojiModPerm: Berechtigung, personalisierte Emojis zu verwalten
emojiModPermDescription: "Hinzufügen: Erlauben Sie diesem Benutzer, neue benutzerdefinierte
Emojis hinzuzufügen und Tag/Kategorie/Lizenz für neu hinzugefügte benutzerdefinierte
Emojis einzustellen.\nHinzufügen und Bearbeiten: \"Hinzufügen\" Berechtigung + Erlauben
Sie diesem Benutzer, den Namen/die Kategorie/Tag/die Lizenz der vorhandenen benutzerdefinierten
Emojis zu bearbeiten.\nAlles erlauben: \"Hinzufügen und Bearbeiten\" Berechtigung
+ Erlauben Sie diesem Benutzer, bestehende benutzerdefinierte Emojis zu löschen."
reloading: Aktualisiert
markLocalFilesNsfwByDefault: Standardmäßig alle neuen lokalen Dateien als sensibel
markieren
markLocalFilesNsfwByDefaultDescription: Unabhäning von dieser Einstellung lässt sich
eine NSFW-Markierung entfernen. Bereits existierende Dateien sind nicht betroffen.
noLanguage: Keine Sprache
showBigPostButton: Anzeigen eines großen Knopfes zum Teilen des Beitrags im Beitragsformular
private: Privat
searchRange: Veröffentlicht zwischen (optional)
searchPostsWithFiles: Nur Beiträge mit Dateien
noAltTextWarning: Einige der angehängten Dateien haben keine Beschreibung. Haben Sie
vergessen, diese zu schreiben?
toReply: Antworten
toPost: Teilen
sentFollowRequests: Gesendete Follow-Anfragen
replyMute: Antworten in Timelines stummschalten
replyUnmute: Stummschaltung von Antworten in Timelines aufheben
noSentFollowRequests: Keine gesendeten Follow-Anfragen
postSearch: Beitragssuche auf diesem Server
enablePullToRefresh: '"Herunterziehen um zu aktualisieren" aktivieren'
pullToRefreshThreshold: Benötigte heruntergezogene Distanz, um zu Aktualisieren
showAddFileDescriptionAtFirstPost: Öffne automatisch ein Eingabefeld, um fehlende
Dateibeschreibungen beim Hochladen zu ergänßen
searchRangeDescription: "Um eine Zeitspanne zu filtern, geben Sie diese in diesem
Format an: 20220615-20231031 (YYYYMMTT)\n\nDas Auslassen der Jahreszahl (z.B.: 0615-1031
oder 20220615-1031) wird automatisch wie die aktuelle Jahreszahl interpretiert.\n
\nZudem können das Anfangs- oder Enddatum ausgelassen werden. Zum Beispiel gibt
-1031 an, nach Beiträgen vor dem 31.10 dieses Jahres zu suchen. Umgekehrt führt
20220615- zu einer Suche nach allen Beiträgen nach dem 15.6.2022."
incorrectLanguageWarning: "Es sieht so aus, als wäre ihr Beitrag auf {detected}, aber
Sie haben {current} ausgewählt.\nMöchten Sie stattdessen die Sprache zu {detected}
ändern?"
noteEditHistory: Bearbeitungsgeschichte des Beitrags
_later:
justNow: gerade eben
secondsAgo: in {n}s
minutesAgo: in {n}min
hoursAgo: in {n}h
daysAgo: in {n}d
weeksAgo: in {n} Woche(n)
monthsAgo: in {n} Monat(en)
yearsAgo: in {n} Jahr(en)
future: zukünftig
scheduledPost: Veröffentlichungszeit manuell festlegen
scheduledDate: Geplantes Datum
mergeRenotesInTimeline: Mehrere Boosts eines Beitrags gruppieren
mergeThreadInTimeline: In der Timeline mehrere Beiträge im gleichen Thread zusammenlegen
cannotEditVisibility: Die Sichtbarkeit lässt sich nicht einstellen
useThisAccountConfirm: Mit diesem Benutzerkonto fortfahren?
inputAccountId: Bitte gib dein Benutzerkonto an (z.B. @firefish@info.firefish.dev)
remoteFollow: Folgen (fremde Instanz)
foldNotification: Ähnliche Benachrichtigungen gruppieren
i18nServerInfo: Neue Clients nutzen standardmäßig {language}.
i18nServerChange: Stattdessen {language} benutzen.
i18nServerSet: Für neue Clients {language} benutzen.
getQrCode: QR Code anzeigen
useCdn: Ressourcen von einem CDN laden
copyRemoteFollowUrl: URL zum Folgen auf einer fremden Instanz kopieren
showPreviewByDefault: Standardmäßig Vorschau in Beitragsform anzeigen
replaceChatButtonWithAccountButton: Unterhaltungen-Knopf durch Knopf zum Wechseln
des Benutzerkontos ersetzen
searchEngine: Verwendete Suchmaschine in der Suchleiste MFM
makePrivateConfirm: Diese Operation sendet eine Löschungsanfrage an fremde Server
und ändert die Sichtbarkeit zu 'privat'. Fortfahren?
enableTimelineStreaming: Timelines automatisch aktualisieren
scheduledPostAt: Der Beitrag wird {time} gesendet
cancelScheduledPost: Zeitplan entfernen
media: Medien
slashQuote: Kettenzitat
addAlt4MeTag: 'Automatisch den Hashtag #Alt4Me am Ende deines Beitrags einfügen, wenn
eine angehängte Datei keine Beschreibung hat'

View file

@ -677,7 +677,7 @@ checking: Έλεγχος...
invitationCode: Κωδικός πρόσκλησης
normalPassword: Μέτριος κωδικός
weakPassword: Αδύναμος κωδικός
strongPassword: Δυνατός κωδικός
veryStrongPassword: Δυνατός κωδικός
signinWith: Συνδεθείτε με {x}
tapSecurityKey: Βάλτε το κλειδί ασφάλειας
signinFailed: Αδυναμία σύνδεσης. Το όνομα μέλους ή ο κωδικός είναι λάθος.

View file

@ -514,8 +514,9 @@ usernameInvalidFormat: "You can use upper- and lowercase letters, numbers, and u
tooShort: "Too short"
tooLong: "Too long"
weakPassword: "Weak password"
normalPassword: "Average password"
strongPassword: "Strong password"
normalPassword: "Medium password"
strongPassword: "Good password"
veryStrongPassword: "Great password"
passwordMatched: "Matches"
passwordNotMatched: "Does not match"
signinWith: "Sign in with {x}"
@ -542,7 +543,7 @@ existingAccount: "Existing account"
regenerate: "Regenerate"
fontSize: "Font size"
noFollowRequests: "You don't have any pending follow requests"
noSentFollowRequests: "You don't have any sent follow requests"
noSentFollowRequests: "You haven't sent any follow requests"
openImageInNewTab: "Open images in new tab"
dashboard: "Dashboard"
local: "Local"
@ -1236,8 +1237,9 @@ publishTimelinesDescription: "If enabled, the Local and Global timelines will be
on {url} even when signed out."
noAltTextWarning: "Some attached file(s) have no description. Did you forget to write?"
showNoAltTextWarning: "Show a warning if you attempt to post files without a description"
showAddFileDescriptionAtFirstPost: "Automatically open a form to write a description when you
attempt to post files without a description"
showAddFileDescriptionAtFirstPost: "Automatically open a form to write a description
when you attempt to post files without a description"
addAlt4MeTag: "Automatically append #Alt4Me hashtag to your post if attached file has no description"
_emojiModPerm:
unauthorized: "None"
@ -1590,7 +1592,7 @@ _later:
future: "future"
justNow: "right now"
secondsAgo: "in {n}s"
minutesAgo: "in {n}m"
minutesAgo: "in {n}min"
hoursAgo: "in {n}h"
daysAgo: "in {n}d"
weeksAgo: "in {n}w"

View file

@ -465,7 +465,7 @@ tooShort: "Demasiado corto"
tooLong: "Demasiado largo"
weakPassword: "Contraseña débil"
normalPassword: "Buena contraseña"
strongPassword: "Muy buena contraseña"
veryStrongPassword: "Muy buena contraseña"
passwordMatched: "Correcto"
passwordNotMatched: "Las contraseñas no son las mismas"
signinWith: "Inicie sesión con {x}"
@ -1073,7 +1073,7 @@ _aboutFirefish:
source: "Código fuente"
translation: "Traducir Firefish"
donate: "Donar a Firefish"
pleaseDonateToFirefish: Por favor considera donar a Firefish para apollar su desarrollo.
pleaseDonateToFirefish: Por favor considera donar a Firefish para apoyar su desarrollo.
donateHost: Dona a {host}
donateTitle: ¿Te gusta Firefish?
pleaseDonateToHost: También considera donar a tu propio servidor , {host}, para

View file

@ -569,7 +569,7 @@ tooShort: Liian lyhyt
tooLong: Liian pitkä
weakPassword: Heikko salasana
normalPassword: Kohtalainen salasana
strongPassword: Vahva salasana
veryStrongPassword: Vahva salasana
passwordMatched: Vastaa
signinWith: Kirjaudu sisään {x}
signinFailed: Ei voitu kirjautua sisään. Annettu käyttäjänimi tai salasana virheellinen.

View file

@ -468,7 +468,7 @@ tooShort: "Trop court"
tooLong: "Trop long"
weakPassword: "Mot de passe faible"
normalPassword: "Mot de passe acceptable"
strongPassword: "Mot de passe fort"
veryStrongPassword: "Mot de passe fort"
passwordMatched: "Les mots de passe correspondent"
passwordNotMatched: "Les mots de passe ne correspondent pas"
signinWith: "Se connecter avec {x}"

View file

@ -464,7 +464,7 @@ tooShort: "Terlalu pendek"
tooLong: "Terlalu panjang"
weakPassword: "Kata sandi lemah"
normalPassword: "Kata sandi baik"
strongPassword: "Kata sandi kuat"
veryStrongPassword: "Kata sandi kuat"
passwordMatched: "Kata sandi sama"
passwordNotMatched: "Kata sandi tidak sama"
signinWith: "Masuk dengan {x}"

View file

@ -454,7 +454,7 @@ tooShort: "Troppo breve"
tooLong: "Troppo lungo"
weakPassword: "Password debole"
normalPassword: "Password buona"
strongPassword: "Password forte"
veryStrongPassword: "Password forte"
passwordMatched: "Corretta"
passwordNotMatched: "Le password non corrispondono"
signinWith: "Accedi con {x}"

View file

@ -465,7 +465,7 @@ tooShort: "短すぎます"
tooLong: "長すぎます"
weakPassword: "弱いパスワード"
normalPassword: "普通のパスワード"
strongPassword: "強いパスワード"
veryStrongPassword: "とても強いパスワード"
passwordMatched: "一致しました"
passwordNotMatched: "一致していません"
signinWith: "{x}でログイン"
@ -2087,3 +2087,5 @@ scheduledPost: 予約投稿
scheduledDate: 予定日
cancelScheduledPost: 予約を解除する
scheduledPostAt: '{time}に投稿されます'
strongPassword: 強いパスワード
addAlt4MeTag: '説明の無いファイルを投稿する際に自動で #Alt4Me のハッシュタグをつける'

View file

@ -435,7 +435,7 @@ tooShort: "短すぎやろ!"
tooLong: "長すぎやろ!"
weakPassword: "へぼいパスワード"
normalPassword: "普通のパスワード"
strongPassword: "ええ感じのパスワード"
veryStrongPassword: "ええ感じのパスワード"
passwordMatched: "よし!一致や!"
passwordNotMatched: "一致しとらんで?"
signinWith: "{x}でログイン"

View file

@ -442,7 +442,7 @@ tooShort: "너무 짧습니다"
tooLong: "너무 깁니다"
weakPassword: "약한 비밀번호"
normalPassword: "좋은 비밀번호"
strongPassword: "강한 비밀번호"
veryStrongPassword: "강한 비밀번호"
passwordMatched: "일치합니다"
passwordNotMatched: "일치하지 않습니다"
signinWith: "{x}로 로그인"

View file

@ -576,7 +576,7 @@ quoteAttached: Quote
noMessagesYet: Nog geen berichten
weakPassword: Zwak wachtwoord
normalPassword: Middelmatig wachtwoord
strongPassword: Sterk wachtwoord
veryStrongPassword: Sterk wachtwoord
onlyOneFileCanBeAttached: Je kan maar één bestand toevoegen aan je bericht
invitationCode: Uitnodigingscode
checking: Controleren...

View file

@ -571,7 +571,7 @@ youHaveNoGroups: Du har ingen grupper
noHistory: Ingen historikk er tilgjengelig
aboutX: Om {x}
signinHistory: Innloggings-historikk
strongPassword: Sterkt passord
veryStrongPassword: Sterkt passord
noFollowRequests: Du har ingen utestående følgeforespørsler
openImageInNewTab: Åpne bilder i ny fane
dashboard: Dashbord

View file

@ -454,7 +454,7 @@ tooShort: "Zbyt krótka"
tooLong: "Zbyt długa"
weakPassword: "Słabe hasło"
normalPassword: "Dobre hasło"
strongPassword: "Silne hasło"
veryStrongPassword: "Silne hasło"
passwordMatched: "Pasuje"
passwordNotMatched: "Hasła nie pasują do siebie"
signinWith: "Zaloguj się z {x}"

View file

@ -464,7 +464,7 @@ tooShort: "Prea scurt"
tooLong: "Prea lung"
weakPassword: "Parolă slabă"
normalPassword: "Parolă medie"
strongPassword: "Parolă puternică"
veryStrongPassword: "Parolă puternică"
passwordMatched: "Se potrivește!"
passwordNotMatched: "Nu se potrivește"
signinWith: "Autentifică-te cu {x}"

View file

@ -460,7 +460,7 @@ tooShort: "Слишком короткий"
tooLong: "Слишком длинный"
weakPassword: "Слабый пароль"
normalPassword: "Годный пароль"
strongPassword: "Надёжный пароль"
veryStrongPassword: "Надёжный пароль"
passwordMatched: "Совпали"
passwordNotMatched: "Не совпадают"
signinWith: "Использовать {x} для входа"
@ -2137,3 +2137,15 @@ replies: Ответы
quotes: Цитаты
clickToShowPatterns: Нажмите, чтобы показать модуль шаблонов
renotes: Репосты
markLocalFilesNsfwByDefaultDescription: Независимо от данной настройки, пользователи
могут самостоятельно удалять метку NSFW. Не применяется на существующие файлы.
toEdit: Редактировать
attachedToNotes: Посты с этим файлом
showAttachedNotes: Показывать посты с этим файлом
strongPassword: Хороший пароль
toReply: Ответить
toPost: Выложить
sentFollowRequests: Отправленные запросы на подписку
toQuote: Цитировать
cannotEditVisibility: Вы не можете изменить видимость
noSentFollowRequests: Вы не отправляли никаких запросов на подписку

View file

@ -461,7 +461,7 @@ tooShort: "Príliš krátke"
tooLong: "Príliš dlhé"
weakPassword: "Slabé heslo"
normalPassword: "Dobré heslo"
strongPassword: "Silné heslo"
veryStrongPassword: "Silné heslo"
passwordMatched: "Heslá sú rovnaké"
passwordNotMatched: "Heslá nie sú rovnaké"
signinWith: "Prihlásiť sa použitím {x}"

View file

@ -381,7 +381,7 @@ noMessagesYet: Inga meddelande ännu
newMessageExists: Det finns inga nya meddelanden
weakPassword: Svagt lösenord
normalPassword: Dugligt lösenord
strongPassword: Starkt lösenord
veryStrongPassword: Starkt lösenord
passwordMatched: Matchar
passwordNotMatched: Matchar inte
signinWith: Logga in med {x}

View file

@ -452,7 +452,7 @@ tooShort: "สั้นเกินไปนะ"
tooLong: "ยาวเกินไปนะ"
weakPassword: "รหัสผ่าน แย่มาก"
normalPassword: "รหัสผ่านปกติ"
strongPassword: "รหัสผ่านรัดกุมมาก"
veryStrongPassword: "รหัสผ่านรัดกุมมาก"
passwordMatched: "ถูกต้อง!"
passwordNotMatched: "ไม่ถูกต้อง"
signinWith: "ลงชื่อเข้าใช้ด้วย {x}"

View file

@ -521,7 +521,7 @@ newMessageExists: Yeni mesaj yok
invitations: Davetler
invitationCode: Davet kodu
signinWith: '{x} ile giriş yap'
strongPassword: Güçlü şifre
veryStrongPassword: Güçlü şifre
passwordNotMatched: Uyuşmuyor
signinFailed: Giriş yapılamadı. Şifre ve ya kullanıcı adı yanlış.
tapSecurityKey: Güvenlik anahtarınıza dokunun

View file

@ -460,7 +460,7 @@ tooShort: "Занадто короткий"
tooLong: "Занадто довгий"
weakPassword: "Слабкий пароль"
normalPassword: "Достатній пароль"
strongPassword: "Міцний пароль"
veryStrongPassword: "Міцний пароль"
passwordMatched: "Все вірно"
passwordNotMatched: "Паролі не співпадають"
signinWith: "Увійти за допомогою {x}"

View file

@ -462,7 +462,7 @@ tooShort: "Quá ngắn"
tooLong: "Quá dài"
weakPassword: "Mật khẩu yếu"
normalPassword: "Mật khẩu tạm được"
strongPassword: "Mật khẩu mạnh"
veryStrongPassword: "Mật khẩu mạnh"
passwordMatched: "Trùng khớp"
passwordNotMatched: "Không trùng khớp"
signinWith: "Đăng nhập bằng {x}"

View file

@ -450,7 +450,7 @@ tooShort: "太短"
tooLong: "太长"
weakPassword: "密码强度:弱"
normalPassword: "密码强度:中等"
strongPassword: "密码强度:强"
veryStrongPassword: "密码强度:强"
passwordMatched: "密码一致"
passwordNotMatched: "密码不一致"
signinWith: "以 {x} 登录"
@ -1950,7 +1950,7 @@ noteId: 帖子 ID
moveFrom: 从旧账号迁移至此账号
defaultReaction: 发出和收到帖子的默认表情符号反应
sendModMail: 发送管理通知
moderationNote: "管理笔记"
moderationNote: "管理员备注"
ipFirstAcknowledged: "首次获取此 IP 地址的日期"
driveCapacityOverride: "网盘容量变更"
isLocked: 该账号设置了关注请求

View file

@ -448,7 +448,7 @@ tooShort: "過短"
tooLong: "過長"
weakPassword: "密碼強度過弱"
normalPassword: "密碼強度普通"
strongPassword: "密碼強度高"
veryStrongPassword: "密碼強度高"
passwordMatched: "密碼一致"
passwordNotMatched: "密碼不一致"
signinWith: "以{x}登錄"

View file

@ -1,11 +1,11 @@
{
"name": "firefish",
"version": "20240523",
"version": "20240630",
"repository": {
"type": "git",
"url": "https://firefish.dev/firefish/firefish.git"
},
"packageManager": "pnpm@9.1.4",
"packageManager": "pnpm@9.4.0",
"private": true,
"scripts": {
"rebuild": "pnpm run clean && pnpm run build",
@ -22,13 +22,14 @@
"dev": "pnpm node ./scripts/dev.mjs",
"dev:staging": "NODE_OPTIONS=--max_old_space_size=3072 NODE_ENV=development pnpm run build && pnpm run start",
"lint": "pnpm run lint:ts; pnpm run lint:rs",
"lint:ts": "pnpm --filter !firefish-js -r --parallel run lint",
"lint:ts": "pnpm --filter !firefish-js --recursive run lint ; pnpm run format:ts",
"lint:rs": "cargo clippy --fix --allow-dirty --allow-staged && cargo fmt --all --",
"debug": "pnpm run build:debug && pnpm run start",
"mocha": "pnpm --filter backend run mocha",
"test": "pnpm run test:ts && pnpm run test:rs",
"test": "pnpm run test:rs && pnpm run test:rs:miri && pnpm run test:ts",
"test:ts": "pnpm run mocha",
"test:rs": "cargo test --doc && cargo nextest run",
"test:rs:miri": "MIRIFLAGS='-Zmiri-disable-isolation' cargo +nightly miri nextest run -j$(nproc --all)",
"format": "pnpm run format:ts; pnpm run format:rs",
"format:ts": "pnpm -r --parallel run format",
"format:rs": "cargo fmt --all --",
@ -41,14 +42,13 @@
"js-yaml": "4.1.0"
},
"devDependencies": {
"@biomejs/biome": "1.7.3",
"@biomejs/cli-darwin-arm64": "1.7.3",
"@biomejs/cli-darwin-x64": "1.7.3",
"@biomejs/cli-linux-arm64": "1.7.3",
"@biomejs/cli-linux-x64": "1.7.3",
"@types/node": "20.12.13",
"execa": "9.1.0",
"pnpm": "9.1.4",
"typescript": "5.4.5"
"@biomejs/biome": "1.8.3",
"@biomejs/cli-darwin-arm64": "1.8.3",
"@biomejs/cli-darwin-x64": "1.8.3",
"@biomejs/cli-linux-arm64": "1.8.3",
"@biomejs/cli-linux-x64": "1.8.3",
"@types/node": "20.14.9",
"execa": "9.3.0",
"pnpm": "9.4.0"
}
}

View file

@ -6,13 +6,13 @@ rust-version = "1.74"
[features]
default = []
napi = ["dep:napi", "dep:napi-derive"]
napi = ["dep:napi", "dep:napi-derive", "dep:napi-build"]
[lib]
crate-type = ["cdylib", "lib"]
[dependencies]
macro-rs = { workspace = true }
macros = { workspace = true }
napi = { workspace = true, optional = true, features = ["chrono_date", "napi4", "serde-json", "tokio_rt"] }
napi-derive = { workspace = true, optional = true }
@ -25,7 +25,7 @@ bcrypt = { workspace = true, features = ["std"] }
chrono = { workspace = true }
cuid2 = { workspace = true }
emojis = { workspace = true }
idna = { workspace = true }
idna = { workspace = true, features = ["std", "compiled_data"] }
image = { workspace = true, features = ["avif", "bmp", "gif", "ico", "jpeg", "png", "tiff", "webp"] }
isahc = { workspace = true, features = ["http2", "text-decoding"] }
nom-exif = { workspace = true }
@ -39,7 +39,6 @@ sea-orm = { workspace = true, features = ["macros", "runtime-tokio-rustls", "sql
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
serde_yaml = { workspace = true }
strum = { workspace = true, features = ["derive"] }
sysinfo = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["fs", "io-std", "io-util", "macros", "process", "rt-multi-thread", "signal", "sync", "time"] }
@ -54,4 +53,4 @@ pretty_assertions = { workspace = true, features = ["std"] }
tokio-test = { workspace = true }
[build-dependencies]
napi-build = { workspace = true }
napi-build = { workspace = true, optional = true }

View file

@ -11,25 +11,25 @@ regenerate-entities:
--output-dir='src/model/entity' \
--database-url='postgres://$(POSTGRES_USER):$(POSTGRES_PASSWORD)@localhost:25432/$(POSTGRES_DB)' \
--date-time-crate='chrono' \
--with-serde='both' \
--model-extra-attributes='NAPI_EXTRA_ATTR_PLACEHOLDER' && \
for file in src/model/entity/*; do \
base=$$(basename -- "$${file}"); \
jsname=$$(printf '%s\n' "$${base%.*}" | perl -pe 's/(^|_)./uc($$&)/ge;s/_//g'); \
attribute=$$(printf 'cfg_attr(feature = "napi", napi_derive::napi(object, js_name = "%s", use_nullable = true))' "$${jsname}"); \
attribute=$$(printf 'macros::export(object, js_name = "%s")' "$${jsname}"); \
sed -i "s/NAPI_EXTRA_ATTR_PLACEHOLDER/$${attribute}/" "$${file}"; \
sed -i 's/#\[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)\]/#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, serde::Serialize, serde::Deserialize)]\n#[serde(rename_all = "camelCase")]/' "$${file}"; \
sed -i 's/#\[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)\]/#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]\n#[serde(rename_all = "camelCase")]/' "$${file}"; \
done
sed -i 's/#\[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum)\]/#[derive(Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, serde::Serialize, serde::Deserialize)]\n#[serde(rename_all = "camelCase")]\n#[cfg_attr(not(feature = "napi"), derive(Clone))]\n#[cfg_attr(feature = "napi", napi_derive::napi(string_enum = "camelCase"))]/' \
sed -i 's/#\[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)\]/#[derive(Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)]\n#[serde(rename_all = "camelCase")]\n#[macros::derive_clone_and_export(string_enum = "camelCase")]/' \
src/model/entity/sea_orm_active_enums.rs
cargo fmt --all --
.PHONY: update-index
update-index: index.js index.d.ts
index.js index.d.ts: $(SRC)
index.js index.d.ts: $(SRC) package.json
NODE_OPTIONS='--max_old_space_size=3072' pnpm run build:debug
[ -f built/index.js ] && [ -f built/index.d.ts ]
rm --force index.js index.d.ts
cp built/index.js index.js
cp built/index.d.ts index.d.ts
sed -i 's/^ \*r"/ */g' index.d.ts

View file

@ -1,9 +1,8 @@
extern crate napi_build;
fn main() {
// watch the version in the project root package.json
println!("cargo:rerun-if-changed=../../package.json");
// napi
#[cfg(feature = "napi")]
napi_build::setup();
}

File diff suppressed because it is too large Load diff

View file

@ -1,397 +1,453 @@
/* tslint:disable */
// prettier-ignore
/* eslint-disable */
/* prettier-ignore */
/* auto-generated by NAPI-RS */
const { existsSync, readFileSync } = require('fs')
const { join } = require('path')
const { platform, arch } = process
const { readFileSync } = require('fs')
let nativeBinding = null
let localFileExisted = false
let loadError = null
const loadErrors = []
function isMusl() {
// For Node 10
if (!process.report || typeof process.report.getReport !== 'function') {
try {
const lddPath = require('child_process').execSync('which ldd').toString().trim()
return readFileSync(lddPath, 'utf8').includes('musl')
} catch (e) {
return true
const isMusl = () => {
let musl = false
if (process.platform === 'linux') {
musl = isMuslFromFilesystem()
if (musl === null) {
musl = isMuslFromReport()
}
} else {
const { glibcVersionRuntime } = process.report.getReport().header
return !glibcVersionRuntime
if (musl === null) {
musl = isMuslFromChildProcess()
}
}
return musl
}
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
const isMuslFromFilesystem = () => {
try {
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
} catch {
return null
}
}
switch (platform) {
case 'android':
switch (arch) {
case 'arm64':
localFileExisted = existsSync(join(__dirname, 'backend-rs.android-arm64.node'))
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.android-arm64.node')
} else {
nativeBinding = require('backend-rs-android-arm64')
const isMuslFromReport = () => {
const report = typeof process.report.getReport === 'function' ? process.report.getReport() : null
if (!report) {
return null
}
if (report.header && report.header.glibcVersionRuntime) {
return false
}
if (Array.isArray(report.sharedObjects)) {
if (report.sharedObjects.some(isFileMusl)) {
return true
}
}
return false
}
const isMuslFromChildProcess = () => {
try {
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
} catch (e) {
loadError = e
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
return false
}
break
case 'arm':
localFileExisted = existsSync(join(__dirname, 'backend-rs.android-arm-eabi.node'))
}
function requireNative() {
if (process.platform === 'android') {
if (process.arch === 'arm64') {
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.android-arm-eabi.node')
} else {
nativeBinding = require('backend-rs-android-arm-eabi')
}
return require('./backend-rs.android-arm64.node')
} catch (e) {
loadError = e
loadErrors.push(e)
}
break
default:
throw new Error(`Unsupported architecture on Android ${arch}`)
}
break
case 'win32':
switch (arch) {
case 'x64':
localFileExisted = existsSync(
join(__dirname, 'backend-rs.win32-x64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.win32-x64-msvc.node')
} else {
nativeBinding = require('backend-rs-win32-x64-msvc')
}
return require('backend-rs-android-arm64')
} catch (e) {
loadError = e
loadErrors.push(e)
}
break
case 'ia32':
localFileExisted = existsSync(
join(__dirname, 'backend-rs.win32-ia32-msvc.node')
)
} else if (process.arch === 'arm') {
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.win32-ia32-msvc.node')
} else {
nativeBinding = require('backend-rs-win32-ia32-msvc')
}
return require('./backend-rs.android-arm-eabi.node')
} catch (e) {
loadError = e
loadErrors.push(e)
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'backend-rs.win32-arm64-msvc.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.win32-arm64-msvc.node')
} else {
nativeBinding = require('backend-rs-win32-arm64-msvc')
}
return require('backend-rs-android-arm-eabi')
} catch (e) {
loadError = e
loadErrors.push(e)
}
break
default:
throw new Error(`Unsupported architecture on Windows: ${arch}`)
}
break
case 'darwin':
localFileExisted = existsSync(join(__dirname, 'backend-rs.darwin-universal.node'))
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.darwin-universal.node')
} else {
nativeBinding = require('backend-rs-darwin-universal')
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
}
break
} catch {}
switch (arch) {
case 'x64':
localFileExisted = existsSync(join(__dirname, 'backend-rs.darwin-x64.node'))
} else if (process.platform === 'win32') {
if (process.arch === 'x64') {
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.darwin-x64.node')
} else {
nativeBinding = require('backend-rs-darwin-x64')
}
return require('./backend-rs.win32-x64-msvc.node')
} catch (e) {
loadError = e
loadErrors.push(e)
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'backend-rs.darwin-arm64.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.darwin-arm64.node')
} else {
nativeBinding = require('backend-rs-darwin-arm64')
}
return require('backend-rs-win32-x64-msvc')
} catch (e) {
loadError = e
loadErrors.push(e)
}
break
default:
throw new Error(`Unsupported architecture on macOS: ${arch}`)
}
break
case 'freebsd':
if (arch !== 'x64') {
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
}
localFileExisted = existsSync(join(__dirname, 'backend-rs.freebsd-x64.node'))
} else if (process.arch === 'ia32') {
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.freebsd-x64.node')
} else {
nativeBinding = require('backend-rs-freebsd-x64')
}
return require('./backend-rs.win32-ia32-msvc.node')
} catch (e) {
loadError = e
loadErrors.push(e)
}
break
case 'linux':
switch (arch) {
case 'x64':
try {
return require('backend-rs-win32-ia32-msvc')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./backend-rs.win32-arm64-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('backend-rs-win32-arm64-msvc')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
}
} else if (process.platform === 'darwin') {
try {
return require('./backend-rs.darwin-universal.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('backend-rs-darwin-universal')
} catch (e) {
loadErrors.push(e)
}
if (process.arch === 'x64') {
try {
return require('./backend-rs.darwin-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('backend-rs-darwin-x64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./backend-rs.darwin-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('backend-rs-darwin-arm64')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
}
} else if (process.platform === 'freebsd') {
if (process.arch === 'x64') {
try {
return require('./backend-rs.freebsd-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('backend-rs-freebsd-x64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./backend-rs.freebsd-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('backend-rs-freebsd-arm64')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
}
} else if (process.platform === 'linux') {
if (process.arch === 'x64') {
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'backend-rs.linux-x64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.linux-x64-musl.node')
} else {
nativeBinding = require('backend-rs-linux-x64-musl')
}
return require('./backend-rs.linux-x64-musl.node')
} catch (e) {
loadError = e
loadErrors.push(e)
}
} else {
localFileExisted = existsSync(
join(__dirname, 'backend-rs.linux-x64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.linux-x64-gnu.node')
} else {
nativeBinding = require('backend-rs-linux-x64-gnu')
}
return require('backend-rs-linux-x64-musl')
} catch (e) {
loadError = e
loadErrors.push(e)
}
} else {
try {
return require('./backend-rs.linux-x64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
break
case 'arm64':
try {
return require('backend-rs-linux-x64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm64') {
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'backend-rs.linux-arm64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.linux-arm64-musl.node')
} else {
nativeBinding = require('backend-rs-linux-arm64-musl')
}
return require('./backend-rs.linux-arm64-musl.node')
} catch (e) {
loadError = e
loadErrors.push(e)
}
} else {
localFileExisted = existsSync(
join(__dirname, 'backend-rs.linux-arm64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.linux-arm64-gnu.node')
} else {
nativeBinding = require('backend-rs-linux-arm64-gnu')
}
return require('backend-rs-linux-arm64-musl')
} catch (e) {
loadError = e
loadErrors.push(e)
}
} else {
try {
return require('./backend-rs.linux-arm64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
break
case 'arm':
try {
return require('backend-rs-linux-arm64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm') {
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'backend-rs.linux-arm-musleabihf.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.linux-arm-musleabihf.node')
} else {
nativeBinding = require('backend-rs-linux-arm-musleabihf')
}
return require('./backend-rs.linux-arm-musleabihf.node')
} catch (e) {
loadError = e
loadErrors.push(e)
}
} else {
localFileExisted = existsSync(
join(__dirname, 'backend-rs.linux-arm-gnueabihf.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.linux-arm-gnueabihf.node')
} else {
nativeBinding = require('backend-rs-linux-arm-gnueabihf')
}
return require('backend-rs-linux-arm-musleabihf')
} catch (e) {
loadError = e
loadErrors.push(e)
}
} else {
try {
return require('./backend-rs.linux-arm-gnueabihf.node')
} catch (e) {
loadErrors.push(e)
}
break
case 'riscv64':
try {
return require('backend-rs-linux-arm-gnueabihf')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'riscv64') {
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'backend-rs.linux-riscv64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.linux-riscv64-musl.node')
} else {
nativeBinding = require('backend-rs-linux-riscv64-musl')
}
return require('./backend-rs.linux-riscv64-musl.node')
} catch (e) {
loadError = e
loadErrors.push(e)
}
} else {
localFileExisted = existsSync(
join(__dirname, 'backend-rs.linux-riscv64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.linux-riscv64-gnu.node')
} else {
nativeBinding = require('backend-rs-linux-riscv64-gnu')
}
return require('backend-rs-linux-riscv64-musl')
} catch (e) {
loadError = e
loadErrors.push(e)
}
}
break
case 's390x':
localFileExisted = existsSync(
join(__dirname, 'backend-rs.linux-s390x-gnu.node')
)
} else {
try {
if (localFileExisted) {
nativeBinding = require('./backend-rs.linux-s390x-gnu.node')
} else {
nativeBinding = require('backend-rs-linux-s390x-gnu')
}
return require('./backend-rs.linux-riscv64-gnu.node')
} catch (e) {
loadError = e
loadErrors.push(e)
}
try {
return require('backend-rs-linux-riscv64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'ppc64') {
try {
return require('./backend-rs.linux-ppc64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('backend-rs-linux-ppc64-gnu')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 's390x') {
try {
return require('./backend-rs.linux-s390x-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('backend-rs-linux-s390x-gnu')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
}
} else {
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
}
}
nativeBinding = requireNative()
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
try {
nativeBinding = require('./backend-rs.wasi.cjs')
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
console.error(err)
}
}
if (!nativeBinding) {
try {
nativeBinding = require('backend-rs-wasm32-wasi')
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
console.error(err)
}
}
break
default:
throw new Error(`Unsupported architecture on Linux: ${arch}`)
}
break
default:
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
}
if (!nativeBinding) {
if (loadError) {
throw loadError
if (loadErrors.length > 0) {
// TODO Link to documentation with potential fixes
// - The package owner could build/publish bindings for this arch
// - The user may need to bundle the correct files
// - The user may need to re-install node_modules to get new packages
throw new Error('Failed to load native binding', { cause: loadErrors })
}
throw new Error(`Failed to load native binding`)
}
const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, fetchNodeinfo, nodeinfo_2_1, nodeinfo_2_0, Protocol, Inbound, Outbound, greet, initializeRustLogger, showServerInfo, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, isQuote, isSafeUrl, latestVersion, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, cpuInfo, cpuUsage, memoryUsage, storageUsage, AntennaSrc, DriveFileUsageHint, MutedNoteReason, NoteVisibility, NotificationType, PageVisibility, PollNoteVisibility, RelayStatus, UserEmojiModPerm, UserProfileFfvisibility, UserProfileMutingNotificationTypes, updateAntennasOnNewNote, watchNote, unwatchNote, PushNotificationKind, sendPushNotification, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToGroupChatStream, publishToModerationStream, getTimestamp, genId, genIdAt, generateSecureRandomString, generateUserToken } = nativeBinding
module.exports.SECOND = SECOND
module.exports.MINUTE = MINUTE
module.exports.HOUR = HOUR
module.exports.DAY = DAY
module.exports.USER_ONLINE_THRESHOLD = USER_ONLINE_THRESHOLD
module.exports.USER_ACTIVE_THRESHOLD = USER_ACTIVE_THRESHOLD
module.exports.FILE_TYPE_BROWSERSAFE = FILE_TYPE_BROWSERSAFE
module.exports.loadEnv = loadEnv
module.exports.loadConfig = loadConfig
module.exports.stringToAcct = stringToAcct
module.exports.acctToString = acctToString
module.exports.fetchNodeinfo = fetchNodeinfo
module.exports.nodeinfo_2_1 = nodeinfo_2_1
module.exports.nodeinfo_2_0 = nodeinfo_2_0
module.exports.Protocol = Protocol
module.exports.Inbound = Inbound
module.exports.Outbound = Outbound
module.exports.greet = greet
module.exports.initializeRustLogger = initializeRustLogger
module.exports.showServerInfo = showServerInfo
module.exports.isBlockedServer = isBlockedServer
module.exports.isSilencedServer = isSilencedServer
module.exports.isAllowedServer = isAllowedServer
module.exports.checkWordMute = checkWordMute
module.exports.getFullApAccount = getFullApAccount
module.exports.isSelfHost = isSelfHost
module.exports.isSameOrigin = isSameOrigin
module.exports.extractHost = extractHost
module.exports.toPuny = toPuny
module.exports.isUnicodeEmoji = isUnicodeEmoji
module.exports.sqlLikeEscape = sqlLikeEscape
module.exports.safeForSql = safeForSql
module.exports.formatMilliseconds = formatMilliseconds
module.exports.getImageSizeFromUrl = getImageSizeFromUrl
module.exports.getNoteSummary = getNoteSummary
module.exports.isQuote = isQuote
module.exports.isSafeUrl = isSafeUrl
module.exports.latestVersion = latestVersion
module.exports.toMastodonId = toMastodonId
module.exports.fromMastodonId = fromMastodonId
module.exports.fetchMeta = fetchMeta
module.exports.metaToPugArgs = metaToPugArgs
module.exports.nyaify = nyaify
module.exports.hashPassword = hashPassword
module.exports.verifyPassword = verifyPassword
module.exports.isOldPasswordAlgorithm = isOldPasswordAlgorithm
module.exports.decodeReaction = decodeReaction
module.exports.countReactions = countReactions
module.exports.toDbReaction = toDbReaction
module.exports.removeOldAttestationChallenges = removeOldAttestationChallenges
module.exports.cpuInfo = cpuInfo
module.exports.cpuUsage = cpuUsage
module.exports.memoryUsage = memoryUsage
module.exports.storageUsage = storageUsage
module.exports.AntennaSrc = AntennaSrc
module.exports.DriveFileUsageHint = DriveFileUsageHint
module.exports.MutedNoteReason = MutedNoteReason
module.exports.NoteVisibility = NoteVisibility
module.exports.NotificationType = NotificationType
module.exports.PageVisibility = PageVisibility
module.exports.PollNoteVisibility = PollNoteVisibility
module.exports.RelayStatus = RelayStatus
module.exports.UserEmojiModPerm = UserEmojiModPerm
module.exports.UserProfileFfvisibility = UserProfileFfvisibility
module.exports.UserProfileMutingNotificationTypes = UserProfileMutingNotificationTypes
module.exports.updateAntennasOnNewNote = updateAntennasOnNewNote
module.exports.watchNote = watchNote
module.exports.unwatchNote = unwatchNote
module.exports.PushNotificationKind = PushNotificationKind
module.exports.sendPushNotification = sendPushNotification
module.exports.publishToChannelStream = publishToChannelStream
module.exports.ChatEvent = ChatEvent
module.exports.publishToChatStream = publishToChatStream
module.exports.ChatIndexEvent = ChatIndexEvent
module.exports.publishToChatIndexStream = publishToChatIndexStream
module.exports.publishToBroadcastStream = publishToBroadcastStream
module.exports.publishToGroupChatStream = publishToGroupChatStream
module.exports.publishToModerationStream = publishToModerationStream
module.exports.getTimestamp = getTimestamp
module.exports.genId = genId
module.exports.genIdAt = genIdAt
module.exports.generateSecureRandomString = generateSecureRandomString
module.exports.generateUserToken = generateUserToken
module.exports.acctToString = nativeBinding.acctToString
module.exports.AntennaSrc = nativeBinding.AntennaSrc
module.exports.ChatEvent = nativeBinding.ChatEvent
module.exports.ChatIndexEvent = nativeBinding.ChatIndexEvent
module.exports.checkWordMute = nativeBinding.checkWordMute
module.exports.countReactions = nativeBinding.countReactions
module.exports.cpuInfo = nativeBinding.cpuInfo
module.exports.cpuUsage = nativeBinding.cpuUsage
module.exports.DAY = nativeBinding.DAY
module.exports.decodeReaction = nativeBinding.decodeReaction
module.exports.DriveFileEvent = nativeBinding.DriveFileEvent
module.exports.DriveFileUsageHint = nativeBinding.DriveFileUsageHint
module.exports.DriveFolderEvent = nativeBinding.DriveFolderEvent
module.exports.extractHost = nativeBinding.extractHost
module.exports.fetchMeta = nativeBinding.fetchMeta
module.exports.fetchNodeinfo = nativeBinding.fetchNodeinfo
module.exports.FILE_TYPE_BROWSERSAFE = nativeBinding.FILE_TYPE_BROWSERSAFE
module.exports.formatMilliseconds = nativeBinding.formatMilliseconds
module.exports.fromMastodonId = nativeBinding.fromMastodonId
module.exports.generateSecureRandomString = nativeBinding.generateSecureRandomString
module.exports.generateUserToken = nativeBinding.generateUserToken
module.exports.genId = nativeBinding.genId
module.exports.genIdAt = nativeBinding.genIdAt
module.exports.getFullApAccount = nativeBinding.getFullApAccount
module.exports.getImageSizeFromUrl = nativeBinding.getImageSizeFromUrl
module.exports.getNoteSummary = nativeBinding.getNoteSummary
module.exports.getTimestamp = nativeBinding.getTimestamp
module.exports.greet = nativeBinding.greet
module.exports.hashPassword = nativeBinding.hashPassword
module.exports.HOUR = nativeBinding.HOUR
module.exports.Inbound = nativeBinding.Inbound
module.exports.initializeRustLogger = nativeBinding.initializeRustLogger
module.exports.isAllowedServer = nativeBinding.isAllowedServer
module.exports.isBlockedServer = nativeBinding.isBlockedServer
module.exports.isOldPasswordAlgorithm = nativeBinding.isOldPasswordAlgorithm
module.exports.isQuote = nativeBinding.isQuote
module.exports.isSafeUrl = nativeBinding.isSafeUrl
module.exports.isSameOrigin = nativeBinding.isSameOrigin
module.exports.isSelfHost = nativeBinding.isSelfHost
module.exports.isSilencedServer = nativeBinding.isSilencedServer
module.exports.isUnicodeEmoji = nativeBinding.isUnicodeEmoji
module.exports.latestVersion = nativeBinding.latestVersion
module.exports.loadConfig = nativeBinding.loadConfig
module.exports.memoryUsage = nativeBinding.memoryUsage
module.exports.metaToPugArgs = nativeBinding.metaToPugArgs
module.exports.MINUTE = nativeBinding.MINUTE
module.exports.MutedNoteReason = nativeBinding.MutedNoteReason
module.exports.nodeinfo_2_0 = nativeBinding.nodeinfo_2_0
module.exports.nodeinfo_2_1 = nativeBinding.nodeinfo_2_1
module.exports.NoteVisibility = nativeBinding.NoteVisibility
module.exports.NotificationType = nativeBinding.NotificationType
module.exports.nyaify = nativeBinding.nyaify
module.exports.Outbound = nativeBinding.Outbound
module.exports.PageVisibility = nativeBinding.PageVisibility
module.exports.PollNoteVisibility = nativeBinding.PollNoteVisibility
module.exports.Protocol = nativeBinding.Protocol
module.exports.publishToBroadcastStream = nativeBinding.publishToBroadcastStream
module.exports.publishToChannelStream = nativeBinding.publishToChannelStream
module.exports.publishToChatIndexStream = nativeBinding.publishToChatIndexStream
module.exports.publishToChatStream = nativeBinding.publishToChatStream
module.exports.publishToDriveFileStream = nativeBinding.publishToDriveFileStream
module.exports.publishToDriveFolderStream = nativeBinding.publishToDriveFolderStream
module.exports.publishToGroupChatStream = nativeBinding.publishToGroupChatStream
module.exports.publishToModerationStream = nativeBinding.publishToModerationStream
module.exports.publishToNotesStream = nativeBinding.publishToNotesStream
module.exports.PushNotificationKind = nativeBinding.PushNotificationKind
module.exports.RelayStatus = nativeBinding.RelayStatus
module.exports.removeOldAttestationChallenges = nativeBinding.removeOldAttestationChallenges
module.exports.safeForSql = nativeBinding.safeForSql
module.exports.SECOND = nativeBinding.SECOND
module.exports.sendPushNotification = nativeBinding.sendPushNotification
module.exports.showServerInfo = nativeBinding.showServerInfo
module.exports.sqlLikeEscape = nativeBinding.sqlLikeEscape
module.exports.storageUsage = nativeBinding.storageUsage
module.exports.stringToAcct = nativeBinding.stringToAcct
module.exports.toDbReaction = nativeBinding.toDbReaction
module.exports.toMastodonId = nativeBinding.toMastodonId
module.exports.toPuny = nativeBinding.toPuny
module.exports.unwatchNote = nativeBinding.unwatchNote
module.exports.updateAntennaCache = nativeBinding.updateAntennaCache
module.exports.updateAntennasOnNewNote = nativeBinding.updateAntennasOnNewNote
module.exports.updateMetaCache = nativeBinding.updateMetaCache
module.exports.updateNodeinfoCache = nativeBinding.updateNodeinfoCache
module.exports.USER_ACTIVE_THRESHOLD = nativeBinding.USER_ACTIVE_THRESHOLD
module.exports.USER_ONLINE_THRESHOLD = nativeBinding.USER_ONLINE_THRESHOLD
module.exports.UserEmojiModPerm = nativeBinding.UserEmojiModPerm
module.exports.UserProfileFfvisibility = nativeBinding.UserProfileFfvisibility
module.exports.UserProfileMutingNotificationTypes = nativeBinding.UserProfileMutingNotificationTypes
module.exports.verifyPassword = nativeBinding.verifyPassword
module.exports.watchNote = nativeBinding.watchNote

View file

@ -1,3 +0,0 @@
# `backend-rs-android-arm-eabi`
This is the **armv7-linux-androideabi** binary for `backend-rs`

View file

@ -1,18 +0,0 @@
{
"name": "backend-rs-android-arm-eabi",
"version": "0.0.0",
"os": [
"android"
],
"cpu": [
"arm"
],
"main": "backend-rs.android-arm-eabi.node",
"files": [
"backend-rs.android-arm-eabi.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View file

@ -1,3 +0,0 @@
# `backend-rs-android-arm64`
This is the **aarch64-linux-android** binary for `backend-rs`

View file

@ -1,18 +0,0 @@
{
"name": "backend-rs-android-arm64",
"version": "0.0.0",
"os": [
"android"
],
"cpu": [
"arm64"
],
"main": "backend-rs.android-arm64.node",
"files": [
"backend-rs.android-arm64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View file

@ -1,3 +0,0 @@
# `backend-rs-darwin-arm64`
This is the **aarch64-apple-darwin** binary for `backend-rs`

View file

@ -1,18 +0,0 @@
{
"name": "backend-rs-darwin-arm64",
"version": "0.0.0",
"os": [
"darwin"
],
"cpu": [
"arm64"
],
"main": "backend-rs.darwin-arm64.node",
"files": [
"backend-rs.darwin-arm64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View file

@ -1,3 +0,0 @@
# `backend-rs-darwin-universal`
This is the **universal-apple-darwin** binary for `backend-rs`

View file

@ -1,15 +0,0 @@
{
"name": "backend-rs-darwin-universal",
"version": "0.0.0",
"os": [
"darwin"
],
"main": "backend-rs.darwin-universal.node",
"files": [
"backend-rs.darwin-universal.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View file

@ -1,3 +0,0 @@
# `backend-rs-darwin-x64`
This is the **x86_64-apple-darwin** binary for `backend-rs`

View file

@ -1,18 +0,0 @@
{
"name": "backend-rs-darwin-x64",
"version": "0.0.0",
"os": [
"darwin"
],
"cpu": [
"x64"
],
"main": "backend-rs.darwin-x64.node",
"files": [
"backend-rs.darwin-x64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View file

@ -1,3 +0,0 @@
# `backend-rs-freebsd-x64`
This is the **x86_64-unknown-freebsd** binary for `backend-rs`

View file

@ -1,18 +0,0 @@
{
"name": "backend-rs-freebsd-x64",
"version": "0.0.0",
"os": [
"freebsd"
],
"cpu": [
"x64"
],
"main": "backend-rs.freebsd-x64.node",
"files": [
"backend-rs.freebsd-x64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View file

@ -1,3 +0,0 @@
# `backend-rs-linux-arm-gnueabihf`
This is the **armv7-unknown-linux-gnueabihf** binary for `backend-rs`

View file

@ -1,18 +0,0 @@
{
"name": "backend-rs-linux-arm-gnueabihf",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm"
],
"main": "backend-rs.linux-arm-gnueabihf.node",
"files": [
"backend-rs.linux-arm-gnueabihf.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View file

@ -1,3 +0,0 @@
# `backend-rs-linux-arm64-gnu`
This is the **aarch64-unknown-linux-gnu** binary for `backend-rs`

View file

@ -1,21 +0,0 @@
{
"name": "backend-rs-linux-arm64-gnu",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm64"
],
"main": "backend-rs.linux-arm64-gnu.node",
"files": [
"backend-rs.linux-arm64-gnu.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"glibc"
]
}

View file

@ -1,3 +0,0 @@
# `backend-rs-linux-arm64-musl`
This is the **aarch64-unknown-linux-musl** binary for `backend-rs`

View file

@ -1,21 +0,0 @@
{
"name": "backend-rs-linux-arm64-musl",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm64"
],
"main": "backend-rs.linux-arm64-musl.node",
"files": [
"backend-rs.linux-arm64-musl.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"musl"
]
}

View file

@ -1,3 +0,0 @@
# `backend-rs-linux-x64-gnu`
This is the **x86_64-unknown-linux-gnu** binary for `backend-rs`

View file

@ -1,21 +0,0 @@
{
"name": "backend-rs-linux-x64-gnu",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"x64"
],
"main": "backend-rs.linux-x64-gnu.node",
"files": [
"backend-rs.linux-x64-gnu.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"glibc"
]
}

View file

@ -1,3 +0,0 @@
# `backend-rs-linux-x64-musl`
This is the **x86_64-unknown-linux-musl** binary for `backend-rs`

View file

@ -1,21 +0,0 @@
{
"name": "backend-rs-linux-x64-musl",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"x64"
],
"main": "backend-rs.linux-x64-musl.node",
"files": [
"backend-rs.linux-x64-musl.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"musl"
]
}

View file

@ -1,3 +0,0 @@
# `backend-rs-win32-arm64-msvc`
This is the **aarch64-pc-windows-msvc** binary for `backend-rs`

View file

@ -1,18 +0,0 @@
{
"name": "backend-rs-win32-arm64-msvc",
"version": "0.0.0",
"os": [
"win32"
],
"cpu": [
"arm64"
],
"main": "backend-rs.win32-arm64-msvc.node",
"files": [
"backend-rs.win32-arm64-msvc.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View file

@ -1,3 +0,0 @@
# `backend-rs-win32-ia32-msvc`
This is the **i686-pc-windows-msvc** binary for `backend-rs`

View file

@ -1,18 +0,0 @@
{
"name": "backend-rs-win32-ia32-msvc",
"version": "0.0.0",
"os": [
"win32"
],
"cpu": [
"ia32"
],
"main": "backend-rs.win32-ia32-msvc.node",
"files": [
"backend-rs.win32-ia32-msvc.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View file

@ -1,3 +0,0 @@
# `backend-rs-win32-x64-msvc`
This is the **x86_64-pc-windows-msvc** binary for `backend-rs`

View file

@ -1,18 +0,0 @@
{
"name": "backend-rs-win32-x64-msvc",
"version": "0.0.0",
"os": [
"win32"
],
"cpu": [
"x64"
],
"main": "backend-rs.win32-x64-msvc.node",
"files": [
"backend-rs.win32-x64-msvc.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View file

@ -4,32 +4,13 @@
"main": "built/index.js",
"types": "built/index.d.ts",
"napi": {
"name": "backend-rs",
"triples": {
"additional": [
"aarch64-apple-darwin",
"aarch64-linux-android",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"aarch64-pc-windows-msvc",
"armv7-unknown-linux-gnueabihf",
"x86_64-unknown-linux-musl",
"x86_64-unknown-freebsd",
"i686-pc-windows-msvc",
"armv7-linux-androideabi",
"universal-apple-darwin"
]
}
"binaryName": "backend-rs"
},
"devDependencies": {
"@napi-rs/cli": "2.18.3"
"@napi-rs/cli": "3.0.0-alpha.55"
},
"scripts": {
"artifacts": "napi artifacts",
"build": "napi build --features napi --no-const-enum --platform --release ./built/",
"build:debug": "napi build --features napi --no-const-enum --platform ./built/",
"prepublishOnly": "napi prepublish -t npm",
"universal": "napi universal",
"version": "napi version"
"build": "napi build --features napi --no-const-enum --platform --release --output-dir ./built/",
"build:debug": "napi build --features napi --no-const-enum --platform --output-dir ./built/ --dts-header '/* auto-generated by NAPI-RS */\n/* Do NOT edit this file manually */\n\ntype DateTimeWithTimeZone = Date;\n\ntype Json = any;\n\n'"
}
}

View file

@ -1,15 +1,17 @@
#[crate::ts_export]
//! This module is used in the TypeScript backend only.
#[macros::ts_export]
pub const SECOND: i32 = 1000;
#[crate::ts_export]
#[macros::ts_export]
pub const MINUTE: i32 = 60 * SECOND;
#[crate::ts_export]
#[macros::ts_export]
pub const HOUR: i32 = 60 * MINUTE;
#[crate::ts_export]
#[macros::ts_export]
pub const DAY: i32 = 24 * HOUR;
#[crate::ts_export]
#[macros::ts_export]
pub const USER_ONLINE_THRESHOLD: i32 = 10 * MINUTE;
#[crate::ts_export]
#[macros::ts_export]
pub const USER_ACTIVE_THRESHOLD: i32 = 3 * DAY;
/// List of file types allowed to be viewed directly in the browser
@ -19,7 +21,7 @@ pub const USER_ACTIVE_THRESHOLD: i32 = 3 * DAY;
/// * <https://github.com/sindresorhus/file-type/blob/main/supported.js>
/// * <https://github.com/sindresorhus/file-type/blob/main/core.js>
/// * <https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Containers>
#[crate::ts_export]
#[macros::ts_export]
pub const FILE_TYPE_BROWSERSAFE: [&str; 41] = [
// Images
"image/png",

View file

@ -1,27 +0,0 @@
// FIXME: Are these options used?
#[crate::export(object)]
pub struct EnvConfig {
pub only_queue: bool,
pub only_server: bool,
pub no_daemons: bool,
pub disable_clustering: bool,
pub verbose: bool,
pub with_log_time: bool,
pub slow: bool,
}
#[crate::export]
pub fn load_env() -> EnvConfig {
let node_env = std::env::var("NODE_ENV").unwrap_or_default().to_lowercase();
let is_testing = node_env == "test";
EnvConfig {
only_queue: std::env::var("MK_ONLY_QUEUE").is_ok(),
only_server: std::env::var("MK_ONLY_SERVER").is_ok(),
no_daemons: is_testing || std::env::var("MK_NO_DAEMONS").is_ok(),
disable_clustering: is_testing || std::env::var("MK_DISABLE_CLUSTERING").is_ok(),
verbose: std::env::var("MK_VERBOSE").is_ok(),
with_log_time: std::env::var("MK_WITH_LOG_TIME").is_ok(),
slow: std::env::var("MK_SLOW").is_ok(),
}
}

View file

@ -1,18 +1,28 @@
use crate::database::db_conn;
use crate::model::entity::meta;
use rand::prelude::*;
//! Server information
use crate::{database::db_conn, model::entity::meta};
use sea_orm::{prelude::*, ActiveValue};
use std::sync::Mutex;
type Meta = meta::Model;
static CACHE: Mutex<Option<Meta>> = Mutex::new(None);
fn update_cache(meta: &Meta) {
fn set_cache(meta: &Meta) {
let _ = CACHE.lock().map(|mut cache| *cache = Some(meta.clone()));
}
#[crate::export]
pub async fn fetch_meta(use_cache: bool) -> Result<Meta, DbErr> {
#[macros::export(js_name = "fetchMeta")]
pub async fn local_server_info() -> Result<Meta, DbErr> {
local_server_info_impl(true).await
}
#[macros::export(js_name = "updateMetaCache")]
pub async fn update() -> Result<(), DbErr> {
local_server_info_impl(false).await?;
Ok(())
}
async fn local_server_info_impl(use_cache: bool) -> Result<Meta, DbErr> {
// try using cache
if use_cache {
if let Some(cache) = CACHE.lock().ok().and_then(|cache| cache.clone()) {
@ -24,7 +34,7 @@ pub async fn fetch_meta(use_cache: bool) -> Result<Meta, DbErr> {
let db = db_conn().await?;
let meta = meta::Entity::find().one(db).await?;
if let Some(meta) = meta {
update_cache(&meta);
set_cache(&meta);
return Ok(meta);
}
@ -35,11 +45,11 @@ pub async fn fetch_meta(use_cache: bool) -> Result<Meta, DbErr> {
})
.exec_with_returning(db)
.await?;
update_cache(&meta);
set_cache(&meta);
Ok(meta)
}
#[crate::export(object)]
#[macros::export(object)]
pub struct PugArgs {
pub img: Option<String>,
pub title: String,
@ -52,8 +62,9 @@ pub struct PugArgs {
pub private_mode: Option<bool>,
}
#[crate::export]
#[macros::ts_export]
pub fn meta_to_pug_args(meta: Meta) -> PugArgs {
use rand::prelude::*;
let mut rng = rand::thread_rng();
let splash_icon = meta

View file

@ -1,7 +1,8 @@
//! Server configurations and environment variables
pub use meta::local_server_info;
pub use server::CONFIG;
pub mod constant;
pub mod environment;
pub mod meta;
pub mod server;

View file

@ -1,13 +1,14 @@
//! Server configuration
use once_cell::sync::Lazy;
use serde::Deserialize;
use std::env;
use std::fs;
use std::{env, fs};
pub const VERSION: &str = macro_rs::read_version_from_package_json!();
pub const VERSION: &str = macros::read_version_from_package_json!();
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
struct ServerConfig {
pub url: String,
pub port: u16,
@ -70,9 +71,9 @@ struct ServerConfig {
pub object_storage: Option<ObjectStorageConfig>,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct DbConfig {
pub host: String,
pub port: u16,
@ -83,9 +84,9 @@ pub struct DbConfig {
pub extra: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct RedisConfig {
pub host: String,
pub port: u16,
@ -98,65 +99,65 @@ pub struct RedisConfig {
pub prefix: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct TlsConfig {
pub host: String,
pub reject_unauthorized: bool,
}
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct WorkerConfig {
pub web: u32,
pub queue: u32,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct WorkerConfigInternal {
pub web: Option<u32>,
pub queue: Option<u32>,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct IdConfig {
pub length: Option<u8>,
pub fingerprint: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct SysLogConfig {
pub host: String,
pub port: u16,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct DeepLConfig {
pub managed: Option<bool>,
pub auth_key: Option<String>,
pub is_pro: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct LibreTranslateConfig {
pub managed: Option<bool>,
pub api_url: Option<String>,
pub api_key: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct EmailConfig {
pub managed: Option<bool>,
pub address: Option<String>,
@ -167,9 +168,9 @@ pub struct EmailConfig {
pub use_implicit_ssl_tls: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct ObjectStorageConfig {
pub managed: Option<bool>,
pub base_url: Option<String>,
@ -185,7 +186,7 @@ pub struct ObjectStorageConfig {
pub s3_force_path_style: Option<bool>,
}
#[crate::export(object, use_nullable = false)]
#[macros::export(object, use_nullable = false)]
pub struct Config {
// ServerConfig (from default.yml)
pub url: String,
@ -262,7 +263,7 @@ fn read_config_file() -> ServerConfig {
data
}
#[crate::export]
#[macros::export]
pub fn load_config() -> Config {
let server_config = read_config_file();
let version = VERSION.to_owned();
@ -276,7 +277,10 @@ pub fn load_config() -> Config {
None => hostname.clone(),
};
let scheme = url.scheme().to_owned();
let ws_scheme = scheme.replace("http", "ws");
let ws_scheme = match scheme.as_str() {
"http" => "ws",
_ => "wss",
};
let cluster_limits = match server_config.cluster_limits {
Some(cl) => WorkerConfig {
@ -291,7 +295,7 @@ pub fn load_config() -> Config {
} else {
server_config.redis.prefix.clone()
}
.unwrap_or(hostname.clone());
.unwrap_or_else(|| hostname.clone());
Config {
url: server_config.url,
@ -342,7 +346,7 @@ pub fn load_config() -> Config {
hostname,
redis_key_prefix,
scheme,
ws_scheme,
ws_scheme: ws_scheme.to_string(),
}
}

View file

@ -4,29 +4,23 @@ use crate::database::{redis_conn, redis_key, RedisConnError};
use redis::{AsyncCommands, RedisError};
use serde::{Deserialize, Serialize};
#[derive(strum::Display, Debug)]
#[cfg_attr(test, derive(Debug))]
pub enum Category {
#[strum(serialize = "fetchUrl")]
FetchUrl,
#[strum(serialize = "blocking")]
Block,
#[strum(serialize = "following")]
Follow,
#[cfg(test)]
#[strum(serialize = "usedOnlyForTesting")]
Test,
}
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Redis error: {0}")]
#[error("failed to execute Redis command")]
Redis(#[from] RedisError),
#[error("Redis connection error: {0}")]
#[error("bad Redis connection")]
RedisConn(#[from] RedisConnError),
#[error("Data serialization error: {0}")]
Serialize(#[from] rmp_serde::encode::Error),
#[error("Data deserialization error: {0}")]
Deserialize(#[from] rmp_serde::decode::Error),
#[error("failed to encode data for Redis")]
Encode(#[from] rmp_serde::encode::Error),
}
#[inline]
@ -34,9 +28,15 @@ fn prefix_key(key: &str) -> String {
redis_key(format!("cache:{}", key))
}
#[inline]
fn categorize(category: Category, key: &str) -> String {
format!("{}:{}", category, key)
let prefix = match category {
Category::FetchUrl => "fetchUrl",
Category::Block => "blocking",
Category::Follow => "following",
#[cfg(test)]
Category::Test => "usedOnlyForTesting",
};
format!("{}:{}", prefix, key)
}
#[inline]
@ -50,9 +50,9 @@ fn wildcard(category: Category) -> String {
///
/// # Arguments
///
/// - `key` : key (prefixed automatically)
/// - `value` : (de)serializable value
/// - `expire_seconds` : TTL
/// * `key` : key (prefixed automatically)
/// * `value` : (de)serializable value
/// * `expire_seconds` : TTL
///
/// # Example
///
@ -96,7 +96,7 @@ pub async fn set<V: for<'a> Deserialize<'a> + Serialize>(
///
/// # Argument
///
/// - `key` : key (will be prefixed automatically)
/// * `key` : key (will be prefixed automatically)
///
/// # Example
///
@ -123,7 +123,7 @@ pub async fn set<V: for<'a> Deserialize<'a> + Serialize>(
pub async fn get<V: for<'a> Deserialize<'a> + Serialize>(key: &str) -> Result<Option<V>, Error> {
let serialized_value: Option<Vec<u8>> = redis_conn().await?.get(prefix_key(key)).await?;
Ok(match serialized_value {
Some(v) => Some(rmp_serde::from_slice::<V>(v.as_ref())?),
Some(v) => rmp_serde::from_slice::<V>(v.as_ref()).ok(),
None => None,
})
}
@ -135,9 +135,9 @@ pub async fn get<V: for<'a> Deserialize<'a> + Serialize>(key: &str) -> Result<Op
///
/// # Argument
///
/// - `key` : key (prefixed automatically)
/// * `key` : key (prefixed automatically)
///
/// ## Example
/// # Example
///
/// ```
/// # use backend_rs::database::cache;
@ -169,10 +169,10 @@ pub async fn delete(key: &str) -> Result<(), Error> {
///
/// # Arguments
///
/// - `category` : one of [Category]
/// - `key` : key (prefixed automatically)
/// - `value` : (de)serializable value
/// - `expire_seconds` : TTL
/// * `category` : one of [Category]
/// * `key` : key (prefixed automatically)
/// * `value` : (de)serializable value
/// * `expire_seconds` : TTL
pub async fn set_one<V: for<'a> Deserialize<'a> + Serialize>(
category: Category,
key: &str,
@ -188,8 +188,8 @@ pub async fn set_one<V: for<'a> Deserialize<'a> + Serialize>(
///
/// # Arguments
///
/// - `category` : one of [Category]
/// - `key` : key (prefixed automatically)
/// * `category` : one of [Category]
/// * `key` : key (prefixed automatically)
pub async fn get_one<V: for<'a> Deserialize<'a> + Serialize>(
category: Category,
key: &str,
@ -213,7 +213,7 @@ pub async fn delete_one(category: Category, key: &str) -> Result<(), Error> {
///
/// # Argument
///
/// - `category` : one of [Category]
/// * `category` : one of [Category]
pub async fn delete_all(category: Category) -> Result<(), Error> {
let mut redis = redis_conn().await?;
let keys: Vec<Vec<u8>> = redis.keys(wildcard(category)).await?;
@ -234,6 +234,7 @@ mod unit_test {
use pretty_assertions::assert_eq;
#[tokio::test]
#[cfg_attr(miri, ignore)] // can't call foreign function `getaddrinfo` on OS `linux`
async fn set_get_expire() {
#[derive(serde::Deserialize, serde::Serialize, PartialEq, Debug)]
struct Data {
@ -278,6 +279,7 @@ mod unit_test {
}
#[tokio::test]
#[cfg_attr(miri, ignore)] // can't call foreign function `getaddrinfo` on OS `linux`
async fn use_category() {
let key_1 = "fire";
let key_2 = "fish";

View file

@ -1,8 +1,9 @@
//! Interfaces for accessing PostgreSQL and Redis
pub use postgresql::db_conn;
pub use postgresql::get_conn as db_conn;
pub use redis::get_conn as redis_conn;
pub use redis::key as redis_key;
pub use redis::redis_conn;
pub use redis::RedisConnError;
pub mod cache;

View file

@ -3,6 +3,7 @@
use crate::config::CONFIG;
use once_cell::sync::OnceCell;
use sea_orm::{ConnectOptions, Database, DbConn, DbErr};
use std::time::Duration;
use tracing::log::LevelFilter;
static DB_CONN: OnceCell<DbConn> = OnceCell::new();
@ -18,6 +19,7 @@ async fn init_conn() -> Result<&'static DbConn, DbErr> {
);
let option: ConnectOptions = ConnectOptions::new(database_uri)
.sqlx_logging_level(LevelFilter::Trace)
.sqlx_slow_statements_logging_settings(LevelFilter::Warn, Duration::from_secs(3))
.to_owned();
tracing::info!("initializing connection");
@ -27,7 +29,7 @@ async fn init_conn() -> Result<&'static DbConn, DbErr> {
}
/// Returns an async PostgreSQL connection that can be used with [sea_orm] utilities.
pub async fn db_conn() -> Result<&'static DbConn, DbErr> {
pub async fn get_conn() -> Result<&'static DbConn, DbErr> {
match DB_CONN.get() {
Some(conn) => Ok(conn),
None => init_conn().await,
@ -36,11 +38,52 @@ pub async fn db_conn() -> Result<&'static DbConn, DbErr> {
#[cfg(test)]
mod unit_test {
use super::db_conn;
use super::get_conn;
use sea_orm::{prelude::*, DbBackend, Statement};
#[tokio::test]
async fn connect() {
assert!(db_conn().await.is_ok());
assert!(db_conn().await.is_ok());
#[cfg_attr(miri, ignore)] // can't call foreign function `geteuid` on OS `linux`
async fn connect_sequential() {
get_conn().await.unwrap();
get_conn().await.unwrap();
get_conn().await.unwrap();
get_conn().await.unwrap();
get_conn().await.unwrap();
}
#[tokio::test]
#[cfg_attr(miri, ignore)] // can't call foreign function `geteuid` on OS `linux`
async fn connect_concurrent() {
let [c1, c2, c3, c4, c5] = [get_conn(), get_conn(), get_conn(), get_conn(), get_conn()];
let _ = tokio::try_join!(c1, c2, c3, c4, c5).unwrap();
}
#[tokio::test]
#[cfg_attr(miri, ignore)] // can't call foreign function `geteuid` on OS `linux`
async fn connect_spawn() {
let mut tasks = Vec::new();
for _ in 0..5 {
tasks.push(tokio::spawn(get_conn()));
}
for task in tasks {
task.await.unwrap().unwrap();
}
}
#[tokio::test]
#[cfg_attr(miri, ignore)] // can't call foreign function `geteuid` on OS `linux`
async fn access() {
// DO NOT write any raw SQL query in the actual program
// (with the exception of PGroonga features)
get_conn()
.await
.unwrap()
.execute(Statement::from_string(
DbBackend::Postgres,
"SELECT version()",
))
.await
.unwrap();
}
}

View file

@ -81,15 +81,15 @@ async fn init_conn_pool() -> Result<(), RedisError> {
#[derive(thiserror::Error, Debug)]
pub enum RedisConnError {
#[error("Failed to initialize Redis connection pool: {0}")]
#[error("failed to initialize Redis connection pool")]
Redis(RedisError),
#[error("Redis connection pool error: {0}")]
#[error("bad Redis connection pool")]
Bb8Pool(RunError<RedisError>),
}
/// Returns an async [redis] connection managed by a [bb8] connection pool.
pub async fn redis_conn(
) -> Result<PooledConnection<'static, RedisConnectionManager>, RedisConnError> {
pub async fn get_conn() -> Result<PooledConnection<'static, RedisConnectionManager>, RedisConnError>
{
if !CONN_POOL.initialized() {
let init_res = init_conn_pool().await;
@ -114,19 +114,44 @@ pub fn key(key: impl ToString) -> String {
#[cfg(test)]
mod unit_test {
use super::redis_conn;
use super::get_conn;
use pretty_assertions::assert_eq;
use redis::AsyncCommands;
#[tokio::test]
async fn connect() {
assert!(redis_conn().await.is_ok());
assert!(redis_conn().await.is_ok());
#[cfg_attr(miri, ignore)] // can't call foreign function `getaddrinfo` on OS `linux`
async fn connect_sequential() {
get_conn().await.unwrap();
get_conn().await.unwrap();
get_conn().await.unwrap();
get_conn().await.unwrap();
get_conn().await.unwrap();
}
#[tokio::test]
#[cfg_attr(miri, ignore)] // can't call foreign function `getaddrinfo` on OS `linux`
async fn connect_concurrent() {
let [c1, c2, c3, c4, c5] = [get_conn(), get_conn(), get_conn(), get_conn(), get_conn()];
let _ = tokio::try_join!(c1, c2, c3, c4, c5).unwrap();
}
#[tokio::test]
#[cfg_attr(miri, ignore)] // can't call foreign function `getaddrinfo` on OS `linux`
async fn connect_spawn() {
let mut tasks = Vec::new();
for _ in 0..5 {
tasks.push(tokio::spawn(get_conn()));
}
for task in tasks {
task.await.unwrap().unwrap();
}
}
#[tokio::test]
#[cfg_attr(miri, ignore)] // can't call foreign function `getaddrinfo` on OS `linux`
async fn access() {
let mut redis = redis_conn().await.unwrap();
let mut redis = get_conn().await.unwrap();
let key = "CARGO_UNIT_TEST_KEY";
let value = "CARGO_UNIT_TEST_VALUE";

View file

@ -1,17 +1,20 @@
use std::fmt;
use std::str::FromStr;
use std::{fmt, str::FromStr};
#[derive(Debug, PartialEq)]
#[crate::export(object)]
#[cfg_attr(test, derive(Debug, PartialEq))]
#[macros::export(object)]
pub struct Acct {
pub username: String,
pub host: Option<String>,
}
impl FromStr for Acct {
type Err = ();
#[derive(thiserror::Error, Debug)]
#[doc = "Error type to indicate a string-to-[`Acct`] conversion failure"]
#[error("failed to convert string '{0}' into acct")]
pub struct InvalidAcctString(String);
impl FromStr for Acct {
type Err = InvalidAcctString;
/// This never throw errors. Feel free to `.unwrap()` the result.
fn from_str(value: &str) -> Result<Self, Self::Err> {
let split: Vec<&str> = if let Some(stripped) = value.strip_prefix('@') {
stripped
@ -48,12 +51,12 @@ impl From<Acct> for String {
}
}
#[crate::ts_export]
#[macros::ts_export]
pub fn string_to_acct(acct: &str) -> Acct {
Acct::from_str(acct).unwrap()
}
#[crate::ts_export]
#[macros::ts_export]
pub fn acct_to_string(acct: &Acct) -> String {
acct.to_string()
}
@ -65,7 +68,7 @@ mod unit_test {
use std::str::FromStr;
#[test]
fn test_acct_to_string() {
fn acct_to_string() {
let remote_acct = Acct {
username: "firefish".to_string(),
host: Some("example.com".to_string()),
@ -82,7 +85,7 @@ mod unit_test {
}
#[test]
fn test_string_to_acct() {
fn string_to_acct() {
let remote_acct = Acct {
username: "firefish".to_string(),
host: Some("example.com".to_string()),

View file

@ -1,45 +1,46 @@
//! NodeInfo fetcher
//!
//! ref: <https://nodeinfo.diaspora.software/protocol.html>
use crate::federation::nodeinfo::schema::*;
use crate::util::http_client;
use crate::{federation::nodeinfo::schema::*, util::http_client};
use isahc::AsyncReadResponseExt;
use serde::{Deserialize, Serialize};
use serde::Deserialize;
/// Errors that can occur while fetching NodeInfo from a remote server
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("HTTP client aquisition error: {0}")]
#[error("failed to acquire an HTTP client")]
HttpClient(#[from] http_client::Error),
#[error("HTTP error: {0}")]
#[error("HTTP request failed")]
Http(#[from] isahc::Error),
#[error("Bad status: {0}")]
#[doc = "bad HTTP status"]
#[error("bad HTTP status ({0})")]
BadStatus(String),
#[error("Failed to parse response body as text: {0}")]
#[error("failed to parse HTTP response body as text")]
Response(#[from] std::io::Error),
#[error("Failed to parse response body as json: {0}")]
#[error("failed to parse HTTP response body as json")]
Json(#[from] serde_json::Error),
#[error("No nodeinfo provided")]
#[error("nodeinfo is missing")]
MissingNodeinfo,
}
#[derive(Deserialize, Serialize, Debug)]
/// Represents the schema of `/.well-known/nodeinfo`.
#[derive(Deserialize)]
pub struct NodeinfoLinks {
links: Vec<NodeinfoLink>,
}
#[derive(Deserialize, Serialize, Debug)]
/// Represents one entry of `/.well-known/nodeinfo`.
#[derive(Deserialize)]
pub struct NodeinfoLink {
rel: String,
href: String,
}
#[inline]
fn wellknown_nodeinfo_url(host: &str) -> String {
format!("https://{}/.well-known/nodeinfo", host)
}
/// Fetches `/.well-known/nodeinfo` and parses the result.
async fn fetch_nodeinfo_links(host: &str) -> Result<NodeinfoLinks, Error> {
let client = http_client::client()?;
let wellknown_url = wellknown_nodeinfo_url(host);
let wellknown_url = format!("https://{}/.well-known/nodeinfo", host);
let mut wellknown_response = client.get_async(&wellknown_url).await?;
if !wellknown_response.status().is_success() {
@ -54,6 +55,9 @@ async fn fetch_nodeinfo_links(host: &str) -> Result<NodeinfoLinks, Error> {
Ok(serde_json::from_str(&wellknown_response.text().await?)?)
}
/// Check if any of the following relations is present in the given [NodeinfoLinks].
/// * <http://nodeinfo.diaspora.software/ns/schema/2.0>
/// * <http://nodeinfo.diaspora.software/ns/schema/2.1>
fn check_nodeinfo_link(links: NodeinfoLinks) -> Result<String, Error> {
for link in links.links {
if link.rel == "http://nodeinfo.diaspora.software/ns/schema/2.1"
@ -66,6 +70,7 @@ fn check_nodeinfo_link(links: NodeinfoLinks) -> Result<String, Error> {
Err(Error::MissingNodeinfo)
}
/// Fetches the nodeinfo from the given URL and parses the result.
async fn fetch_nodeinfo_impl(nodeinfo_link: &str) -> Result<Nodeinfo20, Error> {
let client = http_client::client()?;
let mut response = client.get_async(nodeinfo_link).await?;
@ -85,8 +90,8 @@ async fn fetch_nodeinfo_impl(nodeinfo_link: &str) -> Result<Nodeinfo20, Error> {
// for napi export
type Nodeinfo = Nodeinfo20;
/// Fetches and returns the NodeInfo of a remote server.
#[crate::export]
/// Fetches and returns the NodeInfo (version 2.0) of a remote server.
#[macros::export]
pub async fn fetch_nodeinfo(host: &str) -> Result<Nodeinfo, Error> {
tracing::info!("fetching from {}", host);
let links = fetch_nodeinfo_links(host).await?;
@ -96,11 +101,11 @@ pub async fn fetch_nodeinfo(host: &str) -> Result<Nodeinfo, Error> {
#[cfg(test)]
mod unit_test {
use super::{check_nodeinfo_link, fetch_nodeinfo, NodeinfoLink, NodeinfoLinks};
use super::{NodeinfoLink, NodeinfoLinks};
use pretty_assertions::assert_eq;
#[test]
fn test_check_nodeinfo_link() {
fn check_nodeinfo_link() {
let links_1 = NodeinfoLinks {
links: vec![
NodeinfoLink {
@ -114,7 +119,7 @@ mod unit_test {
],
};
assert_eq!(
check_nodeinfo_link(links_1).unwrap(),
super::check_nodeinfo_link(links_1).unwrap(),
"https://example.com/real"
);
@ -131,7 +136,7 @@ mod unit_test {
],
};
assert_eq!(
check_nodeinfo_link(links_2).unwrap(),
super::check_nodeinfo_link(links_2).unwrap(),
"https://example.com/real"
);
@ -147,13 +152,14 @@ mod unit_test {
},
],
};
check_nodeinfo_link(links_3).expect_err("No nodeinfo");
super::check_nodeinfo_link(links_3).expect_err("No nodeinfo");
}
#[tokio::test]
async fn test_fetch_nodeinfo() {
#[cfg_attr(miri, ignore)] // can't call foreign function `curl_global_init` on OS `linux`
async fn fetch_nodeinfo() {
assert_eq!(
fetch_nodeinfo("info.firefish.dev")
super::fetch_nodeinfo("info.firefish.dev")
.await
.unwrap()
.software

View file

@ -1,24 +1,31 @@
//! NodeInfo generator
use crate::config::CONFIG;
use crate::database::{cache, db_conn};
use crate::federation::nodeinfo::schema::*;
use crate::misc::meta::fetch_meta;
use crate::model::entity::{note, user};
use sea_orm::{ColumnTrait, DbErr, EntityTrait, PaginatorTrait, QueryFilter};
use crate::{
config::{local_server_info, CONFIG},
database::db_conn,
federation::nodeinfo::schema::*,
model::entity::{note, user},
};
use sea_orm::prelude::*;
use serde_json::json;
use std::collections::HashMap;
use std::{collections::HashMap, sync::Mutex};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Database error: {0}")]
Db(#[from] DbErr),
#[error("Cache error: {0}")]
Cache(#[from] cache::Error),
#[error("Failed to serialize nodeinfo to JSON: {0}")]
Json(#[from] serde_json::Error),
static CACHE: Mutex<Option<Nodeinfo21>> = Mutex::new(None);
fn set_cache(nodeinfo: &Nodeinfo21) {
let _ = CACHE
.lock()
.map(|mut cache| *cache = Some(nodeinfo.to_owned()));
}
/// Fetches the number of total/active local users and local posts.
///
/// # Return value
/// A tuple containing the following information in this order:
/// * the total number of local users
/// * the total number of local users active in the last 6 months
/// * the total number of local users active in the last month (MAU)
/// * the total number of posts from local users
async fn statistics() -> Result<(u64, u64, u64, u64), DbErr> {
let db = db_conn().await?;
@ -49,14 +56,18 @@ async fn statistics() -> Result<(u64, u64, u64, u64), DbErr> {
)
}
async fn generate_nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
/// Generates NodeInfo (version 2.1) of the local server.
/// This function doesn't use caches and returns the latest information.
async fn generate_nodeinfo_2_1() -> Result<Nodeinfo21, DbErr> {
tracing::info!("generating NodeInfo");
let (local_users, local_active_halfyear, local_active_month, local_posts) =
statistics().await?;
let meta = fetch_meta(true).await?;
let metadata = HashMap::from([
let meta = local_server_info().await?;
let mut metadata = HashMap::from([
(
"nodeName".to_string(),
json!(meta.name.unwrap_or(CONFIG.host.clone())),
json!(meta.name.unwrap_or_else(|| CONFIG.host.clone())),
),
("nodeDescription".to_string(), json!(meta.description)),
("repositoryUrl".to_string(), json!(meta.repository_url)),
@ -83,12 +94,12 @@ async fn generate_nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
("proxyAccountName".to_string(), json!(meta.proxy_account_id)),
(
"themeColor".to_string(),
json!(meta.theme_color.unwrap_or("#31748f".to_string())),
json!(meta.theme_color.unwrap_or_else(|| "#31748f".to_string())),
),
]);
metadata.shrink_to_fit();
Ok(Nodeinfo21 {
version: "2.1".to_string(),
software: Software21 {
name: "firefish".to_string(),
version: CONFIG.version.clone(),
@ -114,32 +125,53 @@ async fn generate_nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
})
}
/// Returns NodeInfo (version 2.1) of the local server.
pub async fn nodeinfo_2_1() -> Result<Nodeinfo21, Error> {
const NODEINFO_2_1_CACHE_KEY: &str = "nodeinfo_2_1";
let cached = cache::get::<Nodeinfo21>(NODEINFO_2_1_CACHE_KEY).await?;
if let Some(nodeinfo) = cached {
Ok(nodeinfo)
} else {
let nodeinfo = generate_nodeinfo_2_1().await?;
cache::set(NODEINFO_2_1_CACHE_KEY, &nodeinfo, 60 * 60).await?;
Ok(nodeinfo)
async fn nodeinfo_2_1_impl(use_cache: bool) -> Result<Nodeinfo21, DbErr> {
if use_cache {
if let Some(nodeinfo) = CACHE.lock().ok().and_then(|cache| cache.to_owned()) {
return Ok(nodeinfo);
}
}
let nodeinfo = generate_nodeinfo_2_1().await?;
tracing::info!("updating cache");
set_cache(&nodeinfo);
Ok(nodeinfo)
}
/// Returns NodeInfo (version 2.1) of the local server.
pub async fn nodeinfo_2_1() -> Result<Nodeinfo21, DbErr> {
nodeinfo_2_1_impl(true).await
}
/// Returns NodeInfo (version 2.0) of the local server.
pub async fn nodeinfo_2_0() -> Result<Nodeinfo20, Error> {
pub async fn nodeinfo_2_0() -> Result<Nodeinfo20, DbErr> {
Ok(nodeinfo_2_1().await?.into())
}
#[crate::ts_export(js_name = "nodeinfo_2_1")]
#[cfg(feature = "napi")]
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[doc = "database error"]
#[error(transparent)]
Db(#[from] DbErr),
#[error("failed to serialize nodeinfo into JSON")]
Json(#[from] serde_json::Error),
}
#[macros::ts_export(js_name = "nodeinfo_2_1")]
pub async fn nodeinfo_2_1_as_json() -> Result<serde_json::Value, Error> {
Ok(serde_json::to_value(nodeinfo_2_1().await?)?)
}
#[crate::ts_export(js_name = "nodeinfo_2_0")]
#[macros::ts_export(js_name = "nodeinfo_2_0")]
pub async fn nodeinfo_2_0_as_json() -> Result<serde_json::Value, Error> {
Ok(serde_json::to_value(nodeinfo_2_0().await?)?)
}
#[macros::ts_export(js_name = "updateNodeinfoCache")]
pub async fn update_cache() -> Result<(), DbErr> {
nodeinfo_2_1_impl(false).await?;
Ok(())
}

View file

@ -1,4 +1,6 @@
//! NodeInfo handler
//!
//! ref: <https://nodeinfo.diaspora.software/>
pub mod fetch;
pub mod generate;

View file

@ -1,18 +1,16 @@
//! Schema definitions of NodeInfo version 2.0 and 2.1
//!
//! ref: <https://nodeinfo.diaspora.software/schema.html>
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
// TODO: I want to use these macros but they don't work with rmp_serde
// * #[serde(skip_serializing_if = "Option::is_none")] (https://github.com/3Hren/msgpack-rust/issues/86)
// * #[serde(tag = "version", rename = "2.1")] (https://github.com/3Hren/msgpack-rust/issues/318)
/// NodeInfo schema version 2.1. <https://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.1>
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[cfg_attr(test, derive(Debug, PartialEq, Deserialize))]
#[derive(Clone, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(tag = "version", rename = "2.1")]
pub struct Nodeinfo21 {
/// The schema version, must be 2.1.
pub version: String,
/// Metadata about server software in use.
pub software: Software21,
/// The protocols supported on this server.
@ -28,12 +26,12 @@ pub struct Nodeinfo21 {
}
/// NodeInfo schema version 2.0. <https://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.0>
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[cfg_attr(test, derive(Debug, PartialEq))]
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object, js_name = "Nodeinfo")]
#[macros::export(object, js_name = "Nodeinfo")]
#[serde(tag = "version", rename = "2.0")]
pub struct Nodeinfo20 {
/// The schema version, must be 2.0.
pub version: String,
/// Metadata about server software in use.
pub software: Software20,
/// The protocols supported on this server.
@ -49,7 +47,8 @@ pub struct Nodeinfo20 {
}
/// Metadata about server software in use (version 2.1).
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[cfg_attr(test, derive(Debug, PartialEq, Deserialize))]
#[derive(Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Software21 {
/// The canonical name of this server software.
@ -57,15 +56,18 @@ pub struct Software21 {
/// The version of this server software.
pub version: String,
/// The url of the source code repository of this server software.
#[serde(skip_serializing_if = "Option::is_none")]
pub repository: Option<String>,
/// The url of the homepage of this server software.
#[serde(skip_serializing_if = "Option::is_none")]
pub homepage: Option<String>,
}
/// Metadata about server software in use (version 2.0).
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[cfg_attr(test, derive(Debug, PartialEq))]
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
#[macros::export(object)]
pub struct Software20 {
/// The canonical name of this server software.
pub name: String,
@ -73,9 +75,10 @@ pub struct Software20 {
pub version: String,
}
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[cfg_attr(test, derive(Debug, PartialEq))]
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
#[macros::derive_clone_and_export]
pub enum Protocol {
Activitypub,
Buddycloud,
@ -90,9 +93,10 @@ pub enum Protocol {
}
/// The third party sites this server can connect to via their application API.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[cfg_attr(test, derive(Debug, PartialEq))]
#[derive(Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
#[macros::export(object)]
pub struct Services {
/// The third party sites this server can retrieve messages from for combined display with regular traffic.
pub inbound: Vec<Inbound>,
@ -101,9 +105,10 @@ pub struct Services {
}
/// The third party sites this server can retrieve messages from for combined display with regular traffic.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[cfg_attr(test, derive(Debug, PartialEq))]
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
#[macros::derive_clone_and_export]
pub enum Inbound {
#[serde(rename = "atom1.0")]
Atom1,
@ -119,9 +124,10 @@ pub enum Inbound {
}
/// The third party sites this server can publish messages to on the behalf of a user.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[cfg_attr(test, derive(Debug, PartialEq))]
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "lowercase")]
#[crate::export(string_enum = "lowercase")]
#[macros::derive_clone_and_export]
pub enum Outbound {
#[serde(rename = "atom1.0")]
Atom1,
@ -156,22 +162,29 @@ pub enum Outbound {
}
/// Usage statistics for this server.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[cfg_attr(test, derive(Debug, PartialEq))]
#[derive(Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
#[macros::export(object)]
pub struct Usage {
pub users: Users,
#[serde(skip_serializing_if = "Option::is_none")]
pub local_posts: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub local_comments: Option<u32>,
}
/// statistics about the users of this server.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
#[cfg_attr(test, derive(Debug, PartialEq))]
#[derive(Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
#[macros::export(object)]
pub struct Users {
#[serde(skip_serializing_if = "Option::is_none")]
pub total: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub active_halfyear: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub active_month: Option<u32>,
}
@ -187,7 +200,6 @@ impl From<Software21> for Software20 {
impl From<Nodeinfo21> for Nodeinfo20 {
fn from(nodeinfo: Nodeinfo21) -> Self {
Self {
version: "2.0".to_string(),
software: nodeinfo.software.into(),
protocols: nodeinfo.protocols,
services: nodeinfo.services,

Some files were not shown because too many files have changed in this diff Show more