Merge branch 'develop' into feat/slash-quote

This commit is contained in:
naskya 2024-04-27 05:20:02 +09:00
commit cd3c6a52dd
No known key found for this signature in database
GPG key ID: 712D413B3A9FED5C
237 changed files with 4016 additions and 1850 deletions

View file

@ -13,16 +13,8 @@ redis:
host: firefish_redis host: firefish_redis
port: 6379 port: 6379
id: 'aid'
#allowedPrivateNetworks: [ #allowedPrivateNetworks: [
# '10.69.1.0/24' # '10.69.1.0/24'
#] #]
logLevel: [ maxLogLevel: 'debug'
'error',
'success',
'warning',
'debug',
'info'
]

View file

@ -145,16 +145,11 @@ reservedUsernames: [
# IP address family used for outgoing request (ipv4, ipv6 or dual) # IP address family used for outgoing request (ipv4, ipv6 or dual)
#outgoingAddressFamily: ipv4 #outgoingAddressFamily: ipv4
# Log Option # Log level (error, warning, info, debug, trace)
# Production env: ['error', 'success', 'warning', 'info'] # Production env: info
# Debug/Test env or Troubleshooting: ['error', 'success', 'warning', 'debug' ,'info'] # Production env whose storage space or IO is tight: warning
# Production env which storage space or IO is tight: ['error', 'warning'] # Debug/Test env or Troubleshooting: debug (or trace)
logLevel: [ maxLogLevel: info
'error',
'success',
'warning',
'info'
]
# Syslog option # Syslog option
#syslog: #syslog:

1240
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -10,25 +10,25 @@ napi-derive = "2.16.2"
napi-build = "2.1.3" napi-build = "2.1.3"
argon2 = "0.5.3" argon2 = "0.5.3"
async-trait = "0.1.80"
basen = "0.1.0" basen = "0.1.0"
bcrypt = "0.15.1" bcrypt = "0.15.1"
cfg-if = "1.0.0"
chrono = "0.4.37" chrono = "0.4.37"
convert_case = "0.6.0" convert_case = "0.6.0"
cuid2 = "0.1.2" cuid2 = "0.1.2"
emojis = "0.6.1" emojis = "0.6.1"
idna = "0.5.0" idna = "0.5.0"
jsonschema = "0.17.1" image = "0.25.1"
nom-exif = "1.2.0"
once_cell = "1.19.0" once_cell = "1.19.0"
parse-display = "0.9.0" openssl = "0.10.64"
pretty_assertions = "1.4.0" pretty_assertions = "1.4.0"
proc-macro2 = "1.0.79" proc-macro2 = "1.0.79"
quote = "1.0.36" quote = "1.0.36"
rand = "0.8.5" rand = "0.8.5"
redis = "0.25.3" redis = "0.25.3"
regex = "1.10.4" regex = "1.10.4"
schemars = "0.8.16" reqwest = "0.12.4"
rmp-serde = "1.2.0"
sea-orm = "0.12.15" sea-orm = "0.12.15"
serde = "1.0.197" serde = "1.0.197"
serde_json = "1.0.115" serde_json = "1.0.115"
@ -37,6 +37,8 @@ strum = "0.26.2"
syn = "2.0.58" syn = "2.0.58"
thiserror = "1.0.58" thiserror = "1.0.58"
tokio = "1.37.0" tokio = "1.37.0"
tracing = "0.1.40"
tracing-subscriber = "0.3.1"
url = "2.5.0" url = "2.5.0"
urlencoding = "2.1.3" urlencoding = "2.1.3"

View file

@ -14,7 +14,7 @@
}, },
"overrides": [ "overrides": [
{ {
"include": ["*.vue"], "include": ["*.vue", "packages/client/*.ts"],
"linter": { "linter": {
"rules": { "rules": {
"style": { "style": {

View file

@ -2,9 +2,16 @@
Breaking changes are indicated by the :warning: icon. Breaking changes are indicated by the :warning: icon.
## Unreleased ## v20240424
- Added `antennaLimit` field to the response of `meta` and `admin/meta`, and the request of `admin/update-meta` (optional). - Added `antennaLimit` field to the response of `meta` and `admin/meta`, and the request of `admin/update-meta` (optional).
- Added `filter` optional parameter to `notes/renotes` endpoint to filter the types of renotes. It can take the following values:
- `all` (default)
- `renote`
- `quote`
- :warning: Removed the following optional parameters in `notes/reactions`, as they were never taken into account due to a bug:
- `sinceId`
- `untilId`
## v20240413 ## v20240413

View file

@ -5,6 +5,12 @@ Critical security updates are indicated by the :warning: icon.
- Server administrators should check [notice-for-admins.md](./notice-for-admins.md) as well. - Server administrators should check [notice-for-admins.md](./notice-for-admins.md) as well.
- Third-party client/bot developers may want to check [api-change.md](./api-change.md) as well. - Third-party client/bot developers may want to check [api-change.md](./api-change.md) as well.
## [v20240424](https://firefish.dev/firefish/firefish/-/merge_requests/10765/commits)
- Improve the usability of the feature to prevent forgetting to write alt texts
- Add a server-wide setting for the maximum number of antennas each user can create
- Fix bugs
## [v20240421](https://firefish.dev/firefish/firefish/-/merge_requests/10756/commits) ## [v20240421](https://firefish.dev/firefish/firefish/-/merge_requests/10756/commits)
- Fix bugs - Fix bugs

View file

@ -1,6 +1,7 @@
BEGIN; BEGIN;
DELETE FROM "migrations" WHERE name IN ( DELETE FROM "migrations" WHERE name IN (
'AlterAkaType1714099399879',
'AddDriveFileUsage1713451569342', 'AddDriveFileUsage1713451569342',
'ConvertCwVarcharToText1713225866247', 'ConvertCwVarcharToText1713225866247',
'FixChatFileConstraint1712855579316', 'FixChatFileConstraint1712855579316',
@ -24,6 +25,13 @@ DELETE FROM "migrations" WHERE name IN (
'RemoveNativeUtilsMigration1705877093218' 'RemoveNativeUtilsMigration1705877093218'
); );
-- alter-aka-type
ALTER TABLE "user" RENAME COLUMN "alsoKnownAs" TO "alsoKnownAsOld";
ALTER TABLE "user" ADD COLUMN "alsoKnownAs" text;
UPDATE "user" SET "alsoKnownAs" = array_to_string("alsoKnownAsOld", ',');
COMMENT ON COLUMN "user"."alsoKnownAs" IS 'URIs the user is known as too';
ALTER TABLE "user" DROP COLUMN "alsoKnownAsOld";
-- AddDriveFileUsage -- AddDriveFileUsage
ALTER TABLE "drive_file" DROP COLUMN "usageHint"; ALTER TABLE "drive_file" DROP COLUMN "usageHint";
DROP TYPE "drive_file_usage_hint_enum"; DROP TYPE "drive_file_usage_hint_enum";

View file

@ -2,6 +2,12 @@
You can skip intermediate versions when upgrading from an old version, but please read the notices and follow the instructions for each intermediate version before [upgrading](./upgrade.md). You can skip intermediate versions when upgrading from an old version, but please read the notices and follow the instructions for each intermediate version before [upgrading](./upgrade.md).
## Unreleased
### For all users
You can control the verbosity of the server log by adding `maxLogLevel` in `.config/default.yml`. `logLevels` has been deprecated in favor of this setting. (see also: <https://firefish.dev/firefish/firefish/-/blob/eac0c1c47cd23789dcc395ab08b074934409fd96/.config/example.yml#L152>)
## v20240413 ## v20240413
### For all users ### For all users

View file

@ -19,7 +19,7 @@ deleteAndEditConfirm: Сигурни ли сте, че искате да изт
copyUsername: Копиране на потребителското име copyUsername: Копиране на потребителското име
searchUser: Търсене на потребител searchUser: Търсене на потребител
reply: Отговор reply: Отговор
showMore: Покажи още showMore: Показване на повече
loadMore: Зареди още loadMore: Зареди още
followRequestAccepted: Заявката за последване е приета followRequestAccepted: Заявката за последване е приета
importAndExport: Импорт/експорт на данни importAndExport: Импорт/експорт на данни
@ -69,7 +69,7 @@ renameFile: Преименуване на файла
_widgets: _widgets:
activity: Дейност activity: Дейност
notifications: Известия notifications: Известия
timeline: Инфопоток timeline: Хронология
clock: Часовник clock: Часовник
trends: Актуални trends: Актуални
photos: Снимки photos: Снимки
@ -187,7 +187,7 @@ notesAndReplies: Публикации и отговори
noSuchUser: Потребителят не е намерен noSuchUser: Потребителят не е намерен
pinnedPages: Закачени страници pinnedPages: Закачени страници
pinLimitExceeded: Не може да закачаш повече публикации pinLimitExceeded: Не може да закачаш повече публикации
flagShowTimelineReplies: Показване на отговори в инфопотока flagShowTimelineReplies: Показване на отговори в хронологията
followersCount: Брой последователи followersCount: Брой последователи
receivedReactionsCount: Брой получени реакции receivedReactionsCount: Брой получени реакции
federation: Федерация federation: Федерация
@ -336,11 +336,15 @@ _pages:
title: Заглавие title: Заглавие
my: Моите страници my: Моите страници
pageSetting: Настройки на страницата pageSetting: Настройки на страницата
url: Адрес на страницата
summary: Кратко обобщение
alignCenter: Центриране на елементите
variables: Променливи
_deck: _deck:
_columns: _columns:
notifications: Известия notifications: Известия
mentions: Споменавания mentions: Споменавания
tl: Инфопоток tl: Хронология
direct: Директни съобщения direct: Директни съобщения
list: Списък list: Списък
antenna: Антена antenna: Антена
@ -375,7 +379,7 @@ basicSettings: Основни настройки
otherSettings: Други настройки otherSettings: Други настройки
openInWindow: Отваряне в прозорец openInWindow: Отваряне в прозорец
profile: Профил profile: Профил
timeline: Инфопоток timeline: Хронология
noAccountDescription: Този потребител все още не е написал своята биография. noAccountDescription: Този потребител все още не е написал своята биография.
login: Вход login: Вход
loggingIn: Вписване loggingIn: Вписване
@ -398,7 +402,7 @@ sendMessage: Изпращане на съобщение
jumpToPrevious: Премини към предишно jumpToPrevious: Премини към предишно
newer: по-ново newer: по-ново
older: по-старо older: по-старо
showLess: Покажи по-малко showLess: Показване на по-малко
youGotNewFollower: те последва youGotNewFollower: те последва
receiveFollowRequest: Заявка за последване получена receiveFollowRequest: Заявка за последване получена
mention: Споменаване mention: Споменаване
@ -558,12 +562,12 @@ _visibility:
specified: Директна specified: Директна
localOnly: Само местни localOnly: Само местни
public: Общодостъпна public: Общодостъпна
publicDescription: Публикацията ще бъде видима във всички публични инфопотоци publicDescription: Публикацията ще бъде видима във всички публични хронологии
home: Скрита home: Скрита
localOnlyDescription: Не е видима за отдалечени потребители localOnlyDescription: Не е видима за отдалечени потребители
specifiedDescription: Видима само за определени потребители specifiedDescription: Видима само за определени потребители
followersDescription: Видима само за последователите ти и споменатите потребители followersDescription: Видима само за последователите ти и споменатите потребители
homeDescription: Публикуване само в началния инфопоток homeDescription: Публикуване само в началната хронология
explore: Разглеждане explore: Разглеждане
theme: Теми theme: Теми
wallpaper: Тапет wallpaper: Тапет
@ -594,21 +598,21 @@ _tutorial:
да разберат дали искат да видят вашите публикации или да ви следват. да разберат дали искат да видят вашите публикации или да ви следват.
title: Как се използва Firefish title: Как се използва Firefish
step1_1: Добре дошли! step1_1: Добре дошли!
step5_1: Инфопотоци, инфопотоци навсякъде! step5_1: Хронологии, хронологии навсякъде!
step3_1: Сега е време да последвате няколко хора! step3_1: Сега е време да последвате няколко хора!
step1_2: Нека да ви настроим. Ще бъдете готови за нула време! step1_2: Нека да ви настроим. Ще бъдете готови за нула време!
step5_3: Началният {icon} инфопоток е мястото, където можете да видите публикации step5_3: Началната {icon} хронология е мястото, където можете да видите публикации
от акаунтите, които следвате. от акаунтите, които следвате.
step6_1: И така, какво е това място? step6_1: И така, какво е това място?
step5_7: Глобалният {icon} инфопоток е мястото, където можете да видите публикации step5_7: Глобалната {icon} хронология е мястото, където можете да видите публикации
от всеки друг свързан сървър. от всеки друг свързан сървър.
step4_2: За първата си публикация някои хора обичат да правят публикация {introduction} step4_2: За първата си публикация някои хора обичат да правят публикация {introduction}
или просто „Здравей свят!“ или просто „Здравей свят!“
step5_2: Вашият сървър има активирани {timelines} различни инфопотоци. step5_2: Вашият сървър има активирани {timelines} различни хронологии.
step5_4: Местният {icon} инфопоток е мястото, където можете да видите публикации step5_4: Местната {icon} хронология е мястото, където можете да видите публикации
от всички останали на този сървър. от всички останали на този сървър.
step5_5: Социалният {icon} инфопоток е комбинация от Началния и Местния инфопоток. step5_5: Социалната {icon} хронология е комбинация от Началната и Местната хронология.
step5_6: Препоръчаният {icon} инфопоток е мястото, където можете да видите публикации step5_6: Препоръчаната {icon} хронология е мястото, където можете да видите публикации
от сървъри, препоръчани от администраторите. от сървъри, препоръчани от администраторите.
step6_4: Сега отидете, изследвайте и се забавлявайте! step6_4: Сега отидете, изследвайте и се забавлявайте!
step6_3: Всеки сървър работи по различни начини и не всички сървъри работят с Firefish. step6_3: Всеки сървър работи по различни начини и не всички сървъри работят с Firefish.
@ -754,7 +758,7 @@ _feeds:
general: Общи general: Общи
metadata: Метаданни metadata: Метаданни
disk: Диск disk: Диск
featured: Представени featured: Препоръчано
yearsOld: на {age} години yearsOld: на {age} години
reload: Опресняване reload: Опресняване
invites: Покани invites: Покани
@ -778,8 +782,8 @@ uploadFromUrl: Качване от URL адрес
instanceName: Име на сървъра instanceName: Име на сървъра
instanceDescription: Описание на сървъра instanceDescription: Описание на сървъра
accept: Приемане accept: Приемане
enableLocalTimeline: Включване на местния инфопоток enableLocalTimeline: Включване на местната хронология
enableGlobalTimeline: Включване на глобалния инфопоток enableGlobalTimeline: Включване на глобалната хронология
removeMember: Премахване на член removeMember: Премахване на член
isAdmin: Администратор isAdmin: Администратор
isModerator: Модератор isModerator: Модератор
@ -862,8 +866,8 @@ apply: Прилагане
selectAccount: Избор на акаунт selectAccount: Избор на акаунт
muteThread: Заглушаване на нишката muteThread: Заглушаване на нишката
ffVisibility: Видимост на Последвани/Последователи ffVisibility: Видимост на Последвани/Последователи
renoteMute: Заглушаване на подсилванията в инфопотоците renoteMute: Заглуш. на подсилванията в хронолог.
replyMute: Заглушаване на отговорите в инфопотоците replyMute: Заглуш. на отговорите в хронолог.
blockConfirm: Сигурни ли сте, че искате да блокирате този акаунт? blockConfirm: Сигурни ли сте, че искате да блокирате този акаунт?
appearance: Облик appearance: Облик
fontSize: Размер на шрифта fontSize: Размер на шрифта
@ -893,7 +897,7 @@ charts: Диаграми
disablePagesScript: Изключване на AiScript в Страниците disablePagesScript: Изключване на AiScript в Страниците
updatedAt: Обновено на updatedAt: Обновено на
privateDescription: Видима само за теб privateDescription: Видима само за теб
enableTimelineStreaming: Автоматично обновяване на инфопотоците enableTimelineStreaming: Автоматично обновяване на хронологиите
toEdit: Редактиране toEdit: Редактиране
showEmojisInReactionNotifications: Показване на емоджита в известията за реакции showEmojisInReactionNotifications: Показване на емоджита в известията за реакции
rememberNoteVisibility: Запомняне на настройките за видимост на публикациите rememberNoteVisibility: Запомняне на настройките за видимост на публикациите
@ -932,3 +936,19 @@ clientSettings: Настройки за устройството
behavior: Поведение behavior: Поведение
detectPostLanguage: Автоматично откриване на езика и показване на бутон за превеждане detectPostLanguage: Автоматично откриване на езика и показване на бутон за превеждане
за публикации на чужди езици за публикации на чужди езици
replyUnmute: Отмяна на заглушаването на отговорите
searchWords: Думи за търсене / ID или URL за поглеждане
reloadConfirm: Искате ли да опресните хронологията?
enableRecommendedTimeline: Включване на препоръчаната хронология
showGapBetweenNotesInTimeline: Показване на празнина между публикациите в хронологията
lookup: Поглеждане
media: Мултимедия
welcomeBackWithName: Добре дошли отново, {name}
reduceUiAnimation: Намаляване на UI анимациите
clickToFinishEmailVerification: Моля, натиснете [{ok}], за да завършите потвърждаването
на ел. поща.
_cw:
show: Показване на съдържанието
remoteFollow: Отдалечено последване
messagingUnencryptedInfo: Чатовете във Firefish не са шифровани от край до край. Не
споделяйте чувствителна информация през Firefish.

View file

@ -2289,3 +2289,6 @@ autocorrectNoteLanguage: Mostra un avís si l'idioma de la publicació no coinci
amb el resultat de l'idioma detectat automàticament amb el resultat de l'idioma detectat automàticament
noteEditHistory: Historial d'edicions noteEditHistory: Historial d'edicions
media: Multimèdia media: Multimèdia
antennaLimit: El nombre màxim d'antenes que pot crear un usuari
showAddFileDescriptionAtFirstPost: Obra de forma automàtica un formulari per escriure
una descripció quant intentes publicar un fitxer que no en té

View file

@ -645,6 +645,7 @@ deletedNote: "Deleted post"
invisibleNote: "Invisible post" invisibleNote: "Invisible post"
enableInfiniteScroll: "Automatically load more" enableInfiniteScroll: "Automatically load more"
visibility: "Visiblility" visibility: "Visiblility"
cannotEditVisibility: "You can't edit the visibility"
poll: "Poll" poll: "Poll"
useCw: "Hide content" useCw: "Hide content"
enablePlayer: "Open video player" enablePlayer: "Open video player"
@ -1010,6 +1011,8 @@ isSystemAccount: "This account is created and automatically operated by the syst
Please do not moderate, edit, delete, or otherwise tamper with this account, or Please do not moderate, edit, delete, or otherwise tamper with this account, or
it may break your server." it may break your server."
typeToConfirm: "Please enter {x} to confirm" typeToConfirm: "Please enter {x} to confirm"
useThisAccountConfirm: "Do you want to continue with this account?"
inputAccountId: "Please input your account (e.g., @firefish@info.firefish.dev)"
deleteAccount: "Delete account" deleteAccount: "Delete account"
document: "Documentation" document: "Documentation"
numberOfPageCache: "Number of cached pages" numberOfPageCache: "Number of cached pages"
@ -1156,6 +1159,9 @@ addRe: "Add \"re:\" at the beginning of comment in reply to a post with a conten
confirm: "Confirm" confirm: "Confirm"
importZip: "Import ZIP" importZip: "Import ZIP"
exportZip: "Export ZIP" exportZip: "Export ZIP"
getQrCode: "Show QR code"
remoteFollow: "Remote follow"
copyRemoteFollowUrl: "Copy remote follow URL"
emojiPackCreator: "Emoji pack creator" emojiPackCreator: "Emoji pack creator"
indexable: "Indexable" indexable: "Indexable"
indexableDescription: "Allow built-in search to show your public posts" indexableDescription: "Allow built-in search to show your public posts"
@ -2146,6 +2152,7 @@ _notification:
reacted: "reacted to your post" reacted: "reacted to your post"
renoted: "boosted your post" renoted: "boosted your post"
voted: "voted on your poll" voted: "voted on your poll"
andCountUsers: "and {count} more users {acted}"
_types: _types:
all: "All" all: "All"
follow: "New followers" follow: "New followers"
@ -2233,3 +2240,4 @@ incorrectLanguageWarning: "It looks like your post is in {detected}, but you sel
{current}.\nWould you like to set the language to {detected} instead?" {current}.\nWould you like to set the language to {detected} instead?"
noteEditHistory: "Post edit history" noteEditHistory: "Post edit history"
slashQuote: "Slash quote" slashQuote: "Slash quote"
foldNotification: "Group similar notifications"

View file

@ -928,6 +928,8 @@ colored: "Coloré"
label: "Étiquette" label: "Étiquette"
localOnly: "Local seulement" localOnly: "Local seulement"
account: "Comptes" account: "Comptes"
getQrCode: "Obtenir le code QR"
_emailUnavailable: _emailUnavailable:
used: "Adresse non disponible" used: "Adresse non disponible"
format: "Le format de cette adresse de courriel est invalide" format: "Le format de cette adresse de courriel est invalide"

View file

@ -1825,6 +1825,7 @@ _notification:
reacted: mereaksi postinganmu reacted: mereaksi postinganmu
renoted: memposting ulang postinganmu renoted: memposting ulang postinganmu
voted: memilih di angketmu voted: memilih di angketmu
andCountUsers: dan {count} lebih banyak pengguna {acted}
_deck: _deck:
alwaysShowMainColumn: "Selalu tampilkan kolom utama" alwaysShowMainColumn: "Selalu tampilkan kolom utama"
columnAlign: "Luruskan kolom" columnAlign: "Luruskan kolom"
@ -2267,3 +2268,13 @@ markLocalFilesNsfwByDefaultDescription: Terlepas dari pengaturan ini, pengguna d
menghapus sendiri tanda NSFW. Berkas yang ada tidak berpengaruh. menghapus sendiri tanda NSFW. Berkas yang ada tidak berpengaruh.
noteEditHistory: Riwayat penyuntingan kiriman noteEditHistory: Riwayat penyuntingan kiriman
media: Media media: Media
antennaLimit: Jumlah antena maksimum yang dapat dibuat oleh setiap pengguna
showAddFileDescriptionAtFirstPost: Buka formulir secara otomatis untuk menulis deskripsi
ketika mencoba mengirim berkas tanpa deskripsi
remoteFollow: Ikuti jarak jauh
foldNotification: Kelompokkan notifikasi yang sama
getQrCode: Tampilkan kode QR
cannotEditVisibility: Kamu tidak bisa menyunting keterlihatan
useThisAccountConfirm: Apakah kamu ingin melanjutkan dengan akun ini?
inputAccountId: Silakan memasukkan akunmu (misalnya, @firefish@info.firefish.dev)
copyRemoteFollowUrl: Salin URL ikuti jarak jauh

View file

@ -1902,6 +1902,7 @@ _notification:
reacted: がリアクションしました reacted: がリアクションしました
renoted: がブーストしました renoted: がブーストしました
voted: が投票しました voted: が投票しました
andCountUsers: と{count}人が{acted}しました
_deck: _deck:
alwaysShowMainColumn: "常にメインカラムを表示" alwaysShowMainColumn: "常にメインカラムを表示"
columnAlign: "カラムの寄せ" columnAlign: "カラムの寄せ"
@ -2057,3 +2058,12 @@ incorrectLanguageWarning: "この投稿は{detected}で書かれていると判
markLocalFilesNsfwByDefault: このサーバーの全てのファイルをデフォルトでNSFWに設定する markLocalFilesNsfwByDefault: このサーバーの全てのファイルをデフォルトでNSFWに設定する
markLocalFilesNsfwByDefaultDescription: この設定が有効でも、ユーザーは自分でNSFWのフラグを外すことができます。また、この設定は既存のファイルには影響しません。 markLocalFilesNsfwByDefaultDescription: この設定が有効でも、ユーザーは自分でNSFWのフラグを外すことができます。また、この設定は既存のファイルには影響しません。
noteEditHistory: 編集履歴 noteEditHistory: 編集履歴
showAddFileDescriptionAtFirstPost: 説明の無い添付ファイルを投稿しようとした際に説明を書く画面を自動で開く
antennaLimit: 各ユーザーが作れるアンテナの最大数
inputAccountId: 'あなたのアカウントを入力してください(例: @firefish@info.firefish.dev'
remoteFollow: リモートフォロー
cannotEditVisibility: 公開範囲は変更できません
useThisAccountConfirm: このアカウントで操作を続けますか?
getQrCode: QRコードを表示
copyRemoteFollowUrl: リモートからフォローするURLをコピー
foldNotification: 同じ種類の通知をまとめて表示する

View file

@ -564,6 +564,7 @@ deletedNote: "已删除的帖子"
invisibleNote: "隐藏的帖子" invisibleNote: "隐藏的帖子"
enableInfiniteScroll: "滚动页面以载入更多内容" enableInfiniteScroll: "滚动页面以载入更多内容"
visibility: "可见性" visibility: "可见性"
cannotEditVisibility: "不能编辑帖子的可见性"
poll: "调查问卷" poll: "调查问卷"
useCw: "隐藏内容" useCw: "隐藏内容"
enablePlayer: "打开播放器" enablePlayer: "打开播放器"
@ -878,6 +879,8 @@ driveCapOverrideCaption: "输入 0 或以下的值将容量重置为默认值。
requireAdminForView: "您需要使用管理员账号登录才能查看。" requireAdminForView: "您需要使用管理员账号登录才能查看。"
isSystemAccount: "该账号由系统自动创建。请不要修改、编辑、删除或以其它方式篡改这个账号,否则可能会破坏您的服务器。" isSystemAccount: "该账号由系统自动创建。请不要修改、编辑、删除或以其它方式篡改这个账号,否则可能会破坏您的服务器。"
typeToConfirm: "输入 {x} 以确认操作" typeToConfirm: "输入 {x} 以确认操作"
useThisAccountConfirm: "您想使用此帐户继续执行此操作吗?"
inputAccountId: "请输入您的帐户(例如 @firefish@info.firefish.dev "
deleteAccount: "删除账号" deleteAccount: "删除账号"
document: "文档" document: "文档"
numberOfPageCache: "缓存页数" numberOfPageCache: "缓存页数"
@ -1386,7 +1389,7 @@ _poll:
_visibility: _visibility:
public: "公开" public: "公开"
publicDescription: "您的帖子将出现在公共时间线上" publicDescription: "您的帖子将出现在公共时间线上"
home: "公开" home: "悄悄公开"
homeDescription: "仅发送至首页时间线" homeDescription: "仅发送至首页时间线"
followers: "仅关注者" followers: "仅关注者"
followersDescription: "仅对您的关注者和提及的用户可见" followersDescription: "仅对您的关注者和提及的用户可见"
@ -1787,6 +1790,7 @@ _notification:
reacted: 回应了您的帖子 reacted: 回应了您的帖子
voted: 在您的问卷调查中投了票 voted: 在您的问卷调查中投了票
renoted: 转发了您的帖子 renoted: 转发了您的帖子
andCountUsers: "和其他 {count} 名用户{acted}"
_deck: _deck:
alwaysShowMainColumn: "总是显示主列" alwaysShowMainColumn: "总是显示主列"
columnAlign: "列对齐" columnAlign: "列对齐"
@ -1972,6 +1976,9 @@ origin: 起源
confirm: 确认 confirm: 确认
importZip: 导入 ZIP importZip: 导入 ZIP
exportZip: 导出 ZIP exportZip: 导出 ZIP
getQrCode: "获取二维码"
remoteFollow: "远程关注"
copyRemoteFollowUrl: "复制远程关注 URL"
emojiPackCreator: 表情包创建工具 emojiPackCreator: 表情包创建工具
objectStorageS3ForcePathStyleDesc: 打开此选项可构建格式为 "s3.amazonaws.com/<bucket>/" 而非 "<bucket>.s3.amazonaws.com" objectStorageS3ForcePathStyleDesc: 打开此选项可构建格式为 "s3.amazonaws.com/<bucket>/" 而非 "<bucket>.s3.amazonaws.com"
的端点 URL。 的端点 URL。
@ -2060,3 +2067,4 @@ incorrectLanguageWarning: "看上去您帖子使用的语言是{detected},但
noteEditHistory: "帖子编辑历史" noteEditHistory: "帖子编辑历史"
media: 媒体 media: 媒体
slashQuote: "斜杠引用" slashQuote: "斜杠引用"
foldNotification: "将通知按同类型分组"

View file

@ -1,6 +1,6 @@
{ {
"name": "firefish", "name": "firefish",
"version": "20240421", "version": "20240424",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://firefish.dev/firefish/firefish.git" "url": "https://firefish.dev/firefish/firefish.git"

View file

@ -18,21 +18,21 @@ napi = { workspace = true, optional = true, default-features = false, features =
napi-derive = { workspace = true, optional = true } napi-derive = { workspace = true, optional = true }
argon2 = { workspace = true, features = ["std"] } argon2 = { workspace = true, features = ["std"] }
async-trait = { workspace = true }
basen = { workspace = true } basen = { workspace = true }
bcrypt = { workspace = true } bcrypt = { workspace = true }
cfg-if = { workspace = true }
chrono = { workspace = true } chrono = { workspace = true }
cuid2 = { workspace = true } cuid2 = { workspace = true }
emojis = { workspace = true } emojis = { workspace = true }
idna = { workspace = true } idna = { workspace = true }
jsonschema = { workspace = true } image = { workspace = true }
nom-exif = { workspace = true }
once_cell = { workspace = true } once_cell = { workspace = true }
parse-display = { workspace = true } openssl = { workspace = true, features = ["vendored"] }
rand = { workspace = true } rand = { workspace = true }
redis = { workspace = true } redis = { workspace = true }
regex = { workspace = true } regex = { workspace = true }
schemars = { workspace = true, features = ["chrono"] } reqwest = { workspace = true, features = ["blocking"] }
rmp-serde = { workspace = true }
sea-orm = { workspace = true, features = ["sqlx-postgres", "runtime-tokio-rustls"] } sea-orm = { workspace = true, features = ["sqlx-postgres", "runtime-tokio-rustls"] }
serde = { workspace = true, features = ["derive"] } serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true } serde_json = { workspace = true }
@ -40,6 +40,8 @@ serde_yaml = { workspace = true }
strum = { workspace = true, features = ["derive"] } strum = { workspace = true, features = ["derive"] }
thiserror = { workspace = true } thiserror = { workspace = true }
tokio = { workspace = true, features = ["full"] } tokio = { workspace = true, features = ["full"] }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
url = { workspace = true } url = { workspace = true }
urlencoding = { workspace = true } urlencoding = { workspace = true }

View file

@ -3,6 +3,21 @@
/* auto-generated by NAPI-RS */ /* auto-generated by NAPI-RS */
export const SECOND: number
export const MINUTE: number
export const HOUR: number
export const DAY: number
export const USER_ONLINE_THRESHOLD: number
export const USER_ACTIVE_THRESHOLD: number
/**
* List of file types allowed to be viewed directly in the browser
* Anything not included here will be responded as application/octet-stream
* SVG is not allowed because it generates XSS <- we need to fix this and later allow it to be viewed directly
* https://github.com/sindresorhus/file-type/blob/main/supported.js
* https://github.com/sindresorhus/file-type/blob/main/core.js
* https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Containers
*/
export const FILE_TYPE_BROWSERSAFE: string[]
export interface EnvConfig { export interface EnvConfig {
onlyQueue: boolean onlyQueue: boolean
onlyServer: boolean onlyServer: boolean
@ -38,7 +53,9 @@ export interface ServerConfig {
inboxJobPerSec?: number inboxJobPerSec?: number
deliverJobMaxAttempts?: number deliverJobMaxAttempts?: number
inboxJobMaxAttempts?: number inboxJobMaxAttempts?: number
/** deprecated */
logLevel?: Array<string> logLevel?: Array<string>
maxLogLevel?: string
syslog?: SysLogConfig syslog?: SysLogConfig
proxyRemoteFiles?: boolean proxyRemoteFiles?: boolean
mediaProxy?: string mediaProxy?: string
@ -148,7 +165,9 @@ export interface Config {
inboxJobPerSec?: number inboxJobPerSec?: number
deliverJobMaxAttempts?: number deliverJobMaxAttempts?: number
inboxJobMaxAttempts?: number inboxJobMaxAttempts?: number
/** deprecated */
logLevel?: Array<string> logLevel?: Array<string>
maxLogLevel?: string
syslog?: SysLogConfig syslog?: SysLogConfig
proxyRemoteFiles?: boolean proxyRemoteFiles?: boolean
mediaProxy?: string mediaProxy?: string
@ -156,8 +175,8 @@ export interface Config {
reservedUsernames?: Array<string> reservedUsernames?: Array<string>
maxUserSignups?: number maxUserSignups?: number
isManagedHosting?: boolean isManagedHosting?: boolean
maxNoteLength?: number maxNoteLength: number
maxCaptionLength?: number maxCaptionLength: number
deepl?: DeepLConfig deepl?: DeepLConfig
libreTranslate?: LibreTranslateConfig libreTranslate?: LibreTranslateConfig
email?: EmailConfig email?: EmailConfig
@ -193,6 +212,7 @@ export interface Acct {
} }
export function stringToAcct(acct: string): Acct export function stringToAcct(acct: string): Acct
export function acctToString(acct: Acct): string export function acctToString(acct: Acct): string
export function addNoteToAntenna(antennaId: string, note: Note): void
/** /**
* @param host punycoded instance host * @param host punycoded instance host
* @returns whether the given host should be blocked * @returns whether the given host should be blocked
@ -228,6 +248,11 @@ export function sqlLikeEscape(src: string): string
export function safeForSql(src: string): boolean export function safeForSql(src: string): boolean
/** Convert milliseconds to a human readable string */ /** Convert milliseconds to a human readable string */
export function formatMilliseconds(milliseconds: number): string export function formatMilliseconds(milliseconds: number): string
export interface ImageSize {
width: number
height: number
}
export function getImageSizeFromUrl(url: string): Promise<ImageSize>
/** TODO: handle name collisions better */ /** TODO: handle name collisions better */
export interface NoteLikeForGetNoteSummary { export interface NoteLikeForGetNoteSummary {
fileIds: Array<string> fileIds: Array<string>
@ -263,6 +288,8 @@ export interface DecodedReaction {
export function decodeReaction(reaction: string): DecodedReaction export function decodeReaction(reaction: string): DecodedReaction
export function countReactions(reactions: Record<string, number>): Record<string, number> export function countReactions(reactions: Record<string, number>): Record<string, number>
export function toDbReaction(reaction?: string | undefined | null, host?: string | undefined | null): Promise<string> export function toDbReaction(reaction?: string | undefined | null, host?: string | undefined | null): Promise<string>
/** Delete all entries in the "attestation_challenge" table created at more than 5 minutes ago */
export function removeOldAttestationChallenges(): Promise<void>
export interface AbuseUserReport { export interface AbuseUserReport {
id: string id: string
createdAt: Date createdAt: Date
@ -990,10 +1017,10 @@ export interface User {
isDeleted: boolean isDeleted: boolean
driveCapacityOverrideMb: number | null driveCapacityOverrideMb: number | null
movedToUri: string | null movedToUri: string | null
alsoKnownAs: string | null
speakAsCat: boolean speakAsCat: boolean
emojiModPerm: UserEmojimodpermEnum emojiModPerm: UserEmojimodpermEnum
isIndexable: boolean isIndexable: boolean
alsoKnownAs: Array<string> | null
} }
export interface UserGroup { export interface UserGroup {
id: string id: string
@ -1119,9 +1146,41 @@ export interface Webhook {
latestSentAt: Date | null latestSentAt: Date | null
latestStatus: number | null latestStatus: number | null
} }
export function addNoteToAntenna(antennaId: string, note: Note): void export function initializeRustLogger(): void
/** Initializes Cuid2 generator. Must be called before any [create_id]. */ export function watchNote(watcherId: string, noteAuthorId: string, noteId: string): Promise<void>
export function initIdGenerator(length: number, fingerprint: string): void export function unwatchNote(watcherId: string, noteId: string): Promise<void>
export function publishToChannelStream(channelId: string, userId: string): void
export enum ChatEvent {
Message = 'message',
Read = 'read',
Deleted = 'deleted',
Typing = 'typing'
}
export function publishToChatStream(senderUserId: string, receiverUserId: string, kind: ChatEvent, object: any): void
export enum ChatIndexEvent {
Message = 'message',
Read = 'read'
}
export function publishToChatIndexStream(userId: string, kind: ChatIndexEvent, object: any): void
export interface PackedEmoji {
id: string
aliases: Array<string>
name: string
category: string | null
host: string | null
url: string
license: string | null
width: number | null
height: number | null
}
export function publishToBroadcastStream(emoji: PackedEmoji): void
export interface AbuseUserReportLike {
id: string
targetUserId: string
reporterId: string
comment: string
}
export function publishToModerationStream(moderatorId: string, report: AbuseUserReportLike): void
export function getTimestamp(id: string): number export function getTimestamp(id: string): number
/** /**
* The generated ID results in the form of `[8 chars timestamp] + [cuid2]`. * The generated ID results in the form of `[8 chars timestamp] + [cuid2]`.
@ -1131,5 +1190,7 @@ export function getTimestamp(id: string): number
* *
* Ref: https://github.com/paralleldrive/cuid2#parameterized-length * Ref: https://github.com/paralleldrive/cuid2#parameterized-length
*/ */
export function genId(date?: Date | undefined | null): string export function genId(): string
/** Generate an ID using a specific datetime */
export function genIdAt(date: Date): string
export function secureRndstr(length?: number | undefined | null): string export function secureRndstr(length?: number | undefined | null): string

View file

@ -310,12 +310,20 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`) throw new Error(`Failed to load native binding`)
} }
const { loadEnv, loadConfig, stringToAcct, acctToString, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getNoteSummary, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, addNoteToAntenna, initIdGenerator, getTimestamp, genId, secureRndstr } = nativeBinding const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, initializeRustLogger, watchNote, unwatchNote, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToModerationStream, getTimestamp, genId, genIdAt, secureRndstr } = nativeBinding
module.exports.SECOND = SECOND
module.exports.MINUTE = MINUTE
module.exports.HOUR = HOUR
module.exports.DAY = DAY
module.exports.USER_ONLINE_THRESHOLD = USER_ONLINE_THRESHOLD
module.exports.USER_ACTIVE_THRESHOLD = USER_ACTIVE_THRESHOLD
module.exports.FILE_TYPE_BROWSERSAFE = FILE_TYPE_BROWSERSAFE
module.exports.loadEnv = loadEnv module.exports.loadEnv = loadEnv
module.exports.loadConfig = loadConfig module.exports.loadConfig = loadConfig
module.exports.stringToAcct = stringToAcct module.exports.stringToAcct = stringToAcct
module.exports.acctToString = acctToString module.exports.acctToString = acctToString
module.exports.addNoteToAntenna = addNoteToAntenna
module.exports.isBlockedServer = isBlockedServer module.exports.isBlockedServer = isBlockedServer
module.exports.isSilencedServer = isSilencedServer module.exports.isSilencedServer = isSilencedServer
module.exports.isAllowedServer = isAllowedServer module.exports.isAllowedServer = isAllowedServer
@ -329,6 +337,7 @@ module.exports.isUnicodeEmoji = isUnicodeEmoji
module.exports.sqlLikeEscape = sqlLikeEscape module.exports.sqlLikeEscape = sqlLikeEscape
module.exports.safeForSql = safeForSql module.exports.safeForSql = safeForSql
module.exports.formatMilliseconds = formatMilliseconds module.exports.formatMilliseconds = formatMilliseconds
module.exports.getImageSizeFromUrl = getImageSizeFromUrl
module.exports.getNoteSummary = getNoteSummary module.exports.getNoteSummary = getNoteSummary
module.exports.toMastodonId = toMastodonId module.exports.toMastodonId = toMastodonId
module.exports.fromMastodonId = fromMastodonId module.exports.fromMastodonId = fromMastodonId
@ -341,6 +350,7 @@ module.exports.isOldPasswordAlgorithm = isOldPasswordAlgorithm
module.exports.decodeReaction = decodeReaction module.exports.decodeReaction = decodeReaction
module.exports.countReactions = countReactions module.exports.countReactions = countReactions
module.exports.toDbReaction = toDbReaction module.exports.toDbReaction = toDbReaction
module.exports.removeOldAttestationChallenges = removeOldAttestationChallenges
module.exports.AntennaSrcEnum = AntennaSrcEnum module.exports.AntennaSrcEnum = AntennaSrcEnum
module.exports.DriveFileUsageHintEnum = DriveFileUsageHintEnum module.exports.DriveFileUsageHintEnum = DriveFileUsageHintEnum
module.exports.MutedNoteReasonEnum = MutedNoteReasonEnum module.exports.MutedNoteReasonEnum = MutedNoteReasonEnum
@ -352,8 +362,17 @@ module.exports.RelayStatusEnum = RelayStatusEnum
module.exports.UserEmojimodpermEnum = UserEmojimodpermEnum module.exports.UserEmojimodpermEnum = UserEmojimodpermEnum
module.exports.UserProfileFfvisibilityEnum = UserProfileFfvisibilityEnum module.exports.UserProfileFfvisibilityEnum = UserProfileFfvisibilityEnum
module.exports.UserProfileMutingnotificationtypesEnum = UserProfileMutingnotificationtypesEnum module.exports.UserProfileMutingnotificationtypesEnum = UserProfileMutingnotificationtypesEnum
module.exports.addNoteToAntenna = addNoteToAntenna module.exports.initializeRustLogger = initializeRustLogger
module.exports.initIdGenerator = initIdGenerator module.exports.watchNote = watchNote
module.exports.unwatchNote = unwatchNote
module.exports.publishToChannelStream = publishToChannelStream
module.exports.ChatEvent = ChatEvent
module.exports.publishToChatStream = publishToChatStream
module.exports.ChatIndexEvent = ChatIndexEvent
module.exports.publishToChatIndexStream = publishToChatIndexStream
module.exports.publishToBroadcastStream = publishToBroadcastStream
module.exports.publishToModerationStream = publishToModerationStream
module.exports.getTimestamp = getTimestamp module.exports.getTimestamp = getTimestamp
module.exports.genId = genId module.exports.genId = genId
module.exports.genIdAt = genIdAt
module.exports.secureRndstr = secureRndstr module.exports.secureRndstr = secureRndstr

View file

@ -0,0 +1,67 @@
#[crate::export]
pub const SECOND: i32 = 1000;
#[crate::export]
pub const MINUTE: i32 = 60 * SECOND;
#[crate::export]
pub const HOUR: i32 = 60 * MINUTE;
#[crate::export]
pub const DAY: i32 = 24 * HOUR;
#[crate::export]
pub const USER_ONLINE_THRESHOLD: i32 = 10 * MINUTE;
#[crate::export]
pub const USER_ACTIVE_THRESHOLD: i32 = 3 * DAY;
/// List of file types allowed to be viewed directly in the browser
/// Anything not included here will be responded as application/octet-stream
/// SVG is not allowed because it generates XSS <- we need to fix this and later allow it to be viewed directly
/// https://github.com/sindresorhus/file-type/blob/main/supported.js
/// https://github.com/sindresorhus/file-type/blob/main/core.js
/// https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Containers
#[crate::export]
pub const FILE_TYPE_BROWSERSAFE: [&str; 41] = [
// Images
"image/png",
"image/gif", // TODO: deprecated, but still used by old posts, new gifs should be converted to webp in the future
"image/jpeg",
"image/webp", // TODO: make this the default image format
"image/apng",
"image/bmp",
"image/tiff",
"image/x-icon",
"image/avif", // not as good supported now, but its good to introduce initial support for the future
// OggS
"audio/opus",
"video/ogg",
"audio/ogg",
"application/ogg",
// ISO/IEC base media file format
"video/quicktime",
"video/mp4", // TODO: we need to check for av1 later
"video/vnd.avi", // also av1
"audio/mp4",
"video/x-m4v",
"audio/x-m4a",
"video/3gpp",
"video/3gpp2",
"video/3gp2",
"audio/3gpp",
"audio/3gpp2",
"audio/3gp2",
"video/mpeg",
"audio/mpeg",
"video/webm",
"audio/webm",
"audio/aac",
"audio/x-flac",
"audio/flac",
"audio/vnd.wave",
"audio/mod",
"audio/x-mod",
"audio/s3m",
"audio/x-s3m",
"audio/xm",
"audio/x-xm",
"audio/it",
"audio/x-it",
];

View file

@ -1,4 +1,5 @@
pub use server::CONFIG; pub use server::CONFIG;
pub mod constant;
pub mod environment; pub mod environment;
pub mod server; pub mod server;

View file

@ -36,8 +36,11 @@ struct ServerConfig {
pub deliver_job_max_attempts: Option<u32>, pub deliver_job_max_attempts: Option<u32>,
pub inbox_job_max_attempts: Option<u32>, pub inbox_job_max_attempts: Option<u32>,
/// deprecated
pub log_level: Option<Vec<String>>, pub log_level: Option<Vec<String>>,
pub max_log_level: Option<String>,
pub syslog: Option<SysLogConfig>, pub syslog: Option<SysLogConfig>,
pub proxy_remote_files: Option<bool>, pub proxy_remote_files: Option<bool>,
@ -197,7 +200,11 @@ pub struct Config {
pub inbox_job_per_sec: Option<u32>, pub inbox_job_per_sec: Option<u32>,
pub deliver_job_max_attempts: Option<u32>, pub deliver_job_max_attempts: Option<u32>,
pub inbox_job_max_attempts: Option<u32>, pub inbox_job_max_attempts: Option<u32>,
/// deprecated
pub log_level: Option<Vec<String>>, pub log_level: Option<Vec<String>>,
pub max_log_level: Option<String>,
pub syslog: Option<SysLogConfig>, pub syslog: Option<SysLogConfig>,
pub proxy_remote_files: Option<bool>, pub proxy_remote_files: Option<bool>,
pub media_proxy: Option<String>, pub media_proxy: Option<String>,
@ -205,8 +212,8 @@ pub struct Config {
pub reserved_usernames: Option<Vec<String>>, pub reserved_usernames: Option<Vec<String>>,
pub max_user_signups: Option<u32>, pub max_user_signups: Option<u32>,
pub is_managed_hosting: Option<bool>, pub is_managed_hosting: Option<bool>,
pub max_note_length: Option<u32>, pub max_note_length: u32,
pub max_caption_length: Option<u32>, pub max_caption_length: u32,
pub deepl: Option<DeepLConfig>, pub deepl: Option<DeepLConfig>,
pub libre_translate: Option<LibreTranslateConfig>, pub libre_translate: Option<LibreTranslateConfig>,
pub email: Option<EmailConfig>, pub email: Option<EmailConfig>,
@ -346,6 +353,7 @@ fn load_config() -> Config {
deliver_job_max_attempts: server_config.deliver_job_max_attempts, deliver_job_max_attempts: server_config.deliver_job_max_attempts,
inbox_job_max_attempts: server_config.inbox_job_max_attempts, inbox_job_max_attempts: server_config.inbox_job_max_attempts,
log_level: server_config.log_level, log_level: server_config.log_level,
max_log_level: server_config.max_log_level,
syslog: server_config.syslog, syslog: server_config.syslog,
proxy_remote_files: server_config.proxy_remote_files, proxy_remote_files: server_config.proxy_remote_files,
media_proxy: server_config.media_proxy, media_proxy: server_config.media_proxy,
@ -353,8 +361,8 @@ fn load_config() -> Config {
reserved_usernames: server_config.reserved_usernames, reserved_usernames: server_config.reserved_usernames,
max_user_signups: server_config.max_user_signups, max_user_signups: server_config.max_user_signups,
is_managed_hosting: server_config.is_managed_hosting, is_managed_hosting: server_config.is_managed_hosting,
max_note_length: server_config.max_note_length, max_note_length: server_config.max_note_length.unwrap_or(3000),
max_caption_length: server_config.max_caption_length, max_caption_length: server_config.max_caption_length.unwrap_or(1500),
deepl: server_config.deepl, deepl: server_config.deepl,
libre_translate: server_config.libre_translate, libre_translate: server_config.libre_translate,
email: server_config.email, email: server_config.email,

View file

@ -1,5 +1,6 @@
use crate::config::CONFIG; use crate::config::CONFIG;
use sea_orm::{Database, DbConn, DbErr}; use sea_orm::{ConnectOptions, Database, DbConn, DbErr};
use tracing::log::LevelFilter;
static DB_CONN: once_cell::sync::OnceCell<DbConn> = once_cell::sync::OnceCell::new(); static DB_CONN: once_cell::sync::OnceCell<DbConn> = once_cell::sync::OnceCell::new();
@ -12,7 +13,13 @@ async fn init_database() -> Result<&'static DbConn, DbErr> {
CONFIG.db.port, CONFIG.db.port,
CONFIG.db.db, CONFIG.db.db,
); );
let conn = Database::connect(database_uri).await?; let option: ConnectOptions = ConnectOptions::new(database_uri)
.sqlx_logging_level(LevelFilter::Trace)
.to_owned();
tracing::info!("Initializing PostgreSQL connection");
let conn = Database::connect(option).await?;
Ok(DB_CONN.get_or_init(move || conn)) Ok(DB_CONN.get_or_init(move || conn))
} }

View file

@ -26,6 +26,8 @@ fn init_redis() -> Result<Client, RedisError> {
params.concat() params.concat()
}; };
tracing::info!("Initializing Redis connection");
Client::open(redis_url) Client::open(redis_url)
} }

View file

@ -0,0 +1,31 @@
use crate::database::{redis_conn, redis_key};
use crate::model::entity::note;
use crate::service::stream;
use crate::util::id::{get_timestamp, InvalidIdErr};
use redis::{streams::StreamMaxlen, Commands, RedisError};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Redis error: {0}")]
RedisErr(#[from] RedisError),
#[error("Invalid ID: {0}")]
InvalidIdErr(#[from] InvalidIdErr),
#[error("Stream error: {0}")]
StreamErr(#[from] stream::Error),
}
type Note = note::Model;
#[crate::export]
pub fn add_note_to_antenna(antenna_id: String, note: &Note) -> Result<(), Error> {
// for timeline API
redis_conn()?.xadd_maxlen(
redis_key(format!("antennaTimeline:{}", antenna_id)),
StreamMaxlen::Approx(200),
format!("{}-*", get_timestamp(&note.id)?),
&[("note", &note.id)],
)?;
// for streaming API
Ok(stream::antenna::publish(antenna_id, note)?)
}

View file

@ -39,7 +39,7 @@ async fn all_texts(note: NoteLike) -> Result<Vec<String>, DbErr> {
.flatten(), .flatten(),
); );
if let Some(renote_id) = note.renote_id { if let Some(renote_id) = &note.renote_id {
if let Some((text, cw)) = note::Entity::find_by_id(renote_id) if let Some((text, cw)) = note::Entity::find_by_id(renote_id)
.select_only() .select_only()
.columns([note::Column::Text, note::Column::Cw]) .columns([note::Column::Text, note::Column::Cw])
@ -53,10 +53,12 @@ async fn all_texts(note: NoteLike) -> Result<Vec<String>, DbErr> {
if let Some(c) = cw { if let Some(c) = cw {
texts.push(c); texts.push(c);
} }
} else {
tracing::warn!("nonexistent renote id: {:#?}", renote_id);
} }
} }
if let Some(reply_id) = note.reply_id { if let Some(reply_id) = &note.reply_id {
if let Some((text, cw)) = note::Entity::find_by_id(reply_id) if let Some((text, cw)) = note::Entity::find_by_id(reply_id)
.select_only() .select_only()
.columns([note::Column::Text, note::Column::Cw]) .columns([note::Column::Text, note::Column::Cw])
@ -70,6 +72,8 @@ async fn all_texts(note: NoteLike) -> Result<Vec<String>, DbErr> {
if let Some(c) = cw { if let Some(c) = cw {
texts.push(c); texts.push(c);
} }
} else {
tracing::warn!("nonexistent reply id: {:#?}", reply_id);
} }
} }

View file

@ -0,0 +1,200 @@
use crate::misc::redis_cache::{get_cache, set_cache, CacheError};
use crate::util::http_client;
use image::{io::Reader, ImageError, ImageFormat};
use nom_exif::{parse_jpeg_exif, EntryValue, ExifTag};
use std::io::Cursor;
use tokio::sync::Mutex;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Redis cache error: {0}")]
CacheErr(#[from] CacheError),
#[error("Reqwest error: {0}")]
ReqwestErr(#[from] reqwest::Error),
#[error("Image decoding error: {0}")]
ImageErr(#[from] ImageError),
#[error("Image decoding error: {0}")]
IoErr(#[from] std::io::Error),
#[error("Exif extraction error: {0}")]
ExifErr(#[from] nom_exif::Error),
#[error("Emoji meta attempt limit exceeded: {0}")]
TooManyAttempts(String),
#[error("Unsupported image type: {0}")]
UnsupportedImageErr(String),
}
const BROWSER_SAFE_IMAGE_TYPES: [ImageFormat; 8] = [
ImageFormat::Png,
ImageFormat::Jpeg,
ImageFormat::Gif,
ImageFormat::WebP,
ImageFormat::Tiff,
ImageFormat::Bmp,
ImageFormat::Ico,
ImageFormat::Avif,
];
static MTX_GUARD: Mutex<()> = Mutex::const_new(());
#[derive(Debug, PartialEq)]
#[crate::export(object)]
pub struct ImageSize {
pub width: u32,
pub height: u32,
}
#[crate::export]
pub async fn get_image_size_from_url(url: &str) -> Result<ImageSize, Error> {
let attempted: bool;
{
let _ = MTX_GUARD.lock().await;
let key = format!("fetchImage:{}", url);
attempted = get_cache::<bool>(&key)?.is_some();
if !attempted {
set_cache(&key, &true, 10 * 60)?;
}
}
if attempted {
tracing::warn!("attempt limit exceeded: {}", url);
return Err(Error::TooManyAttempts(url.to_string()));
}
tracing::info!("retrieving image size from {}", url);
let image_bytes = http_client()?.get(url).send().await?.bytes().await?;
let reader = Reader::new(Cursor::new(&image_bytes)).with_guessed_format()?;
let format = reader.format();
if format.is_none() || !BROWSER_SAFE_IMAGE_TYPES.contains(&format.unwrap()) {
return Err(Error::UnsupportedImageErr(format!("{:?}", format)));
}
let size = reader.into_dimensions()?;
let res = ImageSize {
width: size.0,
height: size.1,
};
if format.unwrap() != ImageFormat::Jpeg {
return Ok(res);
}
// handle jpeg orientation
// https://magnushoff.com/articles/jpeg-orientation/
let exif = parse_jpeg_exif(&*image_bytes)?;
if exif.is_none() {
return Ok(res);
}
let orientation = exif.unwrap().get_value(&ExifTag::Orientation)?;
let rotated =
orientation.is_some() && matches!(orientation.unwrap(), EntryValue::U32(v) if v >= 5);
if !rotated {
return Ok(res);
}
Ok(ImageSize {
width: size.1,
height: size.0,
})
}
#[cfg(test)]
mod unit_test {
use super::{get_image_size_from_url, ImageSize};
use crate::misc::redis_cache::delete_cache;
use pretty_assertions::assert_eq;
#[tokio::test]
async fn test_get_image_size() {
let png_url_1 = "https://firefish.dev/firefish/firefish/-/raw/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/splash.png";
let png_url_2 = "https://firefish.dev/firefish/firefish/-/raw/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/notification-badges/at.png";
let png_url_3 = "https://firefish.dev/firefish/firefish/-/raw/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/api-doc.png";
let rotated_jpeg_url = "https://firefish.dev/firefish/firefish/-/raw/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/test/resources/rotate.jpg";
let webp_url_1 = "https://firefish.dev/firefish/firefish/-/raw/5891a90f71a8b9d5ea99c683ade7e485c685d642/custom/assets/badges/error.webp";
let webp_url_2 = "https://firefish.dev/firefish/firefish/-/raw/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/screenshots/1.webp";
let ico_url = "https://firefish.dev/firefish/firefish/-/raw/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/favicon.ico";
let gif_url = "https://firefish.dev/firefish/firefish/-/raw/b9c3dfbd3d473cb2cee20c467eeae780bc401271/packages/backend/test/resources/anime.gif";
let mp3_url = "https://firefish.dev/firefish/firefish/-/blob/5891a90f71a8b9d5ea99c683ade7e485c685d642/packages/backend/assets/sounds/aisha/1.mp3";
// Delete caches in case you run this test multiple times
// (should be disabled in CI tasks)
delete_cache(&format!("fetchImage:{}", png_url_1)).unwrap();
delete_cache(&format!("fetchImage:{}", png_url_2)).unwrap();
delete_cache(&format!("fetchImage:{}", png_url_3)).unwrap();
delete_cache(&format!("fetchImage:{}", rotated_jpeg_url)).unwrap();
delete_cache(&format!("fetchImage:{}", webp_url_1)).unwrap();
delete_cache(&format!("fetchImage:{}", webp_url_2)).unwrap();
delete_cache(&format!("fetchImage:{}", ico_url)).unwrap();
delete_cache(&format!("fetchImage:{}", gif_url)).unwrap();
delete_cache(&format!("fetchImage:{}", mp3_url)).unwrap();
let png_size_1 = ImageSize {
width: 1024,
height: 1024,
};
let png_size_2 = ImageSize {
width: 96,
height: 96,
};
let png_size_3 = ImageSize {
width: 1024,
height: 354,
};
let rotated_jpeg_size = ImageSize {
width: 256,
height: 512,
};
let webp_size_1 = ImageSize {
width: 256,
height: 256,
};
let webp_size_2 = ImageSize {
width: 1080,
height: 2340,
};
let ico_size = ImageSize {
width: 256,
height: 256,
};
let gif_size = ImageSize {
width: 256,
height: 256,
};
assert_eq!(
png_size_1,
get_image_size_from_url(png_url_1).await.unwrap()
);
assert_eq!(
png_size_2,
get_image_size_from_url(png_url_2).await.unwrap()
);
assert_eq!(
png_size_3,
get_image_size_from_url(png_url_3).await.unwrap()
);
assert_eq!(
rotated_jpeg_size,
get_image_size_from_url(rotated_jpeg_url).await.unwrap()
);
assert_eq!(
webp_size_1,
get_image_size_from_url(webp_url_1).await.unwrap()
);
assert_eq!(
webp_size_2,
get_image_size_from_url(webp_url_2).await.unwrap()
);
assert_eq!(ico_size, get_image_size_from_url(ico_url).await.unwrap());
assert_eq!(gif_size, get_image_size_from_url(gif_url).await.unwrap());
assert!(get_image_size_from_url(mp3_url).await.is_err());
}
}

View file

@ -1,6 +1,6 @@
#[crate::export] #[crate::export]
pub fn to_mastodon_id(firefish_id: &str) -> Option<String> { pub fn to_mastodon_id(firefish_id: &str) -> Option<String> {
let decoded: [u8; 16] = basen::BASE36.decode_var_len(&firefish_id.to_ascii_lowercase())?; let decoded: [u8; 16] = basen::BASE36.decode_var_len(firefish_id)?;
Some(basen::BASE10.encode_var_len(&decoded)) Some(basen::BASE10.encode_var_len(&decoded))
} }

View file

@ -1,13 +1,17 @@
pub mod acct; pub mod acct;
pub mod add_note_to_antenna;
pub mod check_server_block; pub mod check_server_block;
pub mod check_word_mute; pub mod check_word_mute;
pub mod convert_host; pub mod convert_host;
pub mod emoji; pub mod emoji;
pub mod escape_sql; pub mod escape_sql;
pub mod format_milliseconds; pub mod format_milliseconds;
pub mod get_image_size;
pub mod get_note_summary; pub mod get_note_summary;
pub mod mastodon_id; pub mod mastodon_id;
pub mod meta; pub mod meta;
pub mod nyaify; pub mod nyaify;
pub mod password; pub mod password;
pub mod reaction; pub mod reaction;
pub mod redis_cache;
pub mod remove_old_attestation_challenges;

View file

@ -97,6 +97,8 @@ pub async fn to_db_reaction(reaction: Option<&str>, host: Option<&str>) -> Resul
{ {
return Ok(format!(":{name}@{ascii_host}:")); return Ok(format!(":{name}@{ascii_host}:"));
} }
tracing::info!("nonexistent remote custom emoji: :{name}@{ascii_host}:");
} else { } else {
// local emoji // local emoji
// TODO: Does SeaORM have the `exists` method? // TODO: Does SeaORM have the `exists` method?
@ -109,6 +111,8 @@ pub async fn to_db_reaction(reaction: Option<&str>, host: Option<&str>) -> Resul
{ {
return Ok(format!(":{name}:")); return Ok(format!(":{name}:"));
} }
tracing::info!("nonexistent local custom emoji: :{name}:");
} }
}; };
}; };

View file

@ -0,0 +1,94 @@
use crate::database::{redis_conn, redis_key};
use redis::{Commands, RedisError};
use serde::{Deserialize, Serialize};
#[derive(thiserror::Error, Debug)]
pub enum CacheError {
#[error("Redis error: {0}")]
RedisError(#[from] RedisError),
#[error("Data serialization error: {0}")]
SerializeError(#[from] rmp_serde::encode::Error),
#[error("Data deserialization error: {0}")]
DeserializeError(#[from] rmp_serde::decode::Error),
}
fn prefix_key(key: &str) -> String {
redis_key(format!("cache:{}", key))
}
pub fn set_cache<V: for<'a> Deserialize<'a> + Serialize>(
key: &str,
value: &V,
expire_seconds: u64,
) -> Result<(), CacheError> {
redis_conn()?.set_ex(
prefix_key(key),
rmp_serde::encode::to_vec(&value)?,
expire_seconds,
)?;
Ok(())
}
pub fn get_cache<V: for<'a> Deserialize<'a> + Serialize>(
key: &str,
) -> Result<Option<V>, CacheError> {
let serialized_value: Option<Vec<u8>> = redis_conn()?.get(prefix_key(key))?;
Ok(match serialized_value {
Some(v) => Some(rmp_serde::from_slice::<V>(v.as_ref())?),
None => None,
})
}
pub fn delete_cache(key: &str) -> Result<(), CacheError> {
Ok(redis_conn()?.del(prefix_key(key))?)
}
#[cfg(test)]
mod unit_test {
use super::{get_cache, set_cache};
use pretty_assertions::assert_eq;
#[test]
fn set_get_expire() {
#[derive(serde::Deserialize, serde::Serialize, PartialEq, Debug)]
struct Data {
id: u32,
kind: String,
}
let key_1 = "CARGO_TEST_CACHE_KEY_1";
let value_1: Vec<i32> = vec![1, 2, 3, 4, 5];
let key_2 = "CARGO_TEST_CACHE_KEY_2";
let value_2 = "Hello fedizens".to_string();
let key_3 = "CARGO_TEST_CACHE_KEY_3";
let value_3 = Data {
id: 1000000007,
kind: "prime number".to_string(),
};
set_cache(key_1, &value_1, 1).unwrap();
set_cache(key_2, &value_2, 1).unwrap();
set_cache(key_3, &value_3, 1).unwrap();
let cached_value_1: Vec<i32> = get_cache(key_1).unwrap().unwrap();
let cached_value_2: String = get_cache(key_2).unwrap().unwrap();
let cached_value_3: Data = get_cache(key_3).unwrap().unwrap();
assert_eq!(value_1, cached_value_1);
assert_eq!(value_2, cached_value_2);
assert_eq!(value_3, cached_value_3);
// wait for the cache to expire
std::thread::sleep(std::time::Duration::from_millis(1100));
let expired_value_1: Option<Vec<i32>> = get_cache(key_1).unwrap();
let expired_value_2: Option<Vec<i32>> = get_cache(key_2).unwrap();
let expired_value_3: Option<Vec<i32>> = get_cache(key_3).unwrap();
assert!(expired_value_1.is_none());
assert!(expired_value_2.is_none());
assert!(expired_value_3.is_none());
}
}

View file

@ -0,0 +1,19 @@
// TODO: We want to get rid of this
use crate::database::db_conn;
use crate::model::entity::attestation_challenge;
use chrono::{Duration, Local};
use sea_orm::{ColumnTrait, DbErr, EntityTrait, QueryFilter};
/// Delete all entries in the "attestation_challenge" table created at more than 5 minutes ago
#[crate::export]
pub async fn remove_old_attestation_challenges() -> Result<(), DbErr> {
let res = attestation_challenge::Entity::delete_many()
.filter(attestation_challenge::Column::CreatedAt.lt(Local::now() - Duration::minutes(5)))
.exec(db_conn().await?)
.await?;
tracing::info!("{} attestation challenges are removed", res.rows_affected);
Ok(())
}

View file

@ -71,14 +71,14 @@ pub struct Model {
pub drive_capacity_override_mb: Option<i32>, pub drive_capacity_override_mb: Option<i32>,
#[sea_orm(column_name = "movedToUri")] #[sea_orm(column_name = "movedToUri")]
pub moved_to_uri: Option<String>, pub moved_to_uri: Option<String>,
#[sea_orm(column_name = "alsoKnownAs", column_type = "Text", nullable)]
pub also_known_as: Option<String>,
#[sea_orm(column_name = "speakAsCat")] #[sea_orm(column_name = "speakAsCat")]
pub speak_as_cat: bool, pub speak_as_cat: bool,
#[sea_orm(column_name = "emojiModPerm")] #[sea_orm(column_name = "emojiModPerm")]
pub emoji_mod_perm: UserEmojimodpermEnum, pub emoji_mod_perm: UserEmojimodpermEnum,
#[sea_orm(column_name = "isIndexable")] #[sea_orm(column_name = "isIndexable")]
pub is_indexable: bool, pub is_indexable: bool,
#[sea_orm(column_name = "alsoKnownAs")]
pub also_known_as: Option<Vec<String>>,
} }
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View file

@ -1,9 +0,0 @@
#[derive(thiserror::Error, Debug, PartialEq, Eq)]
pub enum Error {
#[error("Failed to parse string: {0}")]
ParseError(#[from] parse_display::ParseError),
#[error("Database error: {0}")]
DbError(#[from] sea_orm::DbErr),
#[error("Requested entity not found")]
NotFound,
}

View file

@ -1,4 +1 @@
pub mod entity; pub mod entity;
pub mod error;
// pub mod repository;
pub mod schema;

View file

@ -1,31 +0,0 @@
use async_trait::async_trait;
use schemars::JsonSchema;
use super::error::Error;
/// Repositories have a packer that converts a database model to its
/// corresponding API schema.
#[async_trait]
pub trait Repository<T: JsonSchema> {
async fn pack(self) -> Result<T, Error>;
/// Retrieves one model by its id and pack it.
async fn pack_by_id(id: String) -> Result<T, Error>;
}
mod macros {
/// Provides the default implementation of
/// [crate::model::repository::Repository::pack_by_id].
macro_rules! impl_pack_by_id {
($a:ty, $b:ident) => {
match <$a>::find_by_id($b)
.one(crate::database::get_database()?)
.await?
{
None => Err(Error::NotFound),
Some(m) => m.pack().await,
}
};
}
pub(crate) use impl_pack_by_id;
}

View file

@ -1,18 +0,0 @@
use jsonschema::JSONSchema;
use schemars::{schema_for, JsonSchema};
/// Structs of schema defitions implement this trait in order to
/// provide the JSON Schema validator [`jsonschema::JSONSchema`].
pub trait Schema<T: JsonSchema> {
/// Returns the validator of [JSON Schema Draft
/// 7](https://json-schema.org/specification-links.html#draft-7) with the
/// default settings of [`schemars::gen::SchemaSettings`].
fn validator() -> JSONSchema {
let root = schema_for!(T);
let schema = serde_json::to_value(&root).expect("Schema definition invalid");
JSONSchema::options()
.with_draft(jsonschema::Draft::Draft7)
.compile(&schema)
.expect("Unable to compile schema")
}
}

View file

@ -1,23 +0,0 @@
use crate::database::{redis_conn, redis_key};
use crate::model::entity::note;
use crate::service::stream::{publish_to_stream, Error, Stream};
use crate::util::id::get_timestamp;
use redis::{streams::StreamMaxlen, Commands};
type Note = note::Model;
#[crate::export]
pub fn add_note_to_antenna(antenna_id: String, note: &Note) -> Result<(), Error> {
redis_conn()?.xadd_maxlen(
redis_key(format!("antennaTimeline:{}", antenna_id)),
StreamMaxlen::Approx(200),
format!("{}-*", get_timestamp(&note.id)),
&[("note", &note.id)],
)?;
publish_to_stream(
&Stream::Antenna { antenna_id },
Some("note"),
Some(serde_json::to_string(note)?),
)
}

View file

@ -0,0 +1,51 @@
use crate::config::CONFIG;
use tracing::Level;
use tracing_subscriber::FmtSubscriber;
#[crate::export(js_name = "initializeRustLogger")]
pub fn initialize_logger() {
let mut builder = FmtSubscriber::builder();
if let Some(max_level) = &CONFIG.max_log_level {
builder = builder.with_max_level(match max_level.as_str() {
"error" => Level::ERROR,
"warning" => Level::WARN,
"info" => Level::INFO,
"debug" => Level::DEBUG,
"trace" => Level::TRACE,
_ => Level::INFO, // Fallback
});
} else if let Some(levels) = &CONFIG.log_level {
// `logLevel` config is Deprecated
if levels.contains(&"trace".to_string()) {
builder = builder.with_max_level(Level::TRACE);
} else if levels.contains(&"debug".to_string()) {
builder = builder.with_max_level(Level::DEBUG);
} else if levels.contains(&"info".to_string()) {
builder = builder.with_max_level(Level::INFO);
} else if levels.contains(&"warning".to_string()) {
builder = builder.with_max_level(Level::WARN);
} else if levels.contains(&"error".to_string()) {
builder = builder.with_max_level(Level::ERROR);
} else {
// Fallback
builder = builder.with_max_level(Level::INFO);
}
} else {
// Fallback
builder = builder.with_max_level(Level::INFO);
};
let subscriber = builder
.without_time()
.with_level(true)
.with_ansi(true)
.with_target(true)
.with_thread_names(true)
.with_line_number(true)
.log_internal_errors(true)
.compact()
.finish();
tracing::subscriber::set_global_default(subscriber).expect("Failed to initialize the logger");
}

View file

@ -1,2 +1,3 @@
pub mod add_note_to_antenna; pub mod log;
pub mod note;
pub mod stream; pub mod stream;

View file

@ -0,0 +1 @@
pub mod watch;

View file

@ -0,0 +1,42 @@
use crate::database::db_conn;
use crate::model::entity::note_watching;
use crate::util::id::gen_id;
use sea_orm::{ActiveValue, ColumnTrait, DbErr, EntityTrait, ModelTrait, QueryFilter};
#[crate::export]
pub async fn watch_note(
watcher_id: &str,
note_author_id: &str,
note_id: &str,
) -> Result<(), DbErr> {
if watcher_id != note_author_id {
note_watching::Entity::insert(note_watching::ActiveModel {
id: ActiveValue::set(gen_id()),
created_at: ActiveValue::set(chrono::Local::now().naive_local()),
user_id: ActiveValue::Set(watcher_id.to_string()),
note_user_id: ActiveValue::Set(note_author_id.to_string()),
note_id: ActiveValue::Set(note_id.to_string()),
})
.exec(db_conn().await?)
.await?;
}
Ok(())
}
#[crate::export]
pub async fn unwatch_note(watcher_id: &str, note_id: &str) -> Result<(), DbErr> {
let db = db_conn().await?;
let entry = note_watching::Entity::find()
.filter(note_watching::Column::UserId.eq(watcher_id))
.filter(note_watching::Column::NoteId.eq(note_id))
.one(db)
.await?;
if let Some(entry) = entry {
entry.delete(db).await?;
}
Ok(())
}

View file

@ -1,3 +1,10 @@
pub mod antenna;
pub mod channel;
pub mod chat;
pub mod chat_index;
pub mod custom_emoji;
pub mod moderation;
use crate::config::CONFIG; use crate::config::CONFIG;
use crate::database::redis_conn; use crate::database::redis_conn;
use redis::{Commands, RedisError}; use redis::{Commands, RedisError};
@ -7,9 +14,9 @@ pub enum Stream {
#[strum(serialize = "internal")] #[strum(serialize = "internal")]
Internal, Internal,
#[strum(serialize = "broadcast")] #[strum(serialize = "broadcast")]
Broadcast, CustomEmoji,
#[strum(to_string = "adminStream:{user_id}")] #[strum(to_string = "adminStream:{moderator_id}")]
Admin { user_id: String }, Moderation { moderator_id: String },
#[strum(to_string = "user:{user_id}")] #[strum(to_string = "user:{user_id}")]
User { user_id: String }, User { user_id: String },
#[strum(to_string = "channelStream:{channel_id}")] #[strum(to_string = "channelStream:{channel_id}")]
@ -34,7 +41,7 @@ pub enum Stream {
#[strum(to_string = "messagingStream:{group_id}")] #[strum(to_string = "messagingStream:{group_id}")]
GroupChat { group_id: String }, GroupChat { group_id: String },
#[strum(to_string = "messagingIndexStream:{user_id}")] #[strum(to_string = "messagingIndexStream:{user_id}")]
MessagingIndex { user_id: String }, ChatIndex { user_id: String },
} }
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
@ -49,12 +56,12 @@ pub enum Error {
pub fn publish_to_stream( pub fn publish_to_stream(
stream: &Stream, stream: &Stream,
kind: Option<&str>, kind: Option<String>,
value: Option<String>, value: Option<String>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let message = if let Some(kind) = kind { let message = if let Some(kind) = kind {
format!( format!(
"{{ \"type\": \"{}\", \"body\": {} }}", "{{\"type\":\"{}\",\"body\":{}}}",
kind, kind,
value.unwrap_or("null".to_string()), value.unwrap_or("null".to_string()),
) )
@ -64,10 +71,7 @@ pub fn publish_to_stream(
redis_conn()?.publish( redis_conn()?.publish(
&CONFIG.host, &CONFIG.host,
format!( format!("{{\"channel\":\"{}\",\"message\":{}}}", stream, message),
"{{ \"channel\": \"{}\", \"message\": {} }}",
stream, message,
),
)?; )?;
Ok(()) Ok(())
@ -81,10 +85,10 @@ mod unit_test {
#[test] #[test]
fn channel_to_string() { fn channel_to_string() {
assert_eq!(Stream::Internal.to_string(), "internal"); assert_eq!(Stream::Internal.to_string(), "internal");
assert_eq!(Stream::Broadcast.to_string(), "broadcast"); assert_eq!(Stream::CustomEmoji.to_string(), "broadcast");
assert_eq!( assert_eq!(
Stream::Admin { Stream::Moderation {
user_id: "9tb42br63g5apjcq".to_string() moderator_id: "9tb42br63g5apjcq".to_string()
} }
.to_string(), .to_string(),
"adminStream:9tb42br63g5apjcq" "adminStream:9tb42br63g5apjcq"

View file

@ -0,0 +1,10 @@
use crate::model::entity::note;
use crate::service::stream::{publish_to_stream, Error, Stream};
pub fn publish(antenna_id: String, note: &note::Model) -> Result<(), Error> {
publish_to_stream(
&Stream::Antenna { antenna_id },
Some("note".to_string()),
Some(serde_json::to_string(note)?),
)
}

View file

@ -0,0 +1,10 @@
use crate::service::stream::{publish_to_stream, Error, Stream};
#[crate::export(js_name = "publishToChannelStream")]
pub fn publish(channel_id: String, user_id: String) -> Result<(), Error> {
publish_to_stream(
&Stream::Channel { channel_id },
Some("typing".to_string()),
Some(format!("\"{}\"", user_id)),
)
}

View file

@ -0,0 +1,34 @@
use crate::service::stream::{publish_to_stream, Error, Stream};
#[derive(strum::Display)]
#[crate::export(string_enum = "camelCase")]
pub enum ChatEvent {
#[strum(serialize = "message")]
Message,
#[strum(serialize = "read")]
Read,
#[strum(serialize = "deleted")]
Deleted,
#[strum(serialize = "typing")]
Typing,
}
// We want to merge `kind` and `object` into a single enum
// https://github.com/napi-rs/napi-rs/issues/2036
#[crate::export(js_name = "publishToChatStream")]
pub fn publish(
sender_user_id: String,
receiver_user_id: String,
kind: ChatEvent,
object: &serde_json::Value,
) -> Result<(), Error> {
publish_to_stream(
&Stream::Chat {
sender_user_id,
receiver_user_id,
},
Some(kind.to_string()),
Some(serde_json::to_string(object)?),
)
}

View file

@ -0,0 +1,26 @@
use crate::service::stream::{publish_to_stream, Error, Stream};
#[derive(strum::Display)]
#[crate::export(string_enum = "camelCase")]
pub enum ChatIndexEvent {
#[strum(serialize = "message")]
Message,
#[strum(serialize = "read")]
Read,
}
// We want to merge `kind` and `object` into a single enum
// https://github.com/napi-rs/napi-rs/issues/2036
#[crate::export(js_name = "publishToChatIndexStream")]
pub fn publish(
user_id: String,
kind: ChatIndexEvent,
object: &serde_json::Value,
) -> Result<(), Error> {
publish_to_stream(
&Stream::ChatIndex { user_id },
Some(kind.to_string()),
Some(serde_json::to_string(object)?),
)
}

View file

@ -0,0 +1,27 @@
use crate::service::stream::{publish_to_stream, Error, Stream};
use serde::{Deserialize, Serialize};
// TODO: define schema type in other place
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct PackedEmoji {
pub id: String,
pub aliases: Vec<String>,
pub name: String,
pub category: Option<String>,
pub host: Option<String>,
pub url: String,
pub license: Option<String>,
pub width: Option<i32>,
pub height: Option<i32>,
}
#[crate::export(js_name = "publishToBroadcastStream")]
pub fn publish(emoji: &PackedEmoji) -> Result<(), Error> {
publish_to_stream(
&Stream::CustomEmoji,
Some("emojiAdded".to_string()),
Some(format!("{{\"emoji\":{}}}", serde_json::to_string(emoji)?)),
)
}

View file

@ -0,0 +1,21 @@
use crate::service::stream::{publish_to_stream, Error, Stream};
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[crate::export(object)]
pub struct AbuseUserReportLike {
pub id: String,
pub target_user_id: String,
pub reporter_id: String,
pub comment: String,
}
#[crate::export(js_name = "publishToModerationStream")]
pub fn publish(moderator_id: String, report: &AbuseUserReportLike) -> Result<(), Error> {
publish_to_stream(
&Stream::Moderation { moderator_id },
Some("newAbuseUserReport".to_string()),
Some(serde_json::to_string(report)?),
)
}

View file

@ -0,0 +1,24 @@
use crate::config::CONFIG;
use once_cell::sync::OnceCell;
use reqwest::{Client, Error, NoProxy, Proxy};
use std::time::Duration;
static CLIENT: OnceCell<Client> = OnceCell::new();
pub fn http_client() -> Result<Client, Error> {
CLIENT
.get_or_try_init(|| {
let mut builder = Client::builder().timeout(Duration::from_secs(5));
if let Some(proxy_url) = &CONFIG.proxy {
let mut proxy = Proxy::all(proxy_url)?;
if let Some(proxy_bypass_hosts) = &CONFIG.proxy_bypass_hosts {
proxy = proxy.no_proxy(NoProxy::from_string(&proxy_bypass_hosts.join(",")));
}
builder = builder.proxy(proxy);
}
builder.build()
})
.cloned()
}

View file

@ -1,95 +1,109 @@
//! ID generation utility based on [cuid2] //! ID generation utility based on [cuid2]
use crate::config::CONFIG;
use basen::BASE36; use basen::BASE36;
use cfg_if::cfg_if; use chrono::{DateTime, NaiveDateTime, Utc};
use chrono::NaiveDateTime;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use std::cmp; use std::cmp;
#[derive(thiserror::Error, Debug, PartialEq, Eq)]
#[error("ID generator has not been initialized yet")]
pub struct ErrorUninitialized;
static FINGERPRINT: OnceCell<String> = OnceCell::new(); static FINGERPRINT: OnceCell<String> = OnceCell::new();
static GENERATOR: OnceCell<cuid2::CuidConstructor> = OnceCell::new(); static GENERATOR: OnceCell<cuid2::CuidConstructor> = OnceCell::new();
const TIME_2000: i64 = 946_684_800_000; const TIME_2000: i64 = 946_684_800_000;
const TIMESTAMP_LENGTH: u16 = 8; const TIMESTAMP_LENGTH: u8 = 8;
/// Initializes Cuid2 generator. Must be called before any [create_id]. /// Initializes Cuid2 generator.
#[crate::export] fn init_id_generator(length: u8, fingerprint: &str) {
pub fn init_id_generator(length: u16, fingerprint: &str) {
FINGERPRINT.get_or_init(move || format!("{}{}", fingerprint, cuid2::create_id())); FINGERPRINT.get_or_init(move || format!("{}{}", fingerprint, cuid2::create_id()));
GENERATOR.get_or_init(move || { GENERATOR.get_or_init(move || {
cuid2::CuidConstructor::new() cuid2::CuidConstructor::new()
// length to pass shoule be greater than or equal to 8. // length to pass shoule be greater than or equal to 8.
.with_length(cmp::max(length - TIMESTAMP_LENGTH, 8)) .with_length(cmp::max(length - TIMESTAMP_LENGTH, 8).into())
.with_fingerprinter(|| FINGERPRINT.get().unwrap().clone()) .with_fingerprinter(|| FINGERPRINT.get().unwrap().clone())
}); });
} }
/// Returns Cuid2 with the length specified by [init_id]. Must be called after /// Returns Cuid2 with the length specified by [init_id_generator].
/// [init_id], otherwise returns [ErrorUninitialized]. /// It automatically calls [init_id_generator], if the generator has not been initialized.
pub fn create_id(datetime: &NaiveDateTime) -> Result<String, ErrorUninitialized> { fn create_id(datetime: &NaiveDateTime) -> String {
match GENERATOR.get() { if GENERATOR.get().is_none() {
None => Err(ErrorUninitialized), let length = match &CONFIG.cuid {
Some(gen) => { Some(cuid) => cmp::min(cmp::max(cuid.length.unwrap_or(16), 16), 24),
let date_num = cmp::max(0, datetime.and_utc().timestamp_millis() - TIME_2000) as u64; None => 16,
Ok(format!( };
"{:0>8}{}", let fingerprint = match &CONFIG.cuid {
BASE36.encode_var_len(&date_num), Some(cuid) => cuid.fingerprint.as_deref().unwrap_or_default(),
gen.create_id() None => "",
)) };
} init_id_generator(length, fingerprint);
} }
let date_num = cmp::max(0, datetime.and_utc().timestamp_millis() - TIME_2000) as u64;
format!(
"{:0>8}{}",
BASE36.encode_var_len(&date_num),
GENERATOR.get().unwrap().create_id()
)
}
#[derive(thiserror::Error, Debug)]
#[error("Invalid ID: {id}")]
pub struct InvalidIdErr {
id: String,
} }
#[crate::export] #[crate::export]
pub fn get_timestamp(id: &str) -> i64 { pub fn get_timestamp(id: &str) -> Result<i64, InvalidIdErr> {
let n: Option<u64> = BASE36.decode_var_len(&id[0..8]); let n: Option<u64> = BASE36.decode_var_len(&id[0..8]);
match n { if let Some(n) = n {
None => -1, Ok(n as i64 + TIME_2000)
Some(n) => n as i64 + TIME_2000, } else {
Err(InvalidIdErr { id: id.to_string() })
} }
} }
cfg_if! { /// The generated ID results in the form of `[8 chars timestamp] + [cuid2]`.
if #[cfg(feature = "napi")] { /// The minimum and maximum lengths are 16 and 24, respectively.
use chrono::{DateTime, Utc}; /// With the length of 16, namely 8 for cuid2, roughly 1427399 IDs are needed
/// in the same millisecond to reach 50% chance of collision.
///
/// Ref: https://github.com/paralleldrive/cuid2#parameterized-length
#[crate::export]
pub fn gen_id() -> String {
create_id(&Utc::now().naive_utc())
}
/// The generated ID results in the form of `[8 chars timestamp] + [cuid2]`. /// Generate an ID using a specific datetime
/// The minimum and maximum lengths are 16 and 24, respectively. #[crate::export]
/// With the length of 16, namely 8 for cuid2, roughly 1427399 IDs are needed pub fn gen_id_at(date: DateTime<Utc>) -> String {
/// in the same millisecond to reach 50% chance of collision. create_id(&date.naive_utc())
///
/// Ref: https://github.com/paralleldrive/cuid2#parameterized-length
#[napi_derive::napi]
pub fn gen_id(date: Option<DateTime<Utc>>) -> String {
create_id(&date.unwrap_or_else(Utc::now).naive_utc()).unwrap()
}
}
} }
#[cfg(test)] #[cfg(test)]
mod unit_test { mod unit_test {
use crate::util::id; use super::{gen_id, gen_id_at, get_timestamp};
use chrono::Utc; use chrono::{Duration, Utc};
use pretty_assertions::{assert_eq, assert_ne}; use pretty_assertions::{assert_eq, assert_ne};
use std::thread; use std::thread;
#[test] #[test]
fn can_create_and_decode_id() { fn can_create_and_decode_id() {
let now = Utc::now().naive_utc(); let now = Utc::now();
assert_eq!(id::create_id(&now), Err(id::ErrorUninitialized)); assert_eq!(gen_id().len(), 16);
id::init_id_generator(16, ""); assert_ne!(gen_id_at(now), gen_id_at(now));
assert_eq!(id::create_id(&now).unwrap().len(), 16); assert_ne!(gen_id(), gen_id());
assert_ne!(id::create_id(&now).unwrap(), id::create_id(&now).unwrap());
let id1 = thread::spawn(move || id::create_id(&now).unwrap()); let id1 = thread::spawn(move || gen_id_at(now));
let id2 = thread::spawn(move || id::create_id(&now).unwrap()); let id2 = thread::spawn(move || gen_id_at(now));
assert_ne!(id1.join().unwrap(), id2.join().unwrap()); assert_ne!(id1.join().unwrap(), id2.join().unwrap());
let test_id = id::create_id(&now).unwrap(); let test_id = gen_id_at(now);
let timestamp = id::get_timestamp(&test_id); let timestamp = get_timestamp(&test_id).unwrap();
assert_eq!(now.and_utc().timestamp_millis(), timestamp); assert_eq!(now.timestamp_millis(), timestamp);
let now_id = gen_id_at(now);
let old_id = gen_id_at(now - Duration::milliseconds(1));
let future_id = gen_id_at(now + Duration::milliseconds(1));
assert!(old_id < now_id);
assert!(now_id < future_id);
} }
} }

View file

@ -1,2 +1,5 @@
pub use http_client::http_client;
pub mod http_client;
pub mod id; pub mod id;
pub mod random; pub mod random;

View file

@ -9,7 +9,8 @@ import semver from "semver";
import Logger from "@/services/logger.js"; import Logger from "@/services/logger.js";
import type { Config } from "backend-rs"; import type { Config } from "backend-rs";
import { fetchMeta } from "backend-rs"; import { initializeRustLogger } from "backend-rs";
import { fetchMeta, removeOldAttestationChallenges } from "backend-rs";
import { config, envOption } from "@/config.js"; import { config, envOption } from "@/config.js";
import { showMachineInfo } from "@/misc/show-machine-info.js"; import { showMachineInfo } from "@/misc/show-machine-info.js";
import { db, initDb } from "@/db/postgre.js"; import { db, initDb } from "@/db/postgre.js";
@ -94,6 +95,7 @@ export async function masterMain() {
await showMachineInfo(bootLogger); await showMachineInfo(bootLogger);
showNodejsVersion(); showNodejsVersion();
await connectDb(); await connectDb();
initializeRustLogger();
} catch (e) { } catch (e) {
bootLogger.error( bootLogger.error(
`Fatal error occurred during initialization:\n${inspect(e)}`, `Fatal error occurred during initialization:\n${inspect(e)}`,
@ -103,30 +105,26 @@ export async function masterMain() {
process.exit(1); process.exit(1);
} }
bootLogger.succ("Firefish initialized"); bootLogger.info("Firefish initialized");
if (!envOption.disableClustering) { if (!envOption.disableClustering) {
await spawnWorkers(config.clusterLimits); await spawnWorkers(config.clusterLimits);
} }
bootLogger.succ( bootLogger.info(
`Now listening on port ${config.port} on ${config.url}`, `Now listening on port ${config.port} on ${config.url}`,
null, null,
true, true,
); );
if ( if (!envOption.noDaemons) {
!envOption.noDaemons &&
config.clusterLimits?.web &&
config.clusterLimits?.web >= 1
) {
import("../daemons/server-stats.js").then((x) => x.default()); import("../daemons/server-stats.js").then((x) => x.default());
import("../daemons/queue-stats.js").then((x) => x.default()); import("../daemons/queue-stats.js").then((x) => x.default());
import("../daemons/janitor.js").then((x) => x.default()); // Update meta cache every 5 minitues
setInterval(() => fetchMeta(false), 1000 * 60 * 5);
// Remove old attestation challenges
setInterval(() => removeOldAttestationChallenges(), 1000 * 60 * 30);
} }
// Update meta cache every 5 minitues
setInterval(() => fetchMeta(false), 1000 * 60 * 5);
} }
function showEnvironment(): void { function showEnvironment(): void {
@ -164,7 +162,7 @@ async function connectDb(): Promise<void> {
const v = await db const v = await db
.query("SHOW server_version") .query("SHOW server_version")
.then((x) => x[0].server_version); .then((x) => x[0].server_version);
dbLogger.succ(`Connected: v${v}`); dbLogger.info(`Connected: v${v}`);
} catch (e) { } catch (e) {
dbLogger.error("Failed to connect to the database", null, true); dbLogger.error("Failed to connect to the database", null, true);
dbLogger.error(inspect(e)); dbLogger.error(inspect(e));
@ -200,7 +198,7 @@ async function spawnWorkers(
`Starting ${clusterLimits.web} web workers and ${clusterLimits.queue} queue workers (total ${total})...`, `Starting ${clusterLimits.web} web workers and ${clusterLimits.queue} queue workers (total ${total})...`,
); );
await Promise.all(workers.map((mode) => spawnWorker(mode))); await Promise.all(workers.map((mode) => spawnWorker(mode)));
bootLogger.succ("All workers started"); bootLogger.info("All workers started");
} }
function spawnWorker(mode: "web" | "queue"): Promise<void> { function spawnWorker(mode: "web" | "queue"): Promise<void> {

View file

@ -1,17 +1,11 @@
import cluster from "node:cluster"; import cluster from "node:cluster";
import { config } from "@/config.js";
import { initDb } from "@/db/postgre.js"; import { initDb } from "@/db/postgre.js";
import { initIdGenerator } from "backend-rs";
import os from "node:os"; import os from "node:os";
/** /**
* Init worker process * Init worker process
*/ */
export async function workerMain() { export async function workerMain() {
const length = Math.min(Math.max(config.cuid?.length ?? 16, 16), 24);
const fingerprint = config.cuid?.fingerprint ?? "";
initIdGenerator(length, fingerprint);
await initDb(); await initDb();
if (!process.env.mode || process.env.mode === "web") { if (!process.env.mode || process.env.mode === "web") {

View file

@ -1,83 +0,0 @@
import { config } from "@/config.js";
import {
DB_MAX_IMAGE_COMMENT_LENGTH,
DB_MAX_NOTE_TEXT_LENGTH,
} from "@/misc/hard-limits.js";
export const MAX_NOTE_TEXT_LENGTH = Math.min(
config.maxNoteLength ?? 3000,
DB_MAX_NOTE_TEXT_LENGTH,
);
export const MAX_CAPTION_TEXT_LENGTH = Math.min(
config.maxCaptionLength ?? 1500,
DB_MAX_IMAGE_COMMENT_LENGTH,
);
export const SECOND = 1000;
export const MINUTE = 60 * SECOND;
export const HOUR = 60 * MINUTE;
export const DAY = 24 * HOUR;
export const USER_ONLINE_THRESHOLD = 10 * MINUTE;
export const USER_ACTIVE_THRESHOLD = 3 * DAY;
// List of file types allowed to be viewed directly in the browser
// Anything not included here will be responded as application/octet-stream
// SVG is not allowed because it generates XSS <- we need to fix this and later allow it to be viewed directly
export const FILE_TYPE_BROWSERSAFE = [
// Images
"image/png",
"image/gif", // TODO: deprecated, but still used by old notes, new gifs should be converted to webp in the future
"image/jpeg",
"image/webp", // TODO: make this the default image format
"image/apng",
"image/bmp",
"image/tiff",
"image/x-icon",
"image/avif", // not as good supported now, but its good to introduce initial support for the future
// OggS
"audio/opus",
"video/ogg",
"audio/ogg",
"application/ogg",
// ISO/IEC base media file format
"video/quicktime",
"video/mp4", // TODO: we need to check for av1 later
"video/vnd.avi", // also av1
"audio/mp4",
"video/x-m4v",
"audio/x-m4a",
"video/3gpp",
"video/3gpp2",
"video/3gp2",
"audio/3gpp",
"audio/3gpp2",
"audio/3gp2",
"video/mpeg",
"audio/mpeg",
"video/webm",
"audio/webm",
"audio/aac",
"audio/x-flac",
"audio/flac",
"audio/vnd.wave",
"audio/mod",
"audio/x-mod",
"audio/s3m",
"audio/x-s3m",
"audio/xm",
"audio/x-xm",
"audio/it",
"audio/x-it",
];
/*
https://github.com/sindresorhus/file-type/blob/main/supported.js
https://github.com/sindresorhus/file-type/blob/main/core.js
https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Containers
*/

View file

@ -1,20 +0,0 @@
// TODO: 消したい
const interval = 30 * 60 * 1000;
import { AttestationChallenges } from "@/models/index.js";
import { LessThan } from "typeorm";
/**
* Clean up database occasionally
*/
export default function () {
async function tick() {
await AttestationChallenges.delete({
createdAt: LessThan(new Date(Date.now() - 5 * 60 * 1000)),
});
}
tick();
setInterval(tick, interval);
}

View file

@ -80,7 +80,7 @@ import { dbLogger } from "./logger.js";
const sqlLogger = dbLogger.createSubLogger("sql", "gray", false); const sqlLogger = dbLogger.createSubLogger("sql", "gray", false);
class MyCustomLogger implements Logger { class DbLogger implements Logger {
private highlight(sql: string) { private highlight(sql: string) {
return highlight.highlight(sql, { return highlight.highlight(sql, {
language: "sql", language: "sql",
@ -89,15 +89,16 @@ class MyCustomLogger implements Logger {
} }
public logQuery(query: string, parameters?: any[]) { public logQuery(query: string, parameters?: any[]) {
sqlLogger.info(this.highlight(query).substring(0, 100)); sqlLogger.trace(this.highlight(query).substring(0, 100));
} }
public logQueryError(error: string, query: string, parameters?: any[]) { public logQueryError(error: string, query: string, parameters?: any[]) {
sqlLogger.error(this.highlight(query)); sqlLogger.error(error);
sqlLogger.trace(this.highlight(query));
} }
public logQuerySlow(time: number, query: string, parameters?: any[]) { public logQuerySlow(time: number, query: string, parameters?: any[]) {
sqlLogger.warn(this.highlight(query)); sqlLogger.trace(this.highlight(query));
} }
public logSchemaBuild(message: string) { public logSchemaBuild(message: string) {
@ -215,7 +216,7 @@ export const db = new DataSource({
} }
: false, : false,
logging: log, logging: log,
logger: log ? new MyCustomLogger() : undefined, logger: log ? new DbLogger() : undefined,
maxQueryExecutionTime: 300, maxQueryExecutionTime: 300,
entities: entities, entities: entities,
migrations: ["../../migration/*.js"], migrations: ["../../migration/*.js"],

View file

@ -0,0 +1,36 @@
import type { MigrationInterface, QueryRunner } from "typeorm";
export class AlterAkaType1714099399879 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "user" RENAME COLUMN "alsoKnownAs" TO "alsoKnownAsOld"`,
);
await queryRunner.query(
`ALTER TABLE "user" ADD COLUMN "alsoKnownAs" character varying(512)[]`,
);
await queryRunner.query(
`UPDATE "user" SET "alsoKnownAs" = string_to_array("alsoKnownAsOld", ',')::character varying[]`,
);
await queryRunner.query(
`UPDATE "user" SET "alsoKnownAs" = NULL WHERE "alsoKnownAs" = '{}'`,
);
await queryRunner.query(
`COMMENT ON COLUMN "user"."alsoKnownAs" IS 'URIs the user is known as too'`,
);
await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "alsoKnownAsOld"`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "user" RENAME COLUMN "alsoKnownAs" TO "alsoKnownAsOld"`,
);
await queryRunner.query(`ALTER TABLE "user" ADD COLUMN "alsoKnownAs" text`);
await queryRunner.query(
`UPDATE "user" SET "alsoKnownAs" = array_to_string("alsoKnownAsOld", ',')`,
);
await queryRunner.query(
`COMMENT ON COLUMN "user"."alsoKnownAs" IS 'URIs the user is known as too'`,
);
await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "alsoKnownAsOld"`);
}
}

View file

@ -1,5 +1,4 @@
import * as fs from "node:fs"; import * as fs from "node:fs/promises";
import * as util from "node:util";
import Logger from "@/services/logger.js"; import Logger from "@/services/logger.js";
import { createTemp } from "./create-temp.js"; import { createTemp } from "./create-temp.js";
import { downloadUrl } from "./download-url.js"; import { downloadUrl } from "./download-url.js";
@ -16,7 +15,7 @@ export async function downloadTextFile(url: string): Promise<string> {
// write content at URL to temp file // write content at URL to temp file
await downloadUrl(url, path); await downloadUrl(url, path);
const text = await util.promisify(fs.readFile)(path, "utf8"); const text = await fs.readFile(path, "utf-8");
return text; return text;
} finally { } finally {

View file

@ -1,6 +1,5 @@
import * as fs from "node:fs"; import * as fs from "node:fs";
import * as stream from "node:stream"; import * as stream from "node:stream/promises";
import * as util from "node:util";
import got, * as Got from "got"; import got, * as Got from "got";
import { config } from "@/config.js"; import { config } from "@/config.js";
import { getAgentByHostname, StatusError } from "./fetch.js"; import { getAgentByHostname, StatusError } from "./fetch.js";
@ -10,16 +9,14 @@ import IPCIDR from "ip-cidr";
import PrivateIp from "private-ip"; import PrivateIp from "private-ip";
import { isValidUrl } from "./is-valid-url.js"; import { isValidUrl } from "./is-valid-url.js";
const pipeline = util.promisify(stream.pipeline);
export async function downloadUrl(url: string, path: string): Promise<void> { export async function downloadUrl(url: string, path: string): Promise<void> {
if (!isValidUrl(url)) { if (!isValidUrl(url)) {
throw new StatusError("Invalid URL", 400); throw new StatusError("Invalid URL", 400);
} }
const logger = new Logger("download"); const downloadLogger = new Logger("download");
logger.info(`Downloading ${chalk.cyan(url)} ...`); downloadLogger.debug(`Downloading ${chalk.cyan(url)} ...`);
const timeout = 30 * 1000; const timeout = 30 * 1000;
const operationTimeout = 60 * 1000; const operationTimeout = 60 * 1000;
@ -48,7 +45,7 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
}) })
.on("redirect", (res: Got.Response, opts: Got.NormalizedOptions) => { .on("redirect", (res: Got.Response, opts: Got.NormalizedOptions) => {
if (!isValidUrl(opts.url)) { if (!isValidUrl(opts.url)) {
logger.warn(`Invalid URL: ${opts.url}`); downloadLogger.warn(`Invalid URL: ${opts.url}`);
req.destroy(); req.destroy();
} }
}) })
@ -60,7 +57,7 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
res.ip res.ip
) { ) {
if (isPrivateIp(res.ip)) { if (isPrivateIp(res.ip)) {
logger.warn(`Blocked address: ${res.ip}`); downloadLogger.warn(`Blocked address: ${res.ip}`);
req.destroy(); req.destroy();
} }
} }
@ -69,14 +66,16 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
if (contentLength != null) { if (contentLength != null) {
const size = Number(contentLength); const size = Number(contentLength);
if (size > maxSize) { if (size > maxSize) {
logger.warn(`maxSize exceeded (${size} > ${maxSize}) on response`); downloadLogger.warn(
`maxSize exceeded (${size} > ${maxSize}) on response`,
);
req.destroy(); req.destroy();
} }
} }
}) })
.on("downloadProgress", (progress: Got.Progress) => { .on("downloadProgress", (progress: Got.Progress) => {
if (progress.transferred > maxSize) { if (progress.transferred > maxSize) {
logger.warn( downloadLogger.warn(
`maxSize exceeded (${progress.transferred} > ${maxSize}) on downloadProgress`, `maxSize exceeded (${progress.transferred} > ${maxSize}) on downloadProgress`,
); );
req.destroy(); req.destroy();
@ -84,7 +83,7 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
}); });
try { try {
await pipeline(req, fs.createWriteStream(path)); await stream.pipeline(req, fs.createWriteStream(path));
} catch (e) { } catch (e) {
if (e instanceof Got.HTTPError) { if (e instanceof Got.HTTPError) {
throw new StatusError( throw new StatusError(
@ -97,7 +96,7 @@ export async function downloadUrl(url: string, path: string): Promise<void> {
} }
} }
logger.succ(`Download finished: ${chalk.cyan(url)}`); downloadLogger.debug(`Download finished: ${chalk.cyan(url)}`);
} }
export function isPrivateIp(ip: string): boolean { export function isPrivateIp(ip: string): boolean {

View file

@ -1,59 +0,0 @@
import probeImageSize from "probe-image-size";
import { Mutex } from "redis-semaphore";
import { FILE_TYPE_BROWSERSAFE } from "@/const.js";
import Logger from "@/services/logger.js";
import { Cache } from "./cache.js";
import { redisClient } from "@/db/redis.js";
import { inspect } from "node:util";
export type Size = {
width: number;
height: number;
};
const cache = new Cache<boolean>("emojiMeta", 60 * 10); // once every 10 minutes for the same url
const logger = new Logger("emoji");
export async function getEmojiSize(url: string): Promise<Size> {
let attempted = true;
const lock = new Mutex(redisClient, "getEmojiSize");
await lock.acquire();
try {
attempted = (await cache.get(url)) === true;
if (!attempted) {
await cache.set(url, true);
}
} finally {
await lock.release();
}
if (attempted) {
logger.warn(`Attempt limit exceeded: ${url}`);
throw new Error("Too many attempts");
}
try {
logger.debug(`Retrieving emoji size from ${url}`);
const { width, height, mime } = await probeImageSize(url, {
timeout: 5000,
});
if (!(mime.startsWith("image/") && FILE_TYPE_BROWSERSAFE.includes(mime))) {
throw new Error("Unsupported image type");
}
return { width, height };
} catch (e) {
throw new Error(`Unable to retrieve metadata:\n${inspect(e)}`);
}
}
export function getNormalSize(
{ width, height }: Size,
orientation?: number,
): Size {
return (orientation || 0) >= 5
? { width: height, height: width }
: { width, height };
}

View file

@ -1,7 +1,7 @@
import * as fs from "node:fs"; import * as fs from "node:fs/promises";
import { createReadStream } from "node:fs";
import * as crypto from "node:crypto"; import * as crypto from "node:crypto";
import * as stream from "node:stream"; import * as stream from "node:stream/promises";
import * as util from "node:util";
import { fileTypeFromFile } from "file-type"; import { fileTypeFromFile } from "file-type";
import probeImageSize from "probe-image-size"; import probeImageSize from "probe-image-size";
import isSvg from "is-svg"; import isSvg from "is-svg";
@ -9,8 +9,6 @@ import sharp from "sharp";
import { encode } from "blurhash"; import { encode } from "blurhash";
import { inspect } from "node:util"; import { inspect } from "node:util";
const pipeline = util.promisify(stream.pipeline);
export type FileInfo = { export type FileInfo = {
size: number; size: number;
md5: string; md5: string;
@ -163,7 +161,7 @@ export async function checkSvg(path: string) {
try { try {
const size = await getFileSize(path); const size = await getFileSize(path);
if (size > 1 * 1024 * 1024) return false; if (size > 1 * 1024 * 1024) return false;
return isSvg(fs.readFileSync(path)); return isSvg(await fs.readFile(path, "utf-8"));
} catch { } catch {
return false; return false;
} }
@ -173,8 +171,7 @@ export async function checkSvg(path: string) {
* Get file size * Get file size
*/ */
export async function getFileSize(path: string): Promise<number> { export async function getFileSize(path: string): Promise<number> {
const getStat = util.promisify(fs.stat); return (await fs.stat(path)).size;
return (await getStat(path)).size;
} }
/** /**
@ -182,7 +179,7 @@ export async function getFileSize(path: string): Promise<number> {
*/ */
async function calcHash(path: string): Promise<string> { async function calcHash(path: string): Promise<string> {
const hash = crypto.createHash("md5").setEncoding("hex"); const hash = crypto.createHash("md5").setEncoding("hex");
await pipeline(fs.createReadStream(path), hash); await stream.pipeline(createReadStream(path), hash);
return hash.read(); return hash.read();
} }
@ -196,7 +193,7 @@ async function detectImageSize(path: string): Promise<{
hUnits: string; hUnits: string;
orientation?: number; orientation?: number;
}> { }> {
const readable = fs.createReadStream(path); const readable = createReadStream(path);
const imageSize = await probeImageSize(readable); const imageSize = await probeImageSize(readable);
readable.destroy(); readable.destroy();
return imageSize; return imageSize;
@ -214,7 +211,7 @@ function getBlurhash(path: string): Promise<string> {
.toBuffer((err, buffer, { width, height }) => { .toBuffer((err, buffer, { width, height }) => {
if (err) return reject(err); if (err) return reject(err);
let hash; let hash: string;
try { try {
hash = encode(new Uint8ClampedArray(buffer), width, height, 7, 7); hash = encode(new Uint8ClampedArray(buffer), width, height, 7, 7);

View file

@ -1,18 +0,0 @@
// If you change DB_* values, you must also change the DB schema.
/**
* Maximum note text length that can be stored in DB.
* Surrogate pairs count as one
*
* NOTE: this can hypothetically be pushed further
* (up to 250000000), but will likely cause truncations
* and incompatibilities with other servers,
* as well as potential performance issues.
*/
export const DB_MAX_NOTE_TEXT_LENGTH = 100000;
/**
* Maximum image description length that can be stored in DB.
* Surrogate pairs count as one
*/
export const DB_MAX_IMAGE_COMMENT_LENGTH = 8192;

View file

@ -1,4 +1,4 @@
import { FILE_TYPE_BROWSERSAFE } from "@/const.js"; import { FILE_TYPE_BROWSERSAFE } from "backend-rs";
const dictionary = { const dictionary = {
"safe-file": FILE_TYPE_BROWSERSAFE, "safe-file": FILE_TYPE_BROWSERSAFE,

View file

@ -1,10 +0,0 @@
import type { Note } from "@/models/entities/note.js";
export default function (note: Note): boolean {
return (
note.renoteId != null &&
(note.text != null ||
note.hasPoll ||
(note.fileIds != null && note.fileIds.length > 0))
);
}

View file

@ -2,7 +2,7 @@ import { Brackets } from "typeorm";
import { isBlockedServer } from "backend-rs"; import { isBlockedServer } from "backend-rs";
import { Instances } from "@/models/index.js"; import { Instances } from "@/models/index.js";
import type { Instance } from "@/models/entities/instance.js"; import type { Instance } from "@/models/entities/instance.js";
import { DAY } from "@/const.js"; import { DAY } from "backend-rs";
// Threshold from last contact after which an instance will be considered // Threshold from last contact after which an instance will be considered
// "dead" and should no longer get activities delivered to it. // "dead" and should no longer get activities delivered to it.

View file

@ -13,7 +13,6 @@ import { id } from "../id.js";
import { Note } from "./note.js"; import { Note } from "./note.js";
import { User } from "./user.js"; import { User } from "./user.js";
import { DriveFolder } from "./drive-folder.js"; import { DriveFolder } from "./drive-folder.js";
import { DB_MAX_IMAGE_COMMENT_LENGTH } from "@/misc/hard-limits.js";
import { NoteFile } from "./note-file.js"; import { NoteFile } from "./note-file.js";
export type DriveFileUsageHint = "userAvatar" | "userBanner" | null; export type DriveFileUsageHint = "userAvatar" | "userBanner" | null;
@ -73,7 +72,7 @@ export class DriveFile {
@Index() // USING pgroonga pgroonga_varchar_full_text_search_ops_v2 @Index() // USING pgroonga pgroonga_varchar_full_text_search_ops_v2
@Column("varchar", { @Column("varchar", {
length: DB_MAX_IMAGE_COMMENT_LENGTH, length: 8192,
nullable: true, nullable: true,
comment: "The comment of the DriveFile.", comment: "The comment of the DriveFile.",
}) })

View file

@ -88,7 +88,9 @@ export class User {
}) })
public movedToUri: string | null; public movedToUri: string | null;
@Column("simple-array", { @Column("varchar", {
length: 512,
array: true,
nullable: true, nullable: true,
comment: "URIs the user is known as too", comment: "URIs the user is known as too",
}) })

View file

@ -1,4 +1,4 @@
import { In } from "typeorm"; import { In, IsNull, Not } from "typeorm";
import * as mfm from "mfm-js"; import * as mfm from "mfm-js";
import { Note } from "@/models/entities/note.js"; import { Note } from "@/models/entities/note.js";
import type { User } from "@/models/entities/user.js"; import type { User } from "@/models/entities/user.js";
@ -10,6 +10,7 @@ import {
Followings, Followings,
Polls, Polls,
Channels, Channels,
Notes,
} from "../index.js"; } from "../index.js";
import type { Packed } from "@/misc/schema.js"; import type { Packed } from "@/misc/schema.js";
import { countReactions, decodeReaction, nyaify } from "backend-rs"; import { countReactions, decodeReaction, nyaify } from "backend-rs";
@ -101,7 +102,7 @@ export const NoteRepository = db.getRepository(Note).extend({
return true; return true;
} else { } else {
// 指定されているかどうか // 指定されているかどうか
return note.visibleUserIds.some((id: any) => meId === id); return note.visibleUserIds.some((id) => meId === id);
} }
} }
@ -211,8 +212,25 @@ export const NoteRepository = db.getRepository(Note).extend({
localOnly: note.localOnly || undefined, localOnly: note.localOnly || undefined,
visibleUserIds: visibleUserIds:
note.visibility === "specified" ? note.visibleUserIds : undefined, note.visibility === "specified" ? note.visibleUserIds : undefined,
// FIXME: Deleting a post does not decrease these two numbers, causing the number to be wrong
renoteCount: note.renoteCount, renoteCount: note.renoteCount,
repliesCount: note.repliesCount, repliesCount: note.repliesCount,
// TODO: add it to database and use note.quoteCount
quoteCount: Notes.count({
where: {
renoteId: note.id,
text: Not(IsNull()),
},
}),
myRenoteCount: me
? Notes.count({
where: {
renoteId: note.id,
text: IsNull(),
userId: me.id,
},
})
: undefined,
reactions: countReactions(note.reactions), reactions: countReactions(note.reactions),
reactionEmojis: reactionEmoji, reactionEmojis: reactionEmoji,
emojis: noteEmoji, emojis: noteEmoji,

View file

@ -7,7 +7,7 @@ import type { Packed } from "@/misc/schema.js";
import type { Promiseable } from "@/prelude/await-all.js"; import type { Promiseable } from "@/prelude/await-all.js";
import { awaitAll } from "@/prelude/await-all.js"; import { awaitAll } from "@/prelude/await-all.js";
import { populateEmojis } from "@/misc/populate-emojis.js"; import { populateEmojis } from "@/misc/populate-emojis.js";
import { USER_ACTIVE_THRESHOLD, USER_ONLINE_THRESHOLD } from "@/const.js"; import { USER_ACTIVE_THRESHOLD, USER_ONLINE_THRESHOLD } from "backend-rs";
import { Cache } from "@/misc/cache.js"; import { Cache } from "@/misc/cache.js";
import { db } from "@/db/postgre.js"; import { db } from "@/db/postgre.js";
import { isActor, getApId } from "@/remote/activitypub/type.js"; import { isActor, getApId } from "@/remote/activitypub/type.js";

View file

@ -208,5 +208,15 @@ export const packedNoteSchema = {
optional: true, optional: true,
nullable: true, nullable: true,
}, },
myRenoteCount: {
type: "number",
optional: true,
nullable: false,
},
quoteCount: {
type: "number",
optional: false,
nullable: false,
},
}, },
} as const; } as const;

View file

@ -70,10 +70,10 @@ deliverQueue
), ),
) )
.on("failed", (job, err) => .on("failed", (job, err) =>
deliverLogger.warn(`failed(${err}) ${getJobInfo(job)} to=${job.data.to}`), deliverLogger.info(`failed(${err}) ${getJobInfo(job)} to=${job.data.to}`),
) )
.on("error", (job: any, err: Error) => .on("error", (job: any, err: Error) =>
deliverLogger.error(`error ${err}`, { job, e: renderError(err) }), deliverLogger.warn(`error ${err}`, { job, e: renderError(err) }),
) )
.on("stalled", (job) => .on("stalled", (job) =>
deliverLogger.warn(`stalled ${getJobInfo(job)} to=${job.data.to}`), deliverLogger.warn(`stalled ${getJobInfo(job)} to=${job.data.to}`),
@ -564,12 +564,12 @@ export default function () {
export function destroy() { export function destroy() {
deliverQueue.once("cleaned", (jobs, status) => { deliverQueue.once("cleaned", (jobs, status) => {
deliverLogger.succ(`Cleaned ${jobs.length} ${status} jobs`); deliverLogger.info(`Cleaned ${jobs.length} ${status} jobs`);
}); });
deliverQueue.clean(0, "delayed"); deliverQueue.clean(0, "delayed");
inboxQueue.once("cleaned", (jobs, status) => { inboxQueue.once("cleaned", (jobs, status) => {
inboxLogger.succ(`Cleaned ${jobs.length} ${status} jobs`); inboxLogger.info(`Cleaned ${jobs.length} ${status} jobs`);
}); });
inboxQueue.clean(0, "delayed"); inboxQueue.clean(0, "delayed");
} }

View file

@ -13,7 +13,7 @@ const logger = queueLogger.createSubLogger("delete-account");
export async function deleteAccount( export async function deleteAccount(
job: Bull.Job<DbUserDeleteJobData>, job: Bull.Job<DbUserDeleteJobData>,
): Promise<string | void> { ): Promise<string | void> {
logger.info(`Deleting account of ${job.data.user.id} ...`); logger.info(`Deleting account ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id }); const user = await Users.findOneBy({ id: job.data.user.id });
if (!user) return; if (!user) return;
@ -43,7 +43,7 @@ export async function deleteAccount(
await Notes.delete(notes.map((note) => note.id)); await Notes.delete(notes.map((note) => note.id));
} }
logger.succ("All of notes deleted"); logger.info(`All posts of user ${job.data.user.id} were deleted`);
} }
{ {
@ -73,7 +73,7 @@ export async function deleteAccount(
} }
} }
logger.succ("All of files deleted"); logger.info(`All files of user ${job.data.user.id} were deleted`);
} }
{ {

View file

@ -54,8 +54,6 @@ export async function deleteDriveFiles(
job.progress(deletedCount / total); job.progress(deletedCount / total);
} }
logger.succ( logger.info(`${deletedCount} drive files of user ${user.id} were deleted.`);
`All drive files (${deletedCount}) of ${user.id} has been deleted.`,
);
done(); done();
} }

View file

@ -9,6 +9,7 @@ import { createTemp } from "@/misc/create-temp.js";
import { Users, Blockings } from "@/models/index.js"; import { Users, Blockings } from "@/models/index.js";
import { MoreThan } from "typeorm"; import { MoreThan } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js"; import type { DbUserJobData } from "@/queue/types.js";
import { inspect } from "node:util";
const logger = queueLogger.createSubLogger("export-blocking"); const logger = queueLogger.createSubLogger("export-blocking");
@ -27,7 +28,7 @@ export async function exportBlocking(
// Create temp file // Create temp file
const [path, cleanup] = await createTemp(); const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`); logger.info(`temp file created: ${path}`);
try { try {
const stream = fs.createWriteStream(path, { flags: "a" }); const stream = fs.createWriteStream(path, { flags: "a" });
@ -63,9 +64,10 @@ export async function exportBlocking(
const content = getFullApAccount(u.username, u.host); const content = getFullApAccount(u.username, u.host);
await new Promise<void>((res, rej) => { await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => { stream.write(`${content}\n`, (err) => {
if (err) { if (err) {
logger.error(err); logger.warn("failed");
logger.info(inspect(err));
rej(err); rej(err);
} else { } else {
res(); res();
@ -83,7 +85,7 @@ export async function exportBlocking(
} }
stream.end(); stream.end();
logger.succ(`Exported to: ${path}`); logger.info(`Exported to: ${path}`);
const fileName = `blocking-${dateFormat( const fileName = `blocking-${dateFormat(
new Date(), new Date(),
@ -96,7 +98,7 @@ export async function exportBlocking(
force: true, force: true,
}); });
logger.succ(`Exported to: ${driveFile.id}`); logger.info(`Exported to: ${driveFile.id}`);
} finally { } finally {
cleanup(); cleanup();
} }

View file

@ -29,7 +29,7 @@ export async function exportCustomEmojis(
const [path, cleanup] = await createTempDir(); const [path, cleanup] = await createTempDir();
logger.info(`Temp dir is ${path}`); logger.info(`temp dir created: ${path}`);
const metaPath = `${path}/meta.json`; const metaPath = `${path}/meta.json`;
@ -41,7 +41,8 @@ export async function exportCustomEmojis(
return new Promise<void>((res, rej) => { return new Promise<void>((res, rej) => {
metaStream.write(text, (err) => { metaStream.write(text, (err) => {
if (err) { if (err) {
logger.error(err); logger.warn("Failed to export custom emojis");
logger.info(inspect(err));
rej(err); rej(err);
} else { } else {
res(); res();
@ -105,7 +106,7 @@ export async function exportCustomEmojis(
zlib: { level: 0 }, zlib: { level: 0 },
}); });
archiveStream.on("close", async () => { archiveStream.on("close", async () => {
logger.succ(`Exported to: ${archivePath}`); logger.info(`Exported to: ${archivePath}`);
const fileName = `custom-emojis-${dateFormat( const fileName = `custom-emojis-${dateFormat(
new Date(), new Date(),
@ -118,7 +119,7 @@ export async function exportCustomEmojis(
force: true, force: true,
}); });
logger.succ(`Exported to: ${driveFile.id}`); logger.info(`Exported to: ${driveFile.id}`);
cleanup(); cleanup();
archiveCleanup(); archiveCleanup();
done(); done();

View file

@ -10,6 +10,7 @@ import { Users, Followings, Mutings } from "@/models/index.js";
import { In, MoreThan, Not } from "typeorm"; import { In, MoreThan, Not } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js"; import type { DbUserJobData } from "@/queue/types.js";
import type { Following } from "@/models/entities/following.js"; import type { Following } from "@/models/entities/following.js";
import { inspect } from "node:util";
const logger = queueLogger.createSubLogger("export-following"); const logger = queueLogger.createSubLogger("export-following");
@ -28,7 +29,7 @@ export async function exportFollowing(
// Create temp file // Create temp file
const [path, cleanup] = await createTemp(); const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`); logger.info(`temp file created: ${path}`);
try { try {
const stream = fs.createWriteStream(path, { flags: "a" }); const stream = fs.createWriteStream(path, { flags: "a" });
@ -78,9 +79,12 @@ export async function exportFollowing(
const content = getFullApAccount(u.username, u.host); const content = getFullApAccount(u.username, u.host);
await new Promise<void>((res, rej) => { await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => { stream.write(`${content}\n`, (err) => {
if (err) { if (err) {
logger.error(err); logger.warn(
`failed to export following users of ${job.data.user.id}`,
);
logger.info(inspect(err));
rej(err); rej(err);
} else { } else {
res(); res();
@ -91,7 +95,7 @@ export async function exportFollowing(
} }
stream.end(); stream.end();
logger.succ(`Exported to: ${path}`); logger.info(`Exported to: ${path}`);
const fileName = `following-${dateFormat( const fileName = `following-${dateFormat(
new Date(), new Date(),
@ -104,7 +108,7 @@ export async function exportFollowing(
force: true, force: true,
}); });
logger.succ(`Exported to: ${driveFile.id}`); logger.info(`Exported to: ${driveFile.id}`);
} finally { } finally {
cleanup(); cleanup();
} }

View file

@ -9,6 +9,7 @@ import { createTemp } from "@/misc/create-temp.js";
import { Users, Mutings } from "@/models/index.js"; import { Users, Mutings } from "@/models/index.js";
import { IsNull, MoreThan } from "typeorm"; import { IsNull, MoreThan } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js"; import type { DbUserJobData } from "@/queue/types.js";
import { inspect } from "node:util";
const logger = queueLogger.createSubLogger("export-mute"); const logger = queueLogger.createSubLogger("export-mute");
@ -16,7 +17,7 @@ export async function exportMute(
job: Bull.Job<DbUserJobData>, job: Bull.Job<DbUserJobData>,
done: any, done: any,
): Promise<void> { ): Promise<void> {
logger.info(`Exporting mute of ${job.data.user.id} ...`); logger.info(`Exporting mutes of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id }); const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) { if (user == null) {
@ -27,7 +28,7 @@ export async function exportMute(
// Create temp file // Create temp file
const [path, cleanup] = await createTemp(); const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`); logger.info(`temp file created: ${path}`);
try { try {
const stream = fs.createWriteStream(path, { flags: "a" }); const stream = fs.createWriteStream(path, { flags: "a" });
@ -64,9 +65,10 @@ export async function exportMute(
const content = getFullApAccount(u.username, u.host); const content = getFullApAccount(u.username, u.host);
await new Promise<void>((res, rej) => { await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => { stream.write(`${content}\n`, (err) => {
if (err) { if (err) {
logger.error(err); logger.warn("failed");
logger.info(inspect(err));
rej(err); rej(err);
} else { } else {
res(); res();
@ -84,7 +86,7 @@ export async function exportMute(
} }
stream.end(); stream.end();
logger.succ(`Exported to: ${path}`); logger.info(`Exported to: ${path}`);
const fileName = `mute-${dateFormat( const fileName = `mute-${dateFormat(
new Date(), new Date(),
@ -97,7 +99,7 @@ export async function exportMute(
force: true, force: true,
}); });
logger.succ(`Exported to: ${driveFile.id}`); logger.info(`Exported to: ${driveFile.id}`);
} finally { } finally {
cleanup(); cleanup();
} }

View file

@ -10,6 +10,7 @@ import type { Note } from "@/models/entities/note.js";
import type { Poll } from "@/models/entities/poll.js"; import type { Poll } from "@/models/entities/poll.js";
import type { DbUserJobData } from "@/queue/types.js"; import type { DbUserJobData } from "@/queue/types.js";
import { createTemp } from "@/misc/create-temp.js"; import { createTemp } from "@/misc/create-temp.js";
import { inspect } from "node:util";
const logger = queueLogger.createSubLogger("export-notes"); const logger = queueLogger.createSubLogger("export-notes");
@ -28,7 +29,7 @@ export async function exportNotes(
// Create temp file // Create temp file
const [path, cleanup] = await createTemp(); const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`); logger.info(`temp file created: ${path}`);
try { try {
const stream = fs.createWriteStream(path, { flags: "a" }); const stream = fs.createWriteStream(path, { flags: "a" });
@ -37,7 +38,8 @@ export async function exportNotes(
return new Promise<void>((res, rej) => { return new Promise<void>((res, rej) => {
stream.write(text, (err) => { stream.write(text, (err) => {
if (err) { if (err) {
logger.error(err); logger.warn(`failed to export posts of ${job.data.user.id}`);
logger.info(inspect(err));
rej(err); rej(err);
} else { } else {
res(); res();
@ -91,7 +93,7 @@ export async function exportNotes(
await write("]"); await write("]");
stream.end(); stream.end();
logger.succ(`Exported to: ${path}`); logger.info(`Exported to: ${path}`);
const fileName = `notes-${dateFormat( const fileName = `notes-${dateFormat(
new Date(), new Date(),
@ -104,7 +106,7 @@ export async function exportNotes(
force: true, force: true,
}); });
logger.succ(`Exported to: ${driveFile.id}`); logger.info(`Exported to: ${driveFile.id}`);
} finally { } finally {
cleanup(); cleanup();
} }

View file

@ -9,6 +9,7 @@ import { createTemp } from "@/misc/create-temp.js";
import { Users, UserLists, UserListJoinings } from "@/models/index.js"; import { Users, UserLists, UserListJoinings } from "@/models/index.js";
import { In } from "typeorm"; import { In } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js"; import type { DbUserJobData } from "@/queue/types.js";
import { inspect } from "node:util";
const logger = queueLogger.createSubLogger("export-user-lists"); const logger = queueLogger.createSubLogger("export-user-lists");
@ -31,7 +32,7 @@ export async function exportUserLists(
// Create temp file // Create temp file
const [path, cleanup] = await createTemp(); const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`); logger.info(`temp file created: ${path}`);
try { try {
const stream = fs.createWriteStream(path, { flags: "a" }); const stream = fs.createWriteStream(path, { flags: "a" });
@ -46,9 +47,10 @@ export async function exportUserLists(
const acct = getFullApAccount(u.username, u.host); const acct = getFullApAccount(u.username, u.host);
const content = `${list.name},${acct}`; const content = `${list.name},${acct}`;
await new Promise<void>((res, rej) => { await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => { stream.write(`${content}\n`, (err) => {
if (err) { if (err) {
logger.error(err); logger.warn(`failed to export ${list.id}`);
logger.info(inspect(err));
rej(err); rej(err);
} else { } else {
res(); res();
@ -59,7 +61,7 @@ export async function exportUserLists(
} }
stream.end(); stream.end();
logger.succ(`Exported to: ${path}`); logger.info(`Exported to: ${path}`);
const fileName = `user-lists-${dateFormat( const fileName = `user-lists-${dateFormat(
new Date(), new Date(),
@ -72,7 +74,7 @@ export async function exportUserLists(
force: true, force: true,
}); });
logger.succ(`Exported to: ${driveFile.id}`); logger.info(`Exported to: ${driveFile.id}`);
} finally { } finally {
cleanup(); cleanup();
} }

View file

@ -66,14 +66,15 @@ export async function importBlocking(
// skip myself // skip myself
if (target.id === job.data.user.id) continue; if (target.id === job.data.user.id) continue;
logger.info(`Block[${linenum}] ${target.id} ...`); logger.debug(`Block[${linenum}] ${target.id} ...`);
await block(user, target); await block(user, target);
} catch (e) { } catch (e) {
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`); logger.warn(`failed: error in line ${linenum}`);
logger.info(inspect(e));
} }
} }
logger.succ("Imported"); logger.info("Imported");
done(); done();
} }

View file

@ -11,14 +11,28 @@ import { addFile } from "@/services/drive/add-file.js";
import { genId } from "backend-rs"; import { genId } from "backend-rs";
import { db } from "@/db/postgre.js"; import { db } from "@/db/postgre.js";
import probeImageSize from "probe-image-size"; import probeImageSize from "probe-image-size";
import * as path from "path"; import * as path from "node:path";
const logger = queueLogger.createSubLogger("import-custom-emojis"); const logger = queueLogger.createSubLogger("import-custom-emojis");
// probeImageSize acceptable extensions
// JPG, GIF, PNG, WebP, BMP, TIFF, SVG, PSD.
const acceptableExtensions = [
".jpeg",
".jpg",
".gif",
".png",
".webp",
".bmp",
// ".tiff", // Cannot be used as emoji
// ".svg", // Disable for secure issues
// ".psd", // Cannot be used as emoji
];
// TODO: 名前衝突時の動作を選べるようにする // TODO: 名前衝突時の動作を選べるようにする
export async function importCustomEmojis( export async function importCustomEmojis(
job: Bull.Job<DbUserImportJobData>, job: Bull.Job<DbUserImportJobData>,
done: any, done: () => void,
): Promise<void> { ): Promise<void> {
logger.info("Importing custom emojis ..."); logger.info("Importing custom emojis ...");
@ -32,7 +46,7 @@ export async function importCustomEmojis(
const [tempPath, cleanup] = await createTempDir(); const [tempPath, cleanup] = await createTempDir();
logger.info(`Temp dir is ${tempPath}`); logger.debug(`temp dir created: ${tempPath}`);
const destPath = `${tempPath}/emojis.zip`; const destPath = `${tempPath}/emojis.zip`;
@ -62,6 +76,14 @@ export async function importCustomEmojis(
if (!record.downloaded) continue; if (!record.downloaded) continue;
const emojiInfo = record.emoji; const emojiInfo = record.emoji;
const emojiPath = `${outputPath}/${record.fileName}`; const emojiPath = `${outputPath}/${record.fileName}`;
const extname = path.extname(record.fileName);
// Skip non-support files
if (!acceptableExtensions.includes(extname.toLowerCase())) {
continue;
}
await Emojis.delete({ await Emojis.delete({
name: emojiInfo.name, name: emojiInfo.name,
}); });
@ -92,7 +114,7 @@ export async function importCustomEmojis(
} else { } else {
logger.info("starting emoji import without metadata"); logger.info("starting emoji import without metadata");
// Since we lack metadata, we import into a randomized category name instead // Since we lack metadata, we import into a randomized category name instead
let categoryName = genId(); const categoryName = genId();
let containedEmojis = fs.readdirSync(outputPath); let containedEmojis = fs.readdirSync(outputPath);
@ -103,7 +125,14 @@ export async function importCustomEmojis(
for (const emojiFilename of containedEmojis) { for (const emojiFilename of containedEmojis) {
// strip extension and get filename to use as name // strip extension and get filename to use as name
const name = path.basename(emojiFilename, path.extname(emojiFilename)); const extname = path.extname(emojiFilename);
// Skip non-emoji files, such as LICENSE
if (!acceptableExtensions.includes(extname.toLowerCase())) {
continue;
}
const name = path.basename(emojiFilename, extname);
const emojiPath = `${outputPath}/${emojiFilename}`; const emojiPath = `${outputPath}/${emojiFilename}`;
logger.info(`importing ${name}`); logger.info(`importing ${name}`);
@ -143,8 +172,8 @@ export async function importCustomEmojis(
cleanup(); cleanup();
logger.succ("Imported"); logger.info("Imported");
done(); done();
}); });
logger.succ(`Unzipping to ${outputPath}`); logger.info(`Unzipping to ${outputPath}`);
} }

View file

@ -1,6 +1,6 @@
import * as Post from "@/misc/post.js"; import * as Post from "@/misc/post.js";
import create from "@/services/note/create.js"; import create from "@/services/note/create.js";
import { Users } from "@/models/index.js"; import { NoteFiles, Users } from "@/models/index.js";
import type { DbUserImportMastoPostJobData } from "@/queue/types.js"; import type { DbUserImportMastoPostJobData } from "@/queue/types.js";
import { queueLogger } from "../../logger.js"; import { queueLogger } from "../../logger.js";
import { uploadFromUrl } from "@/services/drive/upload-from-url.js"; import { uploadFromUrl } from "@/services/drive/upload-from-url.js";
@ -49,7 +49,7 @@ export async function importCkPost(
}); });
files.push(file); files.push(file);
} catch (e) { } catch (e) {
logger.error(`Skipped adding file to drive: ${url}`); logger.info(`Skipped adding file to drive: ${url}`);
} }
} }
const { text, cw, localOnly, createdAt, visibility } = Post.parse(post); const { text, cw, localOnly, createdAt, visibility } = Post.parse(post);
@ -59,9 +59,18 @@ export async function importCkPost(
userId: user.id, userId: user.id,
}); });
if (note && (note?.fileIds?.length || 0) < files.length) { // FIXME: What is this condition?
if (note != null && (note.fileIds?.length || 0) < files.length) {
const update: Partial<Note> = {}; const update: Partial<Note> = {};
update.fileIds = files.map((x) => x.id); update.fileIds = files.map((x) => x.id);
if (update.fileIds != null) {
await NoteFiles.delete({ noteId: note.id });
await NoteFiles.insert(
update.fileIds.map((fileId) => ({ noteId: note?.id, fileId })),
);
}
await Notes.update(note.id, update); await Notes.update(note.id, update);
await NoteEdits.insert({ await NoteEdits.insert({
id: genId(), id: genId(),
@ -71,12 +80,12 @@ export async function importCkPost(
fileIds: note.fileIds, fileIds: note.fileIds,
updatedAt: new Date(), updatedAt: new Date(),
}); });
logger.info(`Note file updated`); logger.info("Post updated");
} }
if (!note) { if (note == null) {
note = await create(user, { note = await create(user, {
createdAt: createdAt, createdAt: createdAt,
files: files.length == 0 ? undefined : files, files: files.length === 0 ? undefined : files,
poll: undefined, poll: undefined,
text: text || undefined, text: text || undefined,
reply: post.replyId ? job.data.parent : null, reply: post.replyId ? job.data.parent : null,
@ -90,11 +99,11 @@ export async function importCkPost(
apHashtags: undefined, apHashtags: undefined,
apEmojis: undefined, apEmojis: undefined,
}); });
logger.info(`Create new note`); logger.debug("New post has been created");
} else { } else {
logger.info(`Note exist`); logger.info("This post already exists");
} }
logger.succ("Imported"); logger.info("Imported");
if (post.childNotes) { if (post.childNotes) {
for (const child of post.childNotes) { for (const child of post.childNotes) {
createImportCkPostJob( createImportCkPostJob(

View file

@ -64,11 +64,12 @@ export async function importFollowing(
// skip myself // skip myself
if (target.id === job.data.user.id) continue; if (target.id === job.data.user.id) continue;
logger.info(`Follow[${linenum}] ${target.id} ...`); logger.debug(`Follow[${linenum}] ${target.id} ...`);
follow(user, target); follow(user, target);
} catch (e) { } catch (e) {
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`); logger.warn(`Error in line ${linenum}`);
logger.info(inspect(e));
} }
} }
} else { } else {
@ -102,15 +103,16 @@ export async function importFollowing(
// skip myself // skip myself
if (target.id === job.data.user.id) continue; if (target.id === job.data.user.id) continue;
logger.info(`Follow[${linenum}] ${target.id} ...`); logger.debug(`Follow[${linenum}] ${target.id} ...`);
follow(user, target); follow(user, target);
} catch (e) { } catch (e) {
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`); logger.warn(`Error in line ${linenum}`);
logger.info(inspect(e));
} }
} }
} }
logger.succ("Imported"); logger.info("Imported");
done(); done();
} }

View file

@ -1,5 +1,5 @@
import create from "@/services/note/create.js"; import create from "@/services/note/create.js";
import { Users } from "@/models/index.js"; import { NoteFiles, Users } from "@/models/index.js";
import type { DbUserImportMastoPostJobData } from "@/queue/types.js"; import type { DbUserImportMastoPostJobData } from "@/queue/types.js";
import { queueLogger } from "../../logger.js"; import { queueLogger } from "../../logger.js";
import type Bull from "bull"; import type Bull from "bull";
@ -73,7 +73,7 @@ export async function importMastoPost(
}); });
files.push(file); files.push(file);
} catch (e) { } catch (e) {
logger.error(`Skipped adding file to drive: ${url}`); logger.warn(`Skipped adding file to drive: ${url}`);
} }
} }
} }
@ -85,9 +85,18 @@ export async function importMastoPost(
userId: user.id, userId: user.id,
}); });
if (note && (note?.fileIds?.length || 0) < files.length) { // FIXME: What is this condition?
if (note != null && (note.fileIds?.length || 0) < files.length) {
const update: Partial<Note> = {}; const update: Partial<Note> = {};
update.fileIds = files.map((x) => x.id); update.fileIds = files.map((x) => x.id);
if (update.fileIds != null) {
await NoteFiles.delete({ noteId: note.id });
await NoteFiles.insert(
update.fileIds.map((fileId) => ({ noteId: note?.id, fileId })),
);
}
await Notes.update(note.id, update); await Notes.update(note.id, update);
await NoteEdits.insert({ await NoteEdits.insert({
id: genId(), id: genId(),
@ -97,14 +106,14 @@ export async function importMastoPost(
fileIds: note.fileIds, fileIds: note.fileIds,
updatedAt: new Date(), updatedAt: new Date(),
}); });
logger.info(`Note file updated`); logger.info("Post updated");
} }
if (!note) { if (note == null) {
note = await create(user, { note = await create(user, {
createdAt: isRenote createdAt: isRenote
? new Date(post.published) ? new Date(post.published)
: new Date(post.object.published), : new Date(post.object.published),
files: files.length == 0 ? undefined : files, files: files.length === 0 ? undefined : files,
poll: undefined, poll: undefined,
text: text || undefined, text: text || undefined,
reply, reply,
@ -118,12 +127,12 @@ export async function importMastoPost(
apHashtags: undefined, apHashtags: undefined,
apEmojis: undefined, apEmojis: undefined,
}); });
logger.info(`Create new note`); logger.debug("New post has been created");
} else { } else {
logger.info(`Note exist`); logger.info("This post already exists");
} }
job.progress(100); job.progress(100);
done(); done();
logger.succ("Imported"); logger.info("Imported");
} }

View file

@ -66,15 +66,16 @@ export async function importMuting(
// skip myself // skip myself
if (target.id === job.data.user.id) continue; if (target.id === job.data.user.id) continue;
logger.info(`Mute[${linenum}] ${target.id} ...`); logger.debug(`Mute[${linenum}] ${target.id} ...`);
await mute(user, target); await mute(user, target);
} catch (e) { } catch (e) {
logger.warn(`Error in line ${linenum}: ${inspect(e)}`); logger.warn(`Error in line ${linenum}`);
logger.info(inspect(e));
} }
} }
logger.succ("Imported"); logger.info("Imported");
done(); done();
} }

View file

@ -45,9 +45,10 @@ export async function importPosts(
} }
} catch (e) { } catch (e) {
// handle error // handle error
logger.warn(`Failed to read Mastodon archive:\n${inspect(e)}`); logger.warn("Failed to read Mastodon archive");
logger.info(inspect(e));
} }
logger.succ("Mastodon archive imported"); logger.info("Mastodon archive imported");
done(); done();
return; return;
} }
@ -56,24 +57,25 @@ export async function importPosts(
try { try {
const parsed = JSON.parse(json); const parsed = JSON.parse(json);
if (parsed instanceof Array) { if (Array.isArray(parsed)) {
logger.info("Parsing key style posts"); logger.info("Parsing *key posts");
const arr = recreateChain(parsed); const arr = recreateChain(parsed);
for (const post of arr) { for (const post of arr) {
createImportCkPostJob(job.data.user, post, job.data.signatureCheck); createImportCkPostJob(job.data.user, post, job.data.signatureCheck);
} }
} else if (parsed instanceof Object) { } else if (parsed instanceof Object) {
logger.info("Parsing animal style posts"); logger.info("Parsing Mastodon posts");
for (const post of parsed.orderedItems) { for (const post of parsed.orderedItems) {
createImportMastoPostJob(job.data.user, post, job.data.signatureCheck); createImportMastoPostJob(job.data.user, post, job.data.signatureCheck);
} }
} }
} catch (e) { } catch (e) {
// handle error // handle error
logger.warn(`Error occured while reading:\n${inspect(e)}`); logger.warn("an error occured while reading");
logger.info(inspect(e));
} }
logger.succ("Imported"); logger.info("Imported");
done(); done();
} }

View file

@ -86,10 +86,11 @@ export async function importUserLists(
pushUserToUserList(target, list!); pushUserToUserList(target, list!);
} catch (e) { } catch (e) {
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`); logger.warn(`Error in line ${linenum}`);
logger.info(inspect(e));
} }
} }
logger.succ("Imported"); logger.info("Imported");
done(); done();
} }

View file

@ -48,6 +48,6 @@ export default async function cleanRemoteFiles(
job.progress(deletedCount / total); job.progress(deletedCount / total);
} }
logger.succ("All cached remote files has been deleted."); logger.info("All cached remote files are deleted.");
done(); done();
} }

View file

@ -28,6 +28,6 @@ export async function checkExpiredMutings(
} }
} }
logger.succ("All expired mutings checked."); logger.info("All expired mutings checked.");
done(); done();
} }

View file

@ -11,6 +11,6 @@ export async function cleanCharts(
): Promise<void> { ): Promise<void> {
logger.info("Cleaning active users chart..."); logger.info("Cleaning active users chart...");
await activeUsersChart.clean(); await activeUsersChart.clean();
logger.succ("Active users chart has been cleaned."); logger.info("Active users chart has been cleaned.");
done(); done();
} }

View file

@ -4,7 +4,7 @@ import { UserIps } from "@/models/index.js";
import { queueLogger } from "../../logger.js"; import { queueLogger } from "../../logger.js";
const logger = queueLogger.createSubLogger("clean"); const logger = queueLogger.createSubLogger("clean-user-ip-log");
export async function clean( export async function clean(
job: Bull.Job<Record<string, unknown>>, job: Bull.Job<Record<string, unknown>>,
@ -16,6 +16,6 @@ export async function clean(
createdAt: LessThan(new Date(Date.now() - 1000 * 60 * 60 * 24 * 90)), createdAt: LessThan(new Date(Date.now() - 1000 * 60 * 60 * 24 * 90)),
}); });
logger.succ("Cleaned."); logger.info("Cleaned.");
done(); done();
} }

View file

@ -3,7 +3,7 @@ import { IsNull } from "typeorm";
import { Emojis } from "@/models/index.js"; import { Emojis } from "@/models/index.js";
import { queueLogger } from "../../logger.js"; import { queueLogger } from "../../logger.js";
import { getEmojiSize } from "@/misc/emoji-meta.js"; import { getImageSizeFromUrl } from "backend-rs";
import { inspect } from "node:util"; import { inspect } from "node:util";
const logger = queueLogger.createSubLogger("local-emoji-size"); const logger = queueLogger.createSubLogger("local-emoji-size");
@ -21,23 +21,22 @@ export async function setLocalEmojiSizes(
for (let i = 0; i < emojis.length; i++) { for (let i = 0; i < emojis.length; i++) {
try { try {
const size = await getEmojiSize(emojis[i].publicUrl); const size = await getImageSizeFromUrl(emojis[i].publicUrl);
await Emojis.update(emojis[i].id, { await Emojis.update(emojis[i].id, {
width: size.width || null, width: size.width || null,
height: size.height || null, height: size.height || null,
}); });
} catch (e) { } catch (e) {
logger.error( logger.warn(`Unable to set emoji size (${i + 1}/${emojis.length})`);
`Unable to set emoji size (${i + 1}/${emojis.length}):\n${inspect(e)}`, logger.info(inspect(e));
);
/* skip if any error happens */ /* skip if any error happens */
} finally { } finally {
// wait for 1sec so that this would not overwhelm the object storage. // wait for 1sec so that this would not overwhelm the object storage.
await new Promise((resolve) => setTimeout(resolve, 1000)); await new Promise((resolve) => setTimeout(resolve, 1000));
if (i % 10 === 9) logger.succ(`fetched ${i + 1}/${emojis.length} emojis`); if (i % 10 === 9) logger.info(`fetched ${i + 1}/${emojis.length} emojis`);
} }
} }
logger.succ("Done."); logger.info("Done.");
done(); done();
} }

View file

@ -33,12 +33,13 @@ export async function verifyLinks(
fields: user.fields, fields: user.fields,
}); });
} catch (e) { } catch (e) {
logger.error(`Failed to update user ${user.userId}:\n${inspect(e)}`); logger.error(`Failed to update user ${user.userId}`);
logger.info(inspect(e));
done(e); done(e);
} }
} }
} }
logger.succ("All links successfully verified."); logger.info("All links successfully verified.");
done(); done();
} }

View file

@ -133,7 +133,8 @@ export default class DeliverManager {
host: new URL(inbox).host, host: new URL(inbox).host,
}); });
} catch (error) { } catch (error) {
apLogger.error(`Invalid Inbox ${inbox}:\n${inspect(error)}`); apLogger.info(`Invalid Inbox ${inbox}`);
apLogger.debug(inspect(error));
} }
} }

View file

@ -6,20 +6,19 @@ import { isFollow, getApType } from "../../type.js";
import { apLogger } from "../../logger.js"; import { apLogger } from "../../logger.js";
import { inspect } from "node:util"; import { inspect } from "node:util";
const logger = apLogger;
export default async ( export default async (
actor: CacheableRemoteUser, actor: CacheableRemoteUser,
activity: IAccept, activity: IAccept,
): Promise<string> => { ): Promise<string> => {
const uri = activity.id || activity; const uri = activity.id || activity;
logger.info(`Accept: ${uri}`); apLogger.info(`Accept: ${uri}`);
const resolver = new Resolver(); const resolver = new Resolver();
const object = await resolver.resolve(activity.object).catch((e) => { const object = await resolver.resolve(activity.object).catch((e) => {
logger.error(`Resolution failed:\n${inspect(e)}`); apLogger.info(`Failed to resolve AP object: ${e}`);
apLogger.debug(inspect(e));
throw e; throw e;
}); });

View file

@ -5,15 +5,13 @@ import type { IAnnounce } from "../../type.js";
import { getApId } from "../../type.js"; import { getApId } from "../../type.js";
import { apLogger } from "../../logger.js"; import { apLogger } from "../../logger.js";
const logger = apLogger;
export default async ( export default async (
actor: CacheableRemoteUser, actor: CacheableRemoteUser,
activity: IAnnounce, activity: IAnnounce,
): Promise<void> => { ): Promise<void> => {
const uri = getApId(activity); const uri = getApId(activity);
logger.info(`Announce: ${uri}`); apLogger.info(`Announce: ${uri}`);
const resolver = new Resolver(); const resolver = new Resolver();

View file

@ -13,8 +13,6 @@ import { Notes } from "@/models/index.js";
import { isBlockedServer } from "backend-rs"; import { isBlockedServer } from "backend-rs";
import { inspect } from "node:util"; import { inspect } from "node:util";
const logger = apLogger;
/** /**
* Handle announcement activities * Handle announcement activities
*/ */
@ -50,11 +48,14 @@ export default async function (
// Skip if target is 4xx // Skip if target is 4xx
if (e instanceof StatusError) { if (e instanceof StatusError) {
if (e.isClientError) { if (e.isClientError) {
logger.warn(`Ignored announce target ${targetUri} - ${e.statusCode}`); apLogger.info(
`Ignored announce target ${targetUri} - ${e.statusCode}`,
);
return; return;
} }
logger.warn(`Error in announce target ${targetUri}:\n${inspect(e)}`); apLogger.warn(`Error in announce target ${targetUri}`);
apLogger.debug(inspect(e));
} }
throw e; throw e;
} }
@ -63,7 +64,7 @@ export default async function (
console.log("skip: invalid actor for this activity"); console.log("skip: invalid actor for this activity");
return; return;
} }
logger.info(`Creating the (Re)Note: ${uri}`); apLogger.info(`Creating (re)note: ${uri}`);
const activityAudience = await parseAudience( const activityAudience = await parseAudience(
actor, actor,

View file

@ -7,15 +7,13 @@ import { apLogger } from "../../logger.js";
import { toArray, concat, unique } from "@/prelude/array.js"; import { toArray, concat, unique } from "@/prelude/array.js";
import { inspect } from "node:util"; import { inspect } from "node:util";
const logger = apLogger;
export default async ( export default async (
actor: CacheableRemoteUser, actor: CacheableRemoteUser,
activity: ICreate, activity: ICreate,
): Promise<void> => { ): Promise<void> => {
const uri = getApId(activity); const uri = getApId(activity);
logger.info(`Create: ${uri}`); apLogger.info(`Create: ${uri}`);
// copy audiences between activity <=> object. // copy audiences between activity <=> object.
if (typeof activity.object === "object") { if (typeof activity.object === "object") {
@ -40,13 +38,14 @@ export default async (
const resolver = new Resolver(); const resolver = new Resolver();
const object = await resolver.resolve(activity.object).catch((e) => { const object = await resolver.resolve(activity.object).catch((e) => {
logger.error(`Resolution failed:\n${inspect(e)}`); apLogger.info(`Failed to resolve AP object: ${e}`);
apLogger.debug(inspect(e));
throw e; throw e;
}); });
if (isPost(object)) { if (isPost(object)) {
createNote(resolver, actor, object, false, activity); createNote(resolver, actor, object, false, activity);
} else { } else {
logger.warn(`Unknown type: ${getApType(object)}`); apLogger.info(`Unknown type: ${getApType(object)}`);
} }
}; };

Some files were not shown because too many files have changed in this diff Show more