refactor: remove database migrations in Rust
This commit is contained in:
parent
5ff1afc4f2
commit
4356099fec
20 changed files with 434 additions and 2008 deletions
|
@ -11,8 +11,6 @@ ENV PATH="/root/.cargo/bin:${PATH}"
|
|||
COPY packages/backend/native-utils/Cargo.toml packages/backend/native-utils/Cargo.toml
|
||||
COPY packages/backend/native-utils/Cargo.lock packages/backend/native-utils/Cargo.lock
|
||||
COPY packages/backend/native-utils/src/lib.rs packages/backend/native-utils/src/
|
||||
COPY packages/backend/native-utils/migration/Cargo.toml packages/backend/native-utils/migration/Cargo.toml
|
||||
COPY packages/backend/native-utils/migration/src/lib.rs packages/backend/native-utils/migration/src/
|
||||
|
||||
# Install cargo dependencies
|
||||
RUN cargo fetch --locked --manifest-path /firefish/packages/backend/native-utils/Cargo.toml
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
# Unreleased
|
||||
|
||||
- refactoring
|
||||
|
||||
# v20240212
|
||||
|
||||
- refactoring
|
||||
|
|
|
@ -7,6 +7,19 @@
|
|||
sudo systemctl stop your-firefish-service.service
|
||||
```
|
||||
1. Take a backup
|
||||
1. Revert database migrations
|
||||
```sh
|
||||
sudo --user=postgres psql --file=docs/downgrade.sql --dbname=database_name
|
||||
```
|
||||
|
||||
The database name can be found in `.config/default.yml`.
|
||||
```yaml
|
||||
db:
|
||||
port: 5432
|
||||
db: database_name # this one
|
||||
user: firefish
|
||||
pass: password
|
||||
```
|
||||
1. Switch back to the `v20240206` tag
|
||||
```sh
|
||||
git switch v20240206
|
||||
|
@ -29,6 +42,18 @@
|
|||
# or podman-compose down
|
||||
```
|
||||
1. Take a backup
|
||||
1. Revert database migrations
|
||||
```sh
|
||||
docker-compose exec db psql --command="$(cat docs/downgrade.sql)" --user=user_name --dbname=database_name
|
||||
# or docker-compose exec db psql --command="$(cat docs/revert.sql)" --user=user_name --dbname=database_name
|
||||
```
|
||||
|
||||
The user and database name can be found in `.config/docker.env`.
|
||||
```env
|
||||
POSTGRES_PASSWORD=password
|
||||
POSTGRES_USER=user_name # user name
|
||||
POSTGRES_DB=database_name # database name
|
||||
```
|
||||
1. Change the image tag to `registry.firefish.dev/firefish/firefish:v20240206`
|
||||
```sh
|
||||
vim docker-compose.yml
|
||||
|
|
22
docs/downgrade.sql
Normal file
22
docs/downgrade.sql
Normal file
|
@ -0,0 +1,22 @@
|
|||
BEGIN;
|
||||
|
||||
DELETE FROM "migrations" WHERE name IN (
|
||||
'RemoveNativeUtilsMigration1705877093218'
|
||||
);
|
||||
|
||||
-- remove-native-utils-migration
|
||||
CREATE TABLE "seaql_migrations" (
|
||||
version character varying NOT NULL,
|
||||
applied_at bigint NOT NULL
|
||||
);
|
||||
INSERT INTO "seaql_migrations" (version, applied_at)
|
||||
VALUES
|
||||
('m20230531_180824_drop_reversi', 1705876632),
|
||||
('m20230627_185451_index_note_url', 1705876632),
|
||||
('m20230709_000510_move_antenna_to_cache', 1705876632),
|
||||
('m20230806_170616_fix_antenna_stream_ids', 1705876632),
|
||||
('m20230904_013244_is_indexable', 1705876632),
|
||||
('m20231002_143323_remove_integrations', 1705876632)
|
||||
;
|
||||
|
||||
COMMIT;
|
|
@ -0,0 +1,152 @@
|
|||
export class RemoveNativeUtilsMigration1705877093218 {
|
||||
name = "RemoveNativeUtilsMigration1705877093218";
|
||||
|
||||
async up(queryRunner) {
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS "reversi_game"`);
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS "reversi_matching"`);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_note_url" ON "note" ("text")`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS "antenna_note"`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "user" ADD COLUMN IF NOT EXISTS "isIndexable" boolean NOT NULL DEFAULT true`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "user_profile" ADD COLUMN IF NOT EXISTS "isIndexable" boolean NOT NULL DEFAULT true`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" DROP COLUMN IF EXISTS "enableTwitterIntegration"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" DROP COLUMN IF EXISTS "twitterConsumerKey"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" DROP COLUMN IF EXISTS "twitterConsumerSecret"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" DROP COLUMN IF EXISTS "enableGithubIntegration"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" DROP COLUMN IF EXISTS "githubClientId"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" DROP COLUMN IF EXISTS "githubClientSecret"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" DROP COLUMN IF EXISTS "enableDiscordIntegration"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" DROP COLUMN IF EXISTS "discordClientId"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" DROP COLUMN IF EXISTS "discordClientSecret"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "user_profile" DROP COLUMN IF EXISTS "integrations"`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS "seaql_migrations"`);
|
||||
}
|
||||
|
||||
async down(queryRunner) {
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "seaql_migrations" (version character varying NOT NULL, applied_at bigint NOT NULL)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "user_profile" ADD COLUMN IF NOT EXISTS "integrations" jsonb DEFAULT '{}'`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" ADD COLUMN IF NOT EXISTS "discordClientSecret" character varying(128)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" ADD COLUMN IF NOT EXISTS "discordClientId" character varying(128)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" ADD COLUMN IF NOT EXISTS "enableDiscordIntegration" boolean NOT NULL DEFAULT false`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" ADD COLUMN IF NOT EXISTS "githubClientSecret" character varying(128)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" ADD COLUMN IF NOT EXISTS "githubClientId" character varying(128)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" ADD COLUMN IF NOT EXISTS "enableGithubIntegration" boolean NOT NULL DEFAULT false`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" ADD COLUMN IF NOT EXISTS "twitterConsumerSecret" character varying(128)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" ADD COLUMN IF NOT EXISTS "twitterConsumerKey" character varying(128)`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "meta" ADD COLUMN IF NOT EXISTS "enableTwitterIntegration" boolean NOT NULL DEFAULT false`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "user_profile" DROP COLUMN IF EXISTS "isIndexable"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "user" DROP COLUMN IF EXISTS "isIndexable"`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "antenna_note" ("id" character varying(32) NOT NULL, "noteId" character varying(32) NOT NULL, "antennaId" character varying(32) NOT NULL, "read" boolean NOT NULL DEFAULT false, CONSTRAINT "PK_fb28d94d0989a3872df19fd6ef8" PRIMARY KEY ("id"))`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_0d775946662d2575dfd2068a5f" ON "antenna_note" ("antennaId")`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_bd0397be22147e17210940e125" ON "antenna_note" ("noteId")`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX IF NOT EXISTS "IDX_335a0bf3f904406f9ef3dd51c2" ON "antenna_note" ("noteId", "antennaId")`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_9937ea48d7ae97ffb4f3f063a4" ON "antenna_note" ("read")`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "antenna_note" ADD CONSTRAINT IF NOT EXISTS "FK_0d775946662d2575dfd2068a5f5" FOREIGN KEY ("antennaId") REFERENCES "antenna"("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "antenna_note" ADD CONSTRAINT IF NOT EXISTS "FK_bd0397be22147e17210940e125b" FOREIGN KEY ("noteId") REFERENCES "note"("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
|
||||
);
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS "IDX_note_url"`);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "reversi_matching" ("id" character varying(32) NOT NULL, "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL, "parentId" character varying(32) NOT NULL, "childId" character varying(32) NOT NULL, CONSTRAINT "PK_880bd0afbab232f21c8b9d146cf" PRIMARY KEY ("id"))`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_b604d92d6c7aec38627f6eaf16" ON "reversi_matching" ("createdAt")`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_3b25402709dd9882048c2bbade" ON "reversi_matching" ("parentId")`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_e247b23a3c9b45f89ec1299d06" ON "reversi_matching" ("childId")`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "reversi_matching" ADD CONSTRAINT IF NOT EXISTS "FK_3b25402709dd9882048c2bbade0" FOREIGN KEY ("parentId") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "reversi_matching" ADD CONSTRAINT IF NOT EXISTS "FK_e247b23a3c9b45f89ec1299d066" FOREIGN KEY ("childId") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`COMMENT ON COLUMN "reversi_matching"."createdAt" IS 'The created date of the ReversiMatching.'`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "reversi_game" ("id" character varying(32) NOT NULL, "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL, "startedAt" TIMESTAMP WITH TIME ZONE, "user1Id" character varying(32) NOT NULL, "user2Id" character varying(32) NOT NULL, "user1Accepted" boolean NOT NULL DEFAULT false, "user2Accepted" boolean NOT NULL DEFAULT false, "black" integer, "isStarted" boolean NOT NULL DEFAULT false, "isEnded" boolean NOT NULL DEFAULT false, "winnerId" character varying(32), "surrendered" character varying(32), "logs" jsonb NOT NULL DEFAULT '[]', "map" character varying(64) array NOT NULL, "bw" character varying(32) NOT NULL, "isLlotheo" boolean NOT NULL DEFAULT false, "canPutEverywhere" boolean NOT NULL DEFAULT false, "loopedBoard" boolean NOT NULL DEFAULT false, "form1" jsonb DEFAULT null, "form2" jsonb DEFAULT null, "crc32" character varying(32), CONSTRAINT "PK_76b30eeba71b1193ad7c5311c3f" PRIMARY KEY ("id"))`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_b46ec40746efceac604142be1c" ON "reversi_game" ("createdAt")`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "reversi_game" ADD CONSTRAINT IF NOT EXISTS "FK_f7467510c60a45ce5aca6292743" FOREIGN KEY ("user1Id") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "reversi_game" ADD CONSTRAINT IF NOT EXISTS "FK_6649a4e8c5d5cf32fb03b5da9f6" FOREIGN KEY ("user2Id") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`COMMENT ON COLUMN "reversi_game"."createdAt" IS 'The created date of the ReversiGame.'`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`COMMENT ON COLUMN "reversi_game"."startedAt" IS 'The started date of the ReversiGame.'`,
|
||||
);
|
||||
}
|
||||
}
|
898
packages/backend/native-utils/Cargo.lock
generated
898
packages/backend/native-utils/Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -3,9 +3,6 @@ edition = "2021"
|
|||
name = "native-utils"
|
||||
version = "0.0.0"
|
||||
|
||||
[workspace]
|
||||
members = ["migration"]
|
||||
|
||||
[features]
|
||||
default = []
|
||||
napi = ["dep:napi", "dep:napi-derive"]
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
[package]
|
||||
name = "migration"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
name = "migration"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
convert = ["dep:native-utils", "dep:indicatif", "dep:futures"]
|
||||
|
||||
[dependencies]
|
||||
serde_json = "1.0.108"
|
||||
native-utils = { path = "../", optional = true }
|
||||
indicatif = { version = "0.17.7", features = ["tokio"], optional = true }
|
||||
tokio = { version = "1.35.1", features = ["full"] }
|
||||
futures = { version = "0.3.30", optional = true }
|
||||
serde_yaml = "0.9.29"
|
||||
serde = { version = "1.0.193", features = ["derive"] }
|
||||
urlencoding = "2.1.3"
|
||||
redis = { version = "0.24.0", features = ["tokio-rustls-comp"] }
|
||||
sea-orm = "0.12.10"
|
||||
url = { version = "2.5.0", features = ["serde"] }
|
||||
basen = "0.1.0"
|
||||
|
||||
[dependencies.sea-orm-migration]
|
||||
version = "0.12.10"
|
||||
features = [
|
||||
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
|
||||
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
|
||||
# e.g.
|
||||
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
|
||||
"sqlx-postgres", # `DATABASE_DRIVER` feature
|
||||
]
|
|
@ -1,55 +0,0 @@
|
|||
# Making migrations
|
||||
|
||||
For more information, please read https://www.sea-ql.org/SeaORM/docs/migration/setting-up-migration/
|
||||
|
||||
- Install `sea-orm-cli`
|
||||
```sh
|
||||
cargo install sea-orm-cli
|
||||
```
|
||||
|
||||
- Generate
|
||||
```sh
|
||||
sea-orm-cli migrate generate ****
|
||||
```
|
||||
|
||||
# Running Migrator CLI
|
||||
|
||||
- Generate a new migration file
|
||||
```sh
|
||||
cargo run -- migrate generate MIGRATION_NAME
|
||||
```
|
||||
- Apply all pending migrations
|
||||
```sh
|
||||
cargo run
|
||||
```
|
||||
```sh
|
||||
cargo run -- up
|
||||
```
|
||||
- Apply first 10 pending migrations
|
||||
```sh
|
||||
cargo run -- up -n 10
|
||||
```
|
||||
- Rollback last applied migrations
|
||||
```sh
|
||||
cargo run -- down
|
||||
```
|
||||
- Rollback last 10 applied migrations
|
||||
```sh
|
||||
cargo run -- down -n 10
|
||||
```
|
||||
- Drop all tables from the database, then reapply all migrations
|
||||
```sh
|
||||
cargo run -- fresh
|
||||
```
|
||||
- Rollback all applied migrations, then reapply all migrations
|
||||
```sh
|
||||
cargo run -- refresh
|
||||
```
|
||||
- Rollback all applied migrations
|
||||
```sh
|
||||
cargo run -- reset
|
||||
```
|
||||
- Check the status of all migrations
|
||||
```sh
|
||||
cargo run -- status
|
||||
```
|
|
@ -1,35 +0,0 @@
|
|||
pub use sea_orm_migration::prelude::*;
|
||||
|
||||
use basen::BASE36;
|
||||
|
||||
mod m20230531_180824_drop_reversi;
|
||||
mod m20230627_185451_index_note_url;
|
||||
mod m20230709_000510_move_antenna_to_cache;
|
||||
mod m20230806_170616_fix_antenna_stream_ids;
|
||||
mod m20230904_013244_is_indexable;
|
||||
mod m20231002_143323_remove_integrations;
|
||||
|
||||
pub struct Migrator;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigratorTrait for Migrator {
|
||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||
vec![
|
||||
Box::new(m20230531_180824_drop_reversi::Migration),
|
||||
Box::new(m20230627_185451_index_note_url::Migration),
|
||||
Box::new(m20230709_000510_move_antenna_to_cache::Migration),
|
||||
Box::new(m20230806_170616_fix_antenna_stream_ids::Migration),
|
||||
Box::new(m20230904_013244_is_indexable::Migration),
|
||||
Box::new(m20231002_143323_remove_integrations::Migration),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_timestamp(id: &str) -> i64 {
|
||||
const TIME_2000: i64 = 946_684_800_000;
|
||||
let n: Option<u64> = BASE36.decode_var_len(&id[0..8]);
|
||||
match n {
|
||||
None => -1,
|
||||
Some(n) => n as i64 + TIME_2000,
|
||||
}
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
use sea_orm_migration::{
|
||||
prelude::*,
|
||||
sea_orm::{DbBackend, Statement},
|
||||
};
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
if manager.get_database_backend() == DbBackend::Sqlite {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let db = manager.get_connection();
|
||||
db.query_one(Statement::from_string(
|
||||
DbBackend::Postgres,
|
||||
Table::drop()
|
||||
.table(ReversiGame::Table)
|
||||
.if_exists()
|
||||
.to_string(PostgresQueryBuilder),
|
||||
))
|
||||
.await?;
|
||||
db.query_one(Statement::from_string(
|
||||
DbBackend::Postgres,
|
||||
Table::drop()
|
||||
.table(ReversiMatching::Table)
|
||||
.if_exists()
|
||||
.to_string(PostgresQueryBuilder),
|
||||
))
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, _manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
// Replace the sample below with your own migration scripts
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Learn more at https://docs.rs/sea-query#iden
|
||||
#[derive(Iden)]
|
||||
enum ReversiGame {
|
||||
Table,
|
||||
}
|
||||
#[derive(Iden)]
|
||||
enum ReversiMatching {
|
||||
Table,
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("IDX_note_url")
|
||||
.table(Note::Table)
|
||||
.col(Note::Url)
|
||||
.if_not_exists()
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name("IDX_note_url")
|
||||
.table(Note::Table)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
/// Learn more at https://docs.rs/sea-query#iden
|
||||
#[derive(Iden)]
|
||||
enum Note {
|
||||
Table,
|
||||
Url,
|
||||
}
|
|
@ -1,248 +0,0 @@
|
|||
use redis::streams::StreamMaxlen;
|
||||
use sea_orm::Statement;
|
||||
use sea_orm_migration::prelude::*;
|
||||
use std::env;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
let cache_url = env::var("CACHE_URL").unwrap();
|
||||
let skip_copy = env::var("ANTENNA_MIGRATION_SKIP").unwrap_or_default();
|
||||
let copy_limit = env::var("ANTENNA_MIGRATION_COPY_LIMIT").unwrap_or_default();
|
||||
let read_limit: u64 = env::var("ANTENNA_MIGRATION_READ_LIMIT")
|
||||
.unwrap_or("10000".to_string())
|
||||
.parse()
|
||||
.unwrap();
|
||||
let copy_limit: i64 = match copy_limit.parse() {
|
||||
Ok(limit) => limit,
|
||||
Err(_) => 0,
|
||||
};
|
||||
|
||||
if skip_copy == "true" {
|
||||
println!("Skipped antenna migration");
|
||||
} else {
|
||||
let prefix = env::var("CACHE_PREFIX").unwrap();
|
||||
|
||||
let db = manager.get_connection();
|
||||
let bk = manager.get_database_backend();
|
||||
|
||||
let count_stmt =
|
||||
Statement::from_string(bk, "SELECT COUNT(1) FROM antenna_note".to_owned());
|
||||
let total_num = db
|
||||
.query_one(count_stmt)
|
||||
.await?
|
||||
.unwrap()
|
||||
.try_get_by_index::<i64>(0)?;
|
||||
let copy_limit = if copy_limit > 0 {
|
||||
copy_limit
|
||||
} else {
|
||||
total_num
|
||||
};
|
||||
println!(
|
||||
"Copying {} out of {} entries in antenna_note.",
|
||||
copy_limit, total_num
|
||||
);
|
||||
|
||||
let stmt_base = Query::select()
|
||||
.column((AntennaNote::Table, AntennaNote::Id))
|
||||
.column(AntennaNote::AntennaId)
|
||||
.column(AntennaNote::NoteId)
|
||||
.from(AntennaNote::Table)
|
||||
.order_by((AntennaNote::Table, AntennaNote::Id), Order::Asc)
|
||||
.limit(read_limit)
|
||||
.to_owned();
|
||||
|
||||
let mut stmt = stmt_base.clone();
|
||||
|
||||
let client = redis::Client::open(cache_url).unwrap();
|
||||
let mut redis_conn = client.get_connection().unwrap();
|
||||
|
||||
let mut remaining = total_num;
|
||||
let mut pagination: i64 = 0;
|
||||
|
||||
loop {
|
||||
let res = db.query_all(bk.build(&stmt)).await?;
|
||||
if res.len() == 0 {
|
||||
break;
|
||||
}
|
||||
let val: Vec<(String, String, String)> = res
|
||||
.iter()
|
||||
.filter_map(|q| q.try_get_many_by_index().ok())
|
||||
.collect();
|
||||
|
||||
remaining -= val.len() as i64;
|
||||
if remaining <= copy_limit {
|
||||
let mut pipe = redis::pipe();
|
||||
for v in &val {
|
||||
pipe.xadd_maxlen(
|
||||
format!("{}:antennaTimeline:{}", prefix, v.1),
|
||||
StreamMaxlen::Approx(200),
|
||||
"*",
|
||||
&[("note", v.2.to_owned())],
|
||||
)
|
||||
.ignore();
|
||||
}
|
||||
pipe.query::<()>(&mut redis_conn).unwrap_or(());
|
||||
}
|
||||
|
||||
let copied = total_num - remaining;
|
||||
let copied = std::cmp::min(copied, total_num);
|
||||
pagination += 1;
|
||||
if pagination % 10 == 0 {
|
||||
println!(
|
||||
"Migrating antenna [{:.2}%]",
|
||||
(copied as f64 / total_num as f64) * 100_f64,
|
||||
);
|
||||
}
|
||||
|
||||
if let Some((last_id, _, _)) = val.last() {
|
||||
stmt = stmt_base
|
||||
.clone()
|
||||
.and_where(
|
||||
Expr::col((AntennaNote::Table, AntennaNote::Id)).gt(last_id.to_owned()),
|
||||
)
|
||||
.to_owned();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
println!("Migrating antenna [100.00%]");
|
||||
}
|
||||
|
||||
manager
|
||||
.drop_table(
|
||||
Table::drop()
|
||||
.table(AntennaNote::Table)
|
||||
.if_exists()
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(AntennaNote::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
ColumnDef::new(AntennaNote::Id)
|
||||
.string_len(32)
|
||||
.not_null()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(AntennaNote::NoteId)
|
||||
.string_len(32)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(AntennaNote::AntennaId)
|
||||
.string_len(32)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(AntennaNote::Read)
|
||||
.boolean()
|
||||
.default(false)
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("IDX_0d775946662d2575dfd2068a5f")
|
||||
.table(AntennaNote::Table)
|
||||
.col(AntennaNote::AntennaId)
|
||||
.if_not_exists()
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("IDX_bd0397be22147e17210940e125")
|
||||
.table(AntennaNote::Table)
|
||||
.col(AntennaNote::NoteId)
|
||||
.if_not_exists()
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("IDX_335a0bf3f904406f9ef3dd51c2")
|
||||
.table(AntennaNote::Table)
|
||||
.col(AntennaNote::NoteId)
|
||||
.col(AntennaNote::AntennaId)
|
||||
.unique()
|
||||
.if_not_exists()
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("IDX_9937ea48d7ae97ffb4f3f063a4")
|
||||
.table(AntennaNote::Table)
|
||||
.col(AntennaNote::Read)
|
||||
.if_not_exists()
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.create_foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("FK_0d775946662d2575dfd2068a5f5")
|
||||
.from(AntennaNote::Table, AntennaNote::AntennaId)
|
||||
.to(Antenna::Table, Antenna::Id)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.create_foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("FK_bd0397be22147e17210940e125b")
|
||||
.from(AntennaNote::Table, AntennaNote::NoteId)
|
||||
.to(Note::Table, Note::Id)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Learn more at https://docs.rs/sea-query#iden
|
||||
#[derive(Iden)]
|
||||
enum AntennaNote {
|
||||
Table,
|
||||
Id,
|
||||
#[iden = "noteId"]
|
||||
NoteId,
|
||||
#[iden = "antennaId"]
|
||||
AntennaId,
|
||||
Read,
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
enum Antenna {
|
||||
Table,
|
||||
Id,
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
enum Note {
|
||||
Table,
|
||||
Id,
|
||||
}
|
|
@ -1,60 +0,0 @@
|
|||
use std::env;
|
||||
|
||||
use redis::Commands;
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
use crate::get_timestamp;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, _manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
// Replace the sample below with your own migration scripts
|
||||
let cache_url = env::var("CACHE_URL").unwrap();
|
||||
let prefix = env::var("CACHE_PREFIX").unwrap();
|
||||
|
||||
let client = redis::Client::open(cache_url).unwrap();
|
||||
let mut redis_conn = client.get_connection().unwrap();
|
||||
|
||||
let keys: Vec<String> = redis_conn
|
||||
.keys(format!("{}:antennaTimeline:*", prefix))
|
||||
.unwrap();
|
||||
let key_len = keys.len();
|
||||
|
||||
println!(
|
||||
"Fixing corrupted stream IDs: {} timelines to be fixed",
|
||||
key_len
|
||||
);
|
||||
|
||||
for (i, key) in keys.iter().enumerate() {
|
||||
let all_elems: Vec<Vec<Vec<String>>> = redis_conn.xrange_all(key).unwrap(); // Get all post IDs in stream
|
||||
let stream_ids = all_elems
|
||||
.iter()
|
||||
.map(|v| format!("{}-*", get_timestamp(&v[1][1]))); // Get correct stream id with timestamp
|
||||
redis_conn.del::<_, ()>(key).unwrap();
|
||||
for (j, v) in stream_ids.enumerate() {
|
||||
redis_conn
|
||||
.xadd(key, v, &[("note", &all_elems[j][1][1])])
|
||||
.unwrap_or(());
|
||||
}
|
||||
|
||||
if i % 10 == 0 {
|
||||
println!(
|
||||
"Fixing streams [{:.2}%]",
|
||||
(i as f64 / key_len as f64) * 100_f64
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
println!("Fixing streams [100.00%]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, _manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
// Replace the sample below with your own migration scripts
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(User::Table)
|
||||
.add_column(
|
||||
ColumnDef::new(User::IsIndexable)
|
||||
.boolean()
|
||||
.not_null()
|
||||
.default(true),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(UserProfile::Table)
|
||||
.add_column(
|
||||
ColumnDef::new(UserProfile::IsIndexable)
|
||||
.boolean()
|
||||
.not_null()
|
||||
.default(true),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(User::Table)
|
||||
.drop_column(User::IsIndexable)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(UserProfile::Table)
|
||||
.drop_column(UserProfile::IsIndexable)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Learn more at https://docs.rs/sea-query#iden
|
||||
#[derive(Iden)]
|
||||
enum User {
|
||||
Table,
|
||||
#[iden = "isIndexable"]
|
||||
IsIndexable,
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
enum UserProfile {
|
||||
Table,
|
||||
#[iden = "isIndexable"]
|
||||
IsIndexable,
|
||||
}
|
|
@ -1,117 +0,0 @@
|
|||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(UserProfile::Table)
|
||||
.drop_column(UserProfile::Integrations)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Meta::Table)
|
||||
.drop_column(Meta::EnableTwitterIntegration)
|
||||
.drop_column(Meta::TwitterConsumerKey)
|
||||
.drop_column(Meta::TwitterConsumerSecret)
|
||||
.drop_column(Meta::EnableGithubIntegration)
|
||||
.drop_column(Meta::GithubClientId)
|
||||
.drop_column(Meta::GithubClientSecret)
|
||||
.drop_column(Meta::EnableDiscordIntegration)
|
||||
.drop_column(Meta::DiscordClientId)
|
||||
.drop_column(Meta::DiscordClientSecret)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Meta::Table)
|
||||
.add_column(ColumnDef::new(Meta::DiscordClientSecret).string())
|
||||
.add_column(ColumnDef::new(Meta::DiscordClientId).string())
|
||||
.add_column(
|
||||
ColumnDef::new(Meta::EnableDiscordIntegration)
|
||||
.boolean()
|
||||
.not_null()
|
||||
.default(false),
|
||||
)
|
||||
.add_column(ColumnDef::new(Meta::GithubClientSecret).string())
|
||||
.add_column(ColumnDef::new(Meta::GithubClientId).string())
|
||||
.add_column(
|
||||
ColumnDef::new(Meta::EnableGithubIntegration)
|
||||
.boolean()
|
||||
.not_null()
|
||||
.default(false),
|
||||
)
|
||||
.add_column(ColumnDef::new(Meta::TwitterConsumerSecret).string())
|
||||
.add_column(ColumnDef::new(Meta::TwitterConsumerKey).string())
|
||||
.add_column(
|
||||
ColumnDef::new(Meta::EnableTwitterIntegration)
|
||||
.boolean()
|
||||
.not_null()
|
||||
.default(false),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(UserProfile::Table)
|
||||
.add_column(
|
||||
ColumnDef::new(UserProfile::Integrations)
|
||||
.json()
|
||||
.default("{}"),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
|
||||
enum UserProfile {
|
||||
Table,
|
||||
#[iden = "integrations"]
|
||||
Integrations,
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
enum Meta {
|
||||
Table,
|
||||
#[iden = "enableTwitterIntegration"]
|
||||
EnableTwitterIntegration,
|
||||
#[iden = "twitterConsumerKey"]
|
||||
TwitterConsumerKey,
|
||||
#[iden = "twitterConsumerSecret"]
|
||||
TwitterConsumerSecret,
|
||||
#[iden = "enableGithubIntegration"]
|
||||
EnableGithubIntegration,
|
||||
#[iden = "githubClientId"]
|
||||
GithubClientId,
|
||||
#[iden = "githubClientSecret"]
|
||||
GithubClientSecret,
|
||||
#[iden = "enableDiscordIntegration"]
|
||||
EnableDiscordIntegration,
|
||||
#[iden = "discordClientId"]
|
||||
DiscordClientId,
|
||||
#[iden = "discordClientSecret"]
|
||||
DiscordClientSecret,
|
||||
}
|
|
@ -1,109 +0,0 @@
|
|||
use serde::Deserialize;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use urlencoding::encode;
|
||||
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
const DB_URL_ENV: &str = "DATABASE_URL";
|
||||
const CACHE_URL_ENV: &str = "CACHE_URL";
|
||||
const CACHE_PREFIX_ENV: &str = "CACHE_PREFIX";
|
||||
|
||||
#[cfg(feature = "convert")]
|
||||
mod vec_to_json;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let cwd = env::current_dir().unwrap();
|
||||
let yml = fs::File::open(cwd.join("../../.config/default.yml"))
|
||||
.expect("Failed to open '.config/default.yml'");
|
||||
let config: Config = serde_yaml::from_reader(yml).expect("Failed to parse yaml");
|
||||
|
||||
if env::var_os(DB_URL_ENV).is_none() {
|
||||
env::set_var(
|
||||
DB_URL_ENV,
|
||||
format!(
|
||||
"postgres://{}:{}@{}:{}/{}",
|
||||
config.db.user,
|
||||
encode(&config.db.pass),
|
||||
config.db.host,
|
||||
config.db.port,
|
||||
config.db.db,
|
||||
),
|
||||
);
|
||||
};
|
||||
|
||||
if env::var_os(CACHE_URL_ENV).is_none() {
|
||||
let redis_conf = match config.cache_server {
|
||||
None => config.redis,
|
||||
Some(conf) => conf,
|
||||
};
|
||||
let redis_proto = match redis_conf.tls {
|
||||
None => "redis",
|
||||
Some(_) => "rediss",
|
||||
};
|
||||
let redis_user = redis_conf.user.unwrap_or("default".to_string());
|
||||
let redis_uri_userpass = format!(
|
||||
"{}:{}",
|
||||
redis_user,
|
||||
encode(&redis_conf.pass.unwrap_or_default())
|
||||
);
|
||||
let redis_uri_hostport = format!("{}:{}", redis_conf.host, redis_conf.port);
|
||||
let redis_uri = format!(
|
||||
"{}://{}@{}/{}",
|
||||
redis_proto, redis_uri_userpass, redis_uri_hostport, redis_conf.db
|
||||
);
|
||||
env::set_var(CACHE_URL_ENV, redis_uri);
|
||||
env::set_var(
|
||||
CACHE_PREFIX_ENV,
|
||||
if redis_conf.prefix.is_empty() {
|
||||
config.url.host_str().unwrap()
|
||||
} else {
|
||||
&redis_conf.prefix
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
cli::run_cli(migration::Migrator).await;
|
||||
|
||||
#[cfg(feature = "convert")]
|
||||
vec_to_json::convert().await;
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Config {
|
||||
pub url: url::Url,
|
||||
pub db: DbConfig,
|
||||
pub redis: RedisConfig,
|
||||
pub cache_server: Option<RedisConfig>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Deserialize)]
|
||||
pub struct DbConfig {
|
||||
pub host: String,
|
||||
pub port: u32,
|
||||
pub db: String,
|
||||
pub user: String,
|
||||
pub pass: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Deserialize)]
|
||||
pub struct RedisConfig {
|
||||
pub host: String,
|
||||
pub port: u32,
|
||||
pub user: Option<String>,
|
||||
pub pass: Option<String>,
|
||||
pub tls: Option<TlsConfig>,
|
||||
#[serde(default)]
|
||||
pub db: u32,
|
||||
#[serde(default)]
|
||||
pub prefix: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TlsConfig {
|
||||
pub host: String,
|
||||
pub reject_unauthorized: bool,
|
||||
}
|
|
@ -1,498 +0,0 @@
|
|||
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
||||
use native_utils::model::entity::newtype::{I32Vec, StringVec};
|
||||
use sea_orm_migration::{
|
||||
prelude::*,
|
||||
sea_orm::{Database, DbBackend, DbConn, Statement, TryGetable},
|
||||
};
|
||||
use serde_json::json;
|
||||
use std::env;
|
||||
use std::time::Duration;
|
||||
|
||||
pub async fn convert() {
|
||||
let uri = env::var("DATABASE_URL").expect("Environment variable 'DATABASE_URL' not set");
|
||||
|
||||
let db = Database::connect(uri).await.expect("Unable to connect");
|
||||
let mp = MultiProgress::new();
|
||||
|
||||
let handlers = vec![
|
||||
tokio::spawn(to_json::<AccessToken, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
AccessToken::Table,
|
||||
AccessToken::Id,
|
||||
AccessToken::Permission,
|
||||
)),
|
||||
tokio::spawn(to_json::<Antenna, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Antenna::Table,
|
||||
Antenna::Id,
|
||||
Antenna::Users,
|
||||
)),
|
||||
tokio::spawn(to_json::<App, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
App::Table,
|
||||
App::Id,
|
||||
App::Permission,
|
||||
)),
|
||||
tokio::spawn(to_json::<Emoji, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Emoji::Table,
|
||||
Emoji::Id,
|
||||
Emoji::Aliases,
|
||||
)),
|
||||
tokio::spawn(to_json::<GalleryPost, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
GalleryPost::Table,
|
||||
GalleryPost::Id,
|
||||
GalleryPost::FileIds,
|
||||
)),
|
||||
tokio::spawn(to_json::<GalleryPost, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
GalleryPost::Table,
|
||||
GalleryPost::Id,
|
||||
GalleryPost::Tags,
|
||||
)),
|
||||
tokio::spawn(to_json::<Hashtag, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Hashtag::Table,
|
||||
Hashtag::Id,
|
||||
Hashtag::MentionedUserIds,
|
||||
)),
|
||||
tokio::spawn(to_json::<Hashtag, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Hashtag::Table,
|
||||
Hashtag::Id,
|
||||
Hashtag::MentionedLocalUserIds,
|
||||
)),
|
||||
tokio::spawn(to_json::<Hashtag, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Hashtag::Table,
|
||||
Hashtag::Id,
|
||||
Hashtag::MentionedRemoteUserIds,
|
||||
)),
|
||||
tokio::spawn(to_json::<Hashtag, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Hashtag::Table,
|
||||
Hashtag::Id,
|
||||
Hashtag::AttachedUserIds,
|
||||
)),
|
||||
tokio::spawn(to_json::<Hashtag, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Hashtag::Table,
|
||||
Hashtag::Id,
|
||||
Hashtag::AttachedLocalUserIds,
|
||||
)),
|
||||
tokio::spawn(to_json::<Hashtag, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Hashtag::Table,
|
||||
Hashtag::Id,
|
||||
Hashtag::AttachedRemoteUserIds,
|
||||
)),
|
||||
tokio::spawn(to_json::<MessagingMessage, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
MessagingMessage::Table,
|
||||
MessagingMessage::Id,
|
||||
MessagingMessage::Reads,
|
||||
)),
|
||||
tokio::spawn(to_json::<Meta, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Meta::Table,
|
||||
Meta::Id,
|
||||
Meta::Langs,
|
||||
)),
|
||||
tokio::spawn(to_json::<Meta, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Meta::Table,
|
||||
Meta::Id,
|
||||
Meta::BlockedHosts,
|
||||
)),
|
||||
tokio::spawn(to_json::<Meta, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Meta::Table,
|
||||
Meta::Id,
|
||||
Meta::HiddenTags,
|
||||
)),
|
||||
tokio::spawn(to_json::<Meta, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Meta::Table,
|
||||
Meta::Id,
|
||||
Meta::PinnedUsers,
|
||||
)),
|
||||
tokio::spawn(to_json::<Meta, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Meta::Table,
|
||||
Meta::Id,
|
||||
Meta::PinnedPages,
|
||||
)),
|
||||
tokio::spawn(to_json::<Meta, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Meta::Table,
|
||||
Meta::Id,
|
||||
Meta::RecommendedInstances,
|
||||
)),
|
||||
tokio::spawn(to_json::<Meta, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Meta::Table,
|
||||
Meta::Id,
|
||||
Meta::SilencedHosts,
|
||||
)),
|
||||
tokio::spawn(to_json::<Note, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Note::Table,
|
||||
Note::Id,
|
||||
Note::FileIds,
|
||||
)),
|
||||
tokio::spawn(to_json::<Note, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Note::Table,
|
||||
Note::Id,
|
||||
Note::AttachedFileTypes,
|
||||
)),
|
||||
tokio::spawn(to_json::<Note, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Note::Table,
|
||||
Note::Id,
|
||||
Note::VisibleUserIds,
|
||||
)),
|
||||
tokio::spawn(to_json::<Note, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Note::Table,
|
||||
Note::Id,
|
||||
Note::Mentions,
|
||||
)),
|
||||
tokio::spawn(to_json::<Note, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Note::Table,
|
||||
Note::Id,
|
||||
Note::Emojis,
|
||||
)),
|
||||
tokio::spawn(to_json::<Note, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Note::Table,
|
||||
Note::Id,
|
||||
Note::Tags,
|
||||
)),
|
||||
tokio::spawn(to_json::<NoteEdit, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
NoteEdit::Table,
|
||||
NoteEdit::Id,
|
||||
NoteEdit::FileIds,
|
||||
)),
|
||||
tokio::spawn(to_json::<Page, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Page::Table,
|
||||
Page::Id,
|
||||
Page::VisibleUserIds,
|
||||
)),
|
||||
tokio::spawn(to_json::<RegistryItem, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
RegistryItem::Table,
|
||||
RegistryItem::Id,
|
||||
RegistryItem::Scope,
|
||||
)),
|
||||
tokio::spawn(to_json::<User, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
User::Table,
|
||||
User::Id,
|
||||
User::Tags,
|
||||
)),
|
||||
tokio::spawn(to_json::<User, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
User::Table,
|
||||
User::Id,
|
||||
User::Emojis,
|
||||
)),
|
||||
tokio::spawn(to_json::<Webhook, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Webhook::Table,
|
||||
Webhook::Id,
|
||||
Webhook::On,
|
||||
)),
|
||||
tokio::spawn(to_json::<Poll, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Poll::Table,
|
||||
Poll::NoteId,
|
||||
Poll::Choices,
|
||||
)),
|
||||
tokio::spawn(to_json::<Poll, Vec<i32>, I32Vec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
Poll::Table,
|
||||
Poll::NoteId,
|
||||
Poll::Votes,
|
||||
)),
|
||||
tokio::spawn(to_json::<UserProfile, Vec<String>, StringVec>(
|
||||
db.clone(),
|
||||
mp.clone(),
|
||||
UserProfile::Table,
|
||||
UserProfile::UserId,
|
||||
UserProfile::MutingNotificationTypes,
|
||||
)),
|
||||
];
|
||||
|
||||
futures::future::join_all(handlers).await;
|
||||
}
|
||||
|
||||
fn select_query<T: Iden + 'static>(table: T, id: T, col: T) -> String {
|
||||
Query::select()
|
||||
.column(id)
|
||||
.column(col)
|
||||
.from(table)
|
||||
.to_string(PostgresQueryBuilder)
|
||||
}
|
||||
|
||||
async fn get_vec<T: TryGetable>(db: &DbConn, query: String) -> Result<Vec<(String, T)>, DbErr> {
|
||||
let res: Vec<(String, T)> = db
|
||||
.query_all(Statement::from_string(DbBackend::Postgres, query))
|
||||
.await?
|
||||
.iter()
|
||||
.filter_map(|r| r.try_get_many_by_index().ok())
|
||||
.collect();
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
async fn convert_col<T: Iden + Clone + 'static>(
|
||||
db: &DbConn,
|
||||
table: T,
|
||||
col: T,
|
||||
) -> Result<(), DbErr> {
|
||||
let stmt = Table::alter()
|
||||
.table(table)
|
||||
.drop_column(col.to_owned())
|
||||
.add_column(
|
||||
ColumnDef::new(col.to_owned())
|
||||
.json_binary()
|
||||
.not_null()
|
||||
.default(json!([])),
|
||||
)
|
||||
.to_string(PostgresQueryBuilder);
|
||||
db.query_one(Statement::from_string(DbBackend::Postgres, stmt))
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn to_json<T, U, V>(
|
||||
db: DbConn,
|
||||
mp: MultiProgress,
|
||||
table: T,
|
||||
id: T,
|
||||
col: T,
|
||||
) -> Result<(), DbErr>
|
||||
where
|
||||
T: Iden + Clone + 'static,
|
||||
U: TryGetable + IntoIterator + Clone,
|
||||
V: From<U> + Into<SimpleExpr>,
|
||||
{
|
||||
let query = select_query(table.clone(), id.clone(), col.clone());
|
||||
let loading = ProgressBar::new_spinner()
|
||||
.with_style(ProgressStyle::with_template("{prefix} {msg} {spinner}").unwrap())
|
||||
.with_prefix("[-]")
|
||||
.with_message(format!(
|
||||
"Loading data from {}.{}",
|
||||
table.to_string(),
|
||||
col.to_string()
|
||||
));
|
||||
let loading = mp.add(loading);
|
||||
loading.enable_steady_tick(Duration::from_millis(100));
|
||||
let res = get_vec::<U>(&db, query).await?;
|
||||
let models: Vec<(String, V)> = res
|
||||
.iter()
|
||||
.filter(|(_, r)| r.clone().into_iter().count() > 0)
|
||||
.map(|(id, r)| (id.clone(), <V>::from(r.clone())))
|
||||
.collect();
|
||||
loading.finish_and_clear();
|
||||
convert_col(&db, table.clone(), col.clone()).await?;
|
||||
|
||||
let progress = ProgressBar::new(models.len() as u64)
|
||||
.with_style(
|
||||
ProgressStyle::with_template("{prefix} {msg} {wide_bar} {pos}/{len}")
|
||||
.unwrap()
|
||||
.progress_chars("##-"),
|
||||
)
|
||||
.with_prefix("[*]")
|
||||
.with_message(format!("Copying {}.{}", table.to_string(), col.to_string()));
|
||||
let progress = mp.add(progress);
|
||||
|
||||
for model in models {
|
||||
progress.inc(1);
|
||||
let q = Query::update()
|
||||
.table(table.clone())
|
||||
.values([(col.clone(), model.1.into())])
|
||||
.and_where(Expr::col(id.clone()).eq(model.0))
|
||||
.to_string(PostgresQueryBuilder);
|
||||
db.query_one(Statement::from_string(DbBackend::Postgres, q))
|
||||
.await?;
|
||||
}
|
||||
progress.finish_with_message(format!("Done {}.{}", table.to_string(), col.to_string()));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Iden, Clone)]
|
||||
enum AccessToken {
|
||||
Table,
|
||||
Id,
|
||||
Permission,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum Antenna {
|
||||
Table,
|
||||
Id,
|
||||
Users,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum App {
|
||||
Table,
|
||||
Id,
|
||||
Permission,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum Emoji {
|
||||
Table,
|
||||
Id,
|
||||
Aliases,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum GalleryPost {
|
||||
Table,
|
||||
Id,
|
||||
#[iden = "fileIds"]
|
||||
FileIds,
|
||||
Tags,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum Hashtag {
|
||||
Table,
|
||||
Id,
|
||||
#[iden = "mentionedUserIds"]
|
||||
MentionedUserIds,
|
||||
#[iden = "mentionedLocalUserIds"]
|
||||
MentionedLocalUserIds,
|
||||
#[iden = "mentionedRemoteUserIds"]
|
||||
MentionedRemoteUserIds,
|
||||
#[iden = "attachedUserIds"]
|
||||
AttachedUserIds,
|
||||
#[iden = "attachedLocalUserIds"]
|
||||
AttachedLocalUserIds,
|
||||
#[iden = "attachedRemoteUserIds"]
|
||||
AttachedRemoteUserIds,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum MessagingMessage {
|
||||
Table,
|
||||
Id,
|
||||
Reads,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum Meta {
|
||||
Table,
|
||||
Id,
|
||||
Langs,
|
||||
#[iden = "hiddenTags"]
|
||||
HiddenTags,
|
||||
#[iden = "blockedHosts"]
|
||||
BlockedHosts,
|
||||
#[iden = "pinnedUsers"]
|
||||
PinnedUsers,
|
||||
#[iden = "pinnedPages"]
|
||||
PinnedPages,
|
||||
#[iden = "recommendedInstances"]
|
||||
RecommendedInstances,
|
||||
#[iden = "silencedHosts"]
|
||||
SilencedHosts,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum Note {
|
||||
Table,
|
||||
Id,
|
||||
#[iden = "fileIds"]
|
||||
FileIds,
|
||||
#[iden = "attachedFileTypes"]
|
||||
AttachedFileTypes,
|
||||
#[iden = "visibleUserIds"]
|
||||
VisibleUserIds,
|
||||
Mentions,
|
||||
Emojis,
|
||||
Tags,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum NoteEdit {
|
||||
Table,
|
||||
Id,
|
||||
#[iden = "fileIds"]
|
||||
FileIds,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum Page {
|
||||
Table,
|
||||
Id,
|
||||
#[iden = "visibleUserIds"]
|
||||
VisibleUserIds,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum Poll {
|
||||
Table,
|
||||
#[iden = "noteId"]
|
||||
NoteId,
|
||||
Choices,
|
||||
Votes, // I32Vec
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum RegistryItem {
|
||||
Table,
|
||||
Id,
|
||||
Scope,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum User {
|
||||
Table,
|
||||
Id,
|
||||
Tags,
|
||||
Emojis,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum UserProfile {
|
||||
Table,
|
||||
#[iden = "userId"]
|
||||
UserId,
|
||||
#[iden = "mutingNotificationTypes"]
|
||||
MutingNotificationTypes,
|
||||
}
|
||||
#[derive(Iden, Clone)]
|
||||
enum Webhook {
|
||||
Table,
|
||||
Id,
|
||||
On,
|
||||
}
|
|
@ -34,10 +34,8 @@
|
|||
},
|
||||
"scripts": {
|
||||
"artifacts": "napi artifacts",
|
||||
"build": "pnpm run build:napi && pnpm run build:migration",
|
||||
"build:napi": "napi build --features napi --platform --release ./built/",
|
||||
"build:migration": "cargo build --locked --release --manifest-path ./migration/Cargo.toml && cp -v ./target/release/migration ./built/migration",
|
||||
"build:debug": "napi build --features napi --platform ./built/ && cargo build --locked --manifest-path ./migration/Cargo.toml && cp -v ./target/debug/migration ./built/migration",
|
||||
"build": "napi build --features napi --platform --release ./built/",
|
||||
"build:debug": "napi build --features napi --platform ./built/",
|
||||
"prepublishOnly": "napi prepublish -t npm",
|
||||
"test": "pnpm run cargo:test && pnpm run build:napi && ava",
|
||||
"universal": "napi universal",
|
||||
|
|
|
@ -6,12 +6,8 @@
|
|||
"scripts": {
|
||||
"start": "pnpm node ./built/index.js",
|
||||
"start:test": "NODE_ENV=test pnpm node ./built/index.js",
|
||||
"migrate": "pnpm run migrate:typeorm && pnpm run migrate:cargo",
|
||||
"migrate:typeorm": "typeorm migration:run -d ormconfig.js",
|
||||
"migrate:cargo": "./native-utils/built/migration up",
|
||||
"revertmigration": "pnpm run revertmigration:cargo && pnpm run revertmigration:typeorm",
|
||||
"revertmigration:typeorm": "typeorm migration:revert -d ormconfig.js",
|
||||
"revertmigration:cargo": "./native-utils/built/migration down",
|
||||
"migrate": "typeorm migration:run -d ormconfig.js",
|
||||
"revertmigration": "typeorm migration:revert -d ormconfig.js",
|
||||
"check:connect": "node ./check_connect.js",
|
||||
"build": "pnpm swc src -d built -D",
|
||||
"build:debug": "pnpm swc src -d built -s -D",
|
||||
|
|
Loading…
Reference in a new issue