Initial commit
This commit is contained in:
commit
e7d2312a18
18 changed files with 2882 additions and 0 deletions
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
/target
|
||||||
|
.idea
|
||||||
|
*.pid
|
||||||
|
records.json
|
||||||
|
records.bak
|
1852
Cargo.lock
generated
Normal file
1852
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
27
Cargo.toml
Normal file
27
Cargo.toml
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
[package]
|
||||||
|
name = "fingerlink"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["server", "editor"]
|
||||||
|
server = ["tokio", "qpidfile", "axum"]
|
||||||
|
editor = ["reqwest", "tempfile", "which", "nix"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
qpidfile = { version = "0.9.2", optional = true }
|
||||||
|
tokio = { version = "1.36.0", features = ["full"], optional = true }
|
||||||
|
# dashmap = { version = "5.5.3", features = ["inline"]}
|
||||||
|
tracing = "0.1.40"
|
||||||
|
tracing-subscriber = { version = "0.3.18", features = ["fmt"] }
|
||||||
|
serde = { version = "1.0.196", features = ["derive"] }
|
||||||
|
serde_json = "1.0.113"
|
||||||
|
thiserror = "1.0.57"
|
||||||
|
clap = { version = "4.5.0", features = ["derive"]}
|
||||||
|
axum = { version = "0.7.4", optional = true }
|
||||||
|
reqwest = { version = "0.11.24", optional = true, features = ["rustls", "blocking", "json", "gzip", "brotli", "deflate"] }
|
||||||
|
tempfile = { version = "3.10.0", optional = true }
|
||||||
|
which = { version = "6.0.0", optional = true }
|
||||||
|
nix = { version = "0.27.1", optional = true, default-features = false, features = ["signal"] }
|
127
src/args_parser.rs
Normal file
127
src/args_parser.rs
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
use std::net::IpAddr;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use clap::{Args, Parser, Subcommand, ValueEnum};
|
||||||
|
use tracing::Level;
|
||||||
|
|
||||||
|
#[derive(Debug, Parser)]
|
||||||
|
#[command(name = "Fingerlink", version, about, long_about = None)]
|
||||||
|
pub struct MainCommand {
|
||||||
|
#[arg(long, value_enum, default_value = "info")]
|
||||||
|
pub log_level: LoggingLevel,
|
||||||
|
#[command(flatten)]
|
||||||
|
pub data_paths: DataPaths,
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub run_mode: Command
|
||||||
|
}
|
||||||
|
impl MainCommand {
|
||||||
|
pub fn log_level(&self) -> Level {
|
||||||
|
match self.log_level {
|
||||||
|
LoggingLevel::Trace => Level::TRACE,
|
||||||
|
LoggingLevel::Debug => Level::DEBUG,
|
||||||
|
LoggingLevel::Info => Level::INFO,
|
||||||
|
LoggingLevel::Warning => Level::WARN,
|
||||||
|
LoggingLevel::Error => Level::ERROR
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args, Debug)]
|
||||||
|
pub struct DataPaths {
|
||||||
|
#[arg(long, short = 'd', default_value = "records.json")]
|
||||||
|
pub database_path: PathBuf,
|
||||||
|
#[arg(long, default_value = "server.pid")]
|
||||||
|
pub pid_file_path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ValueEnum)]
|
||||||
|
pub enum LoggingLevel {
|
||||||
|
Trace,
|
||||||
|
Debug,
|
||||||
|
Info,
|
||||||
|
Warning,
|
||||||
|
Error
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Args)]
|
||||||
|
pub struct ServerParameters {
|
||||||
|
/// The IP address to bind to
|
||||||
|
#[arg(long, short = 'b', default_value = "127.0.0.1")]
|
||||||
|
pub bind_ip: IpAddr,
|
||||||
|
/// The port to listen on
|
||||||
|
#[arg(long, short = 'p', default_value = "8080")]
|
||||||
|
pub bind_port: u16,
|
||||||
|
/// Whether to ignore the PID file already existing.
|
||||||
|
#[arg(long)]
|
||||||
|
pub force_pid: bool,
|
||||||
|
/// Limits how many rel parameters will be processed in a single query
|
||||||
|
#[arg(long, default_value = "10")]
|
||||||
|
pub max_allowed_rels_in_request: usize
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Args)]
|
||||||
|
pub struct SaveSettings {
|
||||||
|
/// Specify this flag to write the fetched records into the database
|
||||||
|
#[arg(long, short = 's')]
|
||||||
|
pub save: bool,
|
||||||
|
|
||||||
|
/// Save behaviour when encountering a collision
|
||||||
|
#[arg(long, short = 'c', default_value = "overwrite-single-skip-multiple")]
|
||||||
|
pub collision_handling: CollisionHandling,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Subcommand)]
|
||||||
|
pub enum Command {
|
||||||
|
/// Serve the database
|
||||||
|
Serve (ServerParameters),
|
||||||
|
/// Fetch one or multiple handles from Fediverse-compatible software
|
||||||
|
Fetch {
|
||||||
|
#[command(flatten)]
|
||||||
|
save: SaveSettings,
|
||||||
|
/// The handles to process
|
||||||
|
handles: Vec<String>,
|
||||||
|
/// Set this flag to treat HANDLES as a list of files that contain the handles to process
|
||||||
|
#[arg(long)]
|
||||||
|
handles_are_files: bool,
|
||||||
|
/// The domain to rewrite the acquired handles' subject domain to. If left unspecified, the domain is kept as-is
|
||||||
|
#[arg(long)]
|
||||||
|
new_domain: Option<String>,
|
||||||
|
#[command(flatten)]
|
||||||
|
server_reload: ServerReloadOptions,
|
||||||
|
},
|
||||||
|
/// Runs a single query against the database and returns the Resource associated with the value
|
||||||
|
Query {
|
||||||
|
/// The resource to query for
|
||||||
|
resource: String
|
||||||
|
},
|
||||||
|
/// Open the resource in your system editor
|
||||||
|
Editor {
|
||||||
|
#[command(flatten)]
|
||||||
|
save: SaveSettings,
|
||||||
|
/// Query for the resource to edit
|
||||||
|
resource: String,
|
||||||
|
#[command(flatten)]
|
||||||
|
server_reload: ServerReloadOptions,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(ValueEnum, Debug, Eq, PartialEq, Copy, Clone)]
|
||||||
|
pub enum CollisionHandling {
|
||||||
|
/// Terminate import process when encountering a collision.
|
||||||
|
/// Still inserts resources processed before detecting a collision
|
||||||
|
Terminate,
|
||||||
|
/// Skip adding new resource if it would collide with an existing resource
|
||||||
|
Skip,
|
||||||
|
/// Only overwrites if there's just a single resource the new item collides with
|
||||||
|
OverwriteSingleSkipMultiple,
|
||||||
|
/// Overwrites every already existing resource that the new item collides with
|
||||||
|
OverwriteMultiple
|
||||||
|
//TODO: Handlers to remove only the offending aliases, not the whole record?
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args, Debug)]
|
||||||
|
pub struct ServerReloadOptions {
|
||||||
|
/// Set this flag for the application to attempt to reload the running server process automaticallys
|
||||||
|
#[arg(long, short = 'r')]
|
||||||
|
pub reload_server: bool,
|
||||||
|
}
|
3
src/editor/commands.rs
Normal file
3
src/editor/commands.rs
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
pub mod fetch;
|
||||||
|
pub mod query;
|
||||||
|
pub mod editor;
|
16
src/editor/commands/editor.rs
Normal file
16
src/editor/commands/editor.rs
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use crate::args_parser::SaveSettings;
|
||||||
|
use crate::editor::open_in_editor::open_resource_in_editor;
|
||||||
|
use crate::schema::lookup_handler::LookupHandler;
|
||||||
|
|
||||||
|
pub fn editor(database_path: PathBuf, save_settings: SaveSettings, resource: String) {
|
||||||
|
let resources = LookupHandler::load(&database_path).unwrap();
|
||||||
|
let (index, resource) = resources.lookup_with_index(resource.as_str())
|
||||||
|
.expect("Couldn't find a resource for that query");
|
||||||
|
let resource = open_resource_in_editor(resource).unwrap();
|
||||||
|
if save_settings.save {
|
||||||
|
let mut resources = resources.into_inner();
|
||||||
|
resources.0[index] = resource;
|
||||||
|
resources.save(database_path).unwrap();
|
||||||
|
}
|
||||||
|
}
|
58
src/editor/commands/fetch.rs
Normal file
58
src/editor/commands/fetch.rs
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
use std::io::{BufRead, BufReader};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use tracing::{debug, error, info, instrument, trace, warn};
|
||||||
|
use crate::args_parser::{DataPaths, SaveSettings};
|
||||||
|
use crate::editor::finger_remote::finger_many_fedi;
|
||||||
|
use crate::schema::resource_list::ResourceList;
|
||||||
|
|
||||||
|
#[instrument(skip_all)]
|
||||||
|
pub fn fetch(database_path: PathBuf, save_settings: SaveSettings, handles: impl Iterator<Item = String>, handles_are_files: bool, new_domain: Option<String>) {
|
||||||
|
let handles = handles.flat_map(|handle| {
|
||||||
|
if handles_are_files {
|
||||||
|
match std::fs::File::open(&handle) {
|
||||||
|
Ok(file) => {
|
||||||
|
let mut file = BufReader::new(file);
|
||||||
|
let mut handles = vec![];
|
||||||
|
let mut buf = String::new();
|
||||||
|
while file.read_line(&mut buf).is_ok() {
|
||||||
|
handles.push(buf);
|
||||||
|
buf = String::new();
|
||||||
|
}
|
||||||
|
info!("Read {} handles from {handle}", handles.len());
|
||||||
|
handles
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Skipping file at {handle}: {e}");
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {vec![handle]}
|
||||||
|
});
|
||||||
|
let new_resources = finger_many_fedi(handles).map(|mut new_resource| {
|
||||||
|
info!("Fetched {}", new_resource.subject);
|
||||||
|
if let Some(new_domain) = &new_domain {
|
||||||
|
trace!("Processing {new_resource:?}");
|
||||||
|
let current_subject = &new_resource.subject;
|
||||||
|
let (start, old_domain) = match current_subject.rsplit_once('@') {
|
||||||
|
None => {
|
||||||
|
warn!("Failed to parse the domain of {current_subject}, storing as-is.");
|
||||||
|
return new_resource
|
||||||
|
},
|
||||||
|
Some(data) => data
|
||||||
|
};
|
||||||
|
debug!("Replacing {old_domain} with {new_domain}");
|
||||||
|
new_resource.add_new_primary_subject(format!("{start}@{new_domain}"));
|
||||||
|
trace!("{new_resource:?}");
|
||||||
|
new_resource
|
||||||
|
} else { new_resource }
|
||||||
|
});
|
||||||
|
if save_settings.save {
|
||||||
|
let mut resource_list = ResourceList::load(&database_path).unwrap();
|
||||||
|
resource_list.merge_records(new_resources, save_settings.collision_handling);
|
||||||
|
resource_list.save(database_path).unwrap();
|
||||||
|
} else {
|
||||||
|
for resource in new_resources {
|
||||||
|
println!("{resource}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
10
src/editor/commands/query.rs
Normal file
10
src/editor/commands/query.rs
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
use std::io::stdout;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use crate::schema::lookup_handler::LookupHandler;
|
||||||
|
|
||||||
|
pub fn query(database_path: PathBuf, handle: String) {
|
||||||
|
let data = LookupHandler::load(database_path).unwrap();
|
||||||
|
let resource = data.lookup(handle.trim()).unwrap();
|
||||||
|
serde_json::to_writer_pretty(stdout(), resource).unwrap();
|
||||||
|
println!()
|
||||||
|
}
|
75
src/editor/finger_remote.rs
Normal file
75
src/editor/finger_remote.rs
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
use std::fmt::Display;
|
||||||
|
use reqwest::blocking::Client;
|
||||||
|
use reqwest::Url;
|
||||||
|
use thiserror::Error;
|
||||||
|
use tracing::{debug, error, instrument};
|
||||||
|
use crate::editor::finger_remote::FediFingerError::UrlBuildFailed;
|
||||||
|
use crate::schema::resource::Resource;
|
||||||
|
|
||||||
|
/// Error type returned by `finger_url`
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[allow(clippy::enum_variant_names)]
|
||||||
|
pub enum FingerError {
|
||||||
|
#[error("Failed to build request: {0}")]
|
||||||
|
RequestBuildFailed(reqwest::Error),
|
||||||
|
#[error("Failed to make request: {0}")]
|
||||||
|
RequestFailed(reqwest::Error),
|
||||||
|
#[error("Failed to parse response as JSON: {0}")]
|
||||||
|
ParseFailed(reqwest::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run a WebFinger request at the provided URL and parse it as a `Resource`
|
||||||
|
#[instrument(skip(client))]
|
||||||
|
pub fn finger_url(client: &Client, url: Url) -> Result<Resource, FingerError> {
|
||||||
|
use FingerError::*;
|
||||||
|
debug!("Fingering {url}...");
|
||||||
|
let request = client.get(url)
|
||||||
|
.build().map_err(RequestBuildFailed)?;
|
||||||
|
let response = client.execute(request).map_err(RequestFailed)?;
|
||||||
|
debug!("Received response: {response:?}");
|
||||||
|
let response = response.json::<Resource>().map_err(ParseFailed)?;
|
||||||
|
debug!("Parsed response body: {response:?}");
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Error type returned by `finger_fedi`
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum FediFingerError {
|
||||||
|
#[error("Couldn't find the middle @ symbol")]
|
||||||
|
MissingMiddleAt,
|
||||||
|
#[error("Failed to build request URL")]
|
||||||
|
UrlBuildFailed,
|
||||||
|
#[error("Failed to run WebFinger request: {0}")]
|
||||||
|
FingerFailed(#[from] FingerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Finger a Fediverse compatible handle
|
||||||
|
///
|
||||||
|
/// Connects to the server specified in the handle's domain and requests the user's Resource
|
||||||
|
#[instrument(skip(client))]
|
||||||
|
pub fn finger_fedi(client: &Client, handle: &str) -> Result<Resource, FediFingerError> {
|
||||||
|
let handle = handle.strip_prefix('@').unwrap_or(handle);
|
||||||
|
let (_account, domain) = handle.split_once('@').ok_or(FediFingerError::MissingMiddleAt)?;
|
||||||
|
let account = format!("acct:{handle}");
|
||||||
|
|
||||||
|
let url = Url::parse(format!("https://{domain}/.well-known/webfinger?resource={account}").as_str()).map_err(|_e| UrlBuildFailed)?;
|
||||||
|
Ok(finger_url(client, url)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Finger multiple Fediverse compatible handles
|
||||||
|
///
|
||||||
|
/// Runs `finger_fedi` on the provided list of handles. If any requests fail, prints an error and continues.
|
||||||
|
pub fn finger_many_fedi(handles: impl Iterator<Item = impl AsRef<str> + Display>) -> impl Iterator<Item = Resource> {
|
||||||
|
let client = Client::builder()
|
||||||
|
.user_agent(concat!("Fingerlink/", env!("CARGO_PKG_VERSION")))
|
||||||
|
.build().unwrap(); //Safety: setting user_agent can't cause an error.
|
||||||
|
handles.filter_map(move |handle| {
|
||||||
|
match finger_fedi(&client, handle.as_ref()) {
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to finger {handle}: {e}");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
Ok(data) => Some(data)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
4
src/editor/mod.rs
Normal file
4
src/editor/mod.rs
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
pub mod finger_remote;
|
||||||
|
pub mod open_in_editor;
|
||||||
|
pub mod try_reload_server;
|
||||||
|
pub mod commands;
|
82
src/editor/open_in_editor.rs
Normal file
82
src/editor/open_in_editor.rs
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
use std::fmt::{Debug, Display};
|
||||||
|
use std::io::{Read, Seek, SeekFrom, Write};
|
||||||
|
use std::process::Command;
|
||||||
|
use tempfile::NamedTempFile;
|
||||||
|
use thiserror::Error;
|
||||||
|
use tracing::{debug, instrument, trace};
|
||||||
|
use crate::schema::resource::Resource;
|
||||||
|
|
||||||
|
/// Error type returned by `spawn_editor`
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum EditorSpawnError {
|
||||||
|
#[error("$EDITOR environment variable isn't set")]
|
||||||
|
NoEditorEnv,
|
||||||
|
#[error("failed to parse out absolute path of editor: {0}")]
|
||||||
|
InvalidEditorPath(which::Error),
|
||||||
|
#[error("failed to create temporary file for edit buffer: {0}")]
|
||||||
|
TempFileCreationFailed(std::io::Error),
|
||||||
|
#[error("failed to write buffer into temporary file: {0}")]
|
||||||
|
BufferWriteFailed(std::io::Error),
|
||||||
|
#[error("failed to spawn editor process: {0}")]
|
||||||
|
EditorSpawnFailed(std::io::Error),
|
||||||
|
#[error("failed to block until editor exits: {0}")]
|
||||||
|
EditorWaitFailed(std::io::Error),
|
||||||
|
#[error("editor failed to exit correctly")]
|
||||||
|
EditorExitCode,
|
||||||
|
#[error("failed to reopen buffer after editor returned: {0}")]
|
||||||
|
BufferSeekFailed(std::io::Error),
|
||||||
|
#[error("failed to read back the buffer from editor: {0}")]
|
||||||
|
BufferReadbackFailed(std::io::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Spawn the system editor to edit the provided `buffer`. Returns the edited buffer or an error if something goes wrong
|
||||||
|
#[instrument(skip_all)]
|
||||||
|
pub fn spawn_editor(buffer: impl AsRef<str> + Display) -> Result<String, EditorSpawnError> {
|
||||||
|
use EditorSpawnError::*;
|
||||||
|
trace!("Input buffer: {buffer}");
|
||||||
|
let editor = option_env!("EDITOR").ok_or(NoEditorEnv)?;
|
||||||
|
debug!("$EDITOR is {editor}");
|
||||||
|
let editor = which::which(editor).map_err(InvalidEditorPath)?;
|
||||||
|
debug!("$EDITOR's full path is {editor:?}");
|
||||||
|
let mut temp_file = NamedTempFile::new().map_err(TempFileCreationFailed)?;
|
||||||
|
debug!("Created temporary file at {temp_file:?}");
|
||||||
|
temp_file.write_all(buffer.as_ref().as_bytes()).map_err(BufferWriteFailed)?;
|
||||||
|
debug!("Written buffer");
|
||||||
|
|
||||||
|
let mut editor = Command::new(editor);
|
||||||
|
let editor = editor
|
||||||
|
.args([temp_file.path().as_os_str()]);
|
||||||
|
debug!("Spawning editor with command {editor:?}");
|
||||||
|
let mut editor = editor.spawn().map_err(EditorSpawnFailed)?;
|
||||||
|
let editor = editor.wait().map_err(EditorWaitFailed)?;
|
||||||
|
debug!("Editor closed with {editor:?}");
|
||||||
|
match editor.code() {
|
||||||
|
Some(0) => {/*All good*/}
|
||||||
|
None | Some(_) => {return Err(EditorExitCode)}
|
||||||
|
}
|
||||||
|
temp_file.seek(SeekFrom::Start(0)).map_err(BufferSeekFailed)?;
|
||||||
|
let mut buffer = Default::default();
|
||||||
|
temp_file.read_to_string(&mut buffer).map_err(BufferReadbackFailed)?;
|
||||||
|
trace!("Read back buffer: {buffer}");
|
||||||
|
Ok(buffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Error type returned by `open_resource_in_editor`
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum ResourceEditingError {
|
||||||
|
#[error("Failed to pretty print resource")]
|
||||||
|
PrettyPrintFailed(serde_json::Error),
|
||||||
|
#[error("Downstream editor spawn failed: {0}")]
|
||||||
|
EditorSpawnError(#[from] EditorSpawnError),
|
||||||
|
#[error("Failed to parse edited JSON")]
|
||||||
|
ParseFailed(serde_json::Error)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Opens the provided `resource` in the system editor and returns the edited version or an error if something goes wrong.
|
||||||
|
#[instrument(skip(resource))]
|
||||||
|
pub fn open_resource_in_editor(resource: &Resource) -> Result<Resource, ResourceEditingError> {
|
||||||
|
use ResourceEditingError::*;
|
||||||
|
let printed = serde_json::to_string_pretty(resource).map_err(PrettyPrintFailed)?;
|
||||||
|
let edited = spawn_editor(printed)?;
|
||||||
|
serde_json::from_str(edited.as_str()).map_err(ParseFailed)
|
||||||
|
}
|
50
src/editor/try_reload_server.rs
Normal file
50
src/editor/try_reload_server.rs
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::io::Read;
|
||||||
|
use std::str::FromStr;
|
||||||
|
use nix::sys::signal::{kill, Signal};
|
||||||
|
use nix::unistd::Pid;
|
||||||
|
use thiserror::Error;
|
||||||
|
use tracing::{error, info};
|
||||||
|
use crate::args_parser::ServerReloadOptions;
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum ServerReloadError {
|
||||||
|
#[error("Couldn't find PID file at {0}")]
|
||||||
|
PIDFileNotFound(String),
|
||||||
|
#[error("Failed to open PID file: {0}")]
|
||||||
|
FailedToOpenPIDFile(std::io::Error),
|
||||||
|
#[error("Failed to read PID from file: {0}")]
|
||||||
|
FailedToReadPIDFile(std::io::Error),
|
||||||
|
#[error("Failed to parse PID: {0}")]
|
||||||
|
FailedToParsePID(#[from] std::num::ParseIntError),
|
||||||
|
#[error("Failed to send signal: {0}")]
|
||||||
|
FailedToSendSignal(#[from] nix::errno::Errno),
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_pid(pid_file_path: &Path) -> Result<Pid, ServerReloadError> {
|
||||||
|
use ServerReloadError::*;
|
||||||
|
if !pid_file_path.exists() {
|
||||||
|
return Err(PIDFileNotFound(pid_file_path.to_string_lossy().to_string()))
|
||||||
|
}
|
||||||
|
let mut file = std::fs::File::open(pid_file_path).map_err(FailedToOpenPIDFile)?;
|
||||||
|
let mut buffer = Default::default();
|
||||||
|
file.read_to_string(&mut buffer).map_err(FailedToReadPIDFile)?;
|
||||||
|
let pid = FromStr::from_str(buffer.trim())?;
|
||||||
|
Ok(Pid::from_raw(pid))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn try_reload_server(pid_file_path: &Path) -> Result<(), ServerReloadError> {
|
||||||
|
let pid = load_pid(pid_file_path)?;
|
||||||
|
kill(pid, Signal::SIGHUP)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reload(pid_file_path: PathBuf, options: ServerReloadOptions) {
|
||||||
|
if options.reload_server {
|
||||||
|
info!("Attempting to reload server...");
|
||||||
|
match try_reload_server(pid_file_path.as_path()) {
|
||||||
|
Ok(_) => {info!("Server reloading!")}
|
||||||
|
Err(e) => {error!("Failed to reload server: {e}")}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
50
src/main.rs
Normal file
50
src/main.rs
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
use clap::{Parser};
|
||||||
|
use tracing::info;
|
||||||
|
use tracing_subscriber::util::SubscriberInitExt;
|
||||||
|
use crate::args_parser::Command;
|
||||||
|
use crate::editor::open_in_editor::open_resource_in_editor;
|
||||||
|
use editor::commands::fetch::fetch;
|
||||||
|
use editor::commands::query::query;
|
||||||
|
use crate::editor::commands::editor::editor;
|
||||||
|
use crate::editor::try_reload_server::reload;
|
||||||
|
use crate::schema::lookup_handler::LookupHandler;
|
||||||
|
|
||||||
|
mod schema;
|
||||||
|
#[cfg(feature = "editor")]
|
||||||
|
mod editor;
|
||||||
|
#[cfg(feature = "server")]
|
||||||
|
mod server;
|
||||||
|
mod args_parser;
|
||||||
|
|
||||||
|
#[cfg(all(not(feature = "editor"), not(feature = "server")))]
|
||||||
|
compile_error!("Please enable either the \"editor\" or the \"server\" feature");
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let args = args_parser::MainCommand::parse();
|
||||||
|
tracing_subscriber::FmtSubscriber::builder()
|
||||||
|
.with_max_level(args.log_level())
|
||||||
|
.finish().init();
|
||||||
|
let args_parser::MainCommand { data_paths, run_mode, .. } = args;
|
||||||
|
match run_mode {
|
||||||
|
Command::Serve(params) => {
|
||||||
|
#[cfg(not(feature = "server"))]
|
||||||
|
{
|
||||||
|
panic!("Server mode has not been compiled into this executable. Please rebuild with the \"server\" feature enabled.")
|
||||||
|
}
|
||||||
|
#[cfg(feature = "server")]
|
||||||
|
server::init(data_paths, params);
|
||||||
|
}
|
||||||
|
Command::Fetch { save, handles, handles_are_files, new_domain, server_reload } => {
|
||||||
|
fetch(data_paths.database_path, save, handles.into_iter(), handles_are_files, new_domain);
|
||||||
|
reload(data_paths.pid_file_path, server_reload);
|
||||||
|
}
|
||||||
|
Command::Query { resource } => {
|
||||||
|
query(data_paths.database_path, resource)
|
||||||
|
}
|
||||||
|
Command::Editor { save, resource, server_reload } => {
|
||||||
|
editor(data_paths.database_path, save, resource);
|
||||||
|
|
||||||
|
reload(data_paths.pid_file_path, server_reload);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
143
src/schema/lookup_handler.rs
Normal file
143
src/schema/lookup_handler.rs
Normal file
|
@ -0,0 +1,143 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use tracing::{debug, info, instrument};
|
||||||
|
use std::path::Path;
|
||||||
|
use thiserror::Error;
|
||||||
|
use crate::schema::resource::Resource;
|
||||||
|
use crate::schema::resource_list::{ResourceList, ResourceLoadError};
|
||||||
|
|
||||||
|
/// Handles looking up resources based on the ?resource={} URL parameter
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct LookupHandler {
|
||||||
|
resources: ResourceList,
|
||||||
|
lookup: HashMap<String, usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum DataLoadError {
|
||||||
|
#[error("failed to load underlying resource: {0}")]
|
||||||
|
ResourceLoadError(#[from] ResourceLoadError),
|
||||||
|
#[error("duplicate lookup name \"{duplicated}\" found between subjects {subjects:?}")]
|
||||||
|
DuplicateLookupFound {
|
||||||
|
duplicated: String,
|
||||||
|
subjects: [String; 2],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LookupHandler {
|
||||||
|
/// Load and prepare a new LookupHandler from the file at `path`
|
||||||
|
#[instrument(level = "debug", skip(path))]
|
||||||
|
pub fn load(path: impl AsRef<Path> + Debug) -> Result<Self, DataLoadError> {
|
||||||
|
Self::build_from_resource_list(ResourceList::load(path)?)
|
||||||
|
}
|
||||||
|
fn load_from_reader(reader: impl std::io::Read) -> Result<Self, DataLoadError> {
|
||||||
|
Self::build_from_resource_list(ResourceList::load_from_reader(reader)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_from_resource_list(resources: ResourceList) -> Result<Self, DataLoadError> {
|
||||||
|
let mut lookup = HashMap::new();
|
||||||
|
debug!("Building lookup map...");
|
||||||
|
for (index, resource) in resources.0.iter().enumerate() {
|
||||||
|
for lookup_to_add in resource.keys() {
|
||||||
|
let lookup_to_add = lookup_to_add.to_lowercase();
|
||||||
|
debug!("Adding {lookup_to_add} for {}", resource.subject);
|
||||||
|
if let Some(duplicate) = lookup.insert(lookup_to_add.clone(), index) {
|
||||||
|
return Err(DataLoadError::DuplicateLookupFound {
|
||||||
|
duplicated: lookup_to_add,
|
||||||
|
subjects: [
|
||||||
|
resources.0[duplicate].subject.clone(),
|
||||||
|
resource.subject.clone(),
|
||||||
|
],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
info!("Aggregated {} lookup strings", lookup.len());
|
||||||
|
Ok(LookupHandler { resources, lookup })
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level="debug")]
|
||||||
|
pub fn lookup(&self, resource_query: &str) -> Option<&Resource> {
|
||||||
|
self.lookup_with_index(resource_query).map(|(_index, resource)| resource)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lookup_with_index(&self, resource_query: &str) -> Option<(usize, &Resource)> {
|
||||||
|
let resource_index = *self.lookup.get(resource_query)?;
|
||||||
|
let found_resource = &self.resources.0[resource_index];
|
||||||
|
debug!("Lookup for {resource_query} returned {found_resource:?}");
|
||||||
|
Some((resource_index, found_resource))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_inner(self) -> ResourceList {
|
||||||
|
self.resources
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invalid_json() {
|
||||||
|
let invalid = "{".as_bytes();
|
||||||
|
let result = LookupHandler::load_from_reader(invalid);
|
||||||
|
if let Err(DataLoadError::ResourceLoadError(ResourceLoadError::FileParse(_))) = result {
|
||||||
|
// All good!
|
||||||
|
} else {
|
||||||
|
panic!("LookupHandler passed invalid JSON")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn duplicate_subject() {
|
||||||
|
let duplicated = "[{\"subject\": \"testing\"},{\"subject\": \"testing\"}]".as_bytes();
|
||||||
|
let result = LookupHandler::load_from_reader(duplicated);
|
||||||
|
if let Err(DataLoadError::DuplicateLookupFound { duplicated, subjects }) = result {
|
||||||
|
assert_eq!(duplicated, "testing".to_string());
|
||||||
|
assert_eq!(subjects, ["testing".to_string(), "testing".to_string()]);
|
||||||
|
} else {
|
||||||
|
panic!("LookupHandler::load() returned {result:?}");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn duplicate_alias() {
|
||||||
|
let duplicated = "[{\"subject\": \"testing\"},{\"subject\": \"more_testing\", \"aliases\": [\"testing\"]}]".as_bytes();
|
||||||
|
let result = LookupHandler::load_from_reader(duplicated);
|
||||||
|
if let Err(DataLoadError::DuplicateLookupFound { duplicated, mut subjects }) = result {
|
||||||
|
assert_eq!(duplicated, "testing".to_string());
|
||||||
|
subjects.sort(); // Because we don't care about order for testing purposes
|
||||||
|
assert_eq!(subjects, ["more_testing".to_string(), "testing".to_string()]);
|
||||||
|
} else {
|
||||||
|
panic!("LookupHandler::load() returned {result:?}");
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn successful_query() {
|
||||||
|
let data = "[{\"subject\":\"testing\"},{\"subject\":\"more_testing\"}]".as_bytes();
|
||||||
|
let data = LookupHandler::load_from_reader(data).unwrap();
|
||||||
|
for subject in ["testing", "more_testing"] {
|
||||||
|
assert_eq!(data.lookup(subject), Some(&Resource {
|
||||||
|
subject: subject.to_string(),
|
||||||
|
aliases: None,
|
||||||
|
properties: None,
|
||||||
|
links: None,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn successful_alias_query() {
|
||||||
|
let data = "[{\"subject\":\"testing\",\"aliases\":[\"alias1\",\"alias2\"]},{\"subject\":\"red herring\",\"aliases\":[\"alias\",\"1\", \"2\"]}]".as_bytes();
|
||||||
|
let data = LookupHandler::load_from_reader(data).unwrap();
|
||||||
|
for subject in ["alias1", "alias2"] {
|
||||||
|
assert_eq!(data.lookup(subject), Some(&Resource {
|
||||||
|
subject: "testing".to_string(),
|
||||||
|
aliases: Some(vec!["alias1".to_string(), "alias2".to_string()]),
|
||||||
|
properties: None,
|
||||||
|
links: None,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
3
src/schema/mod.rs
Normal file
3
src/schema/mod.rs
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
pub mod resource;
|
||||||
|
pub mod lookup_handler;
|
||||||
|
pub mod resource_list;
|
110
src/schema/resource.rs
Normal file
110
src/schema/resource.rs
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::fmt::{Debug, Display, Formatter};
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
|
/// A single WebFinger resource
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Resource {
|
||||||
|
pub subject: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub aliases: Option<Vec<String>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub properties: Option<HashMap<String, Option<String>>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub links: Option<Vec<Link>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for Resource {
|
||||||
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let pretty = serde_json::to_string_pretty(self).unwrap();
|
||||||
|
write!(f, "{pretty}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Resource {
|
||||||
|
/// Returns the aliases of the given record. If the `aliases` field is
|
||||||
|
/// entirely missing, returns &[].
|
||||||
|
pub fn keys(&self) -> impl Iterator<Item = &String> {
|
||||||
|
let aliases = if let Some(aliases) = &self.aliases {
|
||||||
|
aliases.as_slice()
|
||||||
|
} else {
|
||||||
|
&[]
|
||||||
|
};
|
||||||
|
|
||||||
|
aliases.iter().chain(std::iter::once(&self.subject))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_new_primary_subject(&mut self, mut subject: String) {
|
||||||
|
debug!("Swapping new and old subject");
|
||||||
|
std::mem::swap(&mut subject, &mut self.subject);
|
||||||
|
debug!("Pushing current subject into aliases");
|
||||||
|
if let Some(ref mut aliases) = self.aliases {
|
||||||
|
if !aliases.contains(&subject) {
|
||||||
|
aliases.push(subject)
|
||||||
|
} else {
|
||||||
|
debug!("self.aliases already contained subject, skipping")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!("Empty self.aliases, creating new array.");
|
||||||
|
self.aliases = Some(vec![subject])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A link contained within a WebFinger resource
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Link {
|
||||||
|
pub rel: String,
|
||||||
|
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
|
||||||
|
pub media_type: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub href: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub titles: Option<HashMap<String, String>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub properties: Option<HashMap<String, Option<String>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
/// Functions to generate data for testing functions that manipulate `Resource` structs
|
||||||
|
pub mod test_data {
|
||||||
|
use crate::schema::resource::Resource;
|
||||||
|
pub fn barebones_user() -> Resource {
|
||||||
|
Resource {
|
||||||
|
subject: "acct:user@domain.tld".to_string(),
|
||||||
|
aliases: None,
|
||||||
|
properties: None,
|
||||||
|
links: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn user_with_single_alias() -> Resource {
|
||||||
|
Resource {
|
||||||
|
subject: "acct:user@domain.tld".to_string(),
|
||||||
|
aliases: Some(vec![
|
||||||
|
"https://domain.tld/@user".to_string()
|
||||||
|
]),
|
||||||
|
properties: None,
|
||||||
|
links: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn insert_new_primary_subject_into_barebones_user() {
|
||||||
|
let mut data = test_data::barebones_user();
|
||||||
|
data.add_new_primary_subject("username".to_string());
|
||||||
|
assert_eq!(data, Resource {
|
||||||
|
subject: "username".to_string(),
|
||||||
|
aliases: Some(vec![test_data::barebones_user().subject]),
|
||||||
|
properties: None,
|
||||||
|
links: None,
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
115
src/schema/resource_list.rs
Normal file
115
src/schema/resource_list.rs
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use tracing::{debug, info, instrument, trace, warn};
|
||||||
|
use std::path::Path;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use thiserror::Error;
|
||||||
|
use crate::args_parser::CollisionHandling;
|
||||||
|
use crate::schema::resource::Resource;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ResourceList(pub Vec<Resource>);
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum ResourceLoadError {
|
||||||
|
#[error("failed to open the resource database: {0}")]
|
||||||
|
FileOpen(#[from] std::io::Error),
|
||||||
|
#[error("failed to parse the resource database: {0}")]
|
||||||
|
FileParse(#[from] serde_json::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum ResourceSaveError {
|
||||||
|
#[error("Failed to open the resource database for writing: {0}")]
|
||||||
|
FileOpen(std::io::Error),
|
||||||
|
#[error("Failed to create backup of database before writing: {0}")]
|
||||||
|
BackupFailed(std::io::Error),
|
||||||
|
#[error("Failed to write the resource database: {0}")]
|
||||||
|
FileSerialisation(#[from] serde_json::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ResourceList {
|
||||||
|
/// Loads the `Resource`s from the given `path`
|
||||||
|
#[instrument(level = "debug")]
|
||||||
|
pub fn load(path: impl AsRef<Path> + Debug) -> Result<Self, ResourceLoadError> {
|
||||||
|
info!("Loading data from {path:?}...");
|
||||||
|
let file = std::fs::File::open(path)?;
|
||||||
|
Self::load_from_reader(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Loads the `Resource`s from the given reader
|
||||||
|
pub fn load_from_reader(reader: impl std::io::Read) -> Result<Self, ResourceLoadError> {
|
||||||
|
let reader = std::io::BufReader::new(reader);
|
||||||
|
debug!("Parsing as JSON...");
|
||||||
|
let resources: Vec<Resource> = serde_json::from_reader(reader)?;
|
||||||
|
info!("Loaded {} resources", resources.len());
|
||||||
|
trace!("{resources:?}");
|
||||||
|
Ok(Self(resources))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(path, self))]
|
||||||
|
pub fn save(&self, path: impl AsRef<Path> + Debug) -> Result<(), ResourceSaveError> {
|
||||||
|
info!("Creating backup before writing...");
|
||||||
|
let path = path.as_ref();
|
||||||
|
std::fs::copy(path, path.with_extension("bak")).map_err(ResourceSaveError::BackupFailed)?;
|
||||||
|
info!("Writing data to {path:?}...");
|
||||||
|
let file = std::fs::File::create(path).map_err(ResourceSaveError::FileOpen)?;
|
||||||
|
self.save_to_writer(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save_to_writer(&self, writer: impl std::io::Write) -> Result<(), ResourceSaveError> {
|
||||||
|
trace!("{self:?}");
|
||||||
|
let writer = std::io::BufWriter::new(writer);
|
||||||
|
debug!("Serialising JSON...");
|
||||||
|
Ok(serde_json::to_writer(writer, &self.0)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Merges `new_records` into the `ResourceList` with lookup collisions being handled as defined in `collision_handling`
|
||||||
|
#[instrument(level = "debug", skip(self, new_records))]
|
||||||
|
pub fn merge_records(&mut self, new_records: impl Iterator<Item = Resource>, collision_handling: CollisionHandling) -> &ResourceList {
|
||||||
|
debug!("Building hashset of already taken queries...");
|
||||||
|
let unique_check: HashSet<String> = HashSet::from_iter(self.0.iter().flat_map(Resource::keys).cloned());
|
||||||
|
for record in new_records {
|
||||||
|
let record_keys = HashSet::from_iter(record.keys().cloned());
|
||||||
|
let collisions = unique_check.intersection(&record_keys).collect::<HashSet<_>>();
|
||||||
|
if !collisions.is_empty() {
|
||||||
|
warn!("Resource collision detected with {}: {collisions:?}", record.subject);
|
||||||
|
match collision_handling {
|
||||||
|
CollisionHandling::Skip => {
|
||||||
|
warn!("Skipping record...");
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
CollisionHandling::OverwriteSingleSkipMultiple => {
|
||||||
|
let mut collided_resources =
|
||||||
|
self.0.iter().enumerate()
|
||||||
|
.filter(|record| record.1.keys().any(|elem| collisions.contains(elem)));
|
||||||
|
if let Some((collided_index, collided_resource)) = collided_resources.next() {
|
||||||
|
if collided_resources.next().is_some() {
|
||||||
|
warn!("Resource collides with multiple records, skipping...");
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
warn!("Removing {}", collided_resource.subject);
|
||||||
|
self.0.remove(collided_index);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CollisionHandling::OverwriteMultiple => {
|
||||||
|
warn!("Overwriting already existing record(s) with new data...");
|
||||||
|
self.0.retain(|record| {
|
||||||
|
if record.keys().any(|elem| collisions.contains(elem)) {
|
||||||
|
warn!("Removing {record:?}");
|
||||||
|
false
|
||||||
|
} else {true}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
CollisionHandling::Terminate => {
|
||||||
|
warn!("Collision found, terminating merge process...");
|
||||||
|
return self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
info!("Inserting {}", record.subject);
|
||||||
|
self.0.push(record);
|
||||||
|
}
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
152
src/server/mod.rs
Normal file
152
src/server/mod.rs
Normal file
|
@ -0,0 +1,152 @@
|
||||||
|
use std::io::Read;
|
||||||
|
use std::net::SocketAddr;
|
||||||
|
use std::ops::DerefMut;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
use axum::body::Body;
|
||||||
|
use axum::extract::{Request, State};
|
||||||
|
use axum::http::StatusCode;
|
||||||
|
use axum::response::Response;
|
||||||
|
use axum::Router;
|
||||||
|
use axum::routing::get;
|
||||||
|
use tokio::select;
|
||||||
|
use tokio::signal::unix::SignalKind;
|
||||||
|
use tracing::{debug, error, info, instrument, trace};
|
||||||
|
use crate::args_parser::{DataPaths, ServerParameters};
|
||||||
|
use crate::schema::lookup_handler::LookupHandler;
|
||||||
|
use crate::schema::resource::Resource;
|
||||||
|
|
||||||
|
pub fn init(data_paths: DataPaths, server_parameters: ServerParameters) {
|
||||||
|
let DataPaths { database_path, pid_file_path } = data_paths;
|
||||||
|
//TODO: Apparently you're supposed to keep this file around and verify that the PID in it is still active?
|
||||||
|
debug!("Attempting to install PID file at {pid_file_path:?}");
|
||||||
|
if let Ok(mut file) = std::fs::File::open(&pid_file_path) {
|
||||||
|
if !server_parameters.force_pid {
|
||||||
|
let mut buffer = String::new();
|
||||||
|
if file.read_to_string(&mut buffer).is_ok() {
|
||||||
|
panic!("Server already running with PID {} according to {pid_file_path:?}. If you wish to overwrite it, pass the --force-pid parameter.", buffer.trim());
|
||||||
|
}
|
||||||
|
panic!("PID file already exists at path {pid_file_path:?}, but I was unable to open it. If you wish to try to overwrite it, pass the --force-pid parameter.");
|
||||||
|
}
|
||||||
|
info!("Found PID file at {pid_file_path:?}, overwriting...");
|
||||||
|
}
|
||||||
|
let pid_file = qpidfile::Pidfile::new(pid_file_path).unwrap();
|
||||||
|
|
||||||
|
let runtime = match tokio::runtime::Runtime::new() {
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to create tokio runtime: {e}");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
Ok(runtime) => runtime
|
||||||
|
};
|
||||||
|
if let Err(e) = runtime.block_on(async_main(database_path, server_parameters)) {
|
||||||
|
error!("Failed to block on server's async_main: {e}");
|
||||||
|
}
|
||||||
|
// To ensure that the pid_file variable lives until the end of the function
|
||||||
|
drop(pid_file);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all)]
|
||||||
|
async fn async_main(db_path: PathBuf, server_parameters: ServerParameters) -> Result<(), std::io::Error> {
|
||||||
|
info!("Initialising server...");
|
||||||
|
let datastore = Arc::new(RwLock::new(LookupHandler::load(&db_path).unwrap()));
|
||||||
|
let _handler = tokio::spawn(hangup_handler(db_path, datastore.clone()));
|
||||||
|
|
||||||
|
let listen_address = SocketAddr::new(server_parameters.bind_ip, server_parameters.bind_port);
|
||||||
|
|
||||||
|
let router = Router::new()
|
||||||
|
.route("/.well-known/webfinger", get(run_webfinger_query))
|
||||||
|
.with_state(datastore);
|
||||||
|
|
||||||
|
let listener = tokio::net::TcpListener::bind(listen_address).await.unwrap();
|
||||||
|
axum::serve(listener, router)
|
||||||
|
.with_graceful_shutdown(graceful_shutdown_handler())
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run_webfinger_query(State(datastore): State<Arc<RwLock<LookupHandler>>>, request: Request) -> Result<Response, StatusCode> {
|
||||||
|
let uri = request.uri();
|
||||||
|
info!("Received query with {uri}");
|
||||||
|
let query = uri.query().ok_or(StatusCode::BAD_REQUEST)?;
|
||||||
|
debug!("Query string is {query}");
|
||||||
|
let params = query.split('&').filter_map(|query_part| {
|
||||||
|
trace!("Processing part {query_part}");
|
||||||
|
query_part.split_once('=')
|
||||||
|
}).collect::<Vec<_>>();
|
||||||
|
trace!("Query parts are {params:?}");
|
||||||
|
|
||||||
|
let mut resource_query_iter = params.iter().filter_map(|(key, value)| if key.trim() == "resource" {Some(value)} else {None});
|
||||||
|
let resource_query = *resource_query_iter.next().ok_or(StatusCode::BAD_REQUEST)?;
|
||||||
|
if resource_query_iter.next().is_some() {
|
||||||
|
debug!("Multiple resource parameters provided, bailing");
|
||||||
|
return Err(StatusCode::BAD_REQUEST)
|
||||||
|
}
|
||||||
|
|
||||||
|
let resource = datastore.read().await.lookup(resource_query).ok_or(StatusCode::NOT_FOUND)?.clone();
|
||||||
|
debug!("Found resource: {resource:?}");
|
||||||
|
|
||||||
|
let mut rels = params.into_iter().filter_map(|(key, value)| if key.trim() == "rel" {Some(value)} else {None}).peekable();
|
||||||
|
|
||||||
|
let response_body = if rels.peek().is_some() {
|
||||||
|
debug!("There were rel parameters in the query");
|
||||||
|
if let Some(links) = resource.links {
|
||||||
|
debug!("Filtering links...");
|
||||||
|
Resource {
|
||||||
|
links: Some(rels.filter_map(|rel| links.iter().find(|link| link.rel == rel).cloned()).collect()),
|
||||||
|
..resource
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
resource
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
resource
|
||||||
|
};
|
||||||
|
|
||||||
|
debug!("Responding with {response_body:?}");
|
||||||
|
|
||||||
|
Response::builder()
|
||||||
|
.header("Content-Type", "application/jrd+json")
|
||||||
|
.body(Body::new(serde_json::to_string(&response_body).map_err(|e| {
|
||||||
|
error!("Server error occurred while serialising response body: {e}");
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
})?))
|
||||||
|
.map_err(|e| {
|
||||||
|
error!("Server error occurred while building response: {e}");
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all)]
|
||||||
|
async fn hangup_handler(db_path: PathBuf, datastore: Arc<RwLock<LookupHandler>>) {
|
||||||
|
let db_path = Path::new(&db_path);
|
||||||
|
debug!("Installing SIGHUP handler...");
|
||||||
|
|
||||||
|
let mut handler = tokio::signal::unix::signal(SignalKind::hangup()).unwrap();
|
||||||
|
while handler.recv().await.is_some() {
|
||||||
|
info!("Received SIGHUP, reloading data..");
|
||||||
|
match LookupHandler::load(db_path) {
|
||||||
|
Ok(mut handler) => {
|
||||||
|
info!("Data parsed, waiting for write lock on datastore...");
|
||||||
|
let mut lock = datastore.write().await;
|
||||||
|
std::mem::swap(lock.deref_mut(), &mut handler);
|
||||||
|
info!("Data updated!");
|
||||||
|
}
|
||||||
|
Err(error) => {error!("Failed to reload datastore: {error}")}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn graceful_shutdown_handler() {
|
||||||
|
debug!("Installing graceful shutdown handler...");
|
||||||
|
let mut sigint = tokio::signal::unix::signal(SignalKind::interrupt()).unwrap();
|
||||||
|
let sigint = sigint.recv();
|
||||||
|
let mut sigterm = tokio::signal::unix::signal(SignalKind::terminate()).unwrap();
|
||||||
|
let sigterm = sigterm.recv();
|
||||||
|
|
||||||
|
select! {
|
||||||
|
biased; // We don't care about fairness in poll order
|
||||||
|
Some(()) = sigint => info!("SIGINT received, shutting down..."),
|
||||||
|
Some(()) = sigterm => info!("SIGTERM received, shutting down..."),
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in a new issue