Compare commits

...

16 commits

Author SHA1 Message Date
cca12f8d34 Update dependencies ()
Reviewed-on: 
2025-02-23 21:18:10 +01:00
e47d9770b3 Bump version 2025-02-23 21:17:15 +01:00
4423ed6099 Update dependencies 2025-02-23 21:12:52 +01:00
fdfc103554 Better error message for PID file creation failure ()
Reviewed-on: 
2025-02-23 21:02:36 +01:00
2c6f58b112 Make Fingerlink more configurable in docker ()
Reviewed-on: 
Co-authored-by: ArcaneSpark <arcanespark@noreply.code.thishorsie.rocks>
Co-committed-by: ArcaneSpark <arcanespark@noreply.code.thishorsie.rocks>
2025-02-23 20:53:24 +01:00
cb783226c6 Better error message for PID file creation failure 2025-02-23 20:51:12 +01:00
0795d6fa4e Merge pull request 'Add template field to Link because ostatus subscribe schema needs it.' () from template_link_type into master
Reviewed-on: 
2024-05-07 18:06:20 +02:00
efc7b83930 Add template field to Link because ostatus subscribe schema needs it.
Also fixes a bug where Resource::compress lost every link and adds a lot more tests around that functionality.
2024-05-07 18:04:43 +02:00
8eafcf86b9 Server only docker image, but the action's documentation is apparently wrong. 2024-04-01 16:55:07 +02:00
6cd503711f Server only docker image 2024-04-01 16:53:23 +02:00
a38ab85320 Bump version and update dependencies 2024-04-01 16:31:08 +02:00
12cb4fc254 Add more testing around case-insensitivity and patch an error in merge_records creating duplicates in the database 2024-04-01 16:30:37 +02:00
92c3239c00 Merge pull request 'Slim down tokio features' () from leaner-tokio into master
Reviewed-on: 
2024-03-18 02:13:02 +01:00
3dbe0eb589 Slim down tokio features 2024-03-18 02:11:01 +01:00
1804f455f6 Docker job shouldn't require test, we'll make sure the tests run in some other way. 2024-03-18 01:56:13 +01:00
2e86deb862 Enable very rudimentary CI/CD ()
Reviewed-on: 
Co-authored-by: Karcsesz <git@karcsesz.hu>
Co-committed-by: Karcsesz <git@karcsesz.hu>
2024-03-18 01:54:16 +01:00
10 changed files with 741 additions and 483 deletions

View file

@ -0,0 +1,27 @@
on:
push:
branches:
- master
jobs:
docker:
name: docker build
runs-on: ubuntu-22.04
steps:
- uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME}}
password: ${{ secrets.DOCKERHUB_TOKEN}}
- uses: docker/build-push-action@v5
with:
push: true
tags: karcsesz/fingerlink:latest
- uses: docker/build-push-action@v5
with:
push: true
tags: karcsesz/fingerlink:latest-nano
build-args: "editor=nano"
- uses: docker/build-push-action@v5
with:
push: true
tags: karcsesz/fingerlink:latest-server
file: Dockerfile.serveonly

View file

@ -0,0 +1,9 @@
on: [push]
jobs:
test:
name: cargo test
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/rust-toolchain@stable
- run: cargo test --all-features

881
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[package]
name = "fingerlink"
version = "0.1.0"
version = "0.1.2"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -12,19 +12,19 @@ editor = ["reqwest", "tempfile", "which", "nix"]
[dependencies]
qpidfile = { version = "0.9.2", optional = true }
tokio = { version = "1.36.0", features = ["full"], optional = true }
tokio = { version = "1.43.0", features = ["rt-multi-thread", "signal"], optional = true }
# dashmap = { version = "5.5.3", features = ["inline"]}
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["fmt"] }
serde = { version = "1.0.197", features = ["derive"] }
serde_json = "1.0.114"
thiserror = "1.0.58"
clap = { version = "4.5.3", features = ["derive"]}
axum = { version = "0.7.4", optional = true }
reqwest = { version = "0.11.26", optional = true, default-features = false, features = ["rustls-tls", "blocking", "json", "gzip", "brotli", "deflate"] }
tempfile = { version = "3.10.1", optional = true }
which = { version = "6.0.0", optional = true }
nix = { version = "0.28.0", optional = true, default-features = false, features = ["signal"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["fmt"] }
serde = { version = "1.0.218", features = ["derive"] }
serde_json = "1.0.139"
thiserror = "2.0.11"
clap = { version = "4.5.30", features = ["derive"] }
axum = { version = "0.8.1", optional = true }
reqwest = { version = "0.12.12", optional = true, default-features = false, features = ["rustls-tls", "blocking", "json", "gzip", "brotli", "deflate"] }
tempfile = { version = "3.17.1", optional = true }
which = { version = "7.0.2", optional = true }
nix = { version = "0.29.0", optional = true, default-features = false, features = ["signal"] }
urlencoding = { version = "2.1.3"}
[profile.release] # 💛 @Ryze@equestria.social

28
Dockerfile Normal file
View file

@ -0,0 +1,28 @@
FROM rust:latest as builder
LABEL authors="karcsesz"
ARG target=x86_64-unknown-linux-musl
RUN apt update && apt install -y musl-tools musl-dev
RUN update-ca-certificates
RUN rustup target add $target
WORKDIR /fingerlink
COPY . .
RUN cargo build --target $target --release
FROM alpine:latest
ARG editor=vim
RUN apk add --no-cache $editor
ENV EDITOR=$editor
ENV PATH="${PATH}:/fingerlink"
WORKDIR /fingerlink
COPY --from=builder /fingerlink/target/x86_64-unknown-linux-musl/release/fingerlink /fingerlink/fingerlink
ENTRYPOINT ["/fingerlink/fingerlink"]
CMD ["serve", "--help"]

24
Dockerfile.serveonly Normal file
View file

@ -0,0 +1,24 @@
FROM rust:latest as builder
LABEL authors="karcsesz"
ARG target=x86_64-unknown-linux-musl
RUN apt update && apt install -y musl-tools musl-dev
RUN update-ca-certificates
RUN rustup target add $target
WORKDIR /fingerlink
COPY . .
RUN cargo build --target $target --release --no-default-features --features server
FROM scratch
ENV PATH="${PATH}:/fingerlink"
WORKDIR /fingerlink
COPY --from=builder /fingerlink/target/x86_64-unknown-linux-musl/release/fingerlink /fingerlink/fingerlink
ENTRYPOINT ["/fingerlink/fingerlink"]
CMD ["serve", "--help"]

View file

@ -46,7 +46,6 @@ impl LookupHandler {
let mut lookup = HashMap::new();
for (index, resource) in resources.0.iter().enumerate() {
for lookup_to_add in resource.keys() {
let lookup_to_add = lookup_to_add.to_lowercase();
debug!("Adding {lookup_to_add} for {}", resource.subject);
let duplicate = lookup.insert(lookup_to_add.clone(), index);
@ -162,6 +161,23 @@ mod tests {
}
}
#[test]
fn successful_case_insensitive_query() {
let data = "[{\"subject\":\"testing\"},{\"subject\":\"more_testing\"}]".as_bytes();
let data = LookupHandler::load_from_reader(data).unwrap();
for subject in ["TESTING", "mOre_testiNg"] {
assert_eq!(
data.lookup(subject),
Some(&Resource {
subject: subject.to_lowercase(),
aliases: None,
properties: None,
links: None,
})
);
}
}
#[test]
fn successful_alias_query() {
let data = "[{\"subject\":\"testing\",\"aliases\":[\"alias1\",\"alias2\"]},{\"subject\":\"red herring\",\"aliases\":[\"alias\",\"1\", \"2\"]}]".as_bytes();
@ -178,4 +194,21 @@ mod tests {
);
}
}
#[test]
fn successful_case_insensitive_alias_query() {
let data = "[{\"subject\":\"testing\",\"aliases\":[\"alias1\",\"alias2\"]},{\"subject\":\"red herring\",\"aliases\":[\"alias\",\"1\", \"2\"]}]".as_bytes();
let data = LookupHandler::load_from_reader(data).unwrap();
for subject in ["aliAS1", "aLiAs2"] {
assert_eq!(
data.lookup(subject),
Some(&Resource {
subject: "testing".to_string(),
aliases: Some(vec!["alias1".to_string(), "alias2".to_string()]),
properties: None,
links: None,
})
);
}
}
}

View file

@ -39,16 +39,16 @@ impl Resource {
}
}
/// Returns the aliases of the given record. If the `aliases` field is
/// Returns the aliases of the given record, converted to lowercase. If the `aliases` field is
/// entirely missing, returns an empty array.
pub fn keys(&self) -> impl Iterator<Item = &String> {
pub fn keys(&self) -> impl Iterator<Item=String> + '_ {
let aliases = if let Some(aliases) = &self.aliases {
aliases.as_slice()
} else {
&[]
};
aliases.iter().chain(std::iter::once(&self.subject))
aliases.iter().chain(std::iter::once(&self.subject)).map(|key| key.to_lowercase())
}
/// Replaces the current `subject` field of the WebFinger resource
@ -114,6 +114,7 @@ impl Resource {
.map(clone_hashmap_with_option_value_as_complete)
.unwrap_or_default(),
),
template: Some(link.template.clone().unwrap_or_default()),
}
}))
})
@ -146,6 +147,36 @@ impl Resource {
.links
.filter(|links| !links.is_empty())
.map(|mut links| {
// Collapse default subvalues
for link in &mut links {
if link.media_type.as_ref().map_or(false, String::is_empty) {
link.media_type = None;
}
if link.href.as_ref().map_or(false, String::is_empty) {
link.href = None;
}
if let Some(titles) = &mut link.titles {
titles.retain(|key, value| !key.is_empty() || !value.is_empty())
}
if link.titles.as_ref().map_or(false, HashMap::is_empty) {
link.titles = None;
}
if let Some(properties) = &mut link.properties {
for value in properties.values_mut() {
if value.as_ref().map_or(false, String::is_empty) {
*value = None;
}
}
properties.retain(|key, value| !key.is_empty() || !value.is_none())
}
if link.properties.as_ref().map_or(false, HashMap::is_empty) {
link.properties = None;
}
if link.template.as_ref().map_or(false, String::is_empty) {
link.template = None;
}
}
// Delete completely default links
links.retain(|link| {
// Empty `rel` is invalid, but short-circuiting here would delete records
// that are only partially edited. Better to store invalid data than to delete
@ -153,22 +184,19 @@ impl Resource {
let mut is_default = link.rel.is_empty();
is_default &= link
.media_type
.as_ref()
.filter(|media_type| !media_type.is_empty())
.is_none();
is_default &= link.href.as_ref().filter(|href| !href.is_empty()).is_none();
is_default &= link
.titles
.as_ref()
.filter(|titles| !titles.is_empty())
.is_none();
is_default &= link
.properties
.as_ref()
.filter(|titles| !titles.is_empty())
.is_none();
is_default &= link
.template
.is_none();
is_default
!is_default
});
links
@ -189,12 +217,15 @@ pub struct Link {
pub titles: Option<HashMap<String, String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<HashMap<String, Option<String>>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub template: Option<String>,
}
#[cfg(test)]
/// Functions to generate data for testing functions that manipulate `Resource` structs
pub mod test_data {
use crate::schema::resource::Resource;
use std::collections::HashMap;
use crate::schema::resource::{Link, Resource};
/// A [`Resource`] with only the `subject` field set
pub fn barebones_user() -> Resource {
@ -220,6 +251,100 @@ pub mod test_data {
links: None,
}
}
pub fn example_mastodon_user() -> Resource {
Resource {
subject: "acct:user@example.com".to_string(),
aliases: Some(vec![
"https://example.com/@user".to_string(),
"https://example.com/users/user".to_string()
]),
properties: None,
links: Some(vec![
Link {
rel: "http://webfinger.net/rel/profile-page".to_string(),
media_type: Some("text/html".to_string()),
href: Some("https://example.com/@user".to_string()),
titles: None,
properties: None,
template: None,
},
Link {
rel: "self".to_string(),
media_type: Some("application/activity+json".to_string()),
href: Some("https://example.com/users/user".to_string()),
titles: None,
properties: None,
template: None,
},
Link {
rel: "http://ostatus.org/schema/1.0/subscribe".to_string(),
media_type: None,
href: None,
titles: None,
properties: None,
template: Some("https://example.com/authorize_interaction?uri={uri}".to_string()),
},
Link {
rel: "http://webfinger.net/rel/avatar".to_string(),
media_type: Some("image/png".to_string()),
href: Some("https://example.com/system/accounts/avatars/321/423/112/234/123/527/original/1j2ioff88a9wa.png".to_string()),
titles: None,
properties: None,
template: None,
}
]),
}
}
pub fn example_firefish_user() -> Resource {
Resource {
subject: "acct:user@example.com".to_string(),
aliases: None,
properties: None,
links: Some(vec![
Link {
rel: "self".to_string(),
media_type: Some("application/activity+json".to_string()),
href: Some("https://example.com/users/8fsua89lcieaj".to_string()),
titles: None,
properties: None,
template: None,
},
Link {
rel: "http://webfinger.net/rel/profile-page".to_string(),
media_type: Some("text/html".to_string()),
href: Some("https://example.com/@user".to_string()),
titles: None,
properties: None,
template: None,
},
Link {
rel: "http://ostatus.org/schema/1.0/subscribe".to_string(),
media_type: None,
href: None,
titles: None,
properties: None,
template: Some("https://example.com/authorize-follow?acct={uri}".to_string()),
}
])
}
}
pub fn full_resource() -> Resource {
Resource {
subject: "some_subject".to_string(),
aliases: Some(vec!["ThisIsAnAlias".to_string()]),
properties: Some(HashMap::from_iter(std::iter::once(("property_key".to_string(), Some("property_value".to_string()))))),
links: Some(vec![Link {
rel: "link_relation".to_string(),
media_type: Some("media/type".to_string()),
href: Some("https://example.com/link_href".to_string()),
titles: Some(HashMap::from_iter(std::iter::once(("title_key".to_string(), "title_value".to_string())))),
properties: Some(HashMap::from_iter(std::iter::once(("property_key".to_string(), Some("property_value".to_string()))))),
template: Some("template".to_string()),
}]),
}
}
}
#[cfg(test)]
@ -288,6 +413,9 @@ mod tests {
test_data::barebones_user(),
test_data::user_with_matching_subject_and_alias(),
test_data::user_with_single_alias(),
test_data::example_firefish_user(),
test_data::example_mastodon_user(),
test_data::full_resource(),
] {
assert_eq!(data, data.as_completely_serializable().compress());
}

View file

@ -83,10 +83,10 @@ impl ResourceList {
) -> &ResourceList {
debug!("Building hashset of already taken queries...");
let unique_check: HashSet<String> =
HashSet::from_iter(self.0.iter().flat_map(Resource::keys).cloned());
HashSet::from_iter(self.0.iter().flat_map(Resource::keys));
for record in new_records {
let record_keys = HashSet::from_iter(record.keys().cloned());
let record_keys = HashSet::from_iter(record.keys());
let collisions = unique_check
.intersection(&record_keys)
.collect::<HashSet<_>>();
@ -103,7 +103,7 @@ impl ResourceList {
CollisionHandling::OverwriteSingleSkipMultiple => {
let mut collided_resources =
self.0.iter().enumerate().filter(|record| {
record.1.keys().any(|elem| collisions.contains(elem))
record.1.keys().any(|elem| collisions.contains(&elem))
});
if let Some((collided_index, collided_resource)) = collided_resources.next()
{
@ -118,7 +118,7 @@ impl ResourceList {
CollisionHandling::OverwriteMultiple => {
warn!("Overwriting already existing record(s) with new data...");
self.0.retain(|record| {
if record.keys().any(|elem| collisions.contains(elem)) {
if record.keys().any(|elem| collisions.contains(&elem)) {
warn!("Removing {record:?}");
false
} else {
@ -138,4 +138,31 @@ impl ResourceList {
self
}
#[cfg(test)]
/// Returns the amount of records stored
fn len(&self) -> usize {
self.0.len()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn check_merge_case_insensitivity() {
let mut base = ResourceList(vec![Resource::new("ThisIsASubject".to_string())]);
base.merge_records(std::iter::once(Resource::new("thisisasubject".to_string())), CollisionHandling::Skip);
assert_eq!(base.len(), 1);
}
#[test]
fn check_merge_case_insensitivity_alt() {
let mut base = Resource::new("ThisIsASubject".to_string());
base.add_new_primary_subject("ThisIsANewPrimarySubject".to_string());
let mut base = ResourceList(vec![base]);
base.merge_records(std::iter::once(Resource::new("thisisasubject".to_string())), CollisionHandling::Skip);
assert_eq!(base.len(), 1);
}
}

View file

@ -34,7 +34,14 @@ pub fn init(data_paths: DataPaths, server_parameters: ServerParameters) {
}
info!("Found PID file at {pid_file_path:?}, overwriting...");
}
let pid_file = qpidfile::Pidfile::new(pid_file_path).unwrap();
let pid_file = match qpidfile::Pidfile::new(&pid_file_path) {
Ok(pidfile) => pidfile,
Err(e) => {
error!("Failed to create PID file at {}", pid_file_path.display());
error!("{}", e);
return;
}
};
let runtime = match tokio::runtime::Runtime::new() {
Err(e) => {