Compare commits

..

3 Commits
mmm ... main

Author SHA1 Message Date
Natty 5363a0c137
Offload some blocking tasks to worker threads
ci/woodpecker/push/ociImagePush Pipeline was successful Details
2024-11-25 22:48:27 +01:00
Natty 5666bb4622
Frontend: Fixed prepending items when fresh and full
ci/woodpecker/push/ociImagePush Pipeline was successful Details
2024-11-25 19:39:05 +01:00
Natty 8aa2a4dac4
Frontend: SSE and pagination fixes
ci/woodpecker/push/ociImagePush Pipeline was successful Details
2024-11-25 17:20:22 +01:00
10 changed files with 1132 additions and 1058 deletions

View File

@ -1,12 +1,13 @@
use std::{borrow::Cow, fmt::Display, string::FromUtf8Error, sync::Arc};
use chrono::Utc;
use futures::{FutureExt, TryFutureExt};
use futures::TryFutureExt;
use http::{HeaderMap, HeaderName, HeaderValue, Method};
use indexmap::IndexSet;
use serde_json::Value;
use sha2::Digest;
use std::{fmt::Display, string::FromUtf8Error, sync::Arc};
use thiserror::Error;
use tokio::task;
use tokio::task::JoinError;
use url::Url;
use magnetar_core::web_model::content_type::ContentActivityStreams;
@ -77,6 +78,8 @@ pub enum ApClientError {
InvalidHeaderValue(#[from] http::header::InvalidHeaderValue),
#[error("UTF-8 parse error: {0}")]
Utf8ParseError(#[from] FromUtf8Error),
#[error("Task join error: {0}")]
JoinError(#[from] JoinError),
}
trait CreateField {
@ -244,7 +247,7 @@ impl SigningParts for SigningInputPostHs2019<'_> {
impl ApClientService for ApClientServiceDefaultProvider {
type Error = ApClientError;
fn sign_request(
async fn sign_request(
&self,
signing_key: ApSigningKey<'_>,
signing_algorithm: SigningAlgorithm,
@ -258,12 +261,16 @@ impl ApClientService for ApClientServiceDefaultProvider {
.collect::<Vec<_>>()
.join("\n");
let signature = signing_key
.key
.sign_base64(signing_algorithm, &message.into_bytes())?;
let key_id = signing_key.key_id.clone().into_owned();
let key = signing_key.into_owned();
let signature = task::spawn_blocking(move || {
key
.key
.sign_base64(signing_algorithm, &message.into_bytes())
}).await??;
Ok(ApSignature {
key_id: signing_key.key_id.clone().into_owned(),
key_id,
algorithm: Some(signing_algorithm),
created: request.get_created().cloned(),
expires: request.get_expires().cloned(),
@ -287,8 +294,7 @@ impl ApClientService for ApClientServiceDefaultProvider {
let signed = match signing_algorithm {
SigningAlgorithm::RsaSha256 => self.sign_request(
signing_key,
signing_algorithm,
&SigningInputGetRsaSha256 {
signing_algorithm, &SigningInputGetRsaSha256 {
request_target: RequestTarget {
url: &url,
method: Method::GET,
@ -297,7 +303,7 @@ impl ApClientService for ApClientServiceDefaultProvider {
date: DateHeader(time_created),
expires: expires.map(ExpiresPseudoHeader),
},
)?,
).await?,
SigningAlgorithm::Hs2019 => self.sign_request(
signing_key,
signing_algorithm,
@ -310,7 +316,7 @@ impl ApClientService for ApClientServiceDefaultProvider {
created: CreatedPseudoHeader(time_created),
expires: expires.map(ExpiresPseudoHeader),
},
)?,
).await?,
};
let mut headers = HeaderMap::new();
@ -348,9 +354,13 @@ impl ApClientService for ApClientServiceDefaultProvider {
) -> Result<String, Self::Error> {
let url = url.parse()?;
let body_bytes = serde_json::to_vec(body)?;
let mut sha = sha2::Sha256::new();
sha.update(&body_bytes);
let digest_raw = sha.finalize();
// Move in, move out :3
let (digest_raw, body_bytes) = task::spawn_blocking(move || {
let mut sha = sha2::Sha256::new();
sha.update(&body_bytes);
(sha.finalize(), body_bytes)
}).await?;
use base64::prelude::*;
let digest_base64 = format!("sha-256={}", BASE64_STANDARD.encode(digest_raw));
let time_created = Utc::now();
@ -368,7 +378,7 @@ impl ApClientService for ApClientServiceDefaultProvider {
digest: DigestHeader(&digest_base64),
expires: expires.map(ExpiresPseudoHeader),
},
)?,
).await?,
SigningAlgorithm::Hs2019 => self.sign_request(
signing_key,
signing_algorithm,
@ -382,7 +392,7 @@ impl ApClientService for ApClientServiceDefaultProvider {
digest: DigestHeader(&digest_base64),
expires: expires.map(ExpiresPseudoHeader),
},
)?,
).await?,
};
let mut headers = HeaderMap::new();
@ -450,7 +460,7 @@ mod test {
25,
UserAgent::from_static("magnetar/0.42 (https://astolfo.social)"),
)
.into_diagnostic()?,
.into_diagnostic()?,
)),
};

View File

@ -241,6 +241,25 @@ pub struct ApSigningKey<'a> {
pub key_id: Cow<'a, str>,
}
impl<'a> ApSigningKey<'a> {
pub fn into_owned(self) -> ApSigningKey<'static> {
ApSigningKey {
key: self.key.into_owned(),
key_id: Cow::Owned(self.key_id.into_owned()),
}
}
}
impl<'a> ApHttpSigningKey<'a> {
pub fn into_owned(self) -> ApHttpSigningKey<'static> {
match self {
ApHttpSigningKey::RsaSha256(k) => ApHttpSigningKey::RsaSha256(Cow::Owned(k.into_owned())),
ApHttpSigningKey::RsaSha512(k) => ApHttpSigningKey::RsaSha512(Cow::Owned(k.into_owned())),
ApHttpSigningKey::Ed25519(k) => ApHttpSigningKey::Ed25519(Cow::Owned(k.into_owned())),
}
}
}
impl ApHttpSigningKey<'_> {
pub fn sign(
&self,

View File

@ -62,10 +62,10 @@ pub trait WebFingerResolverService: Send + Sync {
resolved_uri,
percent_encoding::NON_ALPHANUMERIC,
)
.to_string(),
.to_string(),
),
)
.await
.await
}
}
@ -137,12 +137,12 @@ pub struct ApSignature {
#[derive(Debug)]
pub struct ApSigningHeaders(pub(crate) IndexSet<ApSigningField>);
pub trait SigningParts {
pub trait SigningParts: Send {
fn get_created(&self) -> Option<&DateTime<Utc>>;
fn get_expires(&self) -> Option<&DateTime<Utc>>;
}
pub trait SigningInput: SigningParts {
pub trait SigningInput: SigningParts + Sync {
fn create_signing_input(&self) -> Vec<(ApSigningField, String)>;
}
@ -150,7 +150,7 @@ pub trait SigningInput: SigningParts {
pub trait ApClientService: Send + Sync {
type Error;
fn sign_request(
async fn sign_request(
&self,
signing_key: ApSigningKey<'_>,
signing_algorithm: SigningAlgorithm,

View File

@ -67,9 +67,11 @@ import {
computed,
ComputedRef,
isRef,
nextTick,
onActivated,
onDeactivated,
ref,
shallowRef,
watch,
} from "vue";
import * as misskey from "calckey-js";
@ -78,6 +80,7 @@ import {
getScrollContainer,
getScrollPosition,
isTopVisible,
onScrollBottom,
onScrollTop,
} from "@/scripts/scroll";
import MkButton from "@/components/MkButton.vue";
@ -85,7 +88,7 @@ import { magTransProperty } from "@/scripts-mag/mag-util";
import { i18n } from "@/i18n";
export type Paging<
E extends keyof misskey.Endpoints = keyof misskey.Endpoints,
E extends keyof misskey.Endpoints = keyof misskey.Endpoints
> = {
endpoint: E;
limit: number;
@ -119,7 +122,7 @@ const props = withDefaults(
}>(),
{
displayLimit: 30,
},
}
);
const emit = defineEmits<{
@ -129,8 +132,8 @@ const emit = defineEmits<{
type Item = { id: string; createdAt?: string; created_at?: string } & any;
const rootEl = ref<HTMLElement>();
const items = ref<Item[]>([]);
const queue = ref<Item[]>([]);
const items = shallowRef<Item[]>([]);
const queue = shallowRef<Item[]>([]);
const offset = ref(0);
const fetching = ref(true);
const moreFetching = ref(false);
@ -166,7 +169,7 @@ const init = async (): Promise<void> => {
(err) => {
error.value = true;
fetching.value = false;
},
}
);
};
@ -194,7 +197,7 @@ const refresh = async (): Promise<void> => {
for (let i = 0; i < res.length; i++) {
const item = res[i];
if (!updateItem(item.id, (old) => item)) {
append(item);
prepend(item);
}
ids.delete(item.id);
}
@ -206,7 +209,7 @@ const refresh = async (): Promise<void> => {
(err) => {
error.value = true;
fetching.value = false;
},
}
);
};
@ -241,8 +244,8 @@ const fetchMore = async (): Promise<void> => {
magTransProperty(
lastItem,
"createdAt",
"created_at",
),
"created_at"
)
).getTime()
: undefined,
untilId: lastItem?.id ?? undefined,
@ -259,7 +262,7 @@ const fetchMore = async (): Promise<void> => {
},
(err) => {
moreFetching.value = false;
},
}
);
};
@ -276,53 +279,64 @@ const isFresh = (): boolean => {
const pos = getScrollPosition(rootEl.value);
const viewHeight = container.clientHeight;
const height = container.scrollHeight;
const isBottom = pos + viewHeight > height - 32;
return isBottom;
return pos + viewHeight > height - 32;
} else {
const isTop =
return (
isBackTop.value ||
(document.body.contains(rootEl.value) &&
isTopVisible(rootEl.value));
return isTop;
(document.body.contains(rootEl.value) && isTopVisible(rootEl.value))
);
}
};
const unqueue = () => {
const queueRemoved = [...queue.value].reverse();
queue.value = [];
if (props.pagination.reversed) {
items.value = [...items.value, ...queueRemoved].slice(
0,
-props.displayLimit
);
} else {
items.value = [...queueRemoved, ...items.value].slice(
0,
props.displayLimit
);
}
};
const prepend = (item: Item): void => {
if (props.pagination.reversed) {
if (isFresh()) {
items.value = items.value.slice(-props.displayLimit);
hasMore.value = true;
}
items.value.push(item);
} else {
if (isFresh()) {
// Prepend the item
items.value = [item, ...items.value].slice(0, props.displayLimit);
} else {
if (!rootEl.value) {
items.value.unshift(item);
return;
}
if (!queue.value.length) {
onScrollTop(rootEl.value, () => {
const queueRemoved = [...queue.value].reverse();
queue.value = [];
items.value = [...queueRemoved, ...items.value].slice(
0,
props.displayLimit,
);
});
}
queue.value = [...queue.value, item].slice(-props.displayLimit);
}
if (!rootEl.value) {
queue.value = [...queue.value, item].slice(-props.displayLimit);
return;
}
};
const append = (item: Item): void => {
items.value.push(item);
if (!isFresh()) {
if (!queue.value.length) {
(props.pagination.reversed ? onScrollBottom : onScrollTop)(
rootEl.value,
unqueue
);
}
queue.value = [...queue.value, item].slice(-props.displayLimit);
return;
}
if (items.value.length >= props.displayLimit) {
if (!queue.value.length) {
nextTick(unqueue);
}
queue.value = [...queue.value, item].slice(-props.displayLimit);
return;
}
if (props.pagination.reversed) {
items.value = [...items.value, item];
} else {
items.value = [item, ...items.value];
}
};
const removeItem = (finder: (item: Item) => boolean): boolean => {
@ -331,7 +345,7 @@ const removeItem = (finder: (item: Item) => boolean): boolean => {
return false;
}
items.value.splice(i, 1);
items.value = items.value.toSpliced(i, 1);
return true;
};
@ -341,7 +355,9 @@ const updateItem = (id: Item["id"], replacer: (old: Item) => Item): boolean => {
return false;
}
items.value[i] = replacer(items.value[i]);
const newItems = [...items.value];
newItems[i] = replacer(items.value[i]);
items.value = newItems;
return true;
};
@ -349,14 +365,10 @@ if (props.pagination.params && isRef(props.pagination.params)) {
watch(props.pagination.params, init, { deep: true });
}
watch(
queue,
(a, b) => {
if (a.length === 0 && b.length === 0) return;
emit("queue", queue.value.length);
},
{ deep: true },
);
watch(queue, (a, b) => {
if (a.length === 0 && b.length === 0) return;
emit("queue", queue.value.length);
});
init();
@ -375,7 +387,6 @@ defineExpose({
reload,
refresh,
prepend,
append,
removeItem,
updateItem,
isFresh,
@ -387,6 +398,7 @@ defineExpose({
.fade-leave-active {
transition: opacity 0.125s ease;
}
.fade-enter-from,
.fade-leave-to {
opacity: 0;
@ -398,9 +410,11 @@ defineExpose({
margin-right: auto;
}
}
.list > :deep(._button) {
margin-inline: auto;
margin-bottom: 16px;
&:last-of-type:not(:first-child) {
margin-top: 16px;
}

View File

@ -113,7 +113,16 @@ export class MagEventChannel extends EventEmitter<{
let buf = "";
while (true) {
const res = await Promise.race([reader.read(), this.closePromise]);
const res = await Promise.race([
reader.read().catch((e) => {
console.error(e);
return {
done: true,
value: undefined,
};
}),
this.closePromise,
]);
if (res === "cancelled") break;
@ -122,7 +131,7 @@ export class MagEventChannel extends EventEmitter<{
setTimeout(
() => this.connect(),
this.backoffBase *
Math.pow(this.backoffFactor, this.attempts)
Math.pow(this.backoffFactor, this.attempts)
);
this.attempts++;
break;

File diff suppressed because it is too large Load Diff

View File

@ -1,785 +0,0 @@
#![cfg(test)]
use std::collections::HashMap;
use nom::bytes::complete::tag;
use crate::{xml_write::to_xml_string, Context, Span, SpanMeta, Token, DEFAULT_DEPTH_LIMIT};
fn parse_full(string: &str) -> Token {
Context::default()
.full(Span::new_extra(string, SpanMeta::default()))
.unwrap()
.1
.merged()
}
#[test]
fn parse_empty() {
assert_eq!(parse_full(""), Token::Sequence(vec![]));
}
#[test]
fn parse_url_chars() {
let ctx = Context::default();
assert_eq!(
ctx.url_chars(tag(")"), true)(Span::new_extra(
"https://en.wikipedia.org/wiki/Sandbox_(computer_security))",
SpanMeta::default(),
))
.unwrap()
.1
.into_fragment(),
"https://en.wikipedia.org/wiki/Sandbox_(computer_security)"
);
assert_eq!(
ctx.url_chars(tag(")"), true)(Span::new_extra(
"https://en.wikipedia.org/wiki/Sandbox_(computer_security)))",
SpanMeta::default()
))
.unwrap()
.1
.into_fragment(),
"https://en.wikipedia.org/wiki/Sandbox_(computer_security)",
);
assert_eq!(
ctx.url_chars(tag(")"), true)(Span::new_extra(
"https://cs.wikipedia.org/wiki/Among_Us ",
SpanMeta::default()
))
.unwrap()
.1
.into_fragment(),
"https://cs.wikipedia.org/wiki/Among_Us",
);
assert_eq!(
ctx.url_chars(tag(")"), true)(Span::new_extra(
"https://cs.wikipedia.org/wiki/Among Us )",
SpanMeta::default(),
))
.unwrap()
.1
.into_fragment(),
"https://cs.wikipedia.org/wiki/Among Us"
);
assert_eq!(
ctx.url_chars(tag(")"), false)(Span::new_extra(
"https://en.wikipedia.org/wiki/Among Us )",
SpanMeta::default(),
))
.unwrap()
.1
.into_fragment(),
"https://en.wikipedia.org/wiki/Among"
);
}
#[test]
fn parse_formatting() {
assert_eq!(
parse_full(r#"~~stikethrough~~"#),
Token::Strikethrough(Box::new(Token::PlainText("stikethrough".into()))),
);
assert_eq!(
parse_full(r#"**bold**"#),
Token::Bold(Box::new(Token::PlainText("bold".into()))),
);
assert_eq!(
parse_full(r#"*italic*"#),
Token::Italic(Box::new(Token::PlainText("italic".into()))),
);
assert_eq!(
parse_full(r#"* italic *"#),
Token::PlainText("* italic *".into())
);
assert_eq!(
parse_full("snake_case_variable"),
Token::PlainText("snake_case_variable".into())
);
assert_eq!(
parse_full("intra*word*italic"),
Token::Sequence(vec![
Token::PlainText("intra".into()),
Token::Italic(Box::new(Token::PlainText("word".into()))),
Token::PlainText("italic".into()),
])
);
assert_eq!(
parse_full(r#"_ italic *"#),
Token::PlainText("_ italic *".into())
);
assert_eq!(
parse_full(r#"long text with a *footnote <b>text</b>"#),
Token::Sequence(vec![
Token::PlainText("long text with a *footnote ".into()),
Token::Bold(Box::new(Token::PlainText("text".into()))),
])
);
assert_eq!(
parse_full(r#"*"italic"*"#),
Token::Italic(Box::new(Token::PlainText("\"italic\"".into())))
);
assert_eq!(
parse_full(r#"not code `code` also not code"#),
Token::Sequence(vec![
Token::PlainText("not code ".into()),
Token::InlineCode("code".into()),
Token::PlainText(" also not code".into())
]),
);
assert_eq!(
parse_full(r#"not code `code` also `not code"#),
Token::Sequence(vec![
Token::PlainText("not code ".into()),
Token::InlineCode("code".into()),
Token::PlainText(" also `not code".into())
]),
);
assert_eq!(
parse_full(r#"not code `*not bold*` also not code"#),
Token::Sequence(vec![
Token::PlainText("not code ".into()),
Token::InlineCode("*not bold*".into()),
Token::PlainText(" also not code".into())
]),
);
assert_eq!(
parse_full(r#"***bold italic***"#),
Token::Bold(Box::new(Token::Italic(Box::new(Token::PlainText(
"bold italic".into()
)))))
);
assert_eq!(
parse_full(r#"<b><i>bold italic</i></b>"#),
Token::Bold(Box::new(Token::Italic(Box::new(Token::PlainText(
"bold italic".into()
)))))
);
assert_eq!(
parse_full("~~*hello\nworld*"),
Token::Sequence(vec![
Token::PlainText("~~".into()),
Token::Italic(Box::new(Token::PlainText("hello\nworld".into()))),
])
)
}
#[test]
fn parse_flanking() {
assert_eq!(
parse_full(r#"aaa*iii*bbb"#),
Token::Sequence(vec![
Token::PlainText("aaa".into()),
Token::Italic(Box::new(Token::PlainText("iii".into()))),
Token::PlainText("bbb".into()),
])
);
assert_eq!(
parse_full(r#"aaa_nnn_bbb"#),
Token::PlainText("aaa_nnn_bbb".into())
);
assert_eq!(
parse_full("aaa\n_iii_\nbbb"),
Token::Sequence(vec![
Token::PlainText("aaa\n".into()),
Token::Italic(Box::new(Token::PlainText("iii".into()))),
Token::PlainText("\nbbb".into()),
])
);
assert_eq!(
parse_full(r#"*iii*"#),
Token::Italic(Box::new(Token::PlainText("iii".into())))
);
assert_eq!(
parse_full(r#"_iii_"#),
Token::Italic(Box::new(Token::PlainText("iii".into())))
);
assert_eq!(
parse_full(r#"aaa*iii*"#),
Token::Sequence(vec![
Token::PlainText("aaa".into()),
Token::Italic(Box::new(Token::PlainText("iii".into()))),
])
);
assert_eq!(
parse_full(r#"*iii*bbb"#),
Token::Sequence(vec![
Token::Italic(Box::new(Token::PlainText("iii".into()))),
Token::PlainText("bbb".into()),
])
);
assert_eq!(
parse_full(r#"aaa_nnn_"#),
Token::PlainText("aaa_nnn_".into())
);
assert_eq!(
parse_full(r#"_nnn_bbb"#),
Token::PlainText("_nnn_bbb".into())
);
}
#[test]
fn parse_long() {
parse_full(&"A".repeat(20000));
parse_full(&"*A".repeat(20000));
parse_full(&"@A".repeat(20000));
}
#[test]
fn parse_complex() {
assert_eq!(
parse_full(r"\( nya^3 \)"),
Token::InlineMath(" nya^3 ".to_string())
);
assert_eq!(
parse_full("\\( nya^3 \n \\)"),
Token::PlainText("\\( nya^3 \n \\)".into())
);
assert_eq!(
parse_full(r"`AbstractProxyFactoryBean`"),
Token::InlineCode("AbstractProxyFactoryBean".to_string())
);
assert_eq!(
parse_full("`let x = \n 5;`"),
Token::PlainText("`let x = \n 5;`".into())
);
assert_eq!(
parse_full(
r#"
```js
var x = undefined;
```"#
),
Token::BlockCode {
lang: Some("js".to_string()),
inner: "var x = undefined;".to_string(),
}
);
assert_eq!(
parse_full(
r"
\[
a^2 + b^2 = c^2
\]"
),
Token::BlockMath("a^2 + b^2 = c^2".to_string())
);
assert_eq!(
parse_full(r"\[ x^2 + y^2 = z^2 \]"),
Token::BlockMath("x^2 + y^2 = z^2".to_string())
);
assert_eq!(
parse_full(
r#"<center>centered
🦋🏳
text</center>"#
),
Token::Center(Box::new(Token::Sequence(vec![
Token::PlainText("centered\n".into()),
Token::UnicodeEmoji("🦋".into()),
Token::UnicodeEmoji("🏳️‍⚧️".into()),
Token::PlainText("\ntext".into()),
])))
);
assert_eq!(
parse_full(
r#"> <center>centered
> 👩🏽🤝👩🏼
> text</center>"#
),
Token::Quote(Box::new(Token::Center(Box::new(Token::Sequence(vec![
Token::PlainText("centered\n".into()),
Token::UnicodeEmoji("👩🏽‍🤝‍👩🏼".into()),
Token::PlainText("\ntext".into())
]))))),
);
assert_eq!(
parse_full(r#"$[x2 $[sparkle 🥺]💜$[spin.y,speed=5s ❤️]🦊]"#),
Token::Function {
name: "x2".into(),
params: HashMap::new(),
inner: Box::new(Token::Sequence(vec![
Token::Function {
name: "sparkle".into(),
params: HashMap::new(),
inner: Box::new(Token::UnicodeEmoji("🥺".into())),
},
Token::UnicodeEmoji("💜".into()),
Token::Function {
name: "spin".into(),
params: {
let mut params = HashMap::new();
params.insert("y".into(), None);
params.insert("speed".into(), Some("5s".into()));
params
},
inner: Box::new(Token::UnicodeEmoji("❤️".into())),
},
Token::UnicodeEmoji("🦊".into()),
]))
},
);
assert_eq!(
parse_full(r#"<b>bold @tag1 <i> @tag2 </b>italic</i>"#),
Token::Sequence(vec![
Token::PlainText("<b>bold ".into()),
Token::Mention {
mention_type: crate::MentionType::User,
name: "tag1".into(),
host: None
},
Token::PlainText(" <i> ".into()),
Token::Mention {
mention_type: crate::MentionType::User,
name: "tag2".into(),
host: None
},
Token::PlainText(" </b>italic</i>".into())
]),
);
assert_eq!(
parse_full(
r#"
> test
> <i>
> italic
> </i>
>> Nested quote
"#
),
Token::Quote(Box::new(Token::Sequence(vec![
Token::PlainText("test\n".into()),
Token::Italic(Box::new(Token::PlainText("\nitalic\n".into()))),
Token::Quote(Box::new(Token::PlainText("Nested quote".into())))
]))),
);
}
#[test]
fn parse_link() {
assert_eq!(
parse_full("IPv4 test: <https://0>"),
Token::Sequence(vec![
Token::PlainText("IPv4 test: ".into()),
Token::UrlNoEmbed("https://0".into()),
])
);
assert_eq!(
parse_full("IPv4 test: <https://127.0.0.1>"),
Token::Sequence(vec![
Token::PlainText("IPv4 test: ".into()),
Token::UrlNoEmbed("https://127.0.0.1".into()),
])
);
assert_eq!(
parse_full("IPv6 test: <https://[::2f:1]/nya>"),
Token::Sequence(vec![
Token::PlainText("IPv6 test: ".into()),
Token::UrlNoEmbed("https://[::2f:1]/nya".into()),
])
);
assert_eq!(
parse_full("IPv6 test: https://[::2f:1]/nya"),
Token::Sequence(vec![
Token::PlainText("IPv6 test: ".into()),
Token::UrlRaw("https://[::2f:1]/nya".into()),
])
);
// IDNs
assert_eq!(
parse_full("IDN test: https://www.háčkyčárky.cz/"),
Token::Sequence(vec![
Token::PlainText("IDN test: ".into()),
Token::UrlRaw("https://www.háčkyčárky.cz/".into()),
])
);
assert_eq!(
parse_full("Link test: [label](https://example.com)"),
Token::Sequence(vec![
Token::PlainText("Link test: ".into()),
Token::Link {
label: Box::new(Token::PlainText("label".into())),
href: "https://example.com".into(),
embed: true,
},
])
);
assert_eq!(
parse_full("test #hashtag tail"),
Token::Sequence(vec![
Token::PlainText("test ".into()),
Token::Hashtag("hashtag".into()),
Token::PlainText(" tail".into()),
])
);
assert_eq!(
parse_full("not#hashtag tail"),
Token::PlainText("not#hashtag tail".into())
);
assert_eq!(
parse_full("<https://example.com>"),
Token::UrlNoEmbed("https://example.com".into())
);
// Adjacent links okay
assert_eq!(
parse_full("<https://example.com/><https://awawa.gay/>"),
Token::Sequence(vec![
Token::UrlNoEmbed("https://example.com/".into()),
Token::UrlNoEmbed("https://awawa.gay/".into()),
])
);
assert_eq!(
parse_full("Link test: ?[label](https://awawa.gay)"),
Token::Sequence(vec![
Token::PlainText("Link test: ".into()),
Token::Link {
label: Box::new(Token::PlainText("label".into())),
href: "https://awawa.gay".into(),
embed: false,
},
])
);
assert_eq!(
parse_full("Link test: ?[label](https://awawa.gay)test"),
Token::Sequence(vec![
Token::PlainText("Link test: ".into()),
Token::Link {
label: Box::new(Token::PlainText("label".into())),
href: "https://awawa.gay".into(),
embed: false,
},
Token::PlainText("test".into()),
])
);
assert_eq!(
parse_full("Link test: (?[label](https://awawa.gay))"),
Token::Sequence(vec![
Token::PlainText("Link test: (".into()),
Token::Link {
label: Box::new(Token::PlainText("label".into())),
href: "https://awawa.gay".into(),
embed: false,
},
Token::PlainText(")".into()),
])
);
assert_eq!(
parse_full("Link test: ?[label](https://awawa.gay"), // Missing closing bracket
Token::Sequence(vec![
Token::PlainText("Link test: ?[label](".into()),
Token::UrlRaw("https://awawa.gay".into()),
])
);
}
#[test]
fn limit_nesting() {
let mut tok = Token::PlainText(" <s><i>test</i></s> ".into());
for _ in 0..DEFAULT_DEPTH_LIMIT {
tok = Token::Bold(Box::new(tok));
}
assert_eq!(
parse_full(
&("<b>".repeat(DEFAULT_DEPTH_LIMIT)
+ " <s><i>test</i></s> "
+ &*"</b>".repeat(DEFAULT_DEPTH_LIMIT))
),
tok
);
}
#[test]
fn parse_mention() {
assert_eq!(
parse_full("@tag"),
Token::Mention {
mention_type: crate::MentionType::User,
name: "tag".into(),
host: None,
}
);
assert_eq!(
parse_full("email@notactuallyamenmtion.org"),
Token::PlainText("email@notactuallyamenmtion.org".into())
);
assert_eq!(
parse_full("hgsjlkdsa @tag fgahjsdkd"),
Token::Sequence(vec![
Token::PlainText("hgsjlkdsa ".into()),
Token::Mention {
mention_type: crate::MentionType::User,
name: "tag".into(),
host: None,
},
Token::PlainText(" fgahjsdkd".into()),
])
);
assert_eq!(
parse_full("hgsjlkdsa @tag@ fgahjsdkd"),
Token::Sequence(vec![
Token::PlainText("hgsjlkdsa ".into()),
Token::Mention {
mention_type: crate::MentionType::User,
name: "tag".into(),
host: None,
},
Token::PlainText("@ fgahjsdkd".into()),
])
);
assert_eq!(
parse_full("aaaa @tag@domain bbbbb"),
Token::Sequence(vec![
Token::PlainText("aaaa ".into()),
Token::Mention {
mention_type: crate::MentionType::User,
name: "tag".into(),
host: Some("domain".into()),
},
Token::PlainText(" bbbbb".into()),
])
);
assert_eq!(
parse_full("test @tag@domain, test"),
Token::Sequence(vec![
Token::PlainText("test ".into()),
Token::Mention {
mention_type: crate::MentionType::User,
name: "tag".into(),
host: Some("domain".into()),
},
Token::PlainText(", test".into()),
])
);
assert_eq!(
parse_full("test @tag@domain.gay. test"),
Token::Sequence(vec![
Token::PlainText("test ".into()),
Token::Mention {
mention_type: crate::MentionType::User,
name: "tag".into(),
host: Some("domain.gay".into()),
},
Token::PlainText(". test".into()),
])
);
assert_eq!(
parse_full("test @tag@domain? test"),
Token::Sequence(vec![
Token::PlainText("test ".into()),
Token::Mention {
mention_type: crate::MentionType::User,
name: "tag".into(),
host: Some("domain".into()),
},
Token::PlainText("? test".into()),
])
);
assert_eq!(
parse_full("test !tag@domain.com test"),
Token::Sequence(vec![
Token::PlainText("test ".into()),
Token::Mention {
mention_type: crate::MentionType::Community,
name: "tag".into(),
host: Some("domain.com".into()),
},
Token::PlainText(" test".into()),
])
);
assert_eq!(
parse_full("@tag:domain.com"),
Token::Mention {
mention_type: crate::MentionType::MatrixUser,
name: "tag".into(),
host: Some("domain.com".into())
},
);
}
#[test]
fn parse_shortcodes() {
assert_eq!(
parse_full(":bottom:"),
Token::ShortcodeEmoji {
shortcode: "bottom".into(),
host: None,
}
);
assert_eq!(
parse_full(":bottom::blobfox:"),
Token::Sequence(vec![
Token::ShortcodeEmoji {
shortcode: "bottom".into(),
host: None,
},
Token::ShortcodeEmoji {
shortcode: "blobfox".into(),
host: None,
},
])
);
assert_eq!(
parse_full(":bottom@magnetar.social:"),
Token::ShortcodeEmoji {
shortcode: "bottom".into(),
host: Some("magnetar.social".into()),
}
);
assert_eq!(
parse_full(":bottom:blobfox"),
Token::PlainText(":bottom:blobfox".into())
);
assert_eq!(
parse_full("bottom:blobfox:"),
Token::PlainText("bottom:blobfox:".into())
);
}
#[test]
fn parse_emoji() {
assert_eq!(
parse_full("🥺💜❤️🦊"),
Token::Sequence(
vec!["🥺", "💜", "❤️", "🦊"]
.into_iter()
.map(str::to_string)
.map(Token::UnicodeEmoji)
.collect::<Vec<_>>()
)
);
// Trans flag, ZWJ
assert_eq!(
parse_full("\u{1f3f3}\u{0fe0f}\u{0200d}\u{026a7}\u{0fe0f}"),
Token::UnicodeEmoji("\u{1f3f3}\u{0fe0f}\u{0200d}\u{026a7}\u{0fe0f}".into())
);
assert_eq!(
parse_full("\u{0200d}\u{1f3f3}\u{0fe0f}"),
Token::Sequence(vec![
Token::PlainText("\u{0200d}".into()), // ZWJ
Token::UnicodeEmoji("\u{1f3f3}\u{0fe0f}".into()), // White flag
])
);
// Trans flag, ZWNJ
assert_eq!(
parse_full("\u{1f3f3}\u{0fe0f}\u{0200c}\u{026a7}\u{0fe0f}"),
Token::Sequence(vec![
Token::UnicodeEmoji("\u{1f3f3}\u{0fe0f}".into()), // White flag
Token::PlainText("\u{0200c}".into()), // ZWNJ
Token::UnicodeEmoji("\u{026a7}\u{0fe0f}".into()), // Trans symbol
])
);
assert_eq!(
parse_full("\u{1f3f3}\u{0fe0f}\u{0200d}\u{0200d}\u{0200d}"),
Token::Sequence(vec![
Token::UnicodeEmoji("\u{1f3f3}\u{0fe0f}".into()), // White flag
Token::PlainText("\u{0200d}\u{0200d}\u{0200d}".into()), // ZWJ
])
);
}
#[test]
fn xml_serialization() {
assert_eq!(
&to_xml_string(&parse_full("***nyaaa***")).unwrap(),
r#"<mmm><b><i>nyaaa</i></b></mmm>"#
);
assert_eq!(
&to_xml_string(&parse_full(
"@natty $[spin.speed=0.5s 🥺]:cat_attack: <plain>test</plain>"
))
.unwrap(),
r#"<mmm><mention name="natty" type="user"/> <fn name="spin" arg-speed="0.5s"><ue>🥺</ue></fn><ee>cat_attack</ee> test</mmm>"#
);
assert_eq!(
&to_xml_string(&parse_full(
"Ring Galaxy AM 0644 741 from Hubble\nCredits: AURA, STScI, J. Higdon, Cornell, ESA, #NASA\n#nature #space #astrophotography"
))
.unwrap(),
r#"<mmm>Ring Galaxy AM 0644 741 from Hubble
Credits: AURA, STScI, J. Higdon, Cornell, ESA, <hashtag>NASA</hashtag>
<hashtag>nature</hashtag> <hashtag>space</hashtag> <hashtag>astrophotography</hashtag></mmm>"#
);
assert_eq!(
&to_xml_string(&parse_full(
r#"
```js
var x = undefined;
``` "#
))
.unwrap(),
"<mmm><code lang=\"js\">var x = undefined;</code></mmm>"
);
}

View File

@ -1,156 +0,0 @@
use std::io::{Cursor, Write};
use quick_xml::events::{BytesText, Event};
use crate::Token;
impl Token {
fn write<T: Write>(&self, writer: &mut quick_xml::Writer<T>) -> quick_xml::Result<()> {
match self {
Token::PlainText(plain) => {
writer.write_event(Event::Text(BytesText::new(plain.as_str())))?;
}
Token::Sequence(sequence) => {
sequence.iter().try_for_each(|item| item.write(writer))?;
}
Token::Quote(inner) => {
writer
.create_element("quote")
.write_inner_content(|w| inner.write(w))?;
}
Token::Small(inner) => {
writer
.create_element("small")
.write_inner_content(|w| inner.write(w))?;
}
Token::Bold(inner) => {
writer
.create_element("b")
.write_inner_content(|w| inner.write(w))?;
}
Token::Italic(inner) => {
writer
.create_element("i")
.write_inner_content(|w| inner.write(w))?;
}
Token::Center(inner) => {
writer
.create_element("center")
.write_inner_content(|w| inner.write(w))?;
}
Token::Strikethrough(inner) => {
writer
.create_element("s")
.write_inner_content(|w| inner.write(w))?;
}
Token::PlainTag(plain) => {
writer.write_event(Event::Text(BytesText::new(plain.as_str())))?;
}
Token::InlineCode(code) => {
writer
.create_element("inline-code")
.write_text_content(BytesText::new(code))?;
}
Token::InlineMath(math) => {
writer
.create_element("inline-math")
.write_text_content(BytesText::new(math))?;
}
Token::UrlRaw(url) => {
writer
.create_element("a")
.with_attribute(("href", url.as_str()))
.write_text_content(BytesText::new(url))?;
}
Token::UrlNoEmbed(url) => {
writer
.create_element("a")
.with_attribute(("href", url.as_str()))
.with_attribute(("embed", "false"))
.write_text_content(BytesText::new(url))?;
}
Token::Link { label, href, embed } => {
writer
.create_element("a")
.with_attribute(("href", href.as_str()))
.with_attribute(("embed", if *embed { "true" } else { "false" }))
.write_inner_content(|w| label.write(w))?;
}
Token::BlockCode { inner, lang } => {
let mut ew = writer.create_element("code");
if let Some(language) = lang {
ew = ew.with_attribute(("lang", language.as_str()));
}
ew.write_text_content(BytesText::new(inner))?;
}
Token::BlockMath(math) => {
writer
.create_element("math")
.write_text_content(BytesText::new(math))?;
}
Token::Function {
inner,
name,
params,
} => {
let mut ew = writer
.create_element("fn")
.with_attribute(("name", name.as_str()));
for (k, v) in params {
ew = ew
.with_attribute((format!("arg-{k}").as_str(), v.as_deref().unwrap_or("")));
}
ew.write_inner_content(|w| inner.write(w))?;
}
Token::Mention {
name,
host,
mention_type,
} => {
let mut ew = writer
.create_element("mention")
.with_attribute(("name", name.as_str()))
.with_attribute(("type", mention_type.into()));
if let Some(host) = host {
ew = ew.with_attribute(("host", host.as_str()));
}
ew.write_empty()?;
}
Token::UnicodeEmoji(text) => {
writer
.create_element("ue")
.write_text_content(BytesText::new(text))?;
}
Token::ShortcodeEmoji { shortcode, host } => {
let mut ew = writer.create_element("ee");
if let Some(host) = host {
ew = ew.with_attribute(("host", host.as_str()));
}
ew.write_text_content(BytesText::new(shortcode))?;
}
Token::Hashtag(tag) => {
writer
.create_element("hashtag")
.write_text_content(BytesText::new(tag.as_str()))?;
}
}
Ok(())
}
}
pub fn to_xml_string(token: &Token) -> quick_xml::Result<String> {
let mut writer = quick_xml::Writer::new(Cursor::new(Vec::new()));
writer
.create_element("mmm")
.write_inner_content(|writer| token.write(writer))?;
Ok(String::from_utf8(writer.into_inner().into_inner())?)
}

View File

@ -41,6 +41,8 @@ pub enum RetriableLocalDeliveryTaskError {
JsonSerialization(String),
#[error("User cache error: {0}")]
UserCache(String),
#[error("Task join error: {0}")]
JoinError(String),
}
#[derive(Debug, Error, Serialize)]
@ -178,6 +180,9 @@ impl From<ApClientError> for DeliveryErrorKind {
ApClientError::Utf8ParseError(e) => {
RetriableRemoteDeliveryError::Utf8(e.to_string()).into()
}
ApClientError::JoinError(e) => {
RetriableLocalDeliveryTaskError::JoinError(e.to_string()).into()
}
}
}
}
@ -188,7 +193,7 @@ impl From<FederationClientError> for DeliveryErrorKind {
FederationClientError::TimeoutError => RetriableRemoteDeliveryError::Timeout(
"Reached maximum time for response".to_string(),
)
.into(),
.into(),
FederationClientError::ReqwestError(e) => e.into(),
FederationClientError::JsonError(e) => {
RetriableRemoteDeliveryError::Json(e.to_string()).into()

View File

@ -2,9 +2,12 @@ use std::collections::HashMap;
use std::sync::Arc;
use cached::{Cached, TimedCache};
use futures_util::future::OptionFuture;
use miette::Diagnostic;
use thiserror::Error;
use tokio::sync::Mutex;
use tokio::task;
use tokio::task::JoinError;
use tracing::error;
use crate::web::ApiError;
@ -27,6 +30,8 @@ pub enum UserCacheError {
PrivateKeyParseError(#[from] ApHttpPrivateKeyParseError),
#[error("Public key parse error: {0}")]
PublicKeyParseError(#[from] ApHttpPublicKeyParseError),
#[error("Task join error: {0}")]
JoinError(#[from] JoinError),
}
impl From<UserCacheError> for ApiError {
@ -216,8 +221,18 @@ impl LocalUserCacheService {
return Ok(Some(user));
}
self.map_cache_user(self.db.get_user_for_cache_by_token(token).await?.map(CachedLocalUser::try_from).transpose()?)
.await
let fetch: OptionFuture<_> = self.db
.get_user_for_cache_by_token(token)
.await?
.map(|p| task::spawn_blocking(move || CachedLocalUser::try_from(p)))
.into();
self.map_cache_user(
fetch
.await
.transpose()?
.transpose()?
).await
}
pub async fn get_by_id(
@ -230,6 +245,18 @@ impl LocalUserCacheService {
return Ok(Some(user));
}
self.map_cache_user(self.db.get_user_for_cache_by_id(id).await?.map(CachedLocalUser::try_from).transpose()?).await
let fetch: OptionFuture<_> = self.db
.get_user_for_cache_by_id(id)
.await?
.map(|p| task::spawn_blocking(move || CachedLocalUser::try_from(p)))
.into();
self.map_cache_user(
fetch
.await
.transpose()?
.transpose()?
).await
}
}