mirror of
https://github.com/PluralKit/PluralKit.git
synced 2026-02-17 03:00:13 +00:00
Compare commits
10 commits
f848e260db
...
b8b98e7fd0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b8b98e7fd0 | ||
|
|
4ba5b785e5 | ||
|
|
4c940fa925 | ||
|
|
214f164fbc | ||
|
|
a49dbefe83 | ||
|
|
dd14e7daef | ||
|
|
9c1acd84e1 | ||
|
|
2d40a1ee16 | ||
|
|
2248403140 | ||
|
|
b3eb108a13 |
55 changed files with 1108 additions and 185 deletions
34
Cargo.lock
generated
34
Cargo.lock
generated
|
|
@ -88,13 +88,17 @@ dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"libpk",
|
"libpk",
|
||||||
"metrics",
|
"metrics",
|
||||||
|
"pk_macros",
|
||||||
"pluralkit_models",
|
"pluralkit_models",
|
||||||
"reqwest 0.12.15",
|
"reqwest 0.12.15",
|
||||||
"reverse-proxy-service",
|
"reverse-proxy-service",
|
||||||
|
"sea-query",
|
||||||
|
"sea-query-sqlx",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
"sqlx",
|
"sqlx",
|
||||||
|
"subtle",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tower 0.4.13",
|
"tower 0.4.13",
|
||||||
"tower-http",
|
"tower-http",
|
||||||
|
|
@ -2529,6 +2533,7 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
|
||||||
name = "pk_macros"
|
name = "pk_macros"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"prettyplease",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn",
|
||||||
|
|
@ -2610,9 +2615,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prettyplease"
|
name = "prettyplease"
|
||||||
version = "0.2.31"
|
version = "0.2.36"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5316f57387668042f561aae71480de936257848f9c43ce528e311d89a07cadeb"
|
checksum = "ff24dfcda44452b9816fff4cd4227e1bb73ff5a2f1bc1105aa92fb8565ce44d2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"syn",
|
"syn",
|
||||||
|
|
@ -3311,6 +3316,7 @@ dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"croner",
|
"croner",
|
||||||
"fred",
|
"fred",
|
||||||
|
"lazy_static",
|
||||||
"libpk",
|
"libpk",
|
||||||
"metrics",
|
"metrics",
|
||||||
"num-format",
|
"num-format",
|
||||||
|
|
@ -3341,19 +3347,20 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sea-query"
|
name = "sea-query"
|
||||||
version = "0.32.3"
|
version = "1.0.0-rc.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f5a24d8b9fcd2674a6c878a3d871f4f1380c6c43cc3718728ac96864d888458e"
|
checksum = "ab621a8d8b03a3e513ea075f71aa26830a55c977d7b40f09e825bb91910db823"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"chrono",
|
||||||
"inherent",
|
"inherent",
|
||||||
"sea-query-derive",
|
"sea-query-derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sea-query-derive"
|
name = "sea-query-derive"
|
||||||
version = "0.4.3"
|
version = "1.0.0-rc.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bae0cbad6ab996955664982739354128c58d16e126114fe88c2a493642502aab"
|
checksum = "217e9422de35f26c16c5f671fce3c075a65e10322068dbc66078428634af6195"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling",
|
"darling",
|
||||||
"heck 0.4.1",
|
"heck 0.4.1",
|
||||||
|
|
@ -3363,6 +3370,17 @@ dependencies = [
|
||||||
"thiserror 2.0.12",
|
"thiserror 2.0.12",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sea-query-sqlx"
|
||||||
|
version = "0.8.0-rc.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ed5eb19495858d8ae3663387a4f5298516c6f0171a7ca5681055450f190236b8"
|
||||||
|
dependencies = [
|
||||||
|
"chrono",
|
||||||
|
"sea-query",
|
||||||
|
"sqlx",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "security-framework"
|
name = "security-framework"
|
||||||
version = "3.2.0"
|
version = "3.2.0"
|
||||||
|
|
@ -3963,9 +3981,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.100"
|
version = "2.0.104"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0"
|
checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@ futures = "0.3.30"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
metrics = "0.23.0"
|
metrics = "0.23.0"
|
||||||
reqwest = { version = "0.12.7" , default-features = false, features = ["rustls-tls", "trust-dns"]}
|
reqwest = { version = "0.12.7" , default-features = false, features = ["rustls-tls", "trust-dns"]}
|
||||||
|
sea-query = { version = "1.0.0-rc.10", features = ["with-chrono"] }
|
||||||
sentry = { version = "0.36.0", default-features = false, features = ["backtrace", "contexts", "panic", "debug-images", "reqwest", "rustls"] } # replace native-tls with rustls
|
sentry = { version = "0.36.0", default-features = false, features = ["backtrace", "contexts", "panic", "debug-images", "reqwest", "rustls"] } # replace native-tls with rustls
|
||||||
serde = { version = "1.0.196", features = ["derive"] }
|
serde = { version = "1.0.196", features = ["derive"] }
|
||||||
serde_json = "1.0.117"
|
serde_json = "1.0.117"
|
||||||
|
|
|
||||||
|
|
@ -116,7 +116,7 @@ public class ErrorMessageService
|
||||||
return new EmbedBuilder()
|
return new EmbedBuilder()
|
||||||
.Color(0xE74C3C)
|
.Color(0xE74C3C)
|
||||||
.Title("Internal error occurred")
|
.Title("Internal error occurred")
|
||||||
.Description($"For support, please send the error code above as text in {channelInfo} with a description of what you were doing at the time.")
|
.Description($"**If you need support,** please send/forward the error code above **as text** in {channelInfo} with a description of what you were doing at the time.")
|
||||||
.Footer(new Embed.EmbedFooter(errorId))
|
.Footer(new Embed.EmbedFooter(errorId))
|
||||||
.Timestamp(now.ToDateTimeOffset().ToString("O"))
|
.Timestamp(now.ToDateTimeOffset().ToString("O"))
|
||||||
.Build();
|
.Build();
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ WORKDIR /build
|
||||||
|
|
||||||
RUN apk add rustup build-base
|
RUN apk add rustup build-base
|
||||||
# todo: arm64 target
|
# todo: arm64 target
|
||||||
RUN rustup-init --default-host x86_64-unknown-linux-musl --default-toolchain nightly-2024-08-20 --profile default -y
|
RUN rustup-init --default-host x86_64-unknown-linux-musl --default-toolchain nightly-2025-08-22 --profile default -y
|
||||||
|
|
||||||
ENV PATH=/root/.cargo/bin:$PATH
|
ENV PATH=/root/.cargo/bin:$PATH
|
||||||
ENV RUSTFLAGS='-C link-arg=-s'
|
ENV RUSTFLAGS='-C link-arg=-s'
|
||||||
|
|
|
||||||
|
|
@ -42,5 +42,10 @@ build api
|
||||||
build dispatch
|
build dispatch
|
||||||
build gateway
|
build gateway
|
||||||
build avatars "COPY .docker-bin/avatar_cleanup /bin/avatar_cleanup"
|
build avatars "COPY .docker-bin/avatar_cleanup /bin/avatar_cleanup"
|
||||||
build scheduled_tasks
|
build scheduled_tasks "$(cat <<EOF
|
||||||
|
RUN wget https://github.com/wal-g/wal-g/releases/download/v3.0.7/wal-g-pg-ubuntu-22.04-amd64 -O /usr/local/bin/wal-g
|
||||||
|
RUN chmod +x /usr/local/bin/wal-g
|
||||||
|
RUN apk add gcompat
|
||||||
|
EOF
|
||||||
|
)"
|
||||||
build gdpr_worker
|
build gdpr_worker
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,11 @@
|
||||||
[package]
|
[package]
|
||||||
name = "api"
|
name = "api"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
pluralkit_models = { path = "../models" }
|
pluralkit_models = { path = "../models" }
|
||||||
|
pk_macros = { path = "../macros" }
|
||||||
libpk = { path = "../libpk" }
|
libpk = { path = "../libpk" }
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
|
|
@ -13,6 +14,7 @@ fred = { workspace = true }
|
||||||
lazy_static = { workspace = true }
|
lazy_static = { workspace = true }
|
||||||
metrics = { workspace = true }
|
metrics = { workspace = true }
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
|
sea-query = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
sqlx = { workspace = true }
|
sqlx = { workspace = true }
|
||||||
|
|
@ -26,3 +28,5 @@ reverse-proxy-service = { version = "0.2.1", features = ["axum"] }
|
||||||
serde_urlencoded = "0.7.1"
|
serde_urlencoded = "0.7.1"
|
||||||
tower = "0.4.13"
|
tower = "0.4.13"
|
||||||
tower-http = { version = "0.5.2", features = ["catch-panic"] }
|
tower-http = { version = "0.5.2", features = ["catch-panic"] }
|
||||||
|
subtle = "2.6.1"
|
||||||
|
sea-query-sqlx = { version = "0.8.0-rc.8", features = ["sqlx-postgres", "with-chrono"] }
|
||||||
|
|
|
||||||
|
|
@ -7,11 +7,16 @@ pub const INTERNAL_APPID_HEADER: &'static str = "x-pluralkit-appid";
|
||||||
pub struct AuthState {
|
pub struct AuthState {
|
||||||
system_id: Option<i32>,
|
system_id: Option<i32>,
|
||||||
app_id: Option<i32>,
|
app_id: Option<i32>,
|
||||||
|
internal: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AuthState {
|
impl AuthState {
|
||||||
pub fn new(system_id: Option<i32>, app_id: Option<i32>) -> Self {
|
pub fn new(system_id: Option<i32>, app_id: Option<i32>, internal: bool) -> Self {
|
||||||
Self { system_id, app_id }
|
Self {
|
||||||
|
system_id,
|
||||||
|
app_id,
|
||||||
|
internal,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn system_id(&self) -> Option<i32> {
|
pub fn system_id(&self) -> Option<i32> {
|
||||||
|
|
@ -22,6 +27,10 @@ impl AuthState {
|
||||||
self.app_id
|
self.app_id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn internal(&self) -> bool {
|
||||||
|
self.internal
|
||||||
|
}
|
||||||
|
|
||||||
pub fn access_level_for(&self, a: &impl Authable) -> PrivacyLevel {
|
pub fn access_level_for(&self, a: &impl Authable) -> PrivacyLevel {
|
||||||
if self
|
if self
|
||||||
.system_id
|
.system_id
|
||||||
|
|
|
||||||
211
crates/api/src/endpoints/bulk.rs
Normal file
211
crates/api/src/endpoints/bulk.rs
Normal file
|
|
@ -0,0 +1,211 @@
|
||||||
|
use axum::{
|
||||||
|
Extension, Json,
|
||||||
|
extract::{Json as ExtractJson, State},
|
||||||
|
response::IntoResponse,
|
||||||
|
};
|
||||||
|
use pk_macros::api_endpoint;
|
||||||
|
use sea_query::{Expr, ExprTrait, PostgresQueryBuilder};
|
||||||
|
use sea_query_sqlx::SqlxBinder;
|
||||||
|
use serde_json::{Value, json};
|
||||||
|
|
||||||
|
use pluralkit_models::{PKGroup, PKGroupPatch, PKMember, PKMemberPatch, PKSystem};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ApiContext,
|
||||||
|
auth::AuthState,
|
||||||
|
error::{
|
||||||
|
GENERIC_AUTH_ERROR, NOT_OWN_GROUP, NOT_OWN_MEMBER, PKError, TARGET_GROUP_NOT_FOUND,
|
||||||
|
TARGET_MEMBER_NOT_FOUND,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize, Debug)]
|
||||||
|
#[serde(tag = "type", rename_all = "snake_case")]
|
||||||
|
pub enum BulkActionRequestFilter {
|
||||||
|
All,
|
||||||
|
Ids { ids: Vec<String> },
|
||||||
|
Connection { id: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize, Debug)]
|
||||||
|
#[serde(tag = "type", rename_all = "snake_case")]
|
||||||
|
pub enum BulkActionRequest {
|
||||||
|
Member {
|
||||||
|
filter: BulkActionRequestFilter,
|
||||||
|
patch: PKMemberPatch,
|
||||||
|
},
|
||||||
|
Group {
|
||||||
|
filter: BulkActionRequestFilter,
|
||||||
|
patch: PKGroupPatch,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[api_endpoint]
|
||||||
|
pub async fn bulk(
|
||||||
|
Extension(auth): Extension<AuthState>,
|
||||||
|
State(ctx): State<ApiContext>,
|
||||||
|
ExtractJson(req): ExtractJson<BulkActionRequest>,
|
||||||
|
) -> Json<Value> {
|
||||||
|
let Some(system_id) = auth.system_id() else {
|
||||||
|
return Err(GENERIC_AUTH_ERROR);
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(sqlx::FromRow)]
|
||||||
|
struct Ider {
|
||||||
|
id: i32,
|
||||||
|
hid: String,
|
||||||
|
uuid: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(sqlx::FromRow)]
|
||||||
|
struct GroupMemberEntry {
|
||||||
|
member_id: i32,
|
||||||
|
group_id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(sqlx::FromRow)]
|
||||||
|
struct OnlyIder {
|
||||||
|
id: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("BulkActionRequest::{req:#?}");
|
||||||
|
match req {
|
||||||
|
BulkActionRequest::Member { filter, mut patch } => {
|
||||||
|
patch.validate_bulk();
|
||||||
|
if patch.errors().len() > 0 {
|
||||||
|
return Err(PKError::from_validation_errors(patch.errors()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let ids: Vec<i32> = match filter {
|
||||||
|
BulkActionRequestFilter::All => {
|
||||||
|
let ids: Vec<Ider> = sqlx::query_as("select id from members where system = $1")
|
||||||
|
.bind(system_id as i64)
|
||||||
|
.fetch_all(&ctx.db)
|
||||||
|
.await?;
|
||||||
|
ids.iter().map(|v| v.id).collect()
|
||||||
|
}
|
||||||
|
BulkActionRequestFilter::Ids { ids } => {
|
||||||
|
let members: Vec<PKMember> = sqlx::query_as(
|
||||||
|
"select * from members where hid = any($1::array) or uuid::text = any($1::array)",
|
||||||
|
)
|
||||||
|
.bind(&ids)
|
||||||
|
.fetch_all(&ctx.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// todo: better errors
|
||||||
|
if members.len() != ids.len() {
|
||||||
|
return Err(TARGET_MEMBER_NOT_FOUND);
|
||||||
|
}
|
||||||
|
|
||||||
|
if members.iter().any(|m| m.system != system_id) {
|
||||||
|
return Err(NOT_OWN_MEMBER);
|
||||||
|
}
|
||||||
|
|
||||||
|
members.iter().map(|m| m.id).collect()
|
||||||
|
}
|
||||||
|
BulkActionRequestFilter::Connection { id } => {
|
||||||
|
let Some(group): Option<PKGroup> =
|
||||||
|
sqlx::query_as("select * from groups where hid = $1 or uuid::text = $1")
|
||||||
|
.bind(id)
|
||||||
|
.fetch_optional(&ctx.db)
|
||||||
|
.await?
|
||||||
|
else {
|
||||||
|
return Err(TARGET_GROUP_NOT_FOUND);
|
||||||
|
};
|
||||||
|
|
||||||
|
if group.system != system_id {
|
||||||
|
return Err(NOT_OWN_GROUP);
|
||||||
|
}
|
||||||
|
|
||||||
|
let entries: Vec<GroupMemberEntry> =
|
||||||
|
sqlx::query_as("select * from group_members where group_id = $1")
|
||||||
|
.bind(group.id)
|
||||||
|
.fetch_all(&ctx.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
entries.iter().map(|v| v.member_id).collect()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let (q, pms) = patch
|
||||||
|
.to_sql()
|
||||||
|
.table("members") // todo: this should be in the model definition
|
||||||
|
.and_where(Expr::col("id").is_in(ids))
|
||||||
|
.returning_col("id")
|
||||||
|
.build_sqlx(PostgresQueryBuilder);
|
||||||
|
|
||||||
|
let res: Vec<OnlyIder> = sqlx::query_as_with(&q, pms).fetch_all(&ctx.db).await?;
|
||||||
|
Ok(Json(json! {{ "updated": res.len() }}))
|
||||||
|
}
|
||||||
|
BulkActionRequest::Group { filter, mut patch } => {
|
||||||
|
patch.validate_bulk();
|
||||||
|
if patch.errors().len() > 0 {
|
||||||
|
return Err(PKError::from_validation_errors(patch.errors()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let ids: Vec<i32> = match filter {
|
||||||
|
BulkActionRequestFilter::All => {
|
||||||
|
let ids: Vec<Ider> = sqlx::query_as("select id from groups where system = $1")
|
||||||
|
.bind(system_id as i64)
|
||||||
|
.fetch_all(&ctx.db)
|
||||||
|
.await?;
|
||||||
|
ids.iter().map(|v| v.id).collect()
|
||||||
|
}
|
||||||
|
BulkActionRequestFilter::Ids { ids } => {
|
||||||
|
let groups: Vec<PKGroup> = sqlx::query_as(
|
||||||
|
"select * from groups where hid = any($1) or uuid::text = any($1)",
|
||||||
|
)
|
||||||
|
.bind(&ids)
|
||||||
|
.fetch_all(&ctx.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// todo: better errors
|
||||||
|
if groups.len() != ids.len() {
|
||||||
|
return Err(TARGET_GROUP_NOT_FOUND);
|
||||||
|
}
|
||||||
|
|
||||||
|
if groups.iter().any(|m| m.system != system_id) {
|
||||||
|
return Err(NOT_OWN_GROUP);
|
||||||
|
}
|
||||||
|
|
||||||
|
groups.iter().map(|m| m.id).collect()
|
||||||
|
}
|
||||||
|
BulkActionRequestFilter::Connection { id } => {
|
||||||
|
let Some(member): Option<PKMember> =
|
||||||
|
sqlx::query_as("select * from members where hid = $1 or uuid::text = $1")
|
||||||
|
.bind(id)
|
||||||
|
.fetch_optional(&ctx.db)
|
||||||
|
.await?
|
||||||
|
else {
|
||||||
|
return Err(TARGET_MEMBER_NOT_FOUND);
|
||||||
|
};
|
||||||
|
|
||||||
|
if member.system != system_id {
|
||||||
|
return Err(NOT_OWN_MEMBER);
|
||||||
|
}
|
||||||
|
|
||||||
|
let entries: Vec<GroupMemberEntry> =
|
||||||
|
sqlx::query_as("select * from group_members where member_id = $1")
|
||||||
|
.bind(member.id)
|
||||||
|
.fetch_all(&ctx.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
entries.iter().map(|v| v.group_id).collect()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let (q, pms) = patch
|
||||||
|
.to_sql()
|
||||||
|
.table("groups") // todo: this should be in the model definition
|
||||||
|
.and_where(Expr::col("id").is_in(ids))
|
||||||
|
.returning_col("id")
|
||||||
|
.build_sqlx(PostgresQueryBuilder);
|
||||||
|
|
||||||
|
println!("{q:#?} {pms:#?}");
|
||||||
|
|
||||||
|
let res: Vec<OnlyIder> = sqlx::query_as_with(&q, pms).fetch_all(&ctx.db).await?;
|
||||||
|
Ok(Json(json! {{ "updated": res.len() }}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,2 +1,3 @@
|
||||||
|
pub mod bulk;
|
||||||
pub mod private;
|
pub mod private;
|
||||||
pub mod system;
|
pub mod system;
|
||||||
|
|
|
||||||
|
|
@ -2,10 +2,12 @@ use crate::ApiContext;
|
||||||
use axum::{extract::State, response::Json};
|
use axum::{extract::State, response::Json};
|
||||||
use fred::interfaces::*;
|
use fred::interfaces::*;
|
||||||
use libpk::state::ShardState;
|
use libpk::state::ShardState;
|
||||||
|
use pk_macros::api_endpoint;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{Value, json};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
#[serde(rename_all = "PascalCase")]
|
#[serde(rename_all = "PascalCase")]
|
||||||
struct ClusterStats {
|
struct ClusterStats {
|
||||||
|
|
@ -13,34 +15,33 @@ struct ClusterStats {
|
||||||
pub channel_count: i32,
|
pub channel_count: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[api_endpoint]
|
||||||
pub async fn discord_state(State(ctx): State<ApiContext>) -> Json<Value> {
|
pub async fn discord_state(State(ctx): State<ApiContext>) -> Json<Value> {
|
||||||
let mut shard_status = ctx
|
let mut shard_status = ctx
|
||||||
.redis
|
.redis
|
||||||
.hgetall::<HashMap<String, String>, &str>("pluralkit:shardstatus")
|
.hgetall::<HashMap<String, String>, &str>("pluralkit:shardstatus")
|
||||||
.await
|
.await?
|
||||||
.unwrap()
|
|
||||||
.values()
|
.values()
|
||||||
.map(|v| serde_json::from_str(v).expect("could not deserialize shard"))
|
.map(|v| serde_json::from_str(v).expect("could not deserialize shard"))
|
||||||
.collect::<Vec<ShardState>>();
|
.collect::<Vec<ShardState>>();
|
||||||
|
|
||||||
shard_status.sort_by(|a, b| b.shard_id.cmp(&a.shard_id));
|
shard_status.sort_by(|a, b| b.shard_id.cmp(&a.shard_id));
|
||||||
|
|
||||||
Json(json!({
|
Ok(Json(json!({
|
||||||
"shards": shard_status,
|
"shards": shard_status,
|
||||||
}))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[api_endpoint]
|
||||||
pub async fn meta(State(ctx): State<ApiContext>) -> Json<Value> {
|
pub async fn meta(State(ctx): State<ApiContext>) -> Json<Value> {
|
||||||
let stats = serde_json::from_str::<Value>(
|
let stats = serde_json::from_str::<Value>(
|
||||||
ctx.redis
|
ctx.redis
|
||||||
.get::<String, &'static str>("statsapi")
|
.get::<String, &'static str>("statsapi")
|
||||||
.await
|
.await?
|
||||||
.unwrap()
|
|
||||||
.as_str(),
|
.as_str(),
|
||||||
)
|
)?;
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
Json(stats)
|
Ok(Json(stats))
|
||||||
}
|
}
|
||||||
|
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
|
||||||
|
|
@ -1,22 +1,18 @@
|
||||||
use axum::{
|
use axum::{Extension, Json, extract::State, response::IntoResponse};
|
||||||
extract::State,
|
use pk_macros::api_endpoint;
|
||||||
http::StatusCode,
|
use serde_json::{Value, json};
|
||||||
response::{IntoResponse, Response},
|
|
||||||
Extension, Json,
|
|
||||||
};
|
|
||||||
use serde_json::json;
|
|
||||||
use sqlx::Postgres;
|
use sqlx::Postgres;
|
||||||
use tracing::error;
|
|
||||||
|
|
||||||
use pluralkit_models::{PKSystem, PKSystemConfig, PrivacyLevel};
|
use pluralkit_models::{PKSystem, PKSystemConfig, PrivacyLevel};
|
||||||
|
|
||||||
use crate::{auth::AuthState, util::json_err, ApiContext};
|
use crate::{ApiContext, auth::AuthState, error::fail};
|
||||||
|
|
||||||
|
#[api_endpoint]
|
||||||
pub async fn get_system_settings(
|
pub async fn get_system_settings(
|
||||||
Extension(auth): Extension<AuthState>,
|
Extension(auth): Extension<AuthState>,
|
||||||
Extension(system): Extension<PKSystem>,
|
Extension(system): Extension<PKSystem>,
|
||||||
State(ctx): State<ApiContext>,
|
State(ctx): State<ApiContext>,
|
||||||
) -> Response {
|
) -> Json<Value> {
|
||||||
let access_level = auth.access_level_for(&system);
|
let access_level = auth.access_level_for(&system);
|
||||||
|
|
||||||
let mut config = match sqlx::query_as::<Postgres, PKSystemConfig>(
|
let mut config = match sqlx::query_as::<Postgres, PKSystemConfig>(
|
||||||
|
|
@ -27,23 +23,11 @@ pub async fn get_system_settings(
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok(Some(config)) => config,
|
Ok(Some(config)) => config,
|
||||||
Ok(None) => {
|
Ok(None) => fail!(
|
||||||
error!(
|
system = system.id,
|
||||||
system = system.id,
|
"failed to find system config for existing system"
|
||||||
"failed to find system config for existing system"
|
),
|
||||||
);
|
Err(err) => fail!(?err, "failed to query system config"),
|
||||||
return json_err(
|
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
|
||||||
r#"{"message": "500: Internal Server Error", "code": 0}"#.to_string(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
error!(?err, "failed to query system config");
|
|
||||||
return json_err(
|
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
|
||||||
r#"{"message": "500: Internal Server Error", "code": 0}"#.to_string(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// fix this
|
// fix this
|
||||||
|
|
@ -51,7 +35,7 @@ pub async fn get_system_settings(
|
||||||
config.name_format = Some("{name} {tag}".to_string());
|
config.name_format = Some("{name} {tag}".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
Json(&match access_level {
|
Ok(Json(match access_level {
|
||||||
PrivacyLevel::Private => config.to_json(),
|
PrivacyLevel::Private => config.to_json(),
|
||||||
PrivacyLevel::Public => json!({
|
PrivacyLevel::Public => json!({
|
||||||
"pings_enabled": config.pings_enabled,
|
"pings_enabled": config.pings_enabled,
|
||||||
|
|
@ -64,6 +48,5 @@ pub async fn get_system_settings(
|
||||||
"proxy_switch": config.proxy_switch,
|
"proxy_switch": config.proxy_switch,
|
||||||
"name_format": config.name_format,
|
"name_format": config.name_format,
|
||||||
}),
|
}),
|
||||||
})
|
}))
|
||||||
.into_response()
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,20 @@
|
||||||
use axum::http::StatusCode;
|
use axum::{
|
||||||
|
http::StatusCode,
|
||||||
|
response::{IntoResponse, Response},
|
||||||
|
};
|
||||||
|
use pluralkit_models::ValidationError;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
// todo
|
// todo: model parse errors
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct PKError {
|
pub struct PKError {
|
||||||
pub response_code: StatusCode,
|
pub response_code: StatusCode,
|
||||||
pub json_code: i32,
|
pub json_code: i32,
|
||||||
pub message: &'static str,
|
pub message: &'static str,
|
||||||
|
|
||||||
|
pub errors: Vec<ValidationError>,
|
||||||
|
|
||||||
|
pub inner: Option<anyhow::Error>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for PKError {
|
impl fmt::Display for PKError {
|
||||||
|
|
@ -16,17 +23,95 @@ impl fmt::Display for PKError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::error::Error for PKError {}
|
impl Clone for PKError {
|
||||||
|
fn clone(&self) -> PKError {
|
||||||
|
if self.inner.is_some() {
|
||||||
|
panic!("cannot clone PKError with inner error");
|
||||||
|
}
|
||||||
|
PKError {
|
||||||
|
response_code: self.response_code,
|
||||||
|
json_code: self.json_code,
|
||||||
|
message: self.message,
|
||||||
|
inner: None,
|
||||||
|
errors: self.errors.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// can't `impl From<Vec<ValidationError>>`
|
||||||
|
// because "upstream crate may add a new impl" >:(
|
||||||
|
impl PKError {
|
||||||
|
pub fn from_validation_errors(errs: Vec<ValidationError>) -> Self {
|
||||||
|
Self {
|
||||||
|
message: "Error parsing JSON model",
|
||||||
|
json_code: 40001,
|
||||||
|
errors: errs,
|
||||||
|
response_code: StatusCode::BAD_REQUEST,
|
||||||
|
inner: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<E> From<E> for PKError
|
||||||
|
where
|
||||||
|
E: std::fmt::Display + Into<anyhow::Error>,
|
||||||
|
{
|
||||||
|
fn from(err: E) -> Self {
|
||||||
|
let mut res = GENERIC_SERVER_ERROR.clone();
|
||||||
|
res.inner = Some(err.into());
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoResponse for PKError {
|
||||||
|
fn into_response(self) -> Response {
|
||||||
|
if let Some(inner) = self.inner {
|
||||||
|
tracing::error!(?inner, "error returned from handler");
|
||||||
|
}
|
||||||
|
let json = if self.errors.len() > 0 {
|
||||||
|
serde_json::json!({
|
||||||
|
"message": self.message,
|
||||||
|
"code": self.json_code,
|
||||||
|
"errors": self.errors,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
serde_json::json!({
|
||||||
|
"message": self.message,
|
||||||
|
"code": self.json_code,
|
||||||
|
})
|
||||||
|
};
|
||||||
|
crate::util::json_err(self.response_code, serde_json::to_string(&json).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! fail {
|
||||||
|
($($stuff:tt)+) => {{
|
||||||
|
tracing::error!($($stuff)+);
|
||||||
|
return Err(crate::error::GENERIC_SERVER_ERROR);
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) use fail;
|
||||||
|
|
||||||
#[allow(unused_macros)]
|
|
||||||
macro_rules! define_error {
|
macro_rules! define_error {
|
||||||
( $name:ident, $response_code:expr, $json_code:expr, $message:expr ) => {
|
( $name:ident, $response_code:expr, $json_code:expr, $message:expr ) => {
|
||||||
const $name: PKError = PKError {
|
#[allow(dead_code)]
|
||||||
|
pub const $name: PKError = PKError {
|
||||||
response_code: $response_code,
|
response_code: $response_code,
|
||||||
json_code: $json_code,
|
json_code: $json_code,
|
||||||
message: $message,
|
message: $message,
|
||||||
|
inner: None,
|
||||||
|
errors: vec![],
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// define_error! { GENERIC_BAD_REQUEST, StatusCode::BAD_REQUEST, 0, "400: Bad Request" }
|
define_error! { GENERIC_AUTH_ERROR, StatusCode::UNAUTHORIZED, 0, "401: Missing or invalid Authorization header" }
|
||||||
|
define_error! { GENERIC_BAD_REQUEST, StatusCode::BAD_REQUEST, 0, "400: Bad Request" }
|
||||||
|
define_error! { GENERIC_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR, 0, "500: Internal Server Error" }
|
||||||
|
|
||||||
|
define_error! { NOT_OWN_MEMBER, StatusCode::FORBIDDEN, 30006, "Target member is not part of your system." }
|
||||||
|
define_error! { NOT_OWN_GROUP, StatusCode::FORBIDDEN, 30007, "Target group is not part of your system." }
|
||||||
|
|
||||||
|
define_error! { TARGET_MEMBER_NOT_FOUND, StatusCode::BAD_REQUEST, 40010, "Target member not found." }
|
||||||
|
define_error! { TARGET_GROUP_NOT_FOUND, StatusCode::BAD_REQUEST, 40011, "Target group not found." }
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,19 @@
|
||||||
#![feature(let_chains)]
|
|
||||||
|
|
||||||
use auth::{AuthState, INTERNAL_APPID_HEADER, INTERNAL_SYSTEMID_HEADER};
|
use auth::{AuthState, INTERNAL_APPID_HEADER, INTERNAL_SYSTEMID_HEADER};
|
||||||
use axum::{
|
use axum::{
|
||||||
|
Extension, Router,
|
||||||
body::Body,
|
body::Body,
|
||||||
extract::{Request as ExtractRequest, State},
|
extract::{Request as ExtractRequest, State},
|
||||||
http::{Response, StatusCode, Uri},
|
http::Uri,
|
||||||
response::IntoResponse,
|
response::{IntoResponse, Response},
|
||||||
routing::{delete, get, patch, post},
|
routing::{delete, get, patch, post},
|
||||||
Extension, Router,
|
|
||||||
};
|
};
|
||||||
use hyper_util::{
|
use hyper_util::{
|
||||||
client::legacy::{connect::HttpConnector, Client},
|
client::legacy::{Client, connect::HttpConnector},
|
||||||
rt::TokioExecutor,
|
rt::TokioExecutor,
|
||||||
};
|
};
|
||||||
use tracing::{error, info};
|
use tracing::info;
|
||||||
|
|
||||||
|
use pk_macros::api_endpoint;
|
||||||
|
|
||||||
mod auth;
|
mod auth;
|
||||||
mod endpoints;
|
mod endpoints;
|
||||||
|
|
@ -30,11 +30,12 @@ pub struct ApiContext {
|
||||||
rproxy_client: Client<HttpConnector, Body>,
|
rproxy_client: Client<HttpConnector, Body>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[api_endpoint]
|
||||||
async fn rproxy(
|
async fn rproxy(
|
||||||
Extension(auth): Extension<AuthState>,
|
Extension(auth): Extension<AuthState>,
|
||||||
State(ctx): State<ApiContext>,
|
State(ctx): State<ApiContext>,
|
||||||
mut req: ExtractRequest<Body>,
|
mut req: ExtractRequest<Body>,
|
||||||
) -> Result<Response<Body>, StatusCode> {
|
) -> Response {
|
||||||
let path = req.uri().path();
|
let path = req.uri().path();
|
||||||
let path_query = req
|
let path_query = req
|
||||||
.uri()
|
.uri()
|
||||||
|
|
@ -59,15 +60,7 @@ async fn rproxy(
|
||||||
headers.append(INTERNAL_APPID_HEADER, aid.into());
|
headers.append(INTERNAL_APPID_HEADER, aid.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(ctx
|
Ok(ctx.rproxy_client.request(req).await?.into_response())
|
||||||
.rproxy_client
|
|
||||||
.request(req)
|
|
||||||
.await
|
|
||||||
.map_err(|error| {
|
|
||||||
error!(?error, "failed to serve reverse proxy to dotnet-api");
|
|
||||||
StatusCode::BAD_GATEWAY
|
|
||||||
})?
|
|
||||||
.into_response())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// this function is manually formatted for easier legibility of route_services
|
// this function is manually formatted for easier legibility of route_services
|
||||||
|
|
@ -122,6 +115,8 @@ fn router(ctx: ApiContext) -> Router {
|
||||||
|
|
||||||
.route("/v2/messages/{message_id}", get(rproxy))
|
.route("/v2/messages/{message_id}", get(rproxy))
|
||||||
|
|
||||||
|
.route("/v2/bulk", post(endpoints::bulk::bulk))
|
||||||
|
|
||||||
.route("/private/bulk_privacy/member", post(rproxy))
|
.route("/private/bulk_privacy/member", post(rproxy))
|
||||||
.route("/private/bulk_privacy/group", post(rproxy))
|
.route("/private/bulk_privacy/group", post(rproxy))
|
||||||
.route("/private/discord/callback", post(rproxy))
|
.route("/private/discord/callback", post(rproxy))
|
||||||
|
|
|
||||||
|
|
@ -5,10 +5,12 @@ use axum::{
|
||||||
response::Response,
|
response::Response,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use subtle::ConstantTimeEq;
|
||||||
|
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
use crate::auth::AuthState;
|
use crate::auth::AuthState;
|
||||||
use crate::{util::json_err, ApiContext};
|
use crate::{ApiContext, util::json_err};
|
||||||
|
|
||||||
pub async fn auth(State(ctx): State<ApiContext>, mut req: Request, next: Next) -> Response {
|
pub async fn auth(State(ctx): State<ApiContext>, mut req: Request, next: Next) -> Response {
|
||||||
let mut authed_system_id: Option<i32> = None;
|
let mut authed_system_id: Option<i32> = None;
|
||||||
|
|
@ -48,15 +50,31 @@ pub async fn auth(State(ctx): State<ApiContext>, mut req: Request, next: Next) -
|
||||||
.expect("missing api config")
|
.expect("missing api config")
|
||||||
.temp_token2
|
.temp_token2
|
||||||
.as_ref()
|
.as_ref()
|
||||||
// this is NOT how you validate tokens
|
&& app_auth_header
|
||||||
// but this is low abuse risk so we're keeping it for now
|
.as_bytes()
|
||||||
&& app_auth_header == config_token2
|
.ct_eq(config_token2.as_bytes())
|
||||||
|
.into()
|
||||||
{
|
{
|
||||||
authed_app_id = Some(1);
|
authed_app_id = Some(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// todo: fix syntax
|
||||||
|
let internal = if req.headers().get("x-pluralkit-client-ip").is_none()
|
||||||
|
&& let Some(auth_header) = req
|
||||||
|
.headers()
|
||||||
|
.get("x-pluralkit-internalauth")
|
||||||
|
.map(|h| h.to_str().ok())
|
||||||
|
.flatten()
|
||||||
|
&& let Some(real_token) = libpk::config.internal_auth.clone()
|
||||||
|
&& auth_header.as_bytes().ct_eq(real_token.as_bytes()).into()
|
||||||
|
{
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
};
|
||||||
|
|
||||||
req.extensions_mut()
|
req.extensions_mut()
|
||||||
.insert(AuthState::new(authed_system_id, authed_app_id));
|
.insert(AuthState::new(authed_system_id, authed_app_id, internal));
|
||||||
|
|
||||||
next.run(req).await
|
next.run(req).await
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ use std::time::Instant;
|
||||||
|
|
||||||
use axum::{extract::MatchedPath, extract::Request, middleware::Next, response::Response};
|
use axum::{extract::MatchedPath, extract::Request, middleware::Next, response::Response};
|
||||||
use metrics::{counter, histogram};
|
use metrics::{counter, histogram};
|
||||||
use tracing::{info, span, warn, Instrument, Level};
|
use tracing::{Instrument, Level, info, span, warn};
|
||||||
|
|
||||||
use crate::{auth::AuthState, util::header_or_unknown};
|
use crate::{auth::AuthState, util::header_or_unknown};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,11 +6,11 @@ use axum::{
|
||||||
routing::url_params::UrlParams,
|
routing::url_params::UrlParams,
|
||||||
};
|
};
|
||||||
|
|
||||||
use sqlx::{types::Uuid, Postgres};
|
use sqlx::{Postgres, types::Uuid};
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
use crate::auth::AuthState;
|
use crate::auth::AuthState;
|
||||||
use crate::{util::json_err, ApiContext};
|
use crate::{ApiContext, util::json_err};
|
||||||
use pluralkit_models::PKSystem;
|
use pluralkit_models::PKSystem;
|
||||||
|
|
||||||
// move this somewhere else
|
// move this somewhere else
|
||||||
|
|
@ -31,7 +31,7 @@ pub async fn params(State(ctx): State<ApiContext>, mut req: Request, next: Next)
|
||||||
StatusCode::BAD_REQUEST,
|
StatusCode::BAD_REQUEST,
|
||||||
r#"{"message":"400: Bad Request","code": 0}"#.to_string(),
|
r#"{"message":"400: Bad Request","code": 0}"#.to_string(),
|
||||||
)
|
)
|
||||||
.into()
|
.into();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -45,21 +45,6 @@ pub fn ratelimiter<F, T>(f: F) -> FromFnLayer<F, Option<RedisPool>, T> {
|
||||||
|
|
||||||
tokio::spawn(async move { handle });
|
tokio::spawn(async move { handle });
|
||||||
|
|
||||||
let rscript = r.clone();
|
|
||||||
tokio::spawn(async move {
|
|
||||||
if let Ok(()) = rscript.wait_for_connect().await {
|
|
||||||
match rscript
|
|
||||||
.script_load::<String, String>(LUA_SCRIPT.to_string())
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(_) => info!("connected to redis for request rate limiting"),
|
|
||||||
Err(error) => error!(?error, "could not load redis script"),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
error!("could not wait for connection to load redis script!");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
r
|
r
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -152,12 +137,34 @@ pub async fn do_request_ratelimited(
|
||||||
let period = 1; // seconds
|
let period = 1; // seconds
|
||||||
let cost = 1; // todo: update this for group member endpoints
|
let cost = 1; // todo: update this for group member endpoints
|
||||||
|
|
||||||
|
let script_exists: Vec<usize> =
|
||||||
|
match redis.script_exists(vec![LUA_SCRIPT_SHA.to_string()]).await {
|
||||||
|
Ok(exists) => exists,
|
||||||
|
Err(error) => {
|
||||||
|
error!(?error, "failed to check ratelimit script");
|
||||||
|
return json_err(
|
||||||
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
|
r#"{"message": "500: internal server error", "code": 0}"#.to_string(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if script_exists[0] != 1 {
|
||||||
|
match redis
|
||||||
|
.script_load::<String, String>(LUA_SCRIPT.to_string())
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(_) => info!("successfully loaded ratelimit script to redis"),
|
||||||
|
Err(error) => {
|
||||||
|
error!(?error, "could not load redis script")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// local rate_limit_key = KEYS[1]
|
// local rate_limit_key = KEYS[1]
|
||||||
// local rate = ARGV[1]
|
// local rate = ARGV[1]
|
||||||
// local period = ARGV[2]
|
// local period = ARGV[2]
|
||||||
// return {remaining, tostring(retry_after), reset_after}
|
// return {remaining, tostring(retry_after), reset_after}
|
||||||
|
|
||||||
// todo: check if error is script not found and reload script
|
|
||||||
let resp = redis
|
let resp = redis
|
||||||
.evalsha::<(i32, String, u64), String, Vec<String>, Vec<i32>>(
|
.evalsha::<(i32, String, u64), String, Vec<String>, Vec<i32>>(
|
||||||
LUA_SCRIPT_SHA.to_string(),
|
LUA_SCRIPT_SHA.to_string(),
|
||||||
|
|
@ -219,7 +226,7 @@ pub async fn do_request_ratelimited(
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
tracing::error!(?error, "error getting ratelimit info");
|
error!(?error, "error getting ratelimit info");
|
||||||
return json_err(
|
return json_err(
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
r#"{"message": "500: internal server error", "code": 0}"#.to_string(),
|
r#"{"message": "500: internal server error", "code": 0}"#.to_string(),
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ use axum::{
|
||||||
http::{HeaderValue, StatusCode},
|
http::{HeaderValue, StatusCode},
|
||||||
response::IntoResponse,
|
response::IntoResponse,
|
||||||
};
|
};
|
||||||
use serde_json::{json, to_string, Value};
|
use serde_json::{Value, json, to_string};
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
pub fn header_or_unknown(header: Option<&HeaderValue>) -> &str {
|
pub fn header_or_unknown(header: Option<&HeaderValue>) -> &str {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "avatars"
|
name = "avatars"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "avatar_cleanup"
|
name = "avatar_cleanup"
|
||||||
|
|
|
||||||
|
|
@ -8,10 +8,10 @@ use anyhow::Context;
|
||||||
use axum::extract::State;
|
use axum::extract::State;
|
||||||
use axum::routing::get;
|
use axum::routing::get;
|
||||||
use axum::{
|
use axum::{
|
||||||
|
Json, Router,
|
||||||
http::StatusCode,
|
http::StatusCode,
|
||||||
response::{IntoResponse, Response},
|
response::{IntoResponse, Response},
|
||||||
routing::post,
|
routing::post,
|
||||||
Json, Router,
|
|
||||||
};
|
};
|
||||||
use libpk::_config::AvatarsConfig;
|
use libpk::_config::AvatarsConfig;
|
||||||
use libpk::db::repository::avatars as db;
|
use libpk::db::repository::avatars as db;
|
||||||
|
|
@ -153,7 +153,7 @@ async fn verify(
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let encoded = process::process_async(result.data, req.kind).await?;
|
process::process_async(result.data, req.kind).await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ use std::io::Cursor;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
use tracing::{debug, error, info, instrument};
|
use tracing::{debug, error, info, instrument};
|
||||||
|
|
||||||
use crate::{hash::Hash, ImageKind, PKAvatarError};
|
use crate::{ImageKind, PKAvatarError, hash::Hash};
|
||||||
|
|
||||||
const MAX_DIMENSION: u32 = 4000;
|
const MAX_DIMENSION: u32 = 4000;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -62,7 +62,7 @@ pub async fn pull(
|
||||||
let size = match response.content_length() {
|
let size = match response.content_length() {
|
||||||
None => return Err(PKAvatarError::MissingHeader("Content-Length")),
|
None => return Err(PKAvatarError::MissingHeader("Content-Length")),
|
||||||
Some(size) if size > MAX_SIZE => {
|
Some(size) if size > MAX_SIZE => {
|
||||||
return Err(PKAvatarError::ImageFileSizeTooLarge(size, MAX_SIZE))
|
return Err(PKAvatarError::ImageFileSizeTooLarge(size, MAX_SIZE));
|
||||||
}
|
}
|
||||||
Some(size) => size,
|
Some(size) => size,
|
||||||
};
|
};
|
||||||
|
|
@ -162,7 +162,7 @@ pub fn parse_url(url: &str) -> anyhow::Result<ParsedUrl> {
|
||||||
attachment_id: 0,
|
attachment_id: 0,
|
||||||
filename: "".to_string(),
|
filename: "".to_string(),
|
||||||
full_url: url.to_string(),
|
full_url: url.to_string(),
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
_ => anyhow::bail!("not a discord cdn url"),
|
_ => anyhow::bail!("not a discord cdn url"),
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "dispatch"
|
name = "dispatch"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use axum::{extract::MatchedPath, extract::Request, middleware::Next, response::Response};
|
use axum::{extract::MatchedPath, extract::Request, middleware::Next, response::Response};
|
||||||
use tracing::{info, span, warn, Instrument, Level};
|
use tracing::{Instrument, Level, info, span, warn};
|
||||||
|
|
||||||
// log any requests that take longer than 2 seconds
|
// log any requests that take longer than 2 seconds
|
||||||
// todo: change as necessary
|
// todo: change as necessary
|
||||||
|
|
|
||||||
|
|
@ -5,17 +5,16 @@ use hickory_client::{
|
||||||
rr::{DNSClass, Name, RData, RecordType},
|
rr::{DNSClass, Name, RData, RecordType},
|
||||||
udp::UdpClientStream,
|
udp::UdpClientStream,
|
||||||
};
|
};
|
||||||
use reqwest::{redirect::Policy, StatusCode};
|
use reqwest::{StatusCode, redirect::Policy};
|
||||||
use std::{
|
use std::{
|
||||||
net::{Ipv4Addr, SocketAddr, SocketAddrV4},
|
net::{Ipv4Addr, SocketAddr, SocketAddrV4},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
use tokio::{net::UdpSocket, sync::RwLock};
|
use tokio::{net::UdpSocket, sync::RwLock};
|
||||||
use tracing::{debug, error, info};
|
use tracing::{debug, error};
|
||||||
use tracing_subscriber::EnvFilter;
|
|
||||||
|
|
||||||
use axum::{extract::State, http::Uri, routing::post, Json, Router};
|
use axum::{Json, Router, extract::State, http::Uri, routing::post};
|
||||||
|
|
||||||
mod logger;
|
mod logger;
|
||||||
|
|
||||||
|
|
@ -128,7 +127,7 @@ async fn dispatch(
|
||||||
|
|
||||||
match res {
|
match res {
|
||||||
Ok(res) if res.status() != 200 => {
|
Ok(res) if res.status() != 200 => {
|
||||||
return DispatchResponse::InvalidResponseCode(res.status()).to_string()
|
return DispatchResponse::InvalidResponseCode(res.status()).to_string();
|
||||||
}
|
}
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
error!(?error, url = req.url.clone(), "failed to fetch");
|
error!(?error, url = req.url.clone(), "failed to fetch");
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "gateway"
|
name = "gateway"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,18 @@
|
||||||
use axum::{
|
use axum::{
|
||||||
|
Router,
|
||||||
extract::{ConnectInfo, Path, State},
|
extract::{ConnectInfo, Path, State},
|
||||||
http::StatusCode,
|
http::StatusCode,
|
||||||
response::{IntoResponse, Response},
|
response::{IntoResponse, Response},
|
||||||
routing::{delete, get, post},
|
routing::{delete, get, post},
|
||||||
Router,
|
|
||||||
};
|
};
|
||||||
use libpk::runtime_config::RuntimeConfig;
|
use libpk::runtime_config::RuntimeConfig;
|
||||||
use serde_json::{json, to_string};
|
use serde_json::{json, to_string};
|
||||||
use tracing::{error, info};
|
use tracing::{error, info};
|
||||||
use twilight_model::id::{marker::ChannelMarker, Id};
|
use twilight_model::id::{Id, marker::ChannelMarker};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
discord::{
|
discord::{
|
||||||
cache::{dm_channel, DiscordCache, DM_PERMISSIONS},
|
cache::{DM_PERMISSIONS, DiscordCache, dm_channel},
|
||||||
gateway::cluster_config,
|
gateway::cluster_config,
|
||||||
shard_state::ShardStateManager,
|
shard_state::ShardStateManager,
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -4,18 +4,18 @@ use serde::Serialize;
|
||||||
use std::{collections::HashMap, sync::Arc};
|
use std::{collections::HashMap, sync::Arc};
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
use twilight_cache_inmemory::{
|
use twilight_cache_inmemory::{
|
||||||
|
InMemoryCache, ResourceType,
|
||||||
model::CachedMember,
|
model::CachedMember,
|
||||||
permission::{MemberRoles, RootError},
|
permission::{MemberRoles, RootError},
|
||||||
traits::CacheableChannel,
|
traits::CacheableChannel,
|
||||||
InMemoryCache, ResourceType,
|
|
||||||
};
|
};
|
||||||
use twilight_gateway::Event;
|
use twilight_gateway::Event;
|
||||||
use twilight_model::{
|
use twilight_model::{
|
||||||
channel::{Channel, ChannelType},
|
channel::{Channel, ChannelType},
|
||||||
guild::{Guild, Member, Permissions},
|
guild::{Guild, Member, Permissions},
|
||||||
id::{
|
id::{
|
||||||
marker::{ChannelMarker, GuildMarker, MessageMarker, UserMarker},
|
|
||||||
Id,
|
Id,
|
||||||
|
marker::{ChannelMarker, GuildMarker, MessageMarker, UserMarker},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use twilight_util::permission_calculator::PermissionCalculator;
|
use twilight_util::permission_calculator::PermissionCalculator;
|
||||||
|
|
|
||||||
|
|
@ -6,17 +6,17 @@ use std::sync::Arc;
|
||||||
use tokio::sync::mpsc::Sender;
|
use tokio::sync::mpsc::Sender;
|
||||||
use tracing::{error, info, warn};
|
use tracing::{error, info, warn};
|
||||||
use twilight_gateway::{
|
use twilight_gateway::{
|
||||||
create_iterator, CloseFrame, ConfigBuilder, Event, EventTypeFlags, Message, Shard, ShardId,
|
CloseFrame, ConfigBuilder, Event, EventTypeFlags, Message, Shard, ShardId, create_iterator,
|
||||||
};
|
};
|
||||||
use twilight_model::gateway::{
|
use twilight_model::gateway::{
|
||||||
|
Intents,
|
||||||
payload::outgoing::update_presence::UpdatePresencePayload,
|
payload::outgoing::update_presence::UpdatePresencePayload,
|
||||||
presence::{Activity, ActivityType, Status},
|
presence::{Activity, ActivityType, Status},
|
||||||
Intents,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
discord::identify_queue::{self, RedisQueue},
|
|
||||||
RUNTIME_CONFIG_KEY_EVENT_TARGET,
|
RUNTIME_CONFIG_KEY_EVENT_TARGET,
|
||||||
|
discord::identify_queue::{self, RedisQueue},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::cache::DiscordCache;
|
use super::cache::DiscordCache;
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
// - interaction: (custom_id where not_includes "help-menu")
|
// - interaction: (custom_id where not_includes "help-menu")
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::{hash_map::Entry, HashMap},
|
collections::{HashMap, hash_map::Entry},
|
||||||
net::{IpAddr, SocketAddr},
|
net::{IpAddr, SocketAddr},
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
|
@ -15,8 +15,8 @@ use twilight_gateway::Event;
|
||||||
use twilight_model::{
|
use twilight_model::{
|
||||||
application::interaction::InteractionData,
|
application::interaction::InteractionData,
|
||||||
id::{
|
id::{
|
||||||
marker::{ChannelMarker, MessageMarker, UserMarker},
|
|
||||||
Id,
|
Id,
|
||||||
|
marker::{ChannelMarker, MessageMarker, UserMarker},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -103,7 +103,13 @@ impl EventAwaiter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
info!("ran event_awaiter cleanup loop, took {}us, {} reactions, {} messages, {} interactions", Instant::now().duration_since(now).as_micros(), counts.0, counts.1, counts.2);
|
info!(
|
||||||
|
"ran event_awaiter cleanup loop, took {}us, {} reactions, {} messages, {} interactions",
|
||||||
|
Instant::now().duration_since(now).as_micros(),
|
||||||
|
counts.0,
|
||||||
|
counts.1,
|
||||||
|
counts.2
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ use axum::{
|
||||||
extract::MatchedPath, extract::Request, http::StatusCode, middleware::Next, response::Response,
|
extract::MatchedPath, extract::Request, http::StatusCode, middleware::Next, response::Response,
|
||||||
};
|
};
|
||||||
use metrics::{counter, histogram};
|
use metrics::{counter, histogram};
|
||||||
use tracing::{info, span, warn, Instrument, Level};
|
use tracing::{Instrument, Level, info, span, warn};
|
||||||
|
|
||||||
// log any requests that take longer than 2 seconds
|
// log any requests that take longer than 2 seconds
|
||||||
// todo: change as necessary
|
// todo: change as necessary
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
#![feature(let_chains)]
|
|
||||||
#![feature(if_let_guard)]
|
#![feature(if_let_guard)]
|
||||||
#![feature(duration_constructors)]
|
#![feature(duration_constructors)]
|
||||||
|
|
||||||
|
|
@ -10,7 +9,7 @@ use libpk::{runtime_config::RuntimeConfig, state::ShardStateEvent};
|
||||||
use reqwest::{ClientBuilder, StatusCode};
|
use reqwest::{ClientBuilder, StatusCode};
|
||||||
use std::{sync::Arc, time::Duration, vec::Vec};
|
use std::{sync::Arc, time::Duration, vec::Vec};
|
||||||
use tokio::{
|
use tokio::{
|
||||||
signal::unix::{signal, SignalKind},
|
signal::unix::{SignalKind, signal},
|
||||||
sync::mpsc::channel,
|
sync::mpsc::channel,
|
||||||
task::JoinSet,
|
task::JoinSet,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "gdpr_worker"
|
name = "gdpr_worker"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
libpk = { path = "../libpk" }
|
libpk = { path = "../libpk" }
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,10 @@
|
||||||
#![feature(let_chains)]
|
|
||||||
|
|
||||||
use sqlx::prelude::FromRow;
|
use sqlx::prelude::FromRow;
|
||||||
use std::{sync::Arc, time::Duration};
|
use std::{sync::Arc, time::Duration};
|
||||||
use tracing::{error, info, warn};
|
use tracing::{error, info, warn};
|
||||||
use twilight_http::api_error::{ApiError, GeneralApiError};
|
use twilight_http::api_error::{ApiError, GeneralApiError};
|
||||||
use twilight_model::id::{
|
use twilight_model::id::{
|
||||||
marker::{ChannelMarker, MessageMarker},
|
|
||||||
Id,
|
Id,
|
||||||
|
marker::{ChannelMarker, MessageMarker},
|
||||||
};
|
};
|
||||||
|
|
||||||
// create table messages_gdpr_jobs (mid bigint not null references messages(mid) on delete cascade, channel bigint not null);
|
// create table messages_gdpr_jobs (mid bigint not null references messages(mid) on delete cascade, channel bigint not null);
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "libpk"
|
name = "libpk"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ use lazy_static::lazy_static;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use twilight_model::id::{marker::UserMarker, Id};
|
use twilight_model::id::{Id, marker::UserMarker};
|
||||||
|
|
||||||
#[derive(Clone, Deserialize, Debug)]
|
#[derive(Clone, Deserialize, Debug)]
|
||||||
pub struct ClusterSettings {
|
pub struct ClusterSettings {
|
||||||
|
|
@ -128,6 +128,9 @@ pub struct PKConfig {
|
||||||
|
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub sentry_url: Option<String>,
|
pub sentry_url: Option<String>,
|
||||||
|
|
||||||
|
#[serde(default)]
|
||||||
|
pub internal_auth: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PKConfig {
|
impl PKConfig {
|
||||||
|
|
@ -148,11 +151,11 @@ lazy_static! {
|
||||||
// hacks
|
// hacks
|
||||||
if let Ok(var) = std::env::var("NOMAD_ALLOC_INDEX")
|
if let Ok(var) = std::env::var("NOMAD_ALLOC_INDEX")
|
||||||
&& std::env::var("pluralkit__discord__cluster__total_nodes").is_ok() {
|
&& std::env::var("pluralkit__discord__cluster__total_nodes").is_ok() {
|
||||||
std::env::set_var("pluralkit__discord__cluster__node_id", var);
|
unsafe { std::env::set_var("pluralkit__discord__cluster__node_id", var); }
|
||||||
}
|
}
|
||||||
if let Ok(var) = std::env::var("STATEFULSET_NAME_FOR_INDEX")
|
if let Ok(var) = std::env::var("STATEFULSET_NAME_FOR_INDEX")
|
||||||
&& std::env::var("pluralkit__discord__cluster__total_nodes").is_ok() {
|
&& std::env::var("pluralkit__discord__cluster__total_nodes").is_ok() {
|
||||||
std::env::set_var("pluralkit__discord__cluster__node_id", var.split("-").last().unwrap());
|
unsafe { std::env::set_var("pluralkit__discord__cluster__node_id", var.split("-").last().unwrap()); }
|
||||||
}
|
}
|
||||||
|
|
||||||
Arc::new(Config::builder()
|
Arc::new(Config::builder()
|
||||||
|
|
|
||||||
|
|
@ -52,7 +52,7 @@ pub async fn remove_deletion_queue(pool: &PgPool, attachment_id: u64) -> anyhow:
|
||||||
|
|
||||||
pub async fn pop_queue(
|
pub async fn pop_queue(
|
||||||
pool: &PgPool,
|
pool: &PgPool,
|
||||||
) -> anyhow::Result<Option<(Transaction<Postgres>, ImageQueueEntry)>> {
|
) -> anyhow::Result<Option<(Transaction<'_, Postgres>, ImageQueueEntry)>> {
|
||||||
let mut tx = pool.begin().await?;
|
let mut tx = pool.begin().await?;
|
||||||
let res: Option<ImageQueueEntry> = sqlx::query_as("delete from image_queue where itemid = (select itemid from image_queue order by itemid for update skip locked limit 1) returning *")
|
let res: Option<ImageQueueEntry> = sqlx::query_as("delete from image_queue where itemid = (select itemid from image_queue order by itemid for update skip locked limit 1) returning *")
|
||||||
.fetch_optional(&mut *tx).await?;
|
.fetch_optional(&mut *tx).await?;
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::{
|
use sqlx::{
|
||||||
types::chrono::{DateTime, Utc},
|
|
||||||
FromRow,
|
FromRow,
|
||||||
|
types::chrono::{DateTime, Utc},
|
||||||
};
|
};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,8 @@
|
||||||
#![feature(let_chains)]
|
|
||||||
use std::net::SocketAddr;
|
use std::net::SocketAddr;
|
||||||
|
|
||||||
use metrics_exporter_prometheus::PrometheusBuilder;
|
use metrics_exporter_prometheus::PrometheusBuilder;
|
||||||
use sentry::IntoDsn;
|
use sentry::IntoDsn;
|
||||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
|
use tracing_subscriber::{EnvFilter, layer::SubscriberExt, util::SubscriberInitExt};
|
||||||
|
|
||||||
use sentry_tracing::event_from_event;
|
use sentry_tracing::event_from_event;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "pk_macros"
|
name = "pk_macros"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
proc-macro = true
|
proc-macro = true
|
||||||
|
|
@ -10,4 +10,5 @@ proc-macro = true
|
||||||
quote = "1.0"
|
quote = "1.0"
|
||||||
proc-macro2 = "1.0"
|
proc-macro2 = "1.0"
|
||||||
syn = "2.0"
|
syn = "2.0"
|
||||||
|
prettyplease = "0.2.36"
|
||||||
|
|
||||||
|
|
|
||||||
52
crates/macros/src/api.rs
Normal file
52
crates/macros/src/api.rs
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
use quote::quote;
|
||||||
|
use syn::{FnArg, ItemFn, Pat, parse_macro_input};
|
||||||
|
|
||||||
|
fn _pretty_print(ts: &proc_macro2::TokenStream) -> String {
|
||||||
|
let file = syn::parse_file(&ts.to_string()).unwrap();
|
||||||
|
prettyplease::unparse(&file)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn macro_impl(
|
||||||
|
_args: proc_macro::TokenStream,
|
||||||
|
input: proc_macro::TokenStream,
|
||||||
|
) -> proc_macro::TokenStream {
|
||||||
|
let input = parse_macro_input!(input as ItemFn);
|
||||||
|
|
||||||
|
let fn_name = &input.sig.ident;
|
||||||
|
let fn_params = &input.sig.inputs;
|
||||||
|
let fn_body = &input.block;
|
||||||
|
let syn::ReturnType::Type(_, fn_return_type) = &input.sig.output else {
|
||||||
|
panic!("handler return type must not be nothing");
|
||||||
|
};
|
||||||
|
let pms: Vec<proc_macro2::TokenStream> = fn_params
|
||||||
|
.iter()
|
||||||
|
.map(|v| {
|
||||||
|
let FnArg::Typed(pat) = v else {
|
||||||
|
panic!("must not have self param in handler");
|
||||||
|
};
|
||||||
|
let mut pat = pat.pat.clone();
|
||||||
|
if let Pat::Ident(ident) = *pat {
|
||||||
|
let mut ident = ident.clone();
|
||||||
|
ident.mutability = None;
|
||||||
|
pat = Box::new(Pat::Ident(ident));
|
||||||
|
}
|
||||||
|
quote! { #pat }
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let res = quote! {
|
||||||
|
#[allow(unused_mut)]
|
||||||
|
pub async fn #fn_name(#fn_params) -> axum::response::Response {
|
||||||
|
async fn inner(#fn_params) -> Result<#fn_return_type, crate::error::PKError> {
|
||||||
|
#fn_body
|
||||||
|
}
|
||||||
|
|
||||||
|
match inner(#(#pms),*).await {
|
||||||
|
Ok(res) => res.into_response(),
|
||||||
|
Err(err) => err.into_response(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
res.into()
|
||||||
|
}
|
||||||
|
|
@ -1,8 +1,14 @@
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
|
|
||||||
|
mod api;
|
||||||
mod entrypoint;
|
mod entrypoint;
|
||||||
mod model;
|
mod model;
|
||||||
|
|
||||||
|
#[proc_macro_attribute]
|
||||||
|
pub fn api_endpoint(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||||
|
api::macro_impl(args, input)
|
||||||
|
}
|
||||||
|
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn main(args: TokenStream, input: TokenStream) -> TokenStream {
|
pub fn main(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||||
entrypoint::macro_impl(args, input)
|
entrypoint::macro_impl(args, input)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
use proc_macro2::{Span, TokenStream};
|
use proc_macro2::{Span, TokenStream};
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
use syn::{parse_macro_input, DeriveInput, Expr, Ident, Meta, Type};
|
use syn::{DeriveInput, Expr, Ident, Meta, Type, parse_macro_input};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
enum ElemPatchability {
|
enum ElemPatchability {
|
||||||
|
|
@ -85,8 +85,14 @@ fn parse_field(field: syn::Field) -> ModelField {
|
||||||
panic!("must have json name to be publicly patchable");
|
panic!("must have json name to be publicly patchable");
|
||||||
}
|
}
|
||||||
|
|
||||||
if f.json.is_some() && f.is_privacy {
|
if f.is_privacy && f.json.is_none() {
|
||||||
panic!("cannot set custom json name for privacy field");
|
f.json = Some(syn::Expr::Lit(syn::ExprLit {
|
||||||
|
attrs: vec![],
|
||||||
|
lit: syn::Lit::Str(syn::LitStr::new(
|
||||||
|
f.name.clone().to_string().as_str(),
|
||||||
|
proc_macro2::Span::call_site(),
|
||||||
|
)),
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
f
|
f
|
||||||
|
|
@ -122,17 +128,17 @@ pub fn macro_impl(
|
||||||
|
|
||||||
let fields: Vec<ModelField> = fields
|
let fields: Vec<ModelField> = fields
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|f| !matches!(f.patch, ElemPatchability::None))
|
.filter(|f| f.is_privacy || !matches!(f.patch, ElemPatchability::None))
|
||||||
.cloned()
|
.cloned()
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let patch_fields = mk_patch_fields(fields.clone());
|
let patch_fields = mk_patch_fields(fields.clone());
|
||||||
let patch_from_json = mk_patch_from_json(fields.clone());
|
|
||||||
let patch_validate = mk_patch_validate(fields.clone());
|
let patch_validate = mk_patch_validate(fields.clone());
|
||||||
|
let patch_validate_bulk = mk_patch_validate_bulk(fields.clone());
|
||||||
let patch_to_json = mk_patch_to_json(fields.clone());
|
let patch_to_json = mk_patch_to_json(fields.clone());
|
||||||
let patch_to_sql = mk_patch_to_sql(fields.clone());
|
let patch_to_sql = mk_patch_to_sql(fields.clone());
|
||||||
|
|
||||||
return quote! {
|
let code = quote! {
|
||||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||||
pub struct #tname {
|
pub struct #tname {
|
||||||
#tfields
|
#tfields
|
||||||
|
|
@ -146,31 +152,42 @@ pub fn macro_impl(
|
||||||
#to_json
|
#to_json
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct #patchable_name {
|
pub struct #patchable_name {
|
||||||
#patch_fields
|
#patch_fields
|
||||||
|
|
||||||
|
errors: Vec<crate::ValidationError>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl #patchable_name {
|
impl #patchable_name {
|
||||||
pub fn from_json(input: String) -> Self {
|
pub fn validate(&mut self) {
|
||||||
#patch_from_json
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn validate(self) -> bool {
|
|
||||||
#patch_validate
|
#patch_validate
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn errors(&self) -> Vec<crate::ValidationError> {
|
||||||
|
self.errors.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn validate_bulk(&mut self) {
|
||||||
|
#patch_validate_bulk
|
||||||
|
}
|
||||||
|
|
||||||
pub fn to_sql(self) -> sea_query::UpdateStatement {
|
pub fn to_sql(self) -> sea_query::UpdateStatement {
|
||||||
// sea_query::Query::update()
|
use sea_query::types::*;
|
||||||
#patch_to_sql
|
let mut patch = &mut sea_query::Query::update();
|
||||||
|
#patch_to_sql
|
||||||
|
patch.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_json(self) -> serde_json::Value {
|
pub fn to_json(self) -> serde_json::Value {
|
||||||
#patch_to_json
|
#patch_to_json
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
.into();
|
|
||||||
|
// panic!("{:#?}", code.to_string());
|
||||||
|
|
||||||
|
return code.into();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_tfields(fields: Vec<ModelField>) -> TokenStream {
|
fn mk_tfields(fields: Vec<ModelField>) -> TokenStream {
|
||||||
|
|
@ -225,7 +242,7 @@ fn mk_tto_json(fields: Vec<ModelField>) -> TokenStream {
|
||||||
.filter_map(|f| {
|
.filter_map(|f| {
|
||||||
if f.is_privacy {
|
if f.is_privacy {
|
||||||
let tname = f.name.clone();
|
let tname = f.name.clone();
|
||||||
let tnamestr = f.name.clone().to_string();
|
let tnamestr = f.json.clone();
|
||||||
Some(quote! {
|
Some(quote! {
|
||||||
#tnamestr: self.#tname,
|
#tnamestr: self.#tname,
|
||||||
})
|
})
|
||||||
|
|
@ -280,13 +297,48 @@ fn mk_patch_fields(fields: Vec<ModelField>) -> TokenStream {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
fn mk_patch_validate(_fields: Vec<ModelField>) -> TokenStream {
|
fn mk_patch_validate(_fields: Vec<ModelField>) -> TokenStream {
|
||||||
quote! { true }
|
|
||||||
}
|
|
||||||
fn mk_patch_from_json(_fields: Vec<ModelField>) -> TokenStream {
|
|
||||||
quote! { unimplemented!(); }
|
quote! { unimplemented!(); }
|
||||||
}
|
}
|
||||||
fn mk_patch_to_sql(_fields: Vec<ModelField>) -> TokenStream {
|
fn mk_patch_validate_bulk(fields: Vec<ModelField>) -> TokenStream {
|
||||||
quote! { unimplemented!(); }
|
// iterate over all nullable patchable fields other than privacy
|
||||||
|
// add an error if any field is set to a value other than null
|
||||||
|
fields
|
||||||
|
.iter()
|
||||||
|
.map(|f| {
|
||||||
|
if let syn::Type::Path(path) = &f.ty && let Some(inner) = path.path.segments.last() && inner.ident != "Option" {
|
||||||
|
return quote! {};
|
||||||
|
}
|
||||||
|
let name = f.name.clone();
|
||||||
|
if matches!(f.patch, ElemPatchability::Public) {
|
||||||
|
let json = f.json.clone().unwrap();
|
||||||
|
quote! {
|
||||||
|
if let Some(val) = self.#name.clone() && val.is_some() {
|
||||||
|
self.errors.push(ValidationError::simple(#json, "Only null values are supported in bulk endpoint"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
quote! {}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
fn mk_patch_to_sql(fields: Vec<ModelField>) -> TokenStream {
|
||||||
|
fields
|
||||||
|
.iter()
|
||||||
|
.filter_map(|f| {
|
||||||
|
if !matches!(f.patch, ElemPatchability::None) || f.is_privacy {
|
||||||
|
let name = f.name.clone();
|
||||||
|
let column = f.name.to_string();
|
||||||
|
Some(quote! {
|
||||||
|
if let Some(value) = self.#name {
|
||||||
|
patch = patch.value(#column, value);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
fn mk_patch_to_json(_fields: Vec<ModelField>) -> TokenStream {
|
fn mk_patch_to_json(_fields: Vec<ModelField>) -> TokenStream {
|
||||||
quote! { unimplemented!(); }
|
quote! { unimplemented!(); }
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "migrate"
|
name = "migrate"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
libpk = { path = "../libpk" }
|
libpk = { path = "../libpk" }
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,3 @@
|
||||||
#![feature(let_chains)]
|
|
||||||
|
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/data.rs"));
|
include!(concat!(env!("OUT_DIR"), "/data.rs"));
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
[package]
|
[package]
|
||||||
name = "pluralkit_models"
|
name = "pluralkit_models"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = { workspace = true, features = ["serde"] }
|
chrono = { workspace = true, features = ["serde"] }
|
||||||
pk_macros = { path = "../macros" }
|
pk_macros = { path = "../macros" }
|
||||||
sea-query = "0.32.1"
|
sea-query = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true, features = ["preserve_order"] }
|
serde_json = { workspace = true, features = ["preserve_order"] }
|
||||||
# in theory we want to default-features = false for sqlx
|
# in theory we want to default-features = false for sqlx
|
||||||
|
|
|
||||||
132
crates/models/src/group.rs
Normal file
132
crates/models/src/group.rs
Normal file
|
|
@ -0,0 +1,132 @@
|
||||||
|
use pk_macros::pk_model;
|
||||||
|
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use serde_json::Value;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::{PrivacyLevel, SystemId, ValidationError};
|
||||||
|
|
||||||
|
// todo: fix
|
||||||
|
pub type GroupId = i32;
|
||||||
|
|
||||||
|
#[pk_model]
|
||||||
|
struct Group {
|
||||||
|
id: GroupId,
|
||||||
|
#[json = "hid"]
|
||||||
|
#[private_patchable]
|
||||||
|
hid: String,
|
||||||
|
#[json = "uuid"]
|
||||||
|
uuid: Uuid,
|
||||||
|
// TODO fix
|
||||||
|
#[json = "system"]
|
||||||
|
system: SystemId,
|
||||||
|
|
||||||
|
#[json = "name"]
|
||||||
|
#[privacy = name_privacy]
|
||||||
|
#[patchable]
|
||||||
|
name: String,
|
||||||
|
#[json = "display_name"]
|
||||||
|
#[patchable]
|
||||||
|
display_name: Option<String>,
|
||||||
|
#[json = "color"]
|
||||||
|
#[patchable]
|
||||||
|
color: Option<String>,
|
||||||
|
#[json = "icon"]
|
||||||
|
#[patchable]
|
||||||
|
icon: Option<String>,
|
||||||
|
#[json = "banner_image"]
|
||||||
|
#[patchable]
|
||||||
|
banner_image: Option<String>,
|
||||||
|
#[json = "description"]
|
||||||
|
#[privacy = description_privacy]
|
||||||
|
#[patchable]
|
||||||
|
description: Option<String>,
|
||||||
|
#[json = "created"]
|
||||||
|
created: DateTime<Utc>,
|
||||||
|
|
||||||
|
#[privacy]
|
||||||
|
name_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
description_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
banner_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
icon_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
list_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
metadata_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
visibility: PrivacyLevel,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de> Deserialize<'de> for PKGroupPatch {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let mut patch: PKGroupPatch = Default::default();
|
||||||
|
let value: Value = Value::deserialize(deserializer)?;
|
||||||
|
|
||||||
|
if let Some(v) = value.get("name") {
|
||||||
|
if let Some(name) = v.as_str() {
|
||||||
|
patch.name = Some(name.to_string());
|
||||||
|
} else if v.is_null() {
|
||||||
|
patch.errors.push(ValidationError::simple(
|
||||||
|
"name",
|
||||||
|
"Group name cannot be set to null.",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! parse_string_simple {
|
||||||
|
($k:expr) => {
|
||||||
|
match value.get($k) {
|
||||||
|
None => None,
|
||||||
|
Some(Value::Null) => Some(None),
|
||||||
|
Some(Value::String(s)) => Some(Some(s.clone())),
|
||||||
|
_ => {
|
||||||
|
patch.errors.push(ValidationError::new($k));
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
patch.display_name = parse_string_simple!("display_name");
|
||||||
|
patch.description = parse_string_simple!("description");
|
||||||
|
patch.icon = parse_string_simple!("icon");
|
||||||
|
patch.banner_image = parse_string_simple!("banner");
|
||||||
|
patch.color = parse_string_simple!("color").map(|v| v.map(|t| t.to_lowercase()));
|
||||||
|
|
||||||
|
if let Some(privacy) = value.get("privacy").and_then(Value::as_object) {
|
||||||
|
macro_rules! parse_privacy {
|
||||||
|
($v:expr) => {
|
||||||
|
match privacy.get($v) {
|
||||||
|
None => None,
|
||||||
|
Some(Value::Null) => Some(PrivacyLevel::Private),
|
||||||
|
Some(Value::String(s)) if s == "" || s == "private" => {
|
||||||
|
Some(PrivacyLevel::Private)
|
||||||
|
}
|
||||||
|
Some(Value::String(s)) if s == "public" => Some(PrivacyLevel::Public),
|
||||||
|
_ => {
|
||||||
|
patch.errors.push(ValidationError::new($v));
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
patch.name_privacy = parse_privacy!("name_privacy");
|
||||||
|
patch.description_privacy = parse_privacy!("description_privacy");
|
||||||
|
patch.banner_privacy = parse_privacy!("banner_privacy");
|
||||||
|
patch.icon_privacy = parse_privacy!("icon_privacy");
|
||||||
|
patch.list_privacy = parse_privacy!("list_privacy");
|
||||||
|
patch.metadata_privacy = parse_privacy!("metadata_privacy");
|
||||||
|
patch.visibility = parse_privacy!("visibility");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(patch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -9,6 +9,8 @@ macro_rules! model {
|
||||||
|
|
||||||
model!(system);
|
model!(system);
|
||||||
model!(system_config);
|
model!(system_config);
|
||||||
|
model!(member);
|
||||||
|
model!(group);
|
||||||
|
|
||||||
#[derive(serde::Serialize, Debug, Clone)]
|
#[derive(serde::Serialize, Debug, Clone)]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
|
|
@ -18,7 +20,7 @@ pub enum PrivacyLevel {
|
||||||
}
|
}
|
||||||
|
|
||||||
// this sucks, put it somewhere else
|
// this sucks, put it somewhere else
|
||||||
use sqlx::{postgres::PgTypeInfo, Database, Decode, Postgres, Type};
|
use sqlx::{Database, Decode, Postgres, Type, postgres::PgTypeInfo};
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
_util::fake_enum_impls!(PrivacyLevel);
|
_util::fake_enum_impls!(PrivacyLevel);
|
||||||
|
|
||||||
|
|
@ -31,3 +33,30 @@ impl From<i32> for PrivacyLevel {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<PrivacyLevel> for sea_query::Value {
|
||||||
|
fn from(level: PrivacyLevel) -> sea_query::Value {
|
||||||
|
match level {
|
||||||
|
PrivacyLevel::Public => sea_query::Value::Int(Some(1)),
|
||||||
|
PrivacyLevel::Private => sea_query::Value::Int(Some(2)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Serialize, Debug, Clone)]
|
||||||
|
pub enum ValidationError {
|
||||||
|
Simple { key: String, value: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ValidationError {
|
||||||
|
fn new(key: &str) -> Self {
|
||||||
|
Self::simple(key, "is invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn simple(key: &str, value: &str) -> Self {
|
||||||
|
Self::Simple {
|
||||||
|
key: key.to_string(),
|
||||||
|
value: value.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
208
crates/models/src/member.rs
Normal file
208
crates/models/src/member.rs
Normal file
|
|
@ -0,0 +1,208 @@
|
||||||
|
use pk_macros::pk_model;
|
||||||
|
|
||||||
|
use chrono::NaiveDateTime;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::{PrivacyLevel, SystemId, ValidationError};
|
||||||
|
|
||||||
|
// todo: fix
|
||||||
|
pub type MemberId = i32;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize, sqlx::Type)]
|
||||||
|
#[sqlx(type_name = "proxy_tag")]
|
||||||
|
pub struct ProxyTag {
|
||||||
|
pub prefix: Option<String>,
|
||||||
|
pub suffix: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[pk_model]
|
||||||
|
struct Member {
|
||||||
|
id: MemberId,
|
||||||
|
#[json = "hid"]
|
||||||
|
#[private_patchable]
|
||||||
|
hid: String,
|
||||||
|
#[json = "uuid"]
|
||||||
|
uuid: Uuid,
|
||||||
|
// TODO fix
|
||||||
|
#[json = "system"]
|
||||||
|
system: SystemId,
|
||||||
|
|
||||||
|
#[json = "color"]
|
||||||
|
#[patchable]
|
||||||
|
color: Option<String>,
|
||||||
|
#[json = "webhook_avatar_url"]
|
||||||
|
#[patchable]
|
||||||
|
webhook_avatar_url: Option<String>,
|
||||||
|
#[json = "avatar_url"]
|
||||||
|
#[patchable]
|
||||||
|
avatar_url: Option<String>,
|
||||||
|
#[json = "banner_image"]
|
||||||
|
#[patchable]
|
||||||
|
banner_image: Option<String>,
|
||||||
|
#[json = "name"]
|
||||||
|
#[privacy = name_privacy]
|
||||||
|
#[patchable]
|
||||||
|
name: String,
|
||||||
|
#[json = "display_name"]
|
||||||
|
#[patchable]
|
||||||
|
display_name: Option<String>,
|
||||||
|
#[json = "birthday"]
|
||||||
|
#[patchable]
|
||||||
|
birthday: Option<String>,
|
||||||
|
#[json = "pronouns"]
|
||||||
|
#[privacy = pronoun_privacy]
|
||||||
|
#[patchable]
|
||||||
|
pronouns: Option<String>,
|
||||||
|
#[json = "description"]
|
||||||
|
#[privacy = description_privacy]
|
||||||
|
#[patchable]
|
||||||
|
description: Option<String>,
|
||||||
|
#[json = "proxy_tags"]
|
||||||
|
// #[patchable]
|
||||||
|
proxy_tags: Vec<ProxyTag>,
|
||||||
|
#[json = "keep_proxy"]
|
||||||
|
#[patchable]
|
||||||
|
keep_proxy: bool,
|
||||||
|
#[json = "tts"]
|
||||||
|
#[patchable]
|
||||||
|
tts: bool,
|
||||||
|
#[json = "created"]
|
||||||
|
created: NaiveDateTime,
|
||||||
|
#[json = "message_count"]
|
||||||
|
#[private_patchable]
|
||||||
|
message_count: i32,
|
||||||
|
#[json = "last_message_timestamp"]
|
||||||
|
#[private_patchable]
|
||||||
|
last_message_timestamp: Option<NaiveDateTime>,
|
||||||
|
#[json = "allow_autoproxy"]
|
||||||
|
#[patchable]
|
||||||
|
allow_autoproxy: bool,
|
||||||
|
|
||||||
|
#[privacy]
|
||||||
|
#[json = "visibility"]
|
||||||
|
member_visibility: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
description_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
banner_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
avatar_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
name_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
birthday_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
pronoun_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
metadata_privacy: PrivacyLevel,
|
||||||
|
#[privacy]
|
||||||
|
proxy_privacy: PrivacyLevel,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de> Deserialize<'de> for PKMemberPatch {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let mut patch: PKMemberPatch = Default::default();
|
||||||
|
let value: Value = Value::deserialize(deserializer)?;
|
||||||
|
|
||||||
|
if let Some(v) = value.get("name") {
|
||||||
|
if let Some(name) = v.as_str() {
|
||||||
|
patch.name = Some(name.to_string());
|
||||||
|
} else if v.is_null() {
|
||||||
|
patch.errors.push(ValidationError::simple(
|
||||||
|
"name",
|
||||||
|
"Member name cannot be set to null.",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! parse_string_simple {
|
||||||
|
($k:expr) => {
|
||||||
|
match value.get($k) {
|
||||||
|
None => None,
|
||||||
|
Some(Value::Null) => Some(None),
|
||||||
|
Some(Value::String(s)) => Some(Some(s.clone())),
|
||||||
|
_ => {
|
||||||
|
patch.errors.push(ValidationError::new($k));
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
patch.color = parse_string_simple!("color").map(|v| v.map(|t| t.to_lowercase()));
|
||||||
|
patch.display_name = parse_string_simple!("display_name");
|
||||||
|
patch.avatar_url = parse_string_simple!("avatar_url");
|
||||||
|
patch.banner_image = parse_string_simple!("banner");
|
||||||
|
patch.birthday = parse_string_simple!("birthday"); // fix
|
||||||
|
patch.pronouns = parse_string_simple!("pronouns");
|
||||||
|
patch.description = parse_string_simple!("description");
|
||||||
|
|
||||||
|
if let Some(keep_proxy) = value.get("keep_proxy").and_then(Value::as_bool) {
|
||||||
|
patch.keep_proxy = Some(keep_proxy);
|
||||||
|
}
|
||||||
|
if let Some(tts) = value.get("tts").and_then(Value::as_bool) {
|
||||||
|
patch.tts = Some(tts);
|
||||||
|
}
|
||||||
|
|
||||||
|
// todo: legacy import handling
|
||||||
|
|
||||||
|
// todo: fix proxy_tag type in sea_query
|
||||||
|
|
||||||
|
// if let Some(proxy_tags) = value.get("proxy_tags").and_then(Value::as_array) {
|
||||||
|
// patch.proxy_tags = Some(
|
||||||
|
// proxy_tags
|
||||||
|
// .iter()
|
||||||
|
// .filter_map(|tag| {
|
||||||
|
// tag.as_object().map(|tag_obj| {
|
||||||
|
// let prefix = tag_obj
|
||||||
|
// .get("prefix")
|
||||||
|
// .and_then(Value::as_str)
|
||||||
|
// .map(|s| s.to_string());
|
||||||
|
// let suffix = tag_obj
|
||||||
|
// .get("suffix")
|
||||||
|
// .and_then(Value::as_str)
|
||||||
|
// .map(|s| s.to_string());
|
||||||
|
// ProxyTag { prefix, suffix }
|
||||||
|
// })
|
||||||
|
// })
|
||||||
|
// .collect(),
|
||||||
|
// )
|
||||||
|
// }
|
||||||
|
|
||||||
|
if let Some(privacy) = value.get("privacy").and_then(Value::as_object) {
|
||||||
|
macro_rules! parse_privacy {
|
||||||
|
($v:expr) => {
|
||||||
|
match privacy.get($v) {
|
||||||
|
None => None,
|
||||||
|
Some(Value::Null) => Some(PrivacyLevel::Private),
|
||||||
|
Some(Value::String(s)) if s == "" || s == "private" => {
|
||||||
|
Some(PrivacyLevel::Private)
|
||||||
|
}
|
||||||
|
Some(Value::String(s)) if s == "public" => Some(PrivacyLevel::Public),
|
||||||
|
_ => {
|
||||||
|
patch.errors.push(ValidationError::new($v));
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
patch.member_visibility = parse_privacy!("visibility");
|
||||||
|
patch.name_privacy = parse_privacy!("name_privacy");
|
||||||
|
patch.description_privacy = parse_privacy!("description_privacy");
|
||||||
|
patch.banner_privacy = parse_privacy!("banner_privacy");
|
||||||
|
patch.avatar_privacy = parse_privacy!("avatar_privacy");
|
||||||
|
patch.birthday_privacy = parse_privacy!("birthday_privacy");
|
||||||
|
patch.pronoun_privacy = parse_privacy!("pronoun_privacy");
|
||||||
|
patch.proxy_privacy = parse_privacy!("proxy_privacy");
|
||||||
|
patch.metadata_privacy = parse_privacy!("metadata_privacy");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(patch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "scheduled_tasks"
|
name = "scheduled_tasks"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
libpk = { path = "../libpk" }
|
libpk = { path = "../libpk" }
|
||||||
|
|
@ -9,6 +9,7 @@ libpk = { path = "../libpk" }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
chrono = { workspace = true }
|
chrono = { workspace = true }
|
||||||
fred = { workspace = true }
|
fred = { workspace = true }
|
||||||
|
lazy_static = { workspace = true }
|
||||||
metrics = { workspace = true }
|
metrics = { workspace = true }
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -99,13 +99,25 @@ async fn main() -> anyhow::Result<()> {
|
||||||
update_db_message_meta
|
update_db_message_meta
|
||||||
);
|
);
|
||||||
doforever!("* * * * *", "discord stats updater", update_discord_stats);
|
doforever!("* * * * *", "discord stats updater", update_discord_stats);
|
||||||
// on :00 and :30
|
// on hh:00 and hh:30
|
||||||
doforever!(
|
doforever!(
|
||||||
"0,30 * * * *",
|
"0,30 * * * *",
|
||||||
"queue deleted image cleanup job",
|
"queue deleted image cleanup job",
|
||||||
queue_deleted_image_cleanup
|
queue_deleted_image_cleanup
|
||||||
);
|
);
|
||||||
|
// non-standard cron: at hh:mm:00, hh:mm:30
|
||||||
doforever!("0,30 * * * * *", "stats api updater", update_stats_api);
|
doforever!("0,30 * * * * *", "stats api updater", update_stats_api);
|
||||||
|
// every hour (could probably even be less frequent, basebackups are taken rarely)
|
||||||
|
doforever!(
|
||||||
|
"* * * * *",
|
||||||
|
"data basebackup info updater",
|
||||||
|
update_data_basebackup_prometheus
|
||||||
|
);
|
||||||
|
doforever!(
|
||||||
|
"* * * * *",
|
||||||
|
"messages basebackup info updater",
|
||||||
|
update_messages_basebackup_prometheus
|
||||||
|
);
|
||||||
|
|
||||||
set.join_next()
|
set.join_next()
|
||||||
.await
|
.await
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
use std::time::Duration;
|
use std::{collections::HashMap, time::Duration};
|
||||||
|
|
||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
use fred::prelude::KeysInterface;
|
use fred::prelude::KeysInterface;
|
||||||
|
|
@ -10,10 +10,22 @@ use metrics::gauge;
|
||||||
use num_format::{Locale, ToFormattedString};
|
use num_format::{Locale, ToFormattedString};
|
||||||
use reqwest::ClientBuilder;
|
use reqwest::ClientBuilder;
|
||||||
use sqlx::Executor;
|
use sqlx::Executor;
|
||||||
|
use tokio::{process::Command, sync::Mutex};
|
||||||
|
|
||||||
use crate::AppCtx;
|
use crate::AppCtx;
|
||||||
|
|
||||||
pub async fn update_prometheus(ctx: AppCtx) -> anyhow::Result<()> {
|
pub async fn update_prometheus(ctx: AppCtx) -> anyhow::Result<()> {
|
||||||
|
let data_ts = *BASEBACKUP_TS.lock().await.get("data").unwrap_or(&0) as f64;
|
||||||
|
let messages_ts = *BASEBACKUP_TS.lock().await.get("messages").unwrap_or(&0) as f64;
|
||||||
|
|
||||||
|
let now_ts = chrono::Utc::now().timestamp() as f64;
|
||||||
|
|
||||||
|
gauge!("pluralkit_latest_backup_ts", "repo" => "data").set(data_ts);
|
||||||
|
gauge!("pluralkit_latest_backup_ts", "repo" => "messages").set(messages_ts);
|
||||||
|
|
||||||
|
gauge!("pluralkit_latest_backup_age", "repo" => "data").set(now_ts - data_ts);
|
||||||
|
gauge!("pluralkit_latest_backup_age", "repo" => "messages").set(now_ts - messages_ts);
|
||||||
|
|
||||||
#[derive(sqlx::FromRow)]
|
#[derive(sqlx::FromRow)]
|
||||||
struct Count {
|
struct Count {
|
||||||
count: i64,
|
count: i64,
|
||||||
|
|
@ -41,6 +53,83 @@ pub async fn update_prometheus(ctx: AppCtx) -> anyhow::Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lazy_static::lazy_static! {
|
||||||
|
static ref BASEBACKUP_TS: Mutex<HashMap<String, i64>> = Mutex::new(HashMap::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_data_basebackup_prometheus(_: AppCtx) -> anyhow::Result<()> {
|
||||||
|
update_basebackup_ts("data".to_string()).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_messages_basebackup_prometheus(_: AppCtx) -> anyhow::Result<()> {
|
||||||
|
update_basebackup_ts("messages".to_string()).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update_basebackup_ts(repo: String) -> anyhow::Result<()> {
|
||||||
|
let mut env = HashMap::new();
|
||||||
|
|
||||||
|
for (key, value) in std::env::vars() {
|
||||||
|
if key.starts_with("AWS") {
|
||||||
|
env.insert(key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
env.insert(
|
||||||
|
"WALG_S3_PREFIX".to_string(),
|
||||||
|
format!("s3://pluralkit-backups/{repo}/"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let output = Command::new("wal-g")
|
||||||
|
.arg("backup-list")
|
||||||
|
.arg("--json")
|
||||||
|
.envs(env)
|
||||||
|
.output()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
// todo: we should return error here
|
||||||
|
tracing::error!(
|
||||||
|
status = output.status.code(),
|
||||||
|
"failed to execute wal-g command"
|
||||||
|
);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
struct WalgBackupInfo {
|
||||||
|
backup_name: String,
|
||||||
|
time: String,
|
||||||
|
ts_parsed: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut info =
|
||||||
|
serde_json::from_str::<Vec<WalgBackupInfo>>(&String::from_utf8_lossy(&output.stdout))?
|
||||||
|
.into_iter()
|
||||||
|
.filter(|v| v.backup_name.contains("base"))
|
||||||
|
.filter_map(|mut v| {
|
||||||
|
chrono::DateTime::parse_from_rfc3339(&v.time)
|
||||||
|
.ok()
|
||||||
|
.map(|dt| {
|
||||||
|
v.ts_parsed = Some(dt.with_timezone(&chrono::Utc).timestamp());
|
||||||
|
v
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<Vec<WalgBackupInfo>>();
|
||||||
|
|
||||||
|
info.sort_by(|a, b| b.ts_parsed.cmp(&a.ts_parsed));
|
||||||
|
|
||||||
|
let Some(info) = info.first() else {
|
||||||
|
anyhow::bail!("could not find any basebackups in repo {repo}");
|
||||||
|
};
|
||||||
|
|
||||||
|
BASEBACKUP_TS
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.insert(repo, info.ts_parsed.unwrap());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn update_db_meta(ctx: AppCtx) -> anyhow::Result<()> {
|
pub async fn update_db_meta(ctx: AppCtx) -> anyhow::Result<()> {
|
||||||
ctx.data
|
ctx.data
|
||||||
.execute(
|
.execute(
|
||||||
|
|
|
||||||
|
|
@ -154,7 +154,7 @@
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardBody>
|
<CardBody>
|
||||||
<p>If you've lost access to your discord account, you can retrieve your token here.</p>
|
<p>If you've lost access to your discord account, you can retrieve your token here.</p>
|
||||||
<p>Send a direct message to a staff member (a helper, moderator or developer <a href="https://discord.gg/PczBt78">in the support server</a>), they can recover your system with this token.</p>
|
<p>Ask in the #bot-support channel <a href="https://discord.gg/PczBt78">of the support server</a> for a staff member to DM you, they can recover your system with this token. <b>Do not post the token in the channel.</b></p>
|
||||||
<Button color="danger" on:click={() => revealToken()}>Reveal token</Button>
|
<Button color="danger" on:click={() => revealToken()}>Reveal token</Button>
|
||||||
{#if showToken}
|
{#if showToken}
|
||||||
<Row>
|
<Row>
|
||||||
|
|
|
||||||
10
flake.lock
generated
10
flake.lock
generated
|
|
@ -297,16 +297,16 @@
|
||||||
},
|
},
|
||||||
"systems": {
|
"systems": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1680978846,
|
"lastModified": 1681028828,
|
||||||
"narHash": "sha256-Gtqg8b/v49BFDpDetjclCYXm8mAnTrUzR0JnE2nv5aw=",
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
"owner": "nix-systems",
|
"owner": "nix-systems",
|
||||||
"repo": "x86_64-linux",
|
"repo": "default",
|
||||||
"rev": "2ecfcac5e15790ba6ce360ceccddb15ad16d08a8",
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "nix-systems",
|
"owner": "nix-systems",
|
||||||
"repo": "x86_64-linux",
|
"repo": "default",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "nixpkgs/nixpkgs-unstable";
|
nixpkgs.url = "nixpkgs/nixpkgs-unstable";
|
||||||
parts.url = "github:hercules-ci/flake-parts";
|
parts.url = "github:hercules-ci/flake-parts";
|
||||||
systems.url = "github:nix-systems/x86_64-linux";
|
systems.url = "github:nix-systems/default";
|
||||||
# process compose
|
# process compose
|
||||||
process-compose.url = "github:Platonic-Systems/process-compose-flake";
|
process-compose.url = "github:Platonic-Systems/process-compose-flake";
|
||||||
services.url = "github:juspay/services-flake";
|
services.url = "github:juspay/services-flake";
|
||||||
|
|
@ -54,6 +54,7 @@
|
||||||
gcc
|
gcc
|
||||||
omnisharp-roslyn
|
omnisharp-roslyn
|
||||||
bashInteractive
|
bashInteractive
|
||||||
|
rust-analyzer
|
||||||
];
|
];
|
||||||
runScript = cmd;
|
runScript = cmd;
|
||||||
};
|
};
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue