Compare commits

...

20 commits

Author SHA1 Message Date
alyssa
c174a5bd80 comply with payment provider requirements
Some checks failed
Build and push Docker image / .net docker build (push) Has been cancelled
.net checks / run .net tests (push) Has been cancelled
.net checks / dotnet-format (push) Has been cancelled
2026-01-18 06:08:07 -05:00
alyssa
70c476ed3b fix 2026-01-18 06:08:07 -05:00
alyssa
fd8f91527b chore: build premium docker image 2026-01-18 06:08:07 -05:00
alyssa
19348ec7b9 feat(premium): info page 2026-01-18 06:08:07 -05:00
alyssa
3b3e779d44 try shortening http idle timeout for discord client 2026-01-18 06:08:07 -05:00
alyssa
226947e6aa feat(premium): initial subscription implementation through paddle 2026-01-18 06:08:07 -05:00
alyssa
81cde5e688 fix(scheduled_tasks): get wal-g backups bucket from env 2026-01-18 06:08:07 -05:00
alyssa
f1471088d2 feat: premium service boilerplate 2026-01-18 06:08:07 -05:00
alyssa
c4f820e114 oops 2026-01-18 06:08:07 -05:00
alyssa
698f01ab9c most of a dash views api impl 2026-01-18 06:08:07 -05:00
alyssa
5e462a0ca2 feat: add basic premium scaffolding 2026-01-18 06:08:07 -05:00
alyssa
578c09c216 chore: clean up some rust code 2026-01-18 06:08:07 -05:00
alyssa
b19c900cc3 port docs to sveltekit (very broken) 2026-01-18 06:08:06 -05:00
alyssa
17ee73f264 add /api/v2/bulk endpoint
also, initial support for patch models in rust!
2026-01-18 06:07:06 -05:00
alyssa
4a947c01fc fix: cast hid parameter to char(6) in GetMemberByHid query
Some checks failed
Build and push Docker image / .net docker build (push) Has been cancelled
.net checks / run .net tests (push) Has been cancelled
.net checks / dotnet-format (push) Has been cancelled
avoids casting the hid column, which does a full table scan, which is slow
2026-01-17 17:08:55 -05:00
Iris System
4973c0b992 docs: add patreon/bmac link to premium announce 2026-01-16 13:45:42 +13:00
Iris System
f61731a915 docs: add premium announcement 2026-01-16 11:13:00 +13:00
Petal Ladenson
3e1a310884
fix(bot): make reproxy inherit SUPPRESS_NOTIFICATIONS (#776)
Some checks failed
Build and push Docker image / .net docker build (push) Has been cancelled
.net checks / run .net tests (push) Has been cancelled
.net checks / dotnet-format (push) Has been cancelled
2026-01-14 15:41:27 +13:00
Petal Ladenson
952bb02285 fix(docs): Correctly display bellhop emoji in user guide 2026-01-12 11:57:50 -07:00
Petal Ladenson
9dfbf64dac fix(docs): Correctly reflect what permissions PluralKit needs to work 2026-01-12 11:57:31 -07:00
100 changed files with 6497 additions and 8539 deletions

679
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -6,7 +6,6 @@ resolver = "2"
[workspace.dependencies]
anyhow = "1"
axum-macros = "0.4.1"
bytes = "1.6.0"
chrono = "0.4"
fred = { version = "9.3.0", default-features = false, features = ["tracing", "i-keys", "i-hashes", "i-scripts", "sha-1"] }
@ -14,6 +13,7 @@ futures = "0.3.30"
lazy_static = "1.4.0"
metrics = "0.23.0"
reqwest = { version = "0.12.7" , default-features = false, features = ["rustls-tls", "trust-dns"]}
sea-query = { version = "1.0.0-rc.10", features = ["with-chrono"] }
sentry = { version = "0.36.0", default-features = false, features = ["backtrace", "contexts", "panic", "debug-images", "reqwest", "rustls"] } # replace native-tls with rustls
serde = { version = "1.0.196", features = ["derive"] }
serde_json = "1.0.117"
@ -24,6 +24,9 @@ tracing-subscriber = { version = "0.3.20", features = ["env-filter", "json"] }
uuid = { version = "1.7.0", features = ["serde"] }
axum = { git = "https://github.com/pluralkit/axum", branch = "v0.8.4-pluralkit" }
axum-macros = "0.4.1"
axum-extra = { version = "0.10", features = ["cookie"] }
tower-http = { version = "0.5.2", features = ["catch-panic", "fs"] }
twilight-gateway = { git = "https://github.com/pluralkit/twilight", branch = "pluralkit-7f08d95" }
twilight-cache-inmemory = { git = "https://github.com/pluralkit/twilight", branch = "pluralkit-7f08d95", features = ["permission-calculator"] }
@ -36,3 +39,6 @@ twilight-http = { git = "https://github.com/pluralkit/twilight", branch = "plura
# twilight-util = { path = "../twilight/twilight-util", features = ["permission-calculator"] }
# twilight-model = { path = "../twilight/twilight-model" }
# twilight-http = { path = "../twilight/twilight-http", default-features = false, features = ["rustls-aws_lc_rs", "rustls-native-roots"] }
[patch.crates-io]
axum = { git = "https://github.com/pluralkit/axum", branch = "v0.8.4-pluralkit" }

View file

@ -35,7 +35,10 @@ public class BaseRestClient: IAsyncDisposable
if (!token.StartsWith("Bot "))
token = "Bot " + token;
Client = new HttpClient();
Client = new HttpClient(new SocketsHttpHandler
{
PooledConnectionIdleTimeout = TimeSpan.FromSeconds(3),
});
Client.DefaultRequestHeaders.TryAddWithoutValidation("User-Agent", userAgent);
Client.DefaultRequestHeaders.TryAddWithoutValidation("Authorization", token);

View file

@ -36,6 +36,10 @@ public class BotConfig
public bool IsBetaBot { get; set; } = false!;
public string BetaBotAPIUrl { get; set; }
public String? PremiumSubscriberEmoji { get; set; }
public String? PremiumLifetimeEmoji { get; set; }
public String? PremiumDashboardUrl { get; set; }
public record ClusterSettings
{
// this is zero-indexed

View file

@ -92,6 +92,7 @@ public partial class CommandTree
if (ctx.Match("sus")) return ctx.Execute<Fun>(null, m => m.Sus(ctx));
if (ctx.Match("error")) return ctx.Execute<Fun>(null, m => m.Error(ctx));
if (ctx.Match("stats", "status")) return ctx.Execute<Misc>(null, m => m.Stats(ctx));
if (ctx.Match("premium")) return ctx.Execute<Misc>(null, m => m.Premium(ctx));
if (ctx.Match("permcheck"))
return ctx.Execute<Checks>(PermCheck, m => m.PermCheckGuild(ctx));
if (ctx.Match("proxycheck"))

View file

@ -28,6 +28,8 @@ public class Context
private Command? _currentCommand;
private BotConfig _botConfig;
public Context(ILifetimeScope provider, int shardId, Guild? guild, Channel channel, MessageCreateEvent message,
int commandParseOffset, PKSystem senderSystem, SystemConfig config,
GuildConfig? guildConfig, string[] prefixes)
@ -46,6 +48,7 @@ public class Context
_metrics = provider.Resolve<IMetrics>();
_provider = provider;
_commandMessageService = provider.Resolve<CommandMessageService>();
_botConfig = provider.Resolve<BotConfig>();
CommandPrefix = message.Content?.Substring(0, commandParseOffset);
DefaultPrefix = prefixes[0];
Parameters = new Parameters(message.Content?.Substring(commandParseOffset));
@ -74,6 +77,23 @@ public class Context
public readonly SystemConfig Config;
public DateTimeZone Zone => Config?.Zone ?? DateTimeZone.Utc;
public bool Premium
{
get
{
if (Config?.PremiumLifetime ?? false) return true;
// generate _this_ current instant _before_ the check, otherwise it will always be true...
var premiumUntil = Config?.PremiumUntil ?? SystemClock.Instance.GetCurrentInstant();
return SystemClock.Instance.GetCurrentInstant() < premiumUntil;
}
}
public string PremiumEmoji => (Config?.PremiumLifetime ?? false)
? ($"<:lifetime_premium:{_botConfig.PremiumLifetimeEmoji}>" ?? "\u2729")
: Premium
? ($"<:premium_subscriber:{_botConfig.PremiumSubscriberEmoji}>" ?? "\u2729")
: "";
public readonly string CommandPrefix;
public readonly string DefaultPrefix;
public readonly Parameters Parameters;

View file

@ -31,6 +31,32 @@ public class Misc
_shards = shards;
}
public async Task Premium(Context ctx)
{
ctx.CheckSystem();
String message;
if (ctx.Config?.PremiumLifetime ?? false)
{
message = $"Your system has lifetime PluralKit Premium. {ctx.PremiumEmoji} Thanks for the support!";
}
else if (ctx.Premium)
{
message = $"Your system has PluralKit Premium active until <t:{ctx.Config.PremiumUntil?.ToUnixTimeSeconds()}>. {ctx.PremiumEmoji} Thanks for the support!";
}
else
{
message = "PluralKit Premium is not currently active for your system.";
if (ctx.Config?.PremiumUntil != null)
{
message += $" The subscription expired at <t:{ctx.Config.PremiumUntil?.ToUnixTimeSeconds()}> (<t:{ctx.Config.PremiumUntil?.ToUnixTimeSeconds()}:R>)";
}
}
await ctx.Reply(message + $"\n\nManage your subscription at <{_botConfig.PremiumDashboardUrl}>");
}
public async Task Invite(Context ctx)
{
var permissions =

View file

@ -324,6 +324,12 @@ public class ProxyService
// Mangle embeds (for reply embed color changing)
var mangledEmbeds = originalMsg.Embeds!.Select(embed => MangleReproxyEmbed(embed, member)).Where(embed => embed != null).ToArray();
Message.MessageFlags flags = 0;
if (originalMsg.Flags.HasFlag(Message.MessageFlags.SuppressNotifications))
flags |= Message.MessageFlags.SuppressNotifications;
if (originalMsg.Flags.HasFlag(Message.MessageFlags.VoiceMessage))
flags |= Message.MessageFlags.VoiceMessage;
// Send the reproxied webhook
var proxyMessage = await _webhookExecutor.ExecuteWebhook(new ProxyRequest
{
@ -339,7 +345,7 @@ public class ProxyService
Embeds = mangledEmbeds,
Stickers = originalMsg.StickerItems!,
AllowEveryone = allowEveryone,
Flags = originalMsg.Flags.HasFlag(Message.MessageFlags.VoiceMessage) ? Message.MessageFlags.VoiceMessage : null,
Flags = flags,
Tts = tts,
Poll = originalMsg.Poll,
});

View file

@ -21,14 +21,16 @@ public class EmbedService
private readonly IDatabase _db;
private readonly ModelRepository _repo;
private readonly DiscordApiClient _rest;
private readonly BotConfig _config;
private readonly CoreConfig _coreConfig;
public EmbedService(IDatabase db, ModelRepository repo, IDiscordCache cache, DiscordApiClient rest, CoreConfig coreConfig)
public EmbedService(IDatabase db, ModelRepository repo, IDiscordCache cache, DiscordApiClient rest, BotConfig config, CoreConfig coreConfig)
{
_db = db;
_repo = repo;
_cache = cache;
_rest = rest;
_config = config;
_coreConfig = coreConfig;
}
@ -192,7 +194,7 @@ public class EmbedService
new MessageComponent()
{
Type = ComponentType.Text,
Content = $"-# System ID: `{system.DisplayHid(cctx.Config)}`\n-# Created: {system.Created.FormatZoned(cctx.Zone)}",
Content = $"-# System ID: `{system.DisplayHid(cctx.Config)}`{cctx.PremiumEmoji}\n-# Created: {system.Created.FormatZoned(cctx.Zone)}",
},
],
Accessory = new MessageComponent()

View file

@ -13,7 +13,7 @@ public partial class ModelRepository
public Task<PKMember?> GetMemberByHid(string hid, SystemId? system = null)
{
var query = new Query("members").Where("hid", hid.ToLower());
var query = new Query("members").WhereRaw("hid = (?)::char(6)", hid.ToLower());
if (system != null)
query = query.Where("system", system);
return _db.QueryFirst<PKMember?>(query);

View file

@ -28,6 +28,9 @@ public class SystemConfig
public ProxySwitchAction ProxySwitch { get; }
public string NameFormat { get; }
public bool PremiumLifetime { get; }
public Instant? PremiumUntil { get; }
public enum HidPadFormat
{
None = 0,

View file

@ -34,6 +34,7 @@ RUN cargo build --bin avatars --release --target x86_64-unknown-linux-musl
RUN cargo build --bin avatar_cleanup --release --target x86_64-unknown-linux-musl
RUN cargo build --bin scheduled_tasks --release --target x86_64-unknown-linux-musl
RUN cargo build --bin gdpr_worker --release --target x86_64-unknown-linux-musl
RUN cargo build --bin premium --release --target x86_64-unknown-linux-musl
FROM alpine:latest
@ -45,3 +46,4 @@ COPY --from=binary-builder /build/target/x86_64-unknown-linux-musl/release/avata
COPY --from=binary-builder /build/target/x86_64-unknown-linux-musl/release/avatar_cleanup /avatar_cleanup
COPY --from=binary-builder /build/target/x86_64-unknown-linux-musl/release/scheduled_tasks /scheduled_tasks
COPY --from=binary-builder /build/target/x86_64-unknown-linux-musl/release/gdpr_worker /gdpr_worker
COPY --from=binary-builder /build/target/x86_64-unknown-linux-musl/release/premium /premium

View file

@ -49,3 +49,4 @@ RUN apk add gcompat
EOF
)"
build gdpr_worker
build premium

View file

@ -14,10 +14,12 @@ fred = { workspace = true }
lazy_static = { workspace = true }
metrics = { workspace = true }
reqwest = { workspace = true }
sea-query = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
sqlx = { workspace = true }
tokio = { workspace = true }
tower-http = { workspace = true }
tracing = { workspace = true }
twilight-http = { workspace = true }
@ -26,5 +28,5 @@ hyper-util = { version = "0.1.5", features = ["client", "client-legacy", "http1"
reverse-proxy-service = { version = "0.2.1", features = ["axum"] }
serde_urlencoded = "0.7.1"
tower = "0.4.13"
tower-http = { version = "0.5.2", features = ["catch-panic"] }
subtle = "2.6.1"
sea-query-sqlx = { version = "0.8.0-rc.8", features = ["sqlx-postgres", "with-chrono"] }

View file

@ -0,0 +1,211 @@
use axum::{
Extension, Json,
extract::{Json as ExtractJson, State},
response::IntoResponse,
};
use pk_macros::api_endpoint;
use sea_query::{Expr, ExprTrait, PostgresQueryBuilder};
use sea_query_sqlx::SqlxBinder;
use serde_json::{Value, json};
use pluralkit_models::{PKGroup, PKGroupPatch, PKMember, PKMemberPatch, PKSystem};
use crate::{
ApiContext,
auth::AuthState,
error::{
GENERIC_AUTH_ERROR, NOT_OWN_GROUP, NOT_OWN_MEMBER, PKError, TARGET_GROUP_NOT_FOUND,
TARGET_MEMBER_NOT_FOUND,
},
};
#[derive(serde::Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum BulkActionRequestFilter {
All,
Ids { ids: Vec<String> },
Connection { id: String },
}
#[derive(serde::Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum BulkActionRequest {
Member {
filter: BulkActionRequestFilter,
patch: PKMemberPatch,
},
Group {
filter: BulkActionRequestFilter,
patch: PKGroupPatch,
},
}
#[api_endpoint]
pub async fn bulk(
Extension(auth): Extension<AuthState>,
State(ctx): State<ApiContext>,
ExtractJson(req): ExtractJson<BulkActionRequest>,
) -> Json<Value> {
let Some(system_id) = auth.system_id() else {
return Err(GENERIC_AUTH_ERROR);
};
#[derive(sqlx::FromRow)]
struct Ider {
id: i32,
hid: String,
uuid: String,
}
#[derive(sqlx::FromRow)]
struct GroupMemberEntry {
member_id: i32,
group_id: i32,
}
#[allow(dead_code)]
#[derive(sqlx::FromRow)]
struct OnlyIder {
id: i32,
}
println!("BulkActionRequest::{req:#?}");
match req {
BulkActionRequest::Member { filter, mut patch } => {
patch.validate_bulk();
if patch.errors().len() > 0 {
return Err(PKError::from_validation_errors(patch.errors()));
}
let ids: Vec<i32> = match filter {
BulkActionRequestFilter::All => {
let ids: Vec<Ider> = sqlx::query_as("select id from members where system = $1")
.bind(system_id as i64)
.fetch_all(&ctx.db)
.await?;
ids.iter().map(|v| v.id).collect()
}
BulkActionRequestFilter::Ids { ids } => {
let members: Vec<PKMember> = sqlx::query_as(
"select * from members where hid = any($1::array) or uuid::text = any($1::array)",
)
.bind(&ids)
.fetch_all(&ctx.db)
.await?;
// todo: better errors
if members.len() != ids.len() {
return Err(TARGET_MEMBER_NOT_FOUND);
}
if members.iter().any(|m| m.system != system_id) {
return Err(NOT_OWN_MEMBER);
}
members.iter().map(|m| m.id).collect()
}
BulkActionRequestFilter::Connection { id } => {
let Some(group): Option<PKGroup> =
sqlx::query_as("select * from groups where hid = $1 or uuid::text = $1")
.bind(id)
.fetch_optional(&ctx.db)
.await?
else {
return Err(TARGET_GROUP_NOT_FOUND);
};
if group.system != system_id {
return Err(NOT_OWN_GROUP);
}
let entries: Vec<GroupMemberEntry> =
sqlx::query_as("select * from group_members where group_id = $1")
.bind(group.id)
.fetch_all(&ctx.db)
.await?;
entries.iter().map(|v| v.member_id).collect()
}
};
let (q, pms) = patch
.to_sql()
.table("members") // todo: this should be in the model definition
.and_where(Expr::col("id").is_in(ids))
.returning_col("id")
.build_sqlx(PostgresQueryBuilder);
let res: Vec<OnlyIder> = sqlx::query_as_with(&q, pms).fetch_all(&ctx.db).await?;
Ok(Json(json! {{ "updated": res.len() }}))
}
BulkActionRequest::Group { filter, mut patch } => {
patch.validate_bulk();
if patch.errors().len() > 0 {
return Err(PKError::from_validation_errors(patch.errors()));
}
let ids: Vec<i32> = match filter {
BulkActionRequestFilter::All => {
let ids: Vec<Ider> = sqlx::query_as("select id from groups where system = $1")
.bind(system_id as i64)
.fetch_all(&ctx.db)
.await?;
ids.iter().map(|v| v.id).collect()
}
BulkActionRequestFilter::Ids { ids } => {
let groups: Vec<PKGroup> = sqlx::query_as(
"select * from groups where hid = any($1) or uuid::text = any($1)",
)
.bind(&ids)
.fetch_all(&ctx.db)
.await?;
// todo: better errors
if groups.len() != ids.len() {
return Err(TARGET_GROUP_NOT_FOUND);
}
if groups.iter().any(|m| m.system != system_id) {
return Err(NOT_OWN_GROUP);
}
groups.iter().map(|m| m.id).collect()
}
BulkActionRequestFilter::Connection { id } => {
let Some(member): Option<PKMember> =
sqlx::query_as("select * from members where hid = $1 or uuid::text = $1")
.bind(id)
.fetch_optional(&ctx.db)
.await?
else {
return Err(TARGET_MEMBER_NOT_FOUND);
};
if member.system != system_id {
return Err(NOT_OWN_MEMBER);
}
let entries: Vec<GroupMemberEntry> =
sqlx::query_as("select * from group_members where member_id = $1")
.bind(member.id)
.fetch_all(&ctx.db)
.await?;
entries.iter().map(|v| v.group_id).collect()
}
};
let (q, pms) = patch
.to_sql()
.table("groups") // todo: this should be in the model definition
.and_where(Expr::col("id").is_in(ids))
.returning_col("id")
.build_sqlx(PostgresQueryBuilder);
println!("{q:#?} {pms:#?}");
let res: Vec<OnlyIder> = sqlx::query_as_with(&q, pms).fetch_all(&ctx.db).await?;
Ok(Json(json! {{ "updated": res.len() }}))
}
}
}

View file

@ -1,2 +1,3 @@
pub mod bulk;
pub mod private;
pub mod system;

View file

@ -1,10 +1,15 @@
use crate::ApiContext;
use axum::{extract::State, response::Json};
use crate::{ApiContext, auth::AuthState, fail};
use axum::{
Extension,
extract::{Path, State},
response::Json,
};
use fred::interfaces::*;
use libpk::state::ShardState;
use pk_macros::api_endpoint;
use serde::Deserialize;
use serde_json::{Value, json};
use sqlx::Postgres;
use std::collections::HashMap;
#[allow(dead_code)]
@ -53,7 +58,7 @@ use axum::{
};
use hyper::StatusCode;
use libpk::config;
use pluralkit_models::{PKSystem, PKSystemConfig, PrivacyLevel};
use pluralkit_models::{PKDashView, PKSystem, PKSystemConfig, PrivacyLevel};
use reqwest::ClientBuilder;
#[derive(serde::Deserialize, Debug)]
@ -83,8 +88,8 @@ pub async fn discord_callback(
.expect("error making client");
let reqbody = serde_urlencoded::to_string(&CallbackDiscordData {
client_id: config.discord.as_ref().unwrap().client_id.get().to_string(),
client_secret: config.discord.as_ref().unwrap().client_secret.clone(),
client_id: config.discord().client_id.get().to_string(),
client_secret: config.discord().client_secret.clone(),
grant_type: "authorization_code".to_string(),
redirect_uri: request_data.redirect_domain, // change this!
code: request_data.code,
@ -187,3 +192,128 @@ pub async fn discord_callback(
)
.into_response()
}
#[derive(serde::Deserialize, Debug)]
#[serde(tag = "action", rename_all = "snake_case")]
pub enum DashViewRequest {
Add {
name: String,
value: String,
},
Patch {
id: String,
name: Option<String>,
value: Option<String>,
},
Remove {
id: String,
},
}
#[api_endpoint]
pub async fn dash_views(
Extension(auth): Extension<AuthState>,
State(ctx): State<ApiContext>,
extract::Json(body): extract::Json<DashViewRequest>,
) -> Json<Value> {
let Some(system_id) = auth.system_id() else {
return Err(crate::error::GENERIC_AUTH_ERROR);
};
match body {
DashViewRequest::Add { name, value } => {
match sqlx::query_as::<Postgres, PKDashView>(
"select * from dash_views where name = $1 and system = $2",
)
.bind(&name)
.bind(system_id)
.fetch_optional(&ctx.db)
.await
{
Ok(val) => {
if val.is_some() {
return Err(crate::error::GENERIC_BAD_REQUEST);
};
match sqlx::query_as::<Postgres, PKDashView>(
"insert into dash_views (system, name, value) values ($1, $2, $3) returning *",
)
.bind(system_id)
.bind(name)
.bind(value)
.fetch_one(&ctx.db)
.await
{
Ok(res) => Ok(Json(res.to_json())),
Err(err) => fail!(?err, "failed to insert dash views"),
}
}
Err(err) => fail!(?err, "failed to query dash views"),
}
}
DashViewRequest::Patch { id, name, value } => {
match sqlx::query_as::<Postgres, PKDashView>(
"select * from dash_views where id = $1 and system = $2",
)
.bind(id)
.bind(system_id)
.fetch_optional(&ctx.db)
.await
{
Ok(val) => {
let Some(val) = val else {
return Err(crate::error::GENERIC_BAD_REQUEST);
};
// update
Ok(Json(Value::Null))
}
Err(err) => fail!(?err, "failed to query dash views"),
}
}
DashViewRequest::Remove { id } => {
match sqlx::query_as::<Postgres, PKDashView>(
"select * from dash_views where id = $1 and system = $2",
)
.bind(id)
.bind(system_id)
.fetch_optional(&ctx.db)
.await
{
Ok(val) => {
let Some(val) = val else {
return Err(crate::error::GENERIC_BAD_REQUEST);
};
match sqlx::query::<Postgres>(
"delete from dash_views where id = $1 and system = $2 returning *",
)
.bind(val.id)
.bind(system_id)
.fetch_one(&ctx.db)
.await
{
Ok(_) => Ok(Json(Value::Null)),
Err(err) => fail!(?err, "failed to remove dash views"),
}
}
Err(err) => fail!(?err, "failed to query dash views"),
}
}
}
}
#[api_endpoint]
pub async fn dash_view(State(ctx): State<ApiContext>, Path(id): Path<String>) -> Json<Value> {
match sqlx::query_as::<Postgres, PKDashView>("select * from dash_views where id = $1")
.bind(id)
.fetch_optional(&ctx.db)
.await
{
Ok(val) => {
let Some(val) = val else {
return Err(crate::error::GENERIC_BAD_REQUEST);
};
Ok(Json(val.to_json()))
}
Err(err) => fail!(?err, "failed to query dash views"),
}
}

View file

@ -3,9 +3,9 @@ use pk_macros::api_endpoint;
use serde_json::{Value, json};
use sqlx::Postgres;
use pluralkit_models::{PKSystem, PKSystemConfig, PrivacyLevel};
use pluralkit_models::{PKDashView, PKSystem, PKSystemConfig, PrivacyLevel};
use crate::{ApiContext, auth::AuthState, error::fail};
use crate::{ApiContext, auth::AuthState, fail};
#[api_endpoint]
pub async fn get_system_settings(
@ -36,7 +36,32 @@ pub async fn get_system_settings(
}
Ok(Json(match access_level {
PrivacyLevel::Private => config.to_json(),
PrivacyLevel::Private => {
let mut config_json = config.clone().to_json();
match sqlx::query_as::<Postgres, PKDashView>(
"select * from dash_views where system = $1",
)
.bind(system.id)
.fetch_all(&ctx.db)
.await
{
Ok(val) => {
config_json.as_object_mut().unwrap().insert(
"dash_views".to_string(),
serde_json::to_value(
&val.iter()
.map(|v| v.clone().to_json())
.collect::<Vec<serde_json::Value>>(),
)
.unwrap(),
);
}
Err(err) => fail!(?err, "failed to query dash views"),
};
config_json
}
PrivacyLevel::Public => json!({
"pings_enabled": config.pings_enabled,
"latch_timeout": config.latch_timeout,

View file

@ -2,6 +2,7 @@ use axum::{
http::StatusCode,
response::{IntoResponse, Response},
};
use pluralkit_models::ValidationError;
use std::fmt;
// todo: model parse errors
@ -11,6 +12,8 @@ pub struct PKError {
pub json_code: i32,
pub message: &'static str,
pub errors: Vec<ValidationError>,
pub inner: Option<anyhow::Error>,
}
@ -30,6 +33,21 @@ impl Clone for PKError {
json_code: self.json_code,
message: self.message,
inner: None,
errors: self.errors.clone(),
}
}
}
// can't `impl From<Vec<ValidationError>>`
// because "upstream crate may add a new impl" >:(
impl PKError {
pub fn from_validation_errors(errs: Vec<ValidationError>) -> Self {
Self {
message: "Error parsing JSON model",
json_code: 40001,
errors: errs,
response_code: StatusCode::BAD_REQUEST,
inner: None,
}
}
}
@ -50,25 +68,37 @@ impl IntoResponse for PKError {
if let Some(inner) = self.inner {
tracing::error!(?inner, "error returned from handler");
}
crate::util::json_err(
self.response_code,
serde_json::to_string(&serde_json::json!({
let json = if self.errors.len() > 0 {
serde_json::json!({
"message": self.message,
"code": self.json_code,
}))
.unwrap(),
)
"errors": self.errors,
})
} else {
serde_json::json!({
"message": self.message,
"code": self.json_code,
})
};
crate::util::json_err(self.response_code, serde_json::to_string(&json).unwrap())
}
}
#[macro_export]
macro_rules! fail {
($($stuff:tt)+) => {{
tracing::error!($($stuff)+);
return Err(crate::error::GENERIC_SERVER_ERROR);
return Err($crate::error::GENERIC_SERVER_ERROR);
}};
}
pub(crate) use fail;
#[macro_export]
macro_rules! fail_html {
($($stuff:tt)+) => {{
tracing::error!($($stuff)+);
return (axum::http::StatusCode::INTERNAL_SERVER_ERROR, "internal server error").into_response();
}};
}
macro_rules! define_error {
( $name:ident, $response_code:expr, $json_code:expr, $message:expr ) => {
@ -78,9 +108,17 @@ macro_rules! define_error {
json_code: $json_code,
message: $message,
inner: None,
errors: vec![],
};
};
}
define_error! { GENERIC_AUTH_ERROR, StatusCode::UNAUTHORIZED, 0, "401: Missing or invalid Authorization header" }
define_error! { GENERIC_BAD_REQUEST, StatusCode::BAD_REQUEST, 0, "400: Bad Request" }
define_error! { GENERIC_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR, 0, "500: Internal Server Error" }
define_error! { NOT_OWN_MEMBER, StatusCode::FORBIDDEN, 30006, "Target member is not part of your system." }
define_error! { NOT_OWN_GROUP, StatusCode::FORBIDDEN, 30007, "Target group is not part of your system." }
define_error! { TARGET_MEMBER_NOT_FOUND, StatusCode::BAD_REQUEST, 40010, "Target member not found." }
define_error! { TARGET_GROUP_NOT_FOUND, StatusCode::BAD_REQUEST, 40011, "Target group not found." }

10
crates/api/src/lib.rs Normal file
View file

@ -0,0 +1,10 @@
mod auth;
pub mod error;
pub mod middleware;
pub mod util;
#[derive(Clone)]
pub struct ApiContext {
pub db: sqlx::postgres::PgPool,
pub redis: fred::clients::RedisPool,
}

View file

@ -1,132 +1,105 @@
use auth::{AuthState, INTERNAL_APPID_HEADER, INTERNAL_SYSTEMID_HEADER};
use api::ApiContext;
use auth::AuthState;
use axum::{
Extension, Router,
body::Body,
extract::{Request as ExtractRequest, State},
extract::Request as ExtractRequest,
http::Uri,
response::{IntoResponse, Response},
routing::{delete, get, patch, post},
};
use hyper_util::{
client::legacy::{Client, connect::HttpConnector},
rt::TokioExecutor,
};
use tracing::info;
use hyper_util::{client::legacy::connect::HttpConnector, rt::TokioExecutor};
use libpk::config;
use tracing::{info, warn};
use pk_macros::api_endpoint;
use crate::proxyer::Proxyer;
mod auth;
mod endpoints;
mod error;
mod middleware;
mod proxyer;
mod util;
#[derive(Clone)]
pub struct ApiContext {
pub db: sqlx::postgres::PgPool,
pub redis: fred::clients::RedisPool,
rproxy_uri: String,
rproxy_client: Client<HttpConnector, Body>,
}
#[api_endpoint]
async fn rproxy(
Extension(auth): Extension<AuthState>,
State(ctx): State<ApiContext>,
mut req: ExtractRequest<Body>,
) -> Response {
let path = req.uri().path();
let path_query = req
.uri()
.path_and_query()
.map(|v| v.as_str())
.unwrap_or(path);
let uri = format!("{}{}", ctx.rproxy_uri, path_query);
*req.uri_mut() = Uri::try_from(uri).unwrap();
let headers = req.headers_mut();
headers.remove(INTERNAL_SYSTEMID_HEADER);
headers.remove(INTERNAL_APPID_HEADER);
if let Some(sid) = auth.system_id() {
headers.append(INTERNAL_SYSTEMID_HEADER, sid.into());
}
if let Some(aid) = auth.app_id() {
headers.append(INTERNAL_APPID_HEADER, aid.into());
}
Ok(ctx.rproxy_client.request(req).await?.into_response())
}
// this function is manually formatted for easier legibility of route_services
#[rustfmt::skip]
fn router(ctx: ApiContext) -> Router {
fn router(ctx: ApiContext, proxyer: Proxyer) -> Router {
let rproxy = |Extension(auth): Extension<AuthState>, req: ExtractRequest<Body>| {
proxyer.rproxy(auth, req)
};
// processed upside down (???) so we have to put middleware at the end
Router::new()
.route("/v2/systems/{system_id}", get(rproxy))
.route("/v2/systems/{system_id}", patch(rproxy))
.route("/v2/systems/{system_id}", get(rproxy.clone()))
.route("/v2/systems/{system_id}", patch(rproxy.clone()))
.route("/v2/systems/{system_id}/settings", get(endpoints::system::get_system_settings))
.route("/v2/systems/{system_id}/settings", patch(rproxy))
.route("/v2/systems/{system_id}/settings", patch(rproxy.clone()))
.route("/v2/systems/{system_id}/members", get(rproxy))
.route("/v2/members", post(rproxy))
.route("/v2/members/{member_id}", get(rproxy))
.route("/v2/members/{member_id}", patch(rproxy))
.route("/v2/members/{member_id}", delete(rproxy))
.route("/v2/systems/{system_id}/members", get(rproxy.clone()))
.route("/v2/members", post(rproxy.clone()))
.route("/v2/members/{member_id}", get(rproxy.clone()))
.route("/v2/members/{member_id}", patch(rproxy.clone()))
.route("/v2/members/{member_id}", delete(rproxy.clone()))
.route("/v2/systems/{system_id}/groups", get(rproxy))
.route("/v2/groups", post(rproxy))
.route("/v2/groups/{group_id}", get(rproxy))
.route("/v2/groups/{group_id}", patch(rproxy))
.route("/v2/groups/{group_id}", delete(rproxy))
.route("/v2/systems/{system_id}/groups", get(rproxy.clone()))
.route("/v2/groups", post(rproxy.clone()))
.route("/v2/groups/{group_id}", get(rproxy.clone()))
.route("/v2/groups/{group_id}", patch(rproxy.clone()))
.route("/v2/groups/{group_id}", delete(rproxy.clone()))
.route("/v2/groups/{group_id}/members", get(rproxy))
.route("/v2/groups/{group_id}/members/add", post(rproxy))
.route("/v2/groups/{group_id}/members/remove", post(rproxy))
.route("/v2/groups/{group_id}/members/overwrite", post(rproxy))
.route("/v2/groups/{group_id}/members", get(rproxy.clone()))
.route("/v2/groups/{group_id}/members/add", post(rproxy.clone()))
.route("/v2/groups/{group_id}/members/remove", post(rproxy.clone()))
.route("/v2/groups/{group_id}/members/overwrite", post(rproxy.clone()))
.route("/v2/members/{member_id}/groups", get(rproxy))
.route("/v2/members/{member_id}/groups/add", post(rproxy))
.route("/v2/members/{member_id}/groups/remove", post(rproxy))
.route("/v2/members/{member_id}/groups/overwrite", post(rproxy))
.route("/v2/members/{member_id}/groups", get(rproxy.clone()))
.route("/v2/members/{member_id}/groups/add", post(rproxy.clone()))
.route("/v2/members/{member_id}/groups/remove", post(rproxy.clone()))
.route("/v2/members/{member_id}/groups/overwrite", post(rproxy.clone()))
.route("/v2/systems/{system_id}/switches", get(rproxy))
.route("/v2/systems/{system_id}/switches", post(rproxy))
.route("/v2/systems/{system_id}/fronters", get(rproxy))
.route("/v2/systems/{system_id}/switches", get(rproxy.clone()))
.route("/v2/systems/{system_id}/switches", post(rproxy.clone()))
.route("/v2/systems/{system_id}/fronters", get(rproxy.clone()))
.route("/v2/systems/{system_id}/switches/{switch_id}", get(rproxy))
.route("/v2/systems/{system_id}/switches/{switch_id}", patch(rproxy))
.route("/v2/systems/{system_id}/switches/{switch_id}/members", patch(rproxy))
.route("/v2/systems/{system_id}/switches/{switch_id}", delete(rproxy))
.route("/v2/systems/{system_id}/switches/{switch_id}", get(rproxy.clone()))
.route("/v2/systems/{system_id}/switches/{switch_id}", patch(rproxy.clone()))
.route("/v2/systems/{system_id}/switches/{switch_id}/members", patch(rproxy.clone()))
.route("/v2/systems/{system_id}/switches/{switch_id}", delete(rproxy.clone()))
.route("/v2/systems/{system_id}/guilds/{guild_id}", get(rproxy))
.route("/v2/systems/{system_id}/guilds/{guild_id}", patch(rproxy))
.route("/v2/systems/{system_id}/guilds/{guild_id}", get(rproxy.clone()))
.route("/v2/systems/{system_id}/guilds/{guild_id}", patch(rproxy.clone()))
.route("/v2/members/{member_id}/guilds/{guild_id}", get(rproxy))
.route("/v2/members/{member_id}/guilds/{guild_id}", patch(rproxy))
.route("/v2/members/{member_id}/guilds/{guild_id}", get(rproxy.clone()))
.route("/v2/members/{member_id}/guilds/{guild_id}", patch(rproxy.clone()))
.route("/v2/systems/{system_id}/autoproxy", get(rproxy))
.route("/v2/systems/{system_id}/autoproxy", patch(rproxy))
.route("/v2/systems/{system_id}/autoproxy", get(rproxy.clone()))
.route("/v2/systems/{system_id}/autoproxy", patch(rproxy.clone()))
.route("/v2/messages/{message_id}", get(rproxy))
.route("/v2/messages/{message_id}", get(rproxy.clone()))
.route("/private/bulk_privacy/member", post(rproxy))
.route("/private/bulk_privacy/group", post(rproxy))
.route("/private/discord/callback", post(rproxy))
.route("/v2/bulk", post(endpoints::bulk::bulk))
.route("/private/bulk_privacy/member", post(rproxy.clone()))
.route("/private/bulk_privacy/group", post(rproxy.clone()))
.route("/private/discord/callback", post(rproxy.clone()))
.route("/private/discord/callback2", post(endpoints::private::discord_callback))
.route("/private/discord/shard_state", get(endpoints::private::discord_state))
.route("/private/dash_views", post(endpoints::private::dash_views))
.route("/private/dash_view/{id}", get(endpoints::private::dash_view))
.route("/private/stats", get(endpoints::private::meta))
.route("/v2/systems/{system_id}/oembed.json", get(rproxy))
.route("/v2/members/{member_id}/oembed.json", get(rproxy))
.route("/v2/groups/{group_id}/oembed.json", get(rproxy))
.route("/v2/systems/{system_id}/oembed.json", get(rproxy.clone()))
.route("/v2/members/{member_id}/oembed.json", get(rproxy.clone()))
.route("/v2/groups/{group_id}/oembed.json", get(rproxy.clone()))
.layer(middleware::ratelimit::ratelimiter(middleware::ratelimit::do_request_ratelimited)) // this sucks
.layer(axum::middleware::from_fn_with_state(
if config.api().use_ratelimiter {
Some(ctx.redis.clone())
} else {
warn!("running without request rate limiting!");
None
},
middleware::ratelimit::do_request_ratelimited)
)
.layer(axum::middleware::from_fn(middleware::ignore_invalid_routes::ignore_invalid_routes))
.layer(axum::middleware::from_fn_with_state(ctx.clone(), middleware::params::params))
.layer(axum::middleware::from_fn_with_state(ctx.clone(), middleware::auth::auth))
@ -144,33 +117,20 @@ async fn main() -> anyhow::Result<()> {
let db = libpk::db::init_data_db().await?;
let redis = libpk::db::init_redis().await?;
let rproxy_uri = Uri::from_static(
&libpk::config
.api
.as_ref()
.expect("missing api config")
.remote_url,
)
.to_string();
let rproxy_uri = Uri::from_static(&libpk::config.api().remote_url).to_string();
let rproxy_client = hyper_util::client::legacy::Client::<(), ()>::builder(TokioExecutor::new())
.build(HttpConnector::new());
let ctx = ApiContext {
db,
redis,
let proxyer = Proxyer {
rproxy_uri: rproxy_uri[..rproxy_uri.len() - 1].to_string(),
rproxy_client,
};
let app = router(ctx);
let ctx = ApiContext { db, redis };
let addr: &str = libpk::config
.api
.as_ref()
.expect("missing api config")
.addr
.as_ref();
let app = router(ctx, proxyer);
let addr: &str = libpk::config.api().addr.as_ref();
let listener = tokio::net::TcpListener::bind(addr).await?;
info!("listening on {}", addr);

View file

@ -44,12 +44,7 @@ pub async fn auth(State(ctx): State<ApiContext>, mut req: Request, next: Next) -
.get("x-pluralkit-app")
.map(|h| h.to_str().ok())
.flatten()
&& let Some(config_token2) = libpk::config
.api
.as_ref()
.expect("missing api config")
.temp_token2
.as_ref()
&& let Some(config_token2) = libpk::config.api().temp_token2.as_ref()
&& app_auth_header
.as_bytes()
.ct_eq(config_token2.as_bytes())

View file

@ -3,12 +3,12 @@ use std::time::{Duration, SystemTime};
use axum::{
extract::{MatchedPath, Request, State},
http::{HeaderValue, Method, StatusCode},
middleware::{FromFnLayer, Next},
middleware::Next,
response::Response,
};
use fred::{clients::RedisPool, interfaces::ClientLike, prelude::LuaInterface, util::sha1_hash};
use fred::{clients::RedisPool, prelude::LuaInterface, util::sha1_hash};
use metrics::counter;
use tracing::{debug, error, info, warn};
use tracing::{debug, error, info};
use crate::{
auth::AuthState,
@ -21,40 +21,6 @@ lazy_static::lazy_static! {
static ref LUA_SCRIPT_SHA: String = sha1_hash(LUA_SCRIPT);
}
// this is awful but it works
pub fn ratelimiter<F, T>(f: F) -> FromFnLayer<F, Option<RedisPool>, T> {
let redis = libpk::config
.api
.as_ref()
.expect("missing api config")
.ratelimit_redis_addr
.as_ref()
.map(|val| {
// todo: this should probably use the global pool
let r = RedisPool::new(
fred::types::RedisConfig::from_url_centralized(val.as_ref())
.expect("redis url is invalid"),
None,
None,
Some(Default::default()),
10,
)
.expect("failed to connect to redis");
let handle = r.connect();
tokio::spawn(async move { handle });
r
});
if redis.is_none() {
warn!("running without request rate limiting!");
}
axum::middleware::from_fn_with_state(redis, f)
}
enum RatelimitType {
GenericGet,
GenericUpdate,

51
crates/api/src/proxyer.rs Normal file
View file

@ -0,0 +1,51 @@
use crate::{
auth::{AuthState, INTERNAL_APPID_HEADER, INTERNAL_SYSTEMID_HEADER},
error::PKError,
};
use axum::{
body::Body,
extract::Request as ExtractRequest,
http::Uri,
response::{IntoResponse, Response},
};
use hyper_util::client::legacy::{Client, connect::HttpConnector};
#[derive(Clone)]
pub struct Proxyer {
pub rproxy_uri: String,
pub rproxy_client: Client<HttpConnector, Body>,
}
impl Proxyer {
pub async fn rproxy(
self,
auth: AuthState,
mut req: ExtractRequest<Body>,
) -> Result<Response, PKError> {
let path = req.uri().path();
let path_query = req
.uri()
.path_and_query()
.map(|v| v.as_str())
.unwrap_or(path);
let uri = format!("{}{}", self.rproxy_uri, path_query);
*req.uri_mut() = Uri::try_from(uri).unwrap();
let headers = req.headers_mut();
headers.remove(INTERNAL_SYSTEMID_HEADER);
headers.remove(INTERNAL_APPID_HEADER);
if let Some(sid) = auth.system_id() {
headers.append(INTERNAL_SYSTEMID_HEADER, sid.into());
}
if let Some(aid) = auth.app_id() {
headers.append(INTERNAL_APPID_HEADER, aid.into());
}
Ok(self.rproxy_client.request(req).await?.into_response())
}
}

View file

@ -1,30 +1,14 @@
use twilight_model::{
application::command::{Command, CommandType},
guild::IntegrationApplication,
};
use twilight_model::application::command::CommandType;
use twilight_util::builder::command::CommandBuilder;
#[libpk::main]
async fn main() -> anyhow::Result<()> {
let discord = twilight_http::Client::builder()
.token(
libpk::config
.discord
.as_ref()
.expect("missing discord config")
.bot_token
.clone(),
)
.token(libpk::config.discord().bot_token.clone())
.build();
let interaction = discord.interaction(twilight_model::id::Id::new(
libpk::config
.discord
.as_ref()
.expect("missing discord config")
.client_id
.clone()
.get(),
libpk::config.discord().client_id.clone().get(),
));
let commands = vec![

View file

@ -6,10 +6,7 @@ use tracing::{error, info};
#[libpk::main]
async fn main() -> anyhow::Result<()> {
let config = libpk::config
.avatars
.as_ref()
.expect("missing avatar service config");
let config = libpk::config.avatars();
let bucket = {
let region = s3::Region::Custom {
@ -83,10 +80,7 @@ async fn cleanup_job(pool: sqlx::PgPool, bucket: Arc<s3::Bucket>) -> anyhow::Res
}
let image_data = image_data.unwrap();
let config = libpk::config
.avatars
.as_ref()
.expect("missing avatar service config");
let config = libpk::config.avatars();
let path = image_data
.url

View file

@ -172,10 +172,7 @@ pub struct AppState {
#[libpk::main]
async fn main() -> anyhow::Result<()> {
let config = libpk::config
.avatars
.as_ref()
.expect("missing avatar service config");
let config = libpk::config.avatars();
let bucket = {
let region = s3::Region::Custom {

View file

@ -45,7 +45,7 @@ pub async fn run_server(cache: Arc<DiscordCache>, shard_state: Arc<ShardStateMan
.route(
"/guilds/{guild_id}/members/@me",
get(|State(cache): State<Arc<DiscordCache>>, Path(guild_id): Path<u64>| async move {
match cache.0.member(Id::new(guild_id), libpk::config.discord.as_ref().expect("missing discord config").client_id) {
match cache.0.member(Id::new(guild_id), libpk::config.discord().client_id) {
Some(member) => status_code(StatusCode::FOUND, to_string(member.value()).unwrap()),
None => status_code(StatusCode::NOT_FOUND, "".to_string()),
}
@ -54,7 +54,7 @@ pub async fn run_server(cache: Arc<DiscordCache>, shard_state: Arc<ShardStateMan
.route(
"/guilds/{guild_id}/permissions/@me",
get(|State(cache): State<Arc<DiscordCache>>, Path(guild_id): Path<u64>| async move {
match cache.guild_permissions(Id::new(guild_id), libpk::config.discord.as_ref().expect("missing discord config").client_id).await {
match cache.guild_permissions(Id::new(guild_id), libpk::config.discord().client_id).await {
Ok(val) => {
status_code(StatusCode::FOUND, to_string(&val.bits()).unwrap())
},
@ -122,7 +122,7 @@ pub async fn run_server(cache: Arc<DiscordCache>, shard_state: Arc<ShardStateMan
if guild_id == 0 {
return status_code(StatusCode::FOUND, to_string(&*DM_PERMISSIONS).unwrap());
}
match cache.channel_permissions(Id::new(channel_id), libpk::config.discord.as_ref().expect("missing discord config").client_id).await {
match cache.channel_permissions(Id::new(channel_id), libpk::config.discord().client_id).await {
Ok(val) => status_code(StatusCode::FOUND, to_string(&val).unwrap()),
Err(err) => {
error!(?err, ?channel_id, ?guild_id, "failed to get own channelpermissions");
@ -219,7 +219,7 @@ pub async fn run_server(cache: Arc<DiscordCache>, shard_state: Arc<ShardStateMan
.layer(axum::middleware::from_fn(crate::logger::logger))
.with_state(cache);
let addr: &str = libpk::config.discord.as_ref().expect("missing discord config").cache_api_addr.as_ref();
let addr: &str = libpk::config.discord().cache_api_addr.as_ref();
let listener = tokio::net::TcpListener::bind(addr).await?;
info!("listening on {}", addr);
axum::serve(listener, app.into_make_service_with_connect_info::<SocketAddr>()).await?;

View file

@ -91,22 +91,10 @@ fn member_to_cached_member(item: Member, id: Id<UserMarker>) -> CachedMember {
}
pub fn new() -> DiscordCache {
let mut client_builder = twilight_http::Client::builder().token(
libpk::config
.discord
.as_ref()
.expect("missing discord config")
.bot_token
.clone(),
);
let mut client_builder =
twilight_http::Client::builder().token(libpk::config.discord().bot_token.clone());
if let Some(base_url) = libpk::config
.discord
.as_ref()
.expect("missing discord config")
.api_base_url
.clone()
{
if let Some(base_url) = libpk::config.discord().api_base_url.clone() {
client_builder = client_builder.proxy(base_url, true).ratelimiter(None);
}
@ -268,13 +256,7 @@ impl DiscordCache {
return Ok(Permissions::all());
}
let member = if user_id
== libpk::config
.discord
.as_ref()
.expect("missing discord config")
.client_id
{
let member = if user_id == libpk::config.discord().client_id {
self.0
.member(guild_id, user_id)
.ok_or(format_err!("self member not found"))?
@ -340,13 +322,7 @@ impl DiscordCache {
return Ok(Permissions::all());
}
let member = if user_id
== libpk::config
.discord
.as_ref()
.expect("missing discord config")
.client_id
{
let member = if user_id == libpk::config.discord().client_id {
self.0
.member(guild_id, user_id)
.ok_or_else(|| {

View file

@ -23,9 +23,7 @@ use super::cache::DiscordCache;
pub fn cluster_config() -> ClusterSettings {
libpk::config
.discord
.as_ref()
.expect("missing discord config")
.discord()
.cluster
.clone()
.unwrap_or(libpk::_config::ClusterSettings {
@ -63,28 +61,15 @@ pub fn create_shards(redis: fred::clients::RedisPool) -> anyhow::Result<Vec<Shar
)
};
let prefix = libpk::config
.discord
.as_ref()
.expect("missing discord config")
.bot_prefix_for_gateway
.clone();
let prefix = libpk::config.discord().bot_prefix_for_gateway.clone();
let shards = create_iterator(
start_shard..end_shard + 1,
cluster_settings.total_shards,
ConfigBuilder::new(
libpk::config
.discord
.as_ref()
.expect("missing discord config")
.bot_token
.to_owned(),
intents,
)
.presence(presence(format!("{prefix}help").as_str(), false))
.queue(queue.clone())
.build(),
ConfigBuilder::new(libpk::config.discord().bot_token.to_owned(), intents)
.presence(presence(format!("{prefix}help").as_str(), false))
.queue(queue.clone())
.build(),
|_, builder| builder.build(),
);
@ -105,11 +90,7 @@ pub async fn runner(
// let _span = info_span!("shard_runner", shard_id = shard.id().number()).entered();
let shard_id = shard.id().number();
let our_user_id = libpk::config
.discord
.as_ref()
.expect("missing discord config")
.client_id;
let our_user_id = libpk::config.discord().client_id;
info!("waiting for events");
while let Some(item) = shard.next().await {

View file

@ -13,11 +13,7 @@ use twilight_gateway::queue::Queue;
pub fn new(redis: RedisPool) -> RedisQueue {
RedisQueue {
redis,
concurrency: libpk::config
.discord
.as_ref()
.expect("missing discord config")
.max_concurrency,
concurrency: libpk::config.discord().max_concurrency,
}
}

View file

@ -41,13 +41,7 @@ async fn main() -> anyhow::Result<()> {
);
// hacky, but needed for selfhost for now
if let Some(target) = libpk::config
.discord
.as_ref()
.unwrap()
.gateway_target
.clone()
{
if let Some(target) = libpk::config.discord().gateway_target.clone() {
runtime_config
.set(RUNTIME_CONFIG_KEY_EVENT_TARGET.to_string(), target)
.await?;
@ -237,12 +231,7 @@ async fn main() -> anyhow::Result<()> {
}
async fn scheduled_task(redis: RedisPool, senders: Vec<(ShardId, MessageSender)>) {
let prefix = libpk::config
.discord
.as_ref()
.expect("missing discord config")
.bot_prefix_for_gateway
.clone();
let prefix = libpk::config.discord().bot_prefix_for_gateway.clone();
println!("{prefix}");

View file

@ -14,23 +14,10 @@ async fn main() -> anyhow::Result<()> {
let db = libpk::db::init_messages_db().await?;
let mut client_builder = twilight_http::Client::builder()
.token(
libpk::config
.discord
.as_ref()
.expect("missing discord config")
.bot_token
.clone(),
)
.token(libpk::config.discord().bot_token.clone())
.timeout(Duration::from_secs(30));
if let Some(base_url) = libpk::config
.discord
.as_ref()
.expect("missing discord config")
.api_base_url
.clone()
{
if let Some(base_url) = libpk::config.discord().api_base_url.clone() {
client_builder = client_builder.proxy(base_url, true).ratelimiter(None);
}

View file

@ -56,7 +56,7 @@ pub struct ApiConfig {
pub addr: String,
#[serde(default)]
pub ratelimit_redis_addr: Option<String>,
pub use_ratelimiter: bool,
pub remote_url: String,
@ -95,6 +95,21 @@ pub struct ScheduledTasksConfig {
pub expected_gateway_count: usize,
pub gateway_url: String,
pub prometheus_url: String,
pub walg_s3_bucket: String,
}
#[derive(Deserialize, Clone, Debug)]
pub struct PremiumConfig {
pub paddle_webhook_secret: String,
pub paddle_api_key: String,
pub paddle_client_token: String,
pub paddle_price_id: String,
#[serde(default)]
pub is_paddle_production: bool,
pub postmark_token: String,
pub from_email: String,
pub base_url: String,
}
fn _metrics_default() -> bool {
@ -109,13 +124,15 @@ pub struct PKConfig {
pub db: DatabaseConfig,
#[serde(default)]
pub discord: Option<DiscordConfig>,
discord: Option<DiscordConfig>,
#[serde(default)]
pub api: Option<ApiConfig>,
api: Option<ApiConfig>,
#[serde(default)]
pub avatars: Option<AvatarsConfig>,
avatars: Option<AvatarsConfig>,
#[serde(default)]
pub scheduled_tasks: Option<ScheduledTasksConfig>,
#[serde(default)]
premium: Option<PremiumConfig>,
#[serde(default = "_metrics_default")]
pub run_metrics_server: bool,
@ -134,12 +151,28 @@ pub struct PKConfig {
}
impl PKConfig {
pub fn api(self) -> ApiConfig {
self.api.expect("missing api config")
pub fn api(&self) -> &ApiConfig {
self.api.as_ref().expect("missing api config")
}
pub fn discord_config(self) -> DiscordConfig {
self.discord.expect("missing discord config")
pub fn discord(&self) -> &DiscordConfig {
self.discord.as_ref().expect("missing discord config")
}
pub fn avatars(&self) -> &AvatarsConfig {
self.avatars
.as_ref()
.expect("missing avatar service config")
}
pub fn scheduled_tasks(&self) -> &ScheduledTasksConfig {
self.scheduled_tasks
.as_ref()
.expect("missing scheduled_tasks config")
}
pub fn premium(&self) -> &PremiumConfig {
self.premium.as_ref().expect("missing premium config")
}
}

View file

@ -85,8 +85,14 @@ fn parse_field(field: syn::Field) -> ModelField {
panic!("must have json name to be publicly patchable");
}
if f.json.is_some() && f.is_privacy {
panic!("cannot set custom json name for privacy field");
if f.is_privacy && f.json.is_none() {
f.json = Some(syn::Expr::Lit(syn::ExprLit {
attrs: vec![],
lit: syn::Lit::Str(syn::LitStr::new(
f.name.clone().to_string().as_str(),
proc_macro2::Span::call_site(),
)),
}))
}
f
@ -122,17 +128,17 @@ pub fn macro_impl(
let fields: Vec<ModelField> = fields
.iter()
.filter(|f| !matches!(f.patch, ElemPatchability::None))
.filter(|f| f.is_privacy || !matches!(f.patch, ElemPatchability::None))
.cloned()
.collect();
let patch_fields = mk_patch_fields(fields.clone());
let patch_from_json = mk_patch_from_json(fields.clone());
let patch_validate = mk_patch_validate(fields.clone());
let patch_validate_bulk = mk_patch_validate_bulk(fields.clone());
let patch_to_json = mk_patch_to_json(fields.clone());
let patch_to_sql = mk_patch_to_sql(fields.clone());
return quote! {
let code = quote! {
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct #tname {
#tfields
@ -146,31 +152,42 @@ pub fn macro_impl(
#to_json
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Default)]
pub struct #patchable_name {
#patch_fields
errors: Vec<crate::ValidationError>,
}
impl #patchable_name {
pub fn from_json(input: String) -> Self {
#patch_from_json
}
pub fn validate(self) -> bool {
pub fn validate(&mut self) {
#patch_validate
}
pub fn errors(&self) -> Vec<crate::ValidationError> {
self.errors.clone()
}
pub fn validate_bulk(&mut self) {
#patch_validate_bulk
}
pub fn to_sql(self) -> sea_query::UpdateStatement {
// sea_query::Query::update()
#patch_to_sql
use sea_query::types::*;
let mut patch = &mut sea_query::Query::update();
#patch_to_sql
patch.clone()
}
pub fn to_json(self) -> serde_json::Value {
#patch_to_json
}
}
}
.into();
};
// panic!("{:#?}", code.to_string());
return code.into();
}
fn mk_tfields(fields: Vec<ModelField>) -> TokenStream {
@ -225,7 +242,7 @@ fn mk_tto_json(fields: Vec<ModelField>) -> TokenStream {
.filter_map(|f| {
if f.is_privacy {
let tname = f.name.clone();
let tnamestr = f.name.clone().to_string();
let tnamestr = f.json.clone();
Some(quote! {
#tnamestr: self.#tname,
})
@ -280,13 +297,48 @@ fn mk_patch_fields(fields: Vec<ModelField>) -> TokenStream {
.collect()
}
fn mk_patch_validate(_fields: Vec<ModelField>) -> TokenStream {
quote! { true }
}
fn mk_patch_from_json(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); }
}
fn mk_patch_to_sql(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); }
fn mk_patch_validate_bulk(fields: Vec<ModelField>) -> TokenStream {
// iterate over all nullable patchable fields other than privacy
// add an error if any field is set to a value other than null
fields
.iter()
.map(|f| {
if let syn::Type::Path(path) = &f.ty && let Some(inner) = path.path.segments.last() && inner.ident != "Option" {
return quote! {};
}
let name = f.name.clone();
if matches!(f.patch, ElemPatchability::Public) {
let json = f.json.clone().unwrap();
quote! {
if let Some(val) = self.#name.clone() && val.is_some() {
self.errors.push(ValidationError::simple(#json, "Only null values are supported in bulk endpoint"));
}
}
} else {
quote! {}
}
})
.collect()
}
fn mk_patch_to_sql(fields: Vec<ModelField>) -> TokenStream {
fields
.iter()
.filter_map(|f| {
if !matches!(f.patch, ElemPatchability::None) || f.is_privacy {
let name = f.name.clone();
let column = f.name.to_string();
Some(quote! {
if let Some(value) = self.#name {
patch = patch.value(#column, value);
}
})
} else {
None
}
})
.collect()
}
fn mk_patch_to_json(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); }

View file

@ -0,0 +1,7 @@
-- database version 54
-- initial support for premium
alter table system_config add column premium_until timestamp;
alter table system_config add column premium_lifetime bool default false;
update info set schema_version = 54;

View file

@ -0,0 +1,27 @@
-- database version 55
-- dashboard views
create function generate_dash_view_id_inner() returns char(10) as $$
select string_agg(substr('aieu234567890', ceil(random() * 13)::integer, 1), '') from generate_series(1, 10)
$$ language sql volatile;
create function generate_dash_view_id() returns char(10) as $$
declare newid char(10);
begin
loop
newid := generate_dash_view_id_inner();
if not exists (select 1 from dash_views where id = newid) then return newid; end if;
end loop;
end
$$ language plpgsql volatile;
create table dash_views (
id text not null primary key default generate_dash_view_id(),
system int references systems(id) on delete cascade,
name text not null,
value text not null,
unique (system, name)
);
update info set schema_version = 55;

View file

@ -6,7 +6,7 @@ edition = "2024"
[dependencies]
chrono = { workspace = true, features = ["serde"] }
pk_macros = { path = "../macros" }
sea-query = "0.32.1"
sea-query = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true, features = ["preserve_order"] }
# in theory we want to default-features = false for sqlx

132
crates/models/src/group.rs Normal file
View file

@ -0,0 +1,132 @@
use pk_macros::pk_model;
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde_json::Value;
use uuid::Uuid;
use crate::{PrivacyLevel, SystemId, ValidationError};
// todo: fix
pub type GroupId = i32;
#[pk_model]
struct Group {
id: GroupId,
#[json = "hid"]
#[private_patchable]
hid: String,
#[json = "uuid"]
uuid: Uuid,
// TODO fix
#[json = "system"]
system: SystemId,
#[json = "name"]
#[privacy = name_privacy]
#[patchable]
name: String,
#[json = "display_name"]
#[patchable]
display_name: Option<String>,
#[json = "color"]
#[patchable]
color: Option<String>,
#[json = "icon"]
#[patchable]
icon: Option<String>,
#[json = "banner_image"]
#[patchable]
banner_image: Option<String>,
#[json = "description"]
#[privacy = description_privacy]
#[patchable]
description: Option<String>,
#[json = "created"]
created: DateTime<Utc>,
#[privacy]
name_privacy: PrivacyLevel,
#[privacy]
description_privacy: PrivacyLevel,
#[privacy]
banner_privacy: PrivacyLevel,
#[privacy]
icon_privacy: PrivacyLevel,
#[privacy]
list_privacy: PrivacyLevel,
#[privacy]
metadata_privacy: PrivacyLevel,
#[privacy]
visibility: PrivacyLevel,
}
impl<'de> Deserialize<'de> for PKGroupPatch {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let mut patch: PKGroupPatch = Default::default();
let value: Value = Value::deserialize(deserializer)?;
if let Some(v) = value.get("name") {
if let Some(name) = v.as_str() {
patch.name = Some(name.to_string());
} else if v.is_null() {
patch.errors.push(ValidationError::simple(
"name",
"Group name cannot be set to null.",
));
}
}
macro_rules! parse_string_simple {
($k:expr) => {
match value.get($k) {
None => None,
Some(Value::Null) => Some(None),
Some(Value::String(s)) => Some(Some(s.clone())),
_ => {
patch.errors.push(ValidationError::new($k));
None
}
}
};
}
patch.display_name = parse_string_simple!("display_name");
patch.description = parse_string_simple!("description");
patch.icon = parse_string_simple!("icon");
patch.banner_image = parse_string_simple!("banner");
patch.color = parse_string_simple!("color").map(|v| v.map(|t| t.to_lowercase()));
if let Some(privacy) = value.get("privacy").and_then(Value::as_object) {
macro_rules! parse_privacy {
($v:expr) => {
match privacy.get($v) {
None => None,
Some(Value::Null) => Some(PrivacyLevel::Private),
Some(Value::String(s)) if s == "" || s == "private" => {
Some(PrivacyLevel::Private)
}
Some(Value::String(s)) if s == "public" => Some(PrivacyLevel::Public),
_ => {
patch.errors.push(ValidationError::new($v));
None
}
}
};
}
patch.name_privacy = parse_privacy!("name_privacy");
patch.description_privacy = parse_privacy!("description_privacy");
patch.banner_privacy = parse_privacy!("banner_privacy");
patch.icon_privacy = parse_privacy!("icon_privacy");
patch.list_privacy = parse_privacy!("list_privacy");
patch.metadata_privacy = parse_privacy!("metadata_privacy");
patch.visibility = parse_privacy!("visibility");
}
Ok(patch)
}
}

View file

@ -9,6 +9,8 @@ macro_rules! model {
model!(system);
model!(system_config);
model!(member);
model!(group);
#[derive(serde::Serialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
@ -31,3 +33,30 @@ impl From<i32> for PrivacyLevel {
}
}
}
impl From<PrivacyLevel> for sea_query::Value {
fn from(level: PrivacyLevel) -> sea_query::Value {
match level {
PrivacyLevel::Public => sea_query::Value::Int(Some(1)),
PrivacyLevel::Private => sea_query::Value::Int(Some(2)),
}
}
}
#[derive(serde::Serialize, Debug, Clone)]
pub enum ValidationError {
Simple { key: String, value: String },
}
impl ValidationError {
fn new(key: &str) -> Self {
Self::simple(key, "is invalid")
}
fn simple(key: &str, value: &str) -> Self {
Self::Simple {
key: key.to_string(),
value: value.to_string(),
}
}
}

208
crates/models/src/member.rs Normal file
View file

@ -0,0 +1,208 @@
use pk_macros::pk_model;
use chrono::NaiveDateTime;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use uuid::Uuid;
use crate::{PrivacyLevel, SystemId, ValidationError};
// todo: fix
pub type MemberId = i32;
#[derive(Clone, Debug, Serialize, Deserialize, sqlx::Type)]
#[sqlx(type_name = "proxy_tag")]
pub struct ProxyTag {
pub prefix: Option<String>,
pub suffix: Option<String>,
}
#[pk_model]
struct Member {
id: MemberId,
#[json = "hid"]
#[private_patchable]
hid: String,
#[json = "uuid"]
uuid: Uuid,
// TODO fix
#[json = "system"]
system: SystemId,
#[json = "color"]
#[patchable]
color: Option<String>,
#[json = "webhook_avatar_url"]
#[patchable]
webhook_avatar_url: Option<String>,
#[json = "avatar_url"]
#[patchable]
avatar_url: Option<String>,
#[json = "banner_image"]
#[patchable]
banner_image: Option<String>,
#[json = "name"]
#[privacy = name_privacy]
#[patchable]
name: String,
#[json = "display_name"]
#[patchable]
display_name: Option<String>,
#[json = "birthday"]
#[patchable]
birthday: Option<String>,
#[json = "pronouns"]
#[privacy = pronoun_privacy]
#[patchable]
pronouns: Option<String>,
#[json = "description"]
#[privacy = description_privacy]
#[patchable]
description: Option<String>,
#[json = "proxy_tags"]
// #[patchable]
proxy_tags: Vec<ProxyTag>,
#[json = "keep_proxy"]
#[patchable]
keep_proxy: bool,
#[json = "tts"]
#[patchable]
tts: bool,
#[json = "created"]
created: NaiveDateTime,
#[json = "message_count"]
#[private_patchable]
message_count: i32,
#[json = "last_message_timestamp"]
#[private_patchable]
last_message_timestamp: Option<NaiveDateTime>,
#[json = "allow_autoproxy"]
#[patchable]
allow_autoproxy: bool,
#[privacy]
#[json = "visibility"]
member_visibility: PrivacyLevel,
#[privacy]
description_privacy: PrivacyLevel,
#[privacy]
banner_privacy: PrivacyLevel,
#[privacy]
avatar_privacy: PrivacyLevel,
#[privacy]
name_privacy: PrivacyLevel,
#[privacy]
birthday_privacy: PrivacyLevel,
#[privacy]
pronoun_privacy: PrivacyLevel,
#[privacy]
metadata_privacy: PrivacyLevel,
#[privacy]
proxy_privacy: PrivacyLevel,
}
impl<'de> Deserialize<'de> for PKMemberPatch {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let mut patch: PKMemberPatch = Default::default();
let value: Value = Value::deserialize(deserializer)?;
if let Some(v) = value.get("name") {
if let Some(name) = v.as_str() {
patch.name = Some(name.to_string());
} else if v.is_null() {
patch.errors.push(ValidationError::simple(
"name",
"Member name cannot be set to null.",
));
}
}
macro_rules! parse_string_simple {
($k:expr) => {
match value.get($k) {
None => None,
Some(Value::Null) => Some(None),
Some(Value::String(s)) => Some(Some(s.clone())),
_ => {
patch.errors.push(ValidationError::new($k));
None
}
}
};
}
patch.color = parse_string_simple!("color").map(|v| v.map(|t| t.to_lowercase()));
patch.display_name = parse_string_simple!("display_name");
patch.avatar_url = parse_string_simple!("avatar_url");
patch.banner_image = parse_string_simple!("banner");
patch.birthday = parse_string_simple!("birthday"); // fix
patch.pronouns = parse_string_simple!("pronouns");
patch.description = parse_string_simple!("description");
if let Some(keep_proxy) = value.get("keep_proxy").and_then(Value::as_bool) {
patch.keep_proxy = Some(keep_proxy);
}
if let Some(tts) = value.get("tts").and_then(Value::as_bool) {
patch.tts = Some(tts);
}
// todo: legacy import handling
// todo: fix proxy_tag type in sea_query
// if let Some(proxy_tags) = value.get("proxy_tags").and_then(Value::as_array) {
// patch.proxy_tags = Some(
// proxy_tags
// .iter()
// .filter_map(|tag| {
// tag.as_object().map(|tag_obj| {
// let prefix = tag_obj
// .get("prefix")
// .and_then(Value::as_str)
// .map(|s| s.to_string());
// let suffix = tag_obj
// .get("suffix")
// .and_then(Value::as_str)
// .map(|s| s.to_string());
// ProxyTag { prefix, suffix }
// })
// })
// .collect(),
// )
// }
if let Some(privacy) = value.get("privacy").and_then(Value::as_object) {
macro_rules! parse_privacy {
($v:expr) => {
match privacy.get($v) {
None => None,
Some(Value::Null) => Some(PrivacyLevel::Private),
Some(Value::String(s)) if s == "" || s == "private" => {
Some(PrivacyLevel::Private)
}
Some(Value::String(s)) if s == "public" => Some(PrivacyLevel::Public),
_ => {
patch.errors.push(ValidationError::new($v));
None
}
}
};
}
patch.member_visibility = parse_privacy!("visibility");
patch.name_privacy = parse_privacy!("name_privacy");
patch.description_privacy = parse_privacy!("description_privacy");
patch.banner_privacy = parse_privacy!("banner_privacy");
patch.avatar_privacy = parse_privacy!("avatar_privacy");
patch.birthday_privacy = parse_privacy!("birthday_privacy");
patch.pronoun_privacy = parse_privacy!("pronoun_privacy");
patch.proxy_privacy = parse_privacy!("proxy_privacy");
patch.metadata_privacy = parse_privacy!("metadata_privacy");
}
Ok(patch)
}
}

View file

@ -1,3 +1,4 @@
use chrono::NaiveDateTime;
use pk_macros::pk_model;
use sqlx::{postgres::PgTypeInfo, Database, Decode, Postgres, Type};
@ -87,4 +88,19 @@ struct SystemConfig {
name_format: Option<String>,
#[json = "description_templates"]
description_templates: Vec<String>,
#[json = "premium_until"]
premium_until: Option<NaiveDateTime>,
#[json = "premium_lifetime"]
premium_lifetime: bool
}
#[pk_model]
struct DashView {
#[json = "id"]
id: String,
system: SystemId,
#[json = "name"]
name: String,
#[json = "value"]
value: String
}

36
crates/premium/Cargo.toml Normal file
View file

@ -0,0 +1,36 @@
[package]
name = "premium"
version = "0.1.0"
edition = "2024"
[dependencies]
pluralkit_models = { path = "../models" }
pk_macros = { path = "../macros" }
libpk = { path = "../libpk" }
api = { path = "../api" }
anyhow = { workspace = true }
axum = { workspace = true }
chrono = { workspace = true }
axum-extra = { workspace = true }
fred = { workspace = true }
lazy_static = { workspace = true }
metrics = { workspace = true }
reqwest = { workspace = true }
sea-query = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
sqlx = { workspace = true }
tokio = { workspace = true }
tower-http = { workspace = true }
tracing = { workspace = true }
twilight-http = { workspace = true }
askama = "0.14.0"
postmark = { version = "0.11", features = ["reqwest"] }
rand = "0.8"
thiserror = "1.0"
hex = "0.4"
paddle-rust-sdk = { version = "0.16.0", default-features = false, features = ["rustls-native-roots"] }
serde_urlencoded = "0.7"
time = "0.3"

10
crates/premium/init.sql Normal file
View file

@ -0,0 +1,10 @@
create table premium_subscriptions (
id serial primary key,
provider text not null,
provider_id text not null,
email text not null,
system_id int references systems(id) on delete set null,
status text,
next_renewal_at text,
unique (provider, provider_id)
)

349
crates/premium/src/auth.rs Normal file
View file

@ -0,0 +1,349 @@
use api::{ApiContext, fail_html};
use askama::Template;
use axum::{
extract::{MatchedPath, Request, State},
http::header::SET_COOKIE,
middleware::Next,
response::{AppendHeaders, IntoResponse, Redirect, Response},
};
use axum_extra::extract::cookie::CookieJar;
use fred::{
prelude::{KeysInterface, LuaInterface},
util::sha1_hash,
};
use rand::{Rng, distributions::Alphanumeric};
use serde::{Deserialize, Serialize};
use crate::web::{message, render};
const LOGIN_TOKEN_TTL_SECS: i64 = 60 * 10;
const SESSION_LUA_SCRIPT: &str = r#"
local session_key = KEYS[1]
local ttl = ARGV[1]
local session_data = redis.call('GET', session_key)
if session_data then
redis.call('EXPIRE', session_key, ttl)
end
return session_data
"#;
const SESSION_TTL_SECS: i64 = 60 * 60 * 4;
lazy_static::lazy_static! {
static ref SESSION_LUA_SCRIPT_SHA: String = sha1_hash(SESSION_LUA_SCRIPT);
}
fn rand_token() -> String {
rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(64)
.map(char::from)
.collect()
}
#[derive(Clone, Serialize, Deserialize)]
pub struct AuthState {
pub email: String,
pub csrf_token: String,
pub session_id: String,
}
impl AuthState {
fn new(email: String) -> Self {
Self {
email,
csrf_token: rand_token(),
session_id: rand_token(),
}
}
async fn from_request(
headers: axum::http::HeaderMap,
ctx: &ApiContext,
) -> anyhow::Result<Option<Self>> {
let jar = CookieJar::from_headers(&headers);
let Some(session_cookie) = jar.get("pk-session") else {
return Ok(None);
};
let session_id = session_cookie.value();
let session_key = format!("premium:session:{}", session_id);
let script_exists: Vec<usize> = ctx
.redis
.script_exists(vec![SESSION_LUA_SCRIPT_SHA.to_string()])
.await?;
if script_exists[0] != 1 {
ctx.redis
.script_load::<String, String>(SESSION_LUA_SCRIPT.to_string())
.await?;
}
let session_data: Option<String> = ctx
.redis
.evalsha(
SESSION_LUA_SCRIPT_SHA.to_string(),
vec![session_key],
vec![SESSION_TTL_SECS],
)
.await?;
let Some(session_data) = session_data else {
return Ok(None);
};
let session: AuthState = serde_json::from_str(&session_data)?;
Ok(Some(session))
}
async fn save(&self, ctx: &ApiContext) -> anyhow::Result<()> {
let session_key = format!("premium:session:{}", self.session_id);
let session_data = serde_json::to_string(&self)?;
ctx.redis
.set::<(), _, _>(
session_key,
session_data,
Some(fred::types::Expiration::EX(SESSION_TTL_SECS)),
None,
false,
)
.await?;
Ok(())
}
async fn delete(&self, ctx: &ApiContext) -> anyhow::Result<()> {
let session_key = format!("premium:session:{}", self.session_id);
ctx.redis.del::<(), _>(session_key).await?;
Ok(())
}
}
fn refresh_session_cookie(session: &AuthState, mut response: Response) -> Response {
let cookie_value = format!(
"pk-session={}; Path=/; HttpOnly; Secure; SameSite=Lax; Max-Age={}",
session.session_id, SESSION_TTL_SECS
);
response
.headers_mut()
.insert(SET_COOKIE, cookie_value.parse().unwrap());
response
}
pub async fn middleware(
State(ctx): State<ApiContext>,
mut request: Request,
next: Next,
) -> Response {
let extensions = request.extensions().clone();
let endpoint = extensions
.get::<MatchedPath>()
.cloned()
.map(|v| v.as_str().to_string())
.unwrap_or("unknown".to_string());
let session = match AuthState::from_request(request.headers().clone(), &ctx).await {
Ok(s) => s,
Err(err) => fail_html!(?err, "failed to fetch auth state from redis"),
};
if let Some(session) = session.clone() {
request.extensions_mut().insert(session);
}
match endpoint.as_str() {
"/" => {
if let Some(ref session) = session {
let response = next.run(request).await;
refresh_session_cookie(session, response)
} else {
return render!(crate::web::Index {
base_url: libpk::config.premium().base_url.clone(),
session: None,
show_login_form: true,
message: None,
subscriptions: vec![],
paddle: None,
});
}
}
"/info/" => {
let response = next.run(request).await;
if let Some(ref session) = session {
refresh_session_cookie(session, response)
} else {
response
}
}
"/login" => {
if let Some(ref session) = session {
// no session here because that shows the "you're logged in as" component
let response = render!(message("you are already logged in! go back home and log out if you need to log in to a different account.".to_string(), None));
return refresh_session_cookie(session, response);
} else {
let body = match axum::body::to_bytes(request.into_body(), 1024 * 16).await {
Ok(b) => b,
Err(err) => fail_html!(?err, "failed to read request body"),
};
let form: std::collections::HashMap<String, String> =
match serde_urlencoded::from_bytes(&body) {
Ok(f) => f,
Err(err) => fail_html!(?err, "failed to parse form data"),
};
let Some(email) = form.get("email") else {
return render!(crate::web::Index {
base_url: libpk::config.premium().base_url.clone(),
session: None,
show_login_form: true,
message: Some("email field is required".to_string()),
subscriptions: vec![],
paddle: None,
});
};
let email = email.trim().to_lowercase();
if email.is_empty() {
return render!(crate::web::Index {
base_url: libpk::config.premium().base_url.clone(),
session: None,
show_login_form: true,
message: Some("email field is required".to_string()),
subscriptions: vec![],
paddle: None,
});
}
let token = rand_token();
let token_key = format!("premium:login_token:{}", token);
if let Err(err) = ctx
.redis
.set::<(), _, _>(
token_key,
&email,
Some(fred::types::Expiration::EX(LOGIN_TOKEN_TTL_SECS)),
None,
false,
)
.await
{
fail_html!(?err, "failed to store login token in redis");
}
if let Err(err) = crate::mailer::login_token(email, token).await {
fail_html!(?err, "failed to send login email");
}
return render!(message(
"check your email for a login link! it will expire in 10 minutes.".to_string(),
None
));
}
}
"/login/{token}" => {
if let Some(ref session) = session {
// no session here because that shows the "you're logged in as" component
let response = render!(message("you are already logged in! go back home and log out if you need to log in to a different account.".to_string(), None));
return refresh_session_cookie(session, response);
}
let path = request.uri().path();
let token = path.strip_prefix("/login/").unwrap_or("");
if token.is_empty() {
return render!(crate::web::Index {
base_url: libpk::config.premium().base_url.clone(),
session: None,
show_login_form: true,
message: Some("invalid login link".to_string()),
subscriptions: vec![],
paddle: None,
});
}
let token_key = format!("premium:login_token:{}", token);
let email: Option<String> = match ctx.redis.get(&token_key).await {
Ok(e) => e,
Err(err) => fail_html!(?err, "failed to fetch login token from redis"),
};
let Some(email) = email else {
return render!(crate::web::Index {
base_url: libpk::config.premium().base_url.clone(),
session: None,
show_login_form: true,
message: Some(
"invalid or expired login link. please request a new one.".to_string()
),
subscriptions: vec![],
paddle: None,
});
};
if let Err(err) = ctx.redis.del::<(), _>(&token_key).await {
fail_html!(?err, "failed to delete login token from redis");
}
let session = AuthState::new(email);
if let Err(err) = session.save(&ctx).await {
fail_html!(?err, "failed to save session to redis");
}
let cookie_value = format!(
"pk-session={}; Path=/; HttpOnly; Secure; SameSite=Lax; Max-Age={}",
session.session_id, SESSION_TTL_SECS
);
(
AppendHeaders([(SET_COOKIE, cookie_value)]),
Redirect::to("/"),
)
.into_response()
}
"/logout" => {
let Some(session) = session else {
return Redirect::to("/").into_response();
};
let body = match axum::body::to_bytes(request.into_body(), 1024 * 16).await {
Ok(b) => b,
Err(err) => fail_html!(?err, "failed to read request body"),
};
let form: std::collections::HashMap<String, String> =
match serde_urlencoded::from_bytes(&body) {
Ok(f) => f,
Err(err) => fail_html!(?err, "failed to parse form data"),
};
let csrf_valid = form
.get("csrf_token")
.map(|t| t == &session.csrf_token)
.unwrap_or(false);
if !csrf_valid {
return (axum::http::StatusCode::FORBIDDEN, "invalid csrf token").into_response();
}
if let Err(err) = session.delete(&ctx).await {
fail_html!(?err, "failed to delete session from redis");
}
let cookie_value = "pk-session=; Path=/; HttpOnly; Max-Age=0";
(
AppendHeaders([(SET_COOKIE, cookie_value)]),
Redirect::to("/"),
)
.into_response()
}
"/cancel" | "/validate-token" => {
if let Some(ref session) = session {
let response = next.run(request).await;
refresh_session_cookie(session, response)
} else {
Redirect::to("/").into_response()
}
}
_ => (axum::http::StatusCode::NOT_FOUND, "404 not found").into_response(),
}
}

View file

@ -0,0 +1 @@
pub use api::error::*;

View file

@ -0,0 +1,44 @@
use lazy_static::lazy_static;
use postmark::{
Query,
api::{Body, email::SendEmailRequest},
reqwest::PostmarkClient,
};
lazy_static! {
pub static ref CLIENT: PostmarkClient = {
PostmarkClient::builder()
.server_token(&libpk::config.premium().postmark_token)
.build()
};
}
const LOGIN_TEXT: &'static str = r#"Hello,
Someone (hopefully you) has requested a link to log in to the PluralKit Premium website.
Click here to log in: {link}
This link will expire in 10 minutes.
If you did not request this link, please ignore this message.
Thanks,
- PluralKit Team
"#;
pub async fn login_token(rcpt: String, token: String) -> anyhow::Result<()> {
SendEmailRequest::builder()
.from(&libpk::config.premium().from_email)
.to(rcpt)
.subject("[PluralKit Premium] Your login link")
.body(Body::text(LOGIN_TEXT.replace(
"{link}",
format!("{}/login/{token}", libpk::config.premium().base_url).as_str(),
)))
.build()
.execute(&(CLIENT.to_owned()))
.await?;
Ok(())
}

105
crates/premium/src/main.rs Normal file
View file

@ -0,0 +1,105 @@
use askama::Template;
use axum::{
Extension, Router,
extract::State,
response::{Html, IntoResponse, Response},
routing::{get, post},
};
use tower_http::{catch_panic::CatchPanicLayer, services::ServeDir};
use tracing::info;
use api::{ApiContext, middleware};
mod auth;
mod error;
mod mailer;
mod paddle;
mod system;
mod web;
pub use api::fail;
async fn home_handler(
State(ctx): State<ApiContext>,
Extension(session): Extension<auth::AuthState>,
) -> Response {
let subscriptions = match paddle::fetch_subscriptions_for_email(&ctx, &session.email).await {
Ok(subs) => subs,
Err(err) => {
tracing::error!(?err, "failed to fetch subscriptions for {}", session.email);
vec![]
}
};
Html(
web::Index {
base_url: libpk::config.premium().base_url.clone(),
session: Some(session),
show_login_form: false,
message: None,
subscriptions,
paddle: Some(web::PaddleData {
client_token: libpk::config.premium().paddle_client_token.clone(),
price_id: libpk::config.premium().paddle_price_id.clone(),
environment: if libpk::config.premium().is_paddle_production {
"production"
} else {
"sandbox"
}
.to_string(),
}),
}
.render()
.unwrap(),
)
.into_response()
}
// this function is manually formatted for easier legibility of route_services
#[rustfmt::skip]
fn router(ctx: ApiContext) -> Router {
// processed upside down (???) so we have to put middleware at the end
Router::new()
.route("/", get(home_handler))
.route("/info/", get(|| async { Html(include_str!("../templates/info.html")) }))
.route("/login/{token}", get(|| async {
"handled in auth middleware"
}))
.route("/login", post(|| async {
"handled in auth middleware"
}))
.route("/logout", post(|| async {
"handled in auth middleware"
}))
.route("/cancel", get(paddle::cancel_page).post(paddle::cancel))
.route("/validate-token", post(system::validate_token))
.layer(axum::middleware::from_fn_with_state(ctx.clone(), auth::middleware))
.route("/paddle", post(paddle::webhook))
.layer(axum::middleware::from_fn(middleware::logger::logger))
.nest_service("/static", ServeDir::new("static"))
.layer(CatchPanicLayer::custom(api::util::handle_panic))
.with_state(ctx)
}
#[libpk::main]
async fn main() -> anyhow::Result<()> {
let db = libpk::db::init_data_db().await?;
let redis = libpk::db::init_redis().await?;
let ctx = ApiContext { db, redis };
let app = router(ctx);
let addr: &str = libpk::config.api().addr.as_ref();
let listener = tokio::net::TcpListener::bind(addr).await?;
info!("listening on {}", addr);
axum::serve(listener, app).await?;
Ok(())
}

View file

@ -0,0 +1,493 @@
use std::{collections::HashSet, vec};
use api::ApiContext;
use askama::Template;
use axum::{
extract::State,
http::{HeaderMap, StatusCode},
response::{IntoResponse, Response},
};
use lazy_static::lazy_static;
use paddle_rust_sdk::{
Paddle,
entities::{Customer, Subscription},
enums::{EventData, SubscriptionStatus},
webhooks::MaximumVariance,
};
use pk_macros::api_endpoint;
use serde::Serialize;
use sqlx::postgres::Postgres;
use tracing::{error, info};
use crate::fail;
// ew
fn html_escape(s: &str) -> String {
s.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
.replace('\'', "&#x27;")
}
lazy_static! {
static ref PADDLE_CLIENT: Paddle = {
let config = libpk::config.premium();
let base_url = if config.is_paddle_production {
Paddle::PRODUCTION
} else {
Paddle::SANDBOX
};
Paddle::new(&config.paddle_api_key, base_url).expect("failed to create paddle client")
};
}
pub async fn fetch_customer(customer_id: &str) -> anyhow::Result<Customer> {
let customer = PADDLE_CLIENT.customer_get(customer_id).send().await?;
Ok(customer.data)
}
const SUBSCRIPTION_QUERY: &str = r#"
select
p.id, p.provider, p.provider_id, p.email, p.system_id,
s.hid as system_hid, s.name as system_name,
p.status, p.next_renewal_at
from premium_subscriptions p
left join systems s on p.system_id = s.id
"#;
async fn get_subscriptions_by_email(
ctx: &ApiContext,
email: &str,
) -> anyhow::Result<Vec<DbSubscription>> {
let query = format!("{} where p.email = $1", SUBSCRIPTION_QUERY);
let subs = sqlx::query_as(&query)
.bind(email)
.fetch_all(&ctx.db)
.await?;
Ok(subs)
}
async fn get_subscription(
ctx: &ApiContext,
provider_id: &str,
email: &str,
) -> anyhow::Result<Option<DbSubscription>> {
let query = format!(
"{} where p.provider_id = $1 and p.email = $2",
SUBSCRIPTION_QUERY
);
let sub = sqlx::query_as(&query)
.bind(provider_id)
.bind(email)
.fetch_optional(&ctx.db)
.await?;
Ok(sub)
}
#[derive(Debug, Clone, sqlx::FromRow, Serialize)]
pub struct DbSubscription {
pub id: i32,
pub provider: String,
pub provider_id: String,
pub email: String,
pub system_id: Option<i32>,
pub system_hid: Option<String>,
pub system_name: Option<String>,
pub status: Option<String>,
pub next_renewal_at: Option<String>,
}
#[derive(Debug, Clone, Serialize)]
pub struct SubscriptionInfo {
pub db: Option<DbSubscription>,
pub paddle: Option<Subscription>,
}
impl SubscriptionInfo {
pub fn subscription_id(&self) -> &str {
if let Some(paddle) = &self.paddle {
paddle.id.as_ref()
} else if let Some(db) = &self.db {
&db.provider_id
} else {
"unknown"
}
}
pub fn status(&self) -> String {
if let Some(paddle) = &self.paddle {
if let Some(ref scheduled) = paddle.scheduled_change {
if matches!(
scheduled.action,
paddle_rust_sdk::enums::ScheduledChangeAction::Cancel
) {
return format!("expires {}", scheduled.effective_at.format("%Y-%m-%d"));
}
}
format!("{:?}", paddle.status).to_lowercase()
} else if let Some(db) = &self.db {
db.status.clone().unwrap_or_else(|| "unknown".to_string())
} else {
"unknown".to_string()
}
}
pub fn next_renewal(&self) -> String {
if let Some(paddle) = &self.paddle {
// if subscription is canceled, show next_billed_at as "ends at" date instead of "next renewal"
if paddle.scheduled_change.as_ref().is_some_and(|s| {
matches!(
s.action,
paddle_rust_sdk::enums::ScheduledChangeAction::Cancel
)
}) {
return "-".to_string();
}
if let Some(next) = paddle.next_billed_at {
return next.format("%Y-%m-%d").to_string();
}
}
if let Some(db) = &self.db {
if let Some(next) = &db.next_renewal_at {
return next.split('T').next().unwrap_or(next).to_string();
}
}
"-".to_string()
}
pub fn system_id_display(&self) -> String {
if let Some(db) = &self.db {
if let Some(hid) = &db.system_hid {
if let Some(name) = &db.system_name {
// ew, this needs to be fixed
let escaped_name = html_escape(name);
return format!("{} (<code>{}</code>)", escaped_name, hid);
}
return format!("<code>{}</code>", hid);
}
if db.system_id.is_some() {
return "unknown system (contact us at billing@pluralkit.me to fix this)"
.to_string();
}
return "not linked".to_string();
}
"not linked".to_string()
}
pub fn is_cancellable(&self) -> bool {
if let Some(paddle) = &self.paddle {
if paddle.scheduled_change.as_ref().is_some_and(|s| {
matches!(
s.action,
paddle_rust_sdk::enums::ScheduledChangeAction::Cancel
)
}) {
return false;
}
matches!(
paddle.status,
SubscriptionStatus::Active | SubscriptionStatus::PastDue
)
} else if let Some(db) = &self.db {
matches!(db.status.as_deref(), Some("active") | Some("past_due"))
} else {
false
}
}
}
// this is slightly terrible, but works
// the paddle sdk is a mess which does not help
pub async fn fetch_subscriptions_for_email(
ctx: &ApiContext,
email: &str,
) -> anyhow::Result<Vec<SubscriptionInfo>> {
let db_subs = get_subscriptions_by_email(ctx, email).await?;
let mut paddle_subs: Vec<Subscription> = Vec::new();
// there's no method to look up customer by email, so we have to do this nonsense
let Some(customer) = PADDLE_CLIENT
.customers_list()
.emails([email])
.send()
.next()
.await?
.and_then(|v| v.data.into_iter().next())
else {
return Ok(vec![]);
};
// why
let mut temp_paddle_for_sub_list = PADDLE_CLIENT.subscriptions_list();
let mut subs_pages = temp_paddle_for_sub_list.customer_id([customer.id]).send();
while let Some(subs_page) = subs_pages.next().await? {
paddle_subs.extend(subs_page.data);
}
let mut results: Vec<SubscriptionInfo> = Vec::new();
let mut found_ids: HashSet<String> = HashSet::new();
for db_sub in &db_subs {
let paddle_match = paddle_subs
.iter()
.find(|p| p.id.as_ref() == db_sub.provider_id);
if let Some(paddle) = paddle_match {
found_ids.insert(paddle.id.as_ref().to_string());
results.push(SubscriptionInfo {
db: Some(db_sub.clone()),
paddle: Some(paddle.clone()),
});
} else {
results.push(SubscriptionInfo {
db: Some(db_sub.clone()),
paddle: None,
});
}
}
for paddle_sub in paddle_subs {
if !found_ids.contains(paddle_sub.id.as_ref()) {
results.push(SubscriptionInfo {
db: None,
paddle: Some(paddle_sub),
});
}
}
// todo: show some error if a sub is only in db/provider but not both
// todo: we may want to show canceled subscriptions in the future
results.retain(|sub| sub.status() != "canceled");
Ok(results)
}
async fn save_subscription(
ctx: &ApiContext,
sub: &Subscription,
email: &str,
) -> anyhow::Result<()> {
let status = format!("{:?}", sub.status).to_lowercase();
let next_renewal_at = sub.next_billed_at.map(|dt| dt.to_rfc3339());
let system_id: Option<i32> = sub
.custom_data
.as_ref()
.and_then(|d| d.get("system_id"))
.and_then(|v| v.as_i64())
.map(|v| v as i32);
sqlx::query::<Postgres>(
r#"
insert into premium_subscriptions (provider, provider_id, email, system_id, status, next_renewal_at)
values ('paddle', $1, $2, $3, $4, $5)
on conflict (provider, provider_id) do update set
status = excluded.status,
next_renewal_at = excluded.next_renewal_at
"#,
)
.bind(sub.id.as_ref())
.bind(email)
.bind(system_id)
.bind(&status)
.bind(&next_renewal_at)
.execute(&ctx.db)
.await?;
// if has a linked system, also update system_config
// just in case we get out of order webhooks, never reduce the premium_until
// todo: this will obviously break if we refund someone's subscription
if let Some(system_id) = system_id {
if matches!(sub.status, SubscriptionStatus::Active) {
if let Some(next_billed_at) = sub.next_billed_at {
let premium_until = next_billed_at.naive_utc();
sqlx::query::<Postgres>(
r#"
update system_config set
premium_until = greatest(system_config.premium_until, $2)
where system = $1
"#,
)
.bind(system_id)
.bind(premium_until)
.execute(&ctx.db)
.await?;
info!(
"updated premium_until for system {} to {}",
system_id, premium_until
);
}
}
}
Ok(())
}
#[api_endpoint]
pub async fn webhook(State(ctx): State<ApiContext>, headers: HeaderMap, body: String) -> Response {
let Some(signature) = headers
.get("paddle-signature")
.and_then(|h| h.to_str().ok())
else {
return Ok(StatusCode::BAD_REQUEST.into_response());
};
match match Paddle::unmarshal(
body,
&libpk::config.premium().paddle_webhook_secret,
signature,
MaximumVariance::default(),
) {
Ok(event) => event,
Err(err) => {
error!(?err, "failed to unmarshal paddle data");
return Ok(StatusCode::BAD_REQUEST.into_response());
}
}
.data
{
EventData::SubscriptionCreated(sub)
| EventData::SubscriptionActivated(sub)
| EventData::SubscriptionUpdated(sub) => {
match sub.status {
SubscriptionStatus::Trialing => {
error!(
"got status trialing for subscription {}, this should never happen",
sub.id
);
return Ok("".into_response());
}
SubscriptionStatus::Active
| SubscriptionStatus::Canceled
| SubscriptionStatus::PastDue
| SubscriptionStatus::Paused => {}
unk => {
error!("got unknown status {unk:?} for subscription {}", sub.id);
return Ok("".into_response());
}
}
let email = match fetch_customer(sub.customer_id.as_ref()).await {
Ok(cus) => cus.email,
Err(err) => {
fail!(
?err,
"failed to fetch customer email for subscription {}",
sub.id
);
}
};
if let Err(err) = save_subscription(&ctx, &sub, &email).await {
fail!(?err, "failed to save subscription {}", sub.id);
}
info!("saved subscription {} with status {:?}", sub.id, sub.status);
}
_ => {}
}
Ok("".into_response())
}
pub async fn cancel_subscription(subscription_id: &str) -> anyhow::Result<Subscription> {
let result = PADDLE_CLIENT
.subscription_cancel(subscription_id)
.send()
.await?;
Ok(result.data)
}
#[api_endpoint]
pub async fn cancel(
State(ctx): State<ApiContext>,
axum::Extension(session): axum::Extension<crate::auth::AuthState>,
axum::Form(form): axum::Form<CancelForm>,
) -> Response {
if form.csrf_token != session.csrf_token {
return Ok((StatusCode::FORBIDDEN, "invalid csrf token").into_response());
}
let db_sub = get_subscription(&ctx, &form.subscription_id, &session.email)
.await
.map_err(|e| {
error!(?e, "failed to fetch subscription from db");
crate::error::GENERIC_SERVER_ERROR
})?;
if db_sub.is_none() {
return Ok((
StatusCode::FORBIDDEN,
"subscription not found or not owned by you",
)
.into_response());
}
match cancel_subscription(&form.subscription_id).await {
Ok(sub) => {
info!("cancelled subscription {} for {}", sub.id, session.email);
Ok(axum::response::Redirect::to("/").into_response())
}
Err(err) => {
fail!(
?err,
"failed to cancel subscription {}",
form.subscription_id
);
}
}
}
#[derive(serde::Deserialize)]
pub struct CancelForm {
pub csrf_token: String,
pub subscription_id: String,
}
#[derive(serde::Deserialize)]
pub struct CancelQuery {
pub id: String,
}
pub async fn cancel_page(
State(ctx): State<ApiContext>,
axum::Extension(session): axum::Extension<crate::auth::AuthState>,
axum::extract::Query(query): axum::extract::Query<CancelQuery>,
) -> Response {
let subscriptions = match fetch_subscriptions_for_email(&ctx, &session.email).await {
Ok(subs) => subs,
Err(e) => {
error!(?e, "failed to fetch subscriptions");
return (
StatusCode::INTERNAL_SERVER_ERROR,
"failed to fetch subscriptions",
)
.into_response();
}
};
let subscription = subscriptions
.into_iter()
.find(|s| s.subscription_id() == query.id);
let Some(subscription) = subscription else {
return (
StatusCode::FORBIDDEN,
"subscription not found or not owned by you",
)
.into_response();
};
axum::response::Html(
crate::web::Cancel {
csrf_token: session.csrf_token,
subscription,
}
.render()
.unwrap(),
)
.into_response()
}

View file

@ -0,0 +1,67 @@
use axum::{
Extension, Json,
extract::State,
http::StatusCode,
response::{IntoResponse, Response},
};
use serde::{Deserialize, Serialize};
use crate::auth::AuthState;
use api::ApiContext;
#[derive(Deserialize)]
pub(crate) struct ValidateTokenRequest {
csrf_token: String,
token: String,
}
#[derive(Serialize)]
struct ValidateTokenResponse {
system_id: i32,
}
#[derive(Serialize)]
struct ValidateTokenError {
error: String,
}
pub(crate) async fn validate_token(
State(ctx): State<ApiContext>,
Extension(session): Extension<AuthState>,
Json(body): Json<ValidateTokenRequest>,
) -> Response {
if body.csrf_token != session.csrf_token {
return (
StatusCode::FORBIDDEN,
Json(ValidateTokenError {
error: "Invalid CSRF token.".to_string(),
}),
)
.into_response();
}
let system_id = match libpk::db::repository::legacy_token_auth(&ctx.db, &body.token).await {
Ok(Some(id)) => id,
Ok(None) => {
return (
StatusCode::BAD_REQUEST,
Json(ValidateTokenError {
error: "Invalid system token.".to_string(),
}),
)
.into_response();
}
Err(err) => {
tracing::error!(?err, "failed to validate system token");
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(ValidateTokenError {
error: "Failed to validate token.".to_string(),
}),
)
.into_response();
}
};
Json(ValidateTokenResponse { system_id }).into_response()
}

53
crates/premium/src/web.rs Normal file
View file

@ -0,0 +1,53 @@
use askama::Template;
use crate::auth::AuthState;
use crate::paddle::SubscriptionInfo;
macro_rules! render {
($stuff:expr) => {{
let mut response = $stuff.render().unwrap().into_response();
let headers = response.headers_mut();
headers.insert(
"content-type",
axum::http::HeaderValue::from_static("text/html"),
);
response
}};
}
pub(crate) use render;
pub fn message(message: String, session: Option<AuthState>) -> Index {
Index {
base_url: libpk::config.premium().base_url.clone(),
session,
show_login_form: false,
message: Some(message),
subscriptions: vec![],
paddle: None,
}
}
#[derive(Template)]
#[template(path = "index.html")]
pub struct Index {
pub base_url: String,
pub session: Option<AuthState>,
pub show_login_form: bool,
pub message: Option<String>,
pub subscriptions: Vec<SubscriptionInfo>,
pub paddle: Option<PaddleData>,
}
pub struct PaddleData {
pub client_token: String,
pub price_id: String,
pub environment: String,
}
#[derive(Template)]
#[template(path = "cancel.html")]
pub struct Cancel {
pub csrf_token: String,
pub subscription: crate::paddle::SubscriptionInfo,
}

View file

View file

@ -0,0 +1,32 @@
<!DOCTYPE html>
<head>
<title>Cancel Subscription - PluralKit Premium</title>
<link rel="stylesheet" href="/static/stylesheet.css" />
</head>
<body>
<h2>PluralKit Premium</h2>
{% if subscription.is_cancellable() %}
<h3>Cancel Subscription</h3>
<p>Are you sure you want to cancel subscription <strong>{{ subscription.subscription_id() }}</strong>?</p>
<p>Your subscription will remain active until the end of the current billing period.</p>
<form action="/cancel" method="post">
<input type="hidden" name="csrf_token" value="{{ csrf_token }}" />
<input type="hidden" name="subscription_id" value="{{ subscription.subscription_id() }}" />
<button type="submit">Yes, cancel subscription</button>
<a href="/"><button type="button">No, go back</button></a>
</form>
{% else %}
<p>This subscription (<strong>{{ subscription.subscription_id() }}</strong>) has already been canceled and will end on <strong>{{ subscription.next_renewal() }}</strong>.</p>
<a href="/"><button type="button">Go back</button></a>
{% endif %}
<br/><br/>
<span>for assistance please email us at <a href="mailto:billing@pluralkit.me">billing@pluralkit.me</a></span>
<br/>
<br/><a href="/info/">pricing/refunds</a> | <a href="https://pluralkit.me/terms-of-service/">terms of service</a> | <a href="https://pluralkit.me/privacy/">privacy policy</a>
<br/><a href="/">home</a>
</body>

View file

@ -0,0 +1,153 @@
<!DOCTYPE html>
<head>
<title>PluralKit Premium</title>
<link rel="stylesheet" href="/static/stylesheet.css" />
<script src="https://cdn.paddle.com/paddle/v2/paddle.js"></script>
</head>
<body>
<h2>PluralKit Premium</h2>
{% if let Some(session) = session %}
<form action="/logout" method="post">
<input type="hidden" name="csrf_token" value="{{ session.csrf_token }}" />
<p>
logged in as <strong>{{ session.email }}.</strong>
<button type="submit">log out</button>
</p>
</form>
<br/>
{% if subscriptions.is_empty() %}
<p>You are not currently subscribed to PluralKit Premium.</p>
<p>Enter your system token to subscribe. yes this will be fixed before release</p>
<div>
<input type="text" id="system-token" placeholder="token" required />
<button id="buy-button">Subscribe to PluralKit Premium</button>
</div>
<p id="token-error" style="color: red; display: none;"></p>
<p id="system-info" style="color: green; display: none;"></p>
{% else %}
You are currently subscribed to PluralKit Premium. Thanks for the support!
<br/>
{% for sub in &subscriptions %}
<p>
<strong>Subscription ID:</strong> {{ sub.subscription_id() }}<br/>
<strong>Status:</strong> {{ sub.status() }}<br/>
<strong>Next Renewal:</strong> {{ sub.next_renewal() }}<br/>
<strong>Linked System:</strong> {{ sub.system_id_display()|safe }}<br/>
{% if sub.is_cancellable() %}
<a href="/cancel?id={{ sub.subscription_id() }}">Cancel</a><br/>
{% endif %}
</div>
{% endfor %}
{% endif %}
{% if let Some(paddle) = paddle %}
<script>
Paddle.Environment.set("{{ paddle.environment }}");
Paddle.Initialize({
token: "{{ paddle.client_token }}",
eventCallback: function(event) {
if (event.name === "checkout.completed") {
// webhook request sometimes takes a while, artificially delay here
document.body.innerHTML = "<h2>PluralKit Premium</h2><p>Processing your subscription, please wait...</p>";
setTimeout(function() {
window.location.href = "{{ base_url }}";
}, 3000);
}
}
});
const buyButton = document.getElementById("buy-button");
if (buyButton) {
buyButton.addEventListener("click", async function() {
const tokenInput = document.getElementById("system-token");
const errorEl = document.getElementById("token-error");
const infoEl = document.getElementById("system-info");
if (!tokenInput || !tokenInput.value.trim()) {
errorEl.textContent = "Please enter your system token.";
errorEl.style.display = "block";
infoEl.style.display = "none";
return;
}
// Validate the token
buyButton.disabled = true;
buyButton.textContent = "Validating...";
errorEl.style.display = "none";
infoEl.style.display = "none";
try {
const response = await fetch("/validate-token", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
csrf_token: "{{ session.csrf_token }}",
token: tokenInput.value.trim()
})
});
const data = await response.json();
if (!response.ok) {
errorEl.textContent = data.error || "Invalid token.";
errorEl.style.display = "block";
buyButton.disabled = false;
buyButton.textContent = "Subscribe to PluralKit Premium";
return;
}
// Token is valid, open Paddle checkout
Paddle.Checkout.open({
settings: {
allowLogout: false,
},
items: [
{ priceId: "{{ paddle.price_id }}", quantity: 1 }
],
customer: {
email: "{{ session.email }}"
},
customData: {
email: "{{ session.email }}",
system_id: data.system_id
}
});
buyButton.disabled = false;
buyButton.textContent = "Subscribe to PluralKit Premium";
} catch (err) {
errorEl.textContent = "Failed to validate token. Please try again.";
errorEl.style.display = "block";
buyButton.disabled = false;
buyButton.textContent = "Subscribe to PluralKit Premium";
}
});
}
</script>
{% else %}
error initializing paddle client
{% endif %}
{% endif %}
{% if show_login_form %}
<p>Enter your email address to log in.</p>
<form method="POST" action="/login">
<input type="email" name="email" placeholder="you@example.com" required />
<button type="submit">Send</button>
</form>
{% endif %}
{% if let Some(msg) = message %}
<div>{{ msg }}</div>
{% endif %}
<br/><br/>
<span>for assistance please email us at <a href="mailto:billing@pluralkit.me">billing@pluralkit.me</a></span>
<br/>
<br/><a href="/info/">pricing/refunds</a> | <a href="https://pluralkit.me/terms-of-service/">terms of service</a> | <a href="https://pluralkit.me/privacy/">privacy policy</a>
<br/><a href="/">home</a>
</body>

View file

@ -0,0 +1,19 @@
<!DOCTYPE html>
<head>
<title>Billing information - PluralKit Premium</title>
<link rel="stylesheet" href="/static/stylesheet.css" />
</head>
<body>
<h2>PluralKit Premium</h2>
<h3>Pricing</h3>
<p>PluralKit Premium costs $5/mo plus tax applied as per your region.<br/>For any plans longer than 1 month, the equivalent price is applied - for instance, a 3-month plan is $15/3mo plus tax, or a yearly plan is $60/year plus tax.<br/>There is no discount for pre-paying multiple months.</p>
<h3>Refund policy</h3>
<p>We will process any refund requests at our discretion, or where required by law.</p>
<br/><br/>
<span>for assistance please email us at <a href="mailto:billing@pluralkit.me">billing@pluralkit.me</a></span>
<br/>
<br/><a href="/info/">pricing/refunds</a> | <a href="https://pluralkit.me/terms-of-service/">terms of service</a> | <a href="https://pluralkit.me/privacy/">privacy policy</a>
<br/><a href="/">home</a>
</body>

View file

@ -22,22 +22,10 @@ pub struct AppCtx {
#[libpk::main]
async fn main() -> anyhow::Result<()> {
let mut client_builder = twilight_http::Client::builder().token(
libpk::config
.discord
.as_ref()
.expect("missing discord config")
.bot_token
.clone(),
);
let mut client_builder =
twilight_http::Client::builder().token(libpk::config.discord().bot_token.clone());
if let Some(base_url) = libpk::config
.discord
.as_ref()
.expect("missing discord config")
.api_base_url
.clone()
{
if let Some(base_url) = libpk::config.discord().api_base_url.clone() {
client_builder = client_builder.proxy(base_url, true).ratelimiter(None);
}

View file

@ -76,7 +76,10 @@ async fn update_basebackup_ts(repo: String) -> anyhow::Result<()> {
env.insert(
"WALG_S3_PREFIX".to_string(),
format!("s3://pluralkit-backups/{repo}/"),
format!(
"s3://{}/{repo}/",
libpk::config.scheduled_tasks().walg_s3_bucket
),
);
let output = Command::new("wal-g")

31
docs/.gitignore vendored
View file

@ -1,12 +1,23 @@
pids
logs
node_modules
npm-debug.log
coverage/
run
dist
# Output
.output
.vercel
.netlify
.wrangler
/.svelte-kit
/build
# OS
.DS_Store
.nyc_output
.basement
config.local.js
basement_dist
Thumbs.db
# Env
.env
.env.*
!.env.example
!.env.test
# Vite
vite.config.js.timestamp-*
vite.config.ts.timestamp-*

1
docs/.npmrc Normal file
View file

@ -0,0 +1 @@
engine-strict=true

View file

@ -1,19 +0,0 @@
# PluralKit docs
The documentation is built using [Vuepress](https://vuepress.vuejs.org/). All website content is located in the `content/` subdirectory.
Most site parameters, including the sidebar layout, are defined in `content/.vuepress/config.js`. Some additional CSS is defined in `content/.vuepress/styles`.
## Building
First, install [Node.js](https://nodejs.org/en/download/) and [Yarn](https://classic.yarnpkg.com/en/). Then, run the `dev` command:
```sh
$ yarn
$ yarn dev
```
This will start a development server on http://localhost:8080/. Note that changes to the sidebar or similar generally need a full restart (Ctrl-C) to take effect, while content-only changes will hot-reload.
For a full HTML build, run `yarn build`. Files will be output in `content/.vuepress/dist` by default.
## Deployment
The docs are deployed using [Netlify](https://www.netlify.com/) with CI.

View file

@ -467,7 +467,7 @@ Features:
Bugfixes:
- fixed importing pronouns and message count
- fixed looking up messages with a discord canary link (and then fixed looking up normal links >.<)
- fixed looking up messages with a discord canary link (and then fixed looking up normal links >.\<)
- fixed a few "internal error" messages and other miscellaneous bugs
(also, `pk;member <name> soulscream` is a semi-secret command for the time being, if you know what this means, have fun :3 🍬)

View file

@ -0,0 +1,89 @@
---
title: Announcing PluralKit Premium
permalink: /posts/2026-01-16-pluralkit-premium/
---
## Announcing PluralKit Premium
As we've teased earlier in [the support server](https://discord.gg/PczBt78),
we will be adding a "premium" subscription tier to PluralKit. We do want to
assure everyone, however, that **the bot will always be free to use, and no
existing features (or new core features) will ever be paywalled.**
Our goal for PK Premium is to have the income from subscriptions cover all the
running costs of the bot - which, up until now, has been paid for partly by
donations to our [Patreon](https://patreon.com/pluralkit) /
[Buy Me A Coffee](https://buymeacoffee.com/pluralkit); and partly out of the
pockets of the developers.
PluralKit Premium will cost **US$5/month** (plus tax), and will be **launching
before the end of February 2026**.
PluralKit Premium will offer both cosmetic perks, and "power user" features.
At launch, PK Premium will offer the following:
- the ability to set custom system/member/group IDs,
- lossless, higher resolution image hosting on PluralKit's CDN,
- the ability to upload avatars/banners directly from the PluralKit Dashboard,
- and a badge on your PluralKit system card to show off your support.
We will be adding more perks to PK Premium in future - including, but not
limited to:
- more description customisation options,
- automatic regex-based proxy text substitution ("automated typing quirks"),
- and more!
<div style="text-align:center;">
![PluralKit Premium teaser screenshot](../assets/premiumTeaser.png)
</div>
### FAQs
#### I didn't read any of the stuff above, give me a tl;dr!
- PK Premium is launching before the end of February 2026, at US$5/month (+ tax)
- No existing features, or new core features, will be paywalled - the premium
subscription offers cosmetic perks and power-user features only
- Features at launch include custom IDs; high-resolution image hosting; and
direct image upload from the web dashboard
- More premium features are still to come!
#### Can I pay yearly?
Yes, there will be a yearly subscription option. There is no discount for paying
yearly.
#### Can I gift a PK Premium subscription to someone?
Not at launch, but we will likely be revisiting this in future.
#### How will ID changes work?
There will be bot commands for changing your own system, member, and group IDs.
Each month of PK Premium you pay for will grant you a number of ID change "tokens,"
and each ID you change uses one of those "tokens." The exact number of ID changes
you will get each month has not yet been confirmed, but they will stack over time.
In addition to the "token" system, to ensure fairness, there is a cap on the number
of IDs you can change in a 24-hour period.
#### Can I transfer custom IDs to another system?
No, sorry.
#### If I subscribe; customise my IDs; and then cancel, do I keep my custom IDs?
Yes!
#### How does this affect PluralKit being open source?
PluralKit will remain open source, and there is no change to the licensing.
Premium features are included in the open source code.
#### I have another question!
Please ask in [#bot-support in the PluralKit support server](https://discord.gg/PczBt78),
and we'll update this FAQ!

View file

@ -4,6 +4,7 @@ title: Announcements & other posts
# Announcements & other posts
- 2026-01-16: [Announcing PluralKit Premium](/posts/2026-01-16-pluralkit-premium/)
- 2025-09-08: [on the switch to Components V2](/posts/2025-09-08-components-v2/)
- 2025-01-14: [january 2025 funding update](/posts/2025-01-14-funding-update/)
- 2024-12-05: [late 2024 downtime notes & funding update](/posts/2024-12-05-downtime-notes/)

View file

@ -2,21 +2,23 @@
PluralKit requires some channel permissions in order to function properly:
- Message proxying requires the **Manage Messages** and **Manage Webhooks** permissions in a channel.
- Most commands require the **Embed Links**, **Attach Files** and **Add Reactions** permissions to function properly.
- *Everything* PluralKit does aside from the Message Info app command requires **View Channel** permissions in a channel.
- Message proxying requires the **Manage Messages**, **Manage Webhooks**, and **Send Messages** permissions in a channel.
- Most commands require the **Embed Links** and **Add Reactions** permissions to function properly.
- Commands with reaction menus also require **Manage Messages** to remove reactions after clicking.
- Commands executed via reactions (for example the :x:, :bell:, and :question: reactions, as well as any commands with reaction menus) need **Read Message History** to be able to see that reactions were added.
- A couple commands (`pk;s color` and `pk;m <name> color`) currently require **Attach Files**.
- [Proxy logging](/staff/logging) requires the **Send Messages** permission in the log channel.
- [Log cleanup](/staff/compatibility/#log-cleanup) requires the **Manage Messages** permission in the log channels.
Denying the **Send Messages** permission will *not* stop the bot from proxying, although it will prevent it from sending command responses. Denying the **Read Messages** permission will, as any other bot, prevent the bot from interacting in that channel at all.
## Webhook permissions
Webhooks exist outside of the normal Discord permissions system, and (with a few exceptions) it's not possible to modify their permissions.
Webhooks exist outside of the normal Discord permissions system, but as of August 2022 they mostly follow the permissions of the webhook owner (in this case, PluralKit).
However, PluralKit will make an attempt to apply the sender account's permissions to proxied messages. For example, role mentions, `@everyone`, and `@here`
PluralKit will also make an attempt to apply the sender account's permissions to proxied messages. For example, role mentions, `@everyone`, and `@here`
will only function if the sender account has that permission. The same applies to link embeds.
For external emojis to work in proxied messages, the `@everyone` role must have the "Use External Emojis" permission. If it still doesn't work, check if the permission was denied in channel-specific permission settings.
For external emojis to work in proxied messages, PluralKit or one of its roles must have the "Use External Emojis" permission. If it still doesn't work,
check if the permission was denied in channel-specific permission settings. PluralKit must also be in the server the external emoji belongs to.
## Troubleshooting
@ -30,4 +32,4 @@ For example:
pk;debug permissions 466707357099884544
You can find this ID [by enabling Developer Mode and right-clicking (or long-pressing) on the server icon](https://discordia.me/developer-mode).
You can find this ID [by enabling Developer Mode and right-clicking (or long-pressing) on the server icon](https://discordia.me/developer-mode).

View file

@ -453,7 +453,7 @@ You can
#### Pinging the user who sent it
If you'd like to "ping" the account behind a proxied message without having to query the message and ping them yourself,
you can react to the message with the `:bell:` :bell: emoji (or `:bellhop:` :bellhop:, `:exclamation:` :exclamation:, or even `:ping_pong:` :ping_pong:), and PluralKit will ping the relevant member and account in the same channel on your behalf with a link to the message you reacted to.
you can react to the message with the `:bell:` :bell: emoji (or `:bellhop:` :bellhop_bell:, `:exclamation:` :exclamation:, or even `:ping_pong:` :ping_pong:), and PluralKit will ping the relevant member and account in the same channel on your behalf with a link to the message you reacted to.
## Autoproxy
The bot's *autoproxy* feature allows you to have messages be proxied without directly including the proxy tags. Autoproxy can be set up in various ways. There are three autoproxy modes currently implemented:

View file

@ -1,3 +1,3 @@
app = "pluralkit-docs"
primary_region = "arn"
primary_region = "sjc"
http_service.internal_port = 8000

View file

@ -1,20 +1,38 @@
{
"name": "pluralkit-docs",
"private": true,
"description": "Documentation for PluralKit",
"scripts": {
"dev": "vuepress dev content",
"build": "vuepress build content"
},
"license": "AGPL-3.0-or-later",
"devDependencies": {
"@vuepress/plugin-back-to-top": "1.8.2",
"markdown-it-custom-header-link": "^1.0.5",
"vuepress": "1.8.2",
"vuepress-plugin-clean-urls": "1.1.2",
"vuepress-plugin-dehydrate": "1.1.5"
},
"dependencies": {
"vuepress-theme-default-prefers-color-scheme": "2.0.0"
}
"name": "docs",
"private": true,
"version": "0.0.1",
"type": "module",
"scripts": {
"dev": "vite dev",
"build": "vite build",
"preview": "vite preview",
"prepare": "svelte-kit sync || echo ''",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch"
},
"devDependencies": {
"@sveltejs/adapter-node": "^5.4.0",
"@sveltejs/kit": "^2.49.1",
"@sveltejs/vite-plugin-svelte": "^6.2.1",
"@tabler/icons-svelte": "^3.36.0",
"@tailwindcss/postcss": "^4.1.18",
"@tailwindcss/typography": "^0.5.13",
"@tailwindcss/vite": "^4.1.17",
"@types/node": "^25.0.3",
"@types/nprogress": "^0.2.3",
"autoprefixer": "^10.4.23",
"daisyui": "^4.12.24",
"nprogress": "^0.2.0",
"postcss": "^8.5.3",
"sass": "^1.77.8",
"svelte": "^5.45.6",
"svelte-check": "^4.3.4",
"tailwindcss": "^3.4.17",
"typescript": "^5.9.3",
"vite": "^7.2.6"
},
"dependencies": {
"mdsvex": "^0.12.6"
}
}

2356
docs/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load diff

6
docs/postcss.config.js Normal file
View file

@ -0,0 +1,6 @@
export default {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

13
docs/src/app.d.ts vendored Normal file
View file

@ -0,0 +1,13 @@
// See https://svelte.dev/docs/kit/types#app.d.ts
// for information about these interfaces
declare global {
namespace App {
// interface Error {}
// interface Locals {}
// interface PageData {}
// interface PageState {}
// interface Platform {}
}
}
export {};

11
docs/src/app.html Normal file
View file

@ -0,0 +1,11 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
%sveltekit.head%
</head>
<body data-sveltekit-preload-data="hover">
<div style="display: contents">%sveltekit.body%</div>
</body>
</html>

View file

@ -0,0 +1,27 @@
<script lang="ts">
import { env } from "$env/dynamic/public"
// @ts-ignore
const version = __COMMIT_HASH__.slice(1, __COMMIT_HASH__.length - 1)
</script>
<footer class="footer items-center p-4">
<nav class="grid-flow-col gap-4">
<span
>Commit: <a
aria-label="View commit on github"
class="underline"
href={`${
env.PUBLIC_REPOSITORY_URL
? env.PUBLIC_REPOSITORY_URL
: "https://github.com/Draconizations/pk-dashboard-sveltekit"
}/commit/${version}`}>{version}</a
>
</span>
</nav>
<nav class="grid-flow-col gap-4 md:place-self-center md:justify-self-end">
<a class="link-hover" href="/about">About</a>
<a class="link-hover" href="/privacy">Privacy</a>
<a class="link-hover" href="/changelog">Changelog</a>
</nav>
</footer>

View file

@ -0,0 +1,139 @@
<script lang="ts">
import {
IconMenu2,
IconBook,
IconBrandDiscord,
IconShare3,
IconUsers,
IconBoxMultiple,
IconAdjustments,
IconPaint,
IconLogout,
IconAddressBook,
IconHome,
IconSettings,
IconStatusChange,
IconInfoCircle,
IconDashboard,
IconLayoutDashboard,
} from "@tabler/icons-svelte"
let userMenu: HTMLDetailsElement
let navbarMenu: HTMLDetailsElement
</script>
<div class="navbar bg-base-100">
<div class="navbar-start flex-1">
<details class="dropdown" bind:this={navbarMenu}>
<summary class="btn btn-ghost md:hidden">
<IconMenu2 />
</summary>
<ul class="menu menu-sm dropdown-content mt-3 z-[1] p-2 shadow bg-base-100 rounded-box w-52">
<li>
<a href="/" onclick={() => (navbarMenu.open = false)}>
<IconHome /> Homepage
</a>
</li>
<li>
<a
href="https://discord.com/oauth2/authorize?client_id=466378653216014359&scope=bot%20applications.commands&permissions=536995904"
onclick={() => (navbarMenu.open = false)}
>
<IconShare3 /> Invite bot
</a>
</li>
<li>
<a href="https://pluralkit.me/" onclick={() => (navbarMenu.open = false)}
><IconBook /> Documentation</a
>
</li>
<li>
<a href="https://discord.gg/PczBt78" onclick={() => (navbarMenu.open = false)}
><IconBrandDiscord /> Support server</a
>
</li>
</ul>
</details>
<a href="/" class="hidden text-xl btn btn-ghost md:inline-flex">PluralKit</a>
</div>
<div class="hidden navbar-center md:flex">
<ul class="px-1 menu menu-horizontal">
<li><a href="https://dash.pluralkit.me/"><IconLayoutDashboard /> Web dashboard</a></li>
<li><a href="https://discord.gg/PczBt78"><IconBrandDiscord /> Support server</a></li>
<li>
<a
href="https://discord.com/oauth2/authorize?client_id=466378653216014359&scope=bot%20applications.commands&permissions=536995904"
>
<IconShare3 /> Invite bot
</a>
</li>
<li><a href="https://status.pluralkit.me"><IconInfoCircle /> Status</a></li>
</ul>
</div>
<div class="navbar-end w-auto">
<a href="/settings#theme" class="mr-4 tooltip tooltip-bottom" data-tip="Change theme"
><IconPaint /></a
>
{#if false /*dash.user*/}
<details class="dropdown dropdown-left" bind:this={userMenu}>
<summary class="mr-2 list-none">
{#if false /*dash.user.avatar_url*/}
<div class="avatar">
<div class="w-12 rounded-full">
<!-- <img alt="your system avatar" src={dash.user.avatar_url} /> -->
</div>
</div>
{:else}
<div class="avatar">
<div class="w-12 rounded-full">
<img alt="An icon of myriad" src="/myriad_write.png" />
</div>
</div>
{/if}
</summary>
<ul
data-sveltekit-preload-data="tap"
class="menu menu-sm menu-dropdown dropdown-content mt-3 z-[1] p-2 shadow bg-base-100 rounded-box w-36"
>
<!-- <li>
<a href={`/dash/${dash.user?.id}?tab=overview`} onclick={() => (userMenu.open = false)}
><IconAdjustments /> Overview</a
>
</li>
<li>
<a href={`/dash/${dash.user?.id}?tab=system`} onclick={() => (userMenu.open = false)}
><IconAddressBook /> System</a
>
</li>
<li>
<a href={`/dash/${dash.user?.id}?tab=members`} onclick={() => (userMenu.open = false)}
><IconUsers /> Members</a
>
</li>
<li>
<a href={`/dash/${dash.user?.id}?tab=groups`} onclick={() => (userMenu.open = false)}
><IconBoxMultiple /> Groups</a
>
</li> -->
<hr class="my-2" />
<li>
<a href="/settings/general" onclick={() => (userMenu.open = false)}
><IconSettings /> Settings</a
>
</li>
<li>
<form method="post" action="/?/logout">
<IconLogout />
<input
onclick={() => (userMenu.open = false)}
class="text-error w-min"
type="submit"
value="Logout"
/>
</form>
</li>
</ul>
</details>
{/if}
</div>
</div>

View file

@ -0,0 +1,107 @@
<script lang="ts">
import { page } from "$app/stores";
const mdModules = import.meta.glob('/content/**/*.md', { eager: true }) as Record<string, { metadata?: { title?: string; permalink?: string } }>;
const pathToTitle: Record<string, string> = {};
for (const [filePath, mod] of Object.entries(mdModules)) {
const urlPath = filePath
.replace('/content', '')
.replace(/\/index\.md$/, '')
.replace(/\.md$/, '');
if (mod.metadata?.title) {
pathToTitle[urlPath || '/'] = mod.metadata.title;
}
}
function getTitle(path: string): string {
return pathToTitle[path] || path.split('/').pop() || path;
}
const sidebar = [
{
title: "Home",
href: "/",
},
{
title: "Add to your server",
href: "https://discord.com/oauth2/authorize?client_id=466378653216014359&scope=bot%20applications.commands&permissions=536995904",
},
{
title: "Updates",
sidebarDepth: 1,
children: [
"/posts",
"/changelog",
]
},
{
title: "Documentation",
sidebarDepth: 2,
children: [
"/getting-started",
"/user-guide",
"/command-list",
"/privacy-policy",
"/terms-of-service",
"/faq",
"/tips-and-tricks"
]
},
{
title: "For server staff",
children: [
"/staff/permissions",
"/staff/moderation",
"/staff/disabling",
"/staff/logging",
"/staff/compatibility",
]
},
{
title: "API Documentation",
children: [
"/api/changelog",
"/api/reference",
"/api/endpoints",
"/api/models",
"/api/errors",
"/api/dispatch"
]
},
{
title: "Join the support server",
href: "https://discord.gg/PczBt78",
},
];
function isActive(href: string): boolean {
return $page.url.pathname === href;
}
</script>
<aside class="w-80 bg-base-200 p-4 overflow-y-auto shrink-0 min-h-0">
<ul class="menu w-full">
{#each sidebar as item}
{#if item.children}
<li class="menu-title flex flex-row items-center gap-2 mt-4">
{item.title}
</li>
{#each item.children as child}
<li>
<a href={child} class:active={isActive(child)}>
{getTitle(child)}
</a>
</li>
{/each}
{:else}
<li>
<a href={item.href} class:active={isActive(item.href)}>
{item.title}
</a>
</li>
{/if}
{/each}
</ul>
</aside>

212
docs/src/lib/app.scss Normal file
View file

@ -0,0 +1,212 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
html {
height: 100%;
}
@layer components {
hr {
@apply border-muted/50;
}
.btn-menu {
@apply px-4 py-2 h-auto min-h-0 justify-start;
}
.box {
@apply rounded-xl bg-base-200 p-4;
}
.menu :where(li:not(.menu-title) > :not(ul):not(details):not(.menu-title)),
.menu :where(li:not(.menu-title) > details > summary:not(.menu-title)) {
@apply select-text;
}
.tabs-lifted.tabs-box > .tab.tab-active:not(.tab-disabled):not([disabled]) {
@apply bg-base-200;
}
.tabs-lifted.tabs-box .tab.tab-active:not(.tab-disabled):not([disabled])::before,
.tabs-lifted.tabs-box .tab.tab-active:not(.tab-disabled):not([disabled]):first-child::before,
.tabs-lifted.tabs-box .tab.tab-active:not(.tab-disabled):not([disabled]):last-child::before {
background-image: none;
}
/* start of discord markdown styling */
.discord-markdown {
blockquote {
@apply pl-3 border-l-4 border-muted/50;
}
ul {
@apply list-disc pl-4;
}
ol {
@apply list-decimal pl-4;
}
.d-emoji {
@apply h-4 w-auto inline;
}
.d-spoiler {
@apply bg-base-content text-base-content;
border-radius: 4px;
transition-delay: 6000s;
&::selection {
@apply text-base-content;
background-color: transparent;
}
&:active {
@apply bg-base-300;
transition-delay: 0s;
}
}
code {
@apply px-1 text-sm rounded-sm bg-base-200;
}
pre > code {
@apply py-1 px-2 md:px-3 md:py-2 rounded-xl;
}
a {
@apply link-primary;
}
small {
@apply block text-muted;
}
}
/* end of discord markdown styling */
/* button styling! */
.btn {
@apply font-normal;
}
/* daisyUI applies some styling to lists in .menu that we don't want */
/* so we reset them here */
:where(.menu li),
:where(.menu ul) {
position: static;
}
.discord-markdown ul {
position: static;
white-space: normal;
margin-inline-start: 0;
margin-inline-end: 0;
}
.discord-markdown li {
position: static;
display: list-item;
}
.menu .discord-markdown :where(li:not(.menu-title) > :not(ul, details, .menu-title, .btn)),
.menu .discord-markdown :where(li:not(.menu-title) > details > summary:not(.menu-title)) {
display: unset;
padding: unset;
}
/* end of the .menu reset */
}
[data-theme="dark"],
[data-theme="light"],
[data-theme="acid"],
[data-theme="cotton"],
[data-theme="autumn"],
[data-theme="coffee"] {
--sv-min-height: 40px;
--sv-bg: var(--fallback-b1, oklch(var(--b1) / var(--tw-bg-opacity)));
--sv-disabled-bg: var(--fallback-b3, oklch(var(--b3) / var(--tw-bg-opacity)));
--sv-border: 1px solid oklch(var(--muted) / 0.5);
--sv-border-radius: 6px;
--sv-general-padding: 0.25rem;
--sv-control-bg: var(--sv-bg);
--sv-item-wrap-padding: 3px 3px 3px 6px;
--sv-item-selected-bg: var(--fallback-b3, oklch(var(--b3) / var(--tw-bg-opacity)));
--sv-item-btn-color: var(--fallback-bc, oklch(var(--bc) / 1));
--sv-item-btn-color-hover: var(
--fallback-bc,
oklch(var(--bc) / 0.6)
); /* same as icon-color-hover in default theme */
--sv-item-btn-bg: transparent;
--sv-item-btn-bg-hover: transparent;
--sv-icon-color: var(--sv-item-btn-color);
--sv-icon-color-hover: var(--sv-item-btn-color-hover);
--sv-icon-bg: transparent;
--sv-icon-size: 20px;
--sv-separator-bg: transparent;
--sv-btn-border: 0;
--sv-placeholder-color: transparent;
--sv-dropdown-bg: var(--sv-bg);
--sv-dropdown-offset: 1px;
--sv-dropdown-border: 1px solid oklch(var(--muted) / 0.5);
--sv-dropdown-width: auto;
--sv-dropdown-shadow: none;
--sv-dropdown-height: 320px;
--sv-dropdown-active-bg: var(--fallback-b3, oklch(var(--b3) / var(--tw-bg-opacity)));
--sv-dropdown-selected-bg: oklch(var(--p) / 0.2);
--sv-create-kbd-border: none;
--sv-create-kbd-bg: transparent;
--sv-create-disabled-bg: transparent;
--sv-loader-border: none;
--sv-item-wrap-padding: 0.375rem 0.25rem;
}
.join-item.svelecte-control-pk {
--sv-min-height: 2rem;
.sv-control {
border-top-left-radius: 0;
border-bottom-left-radius: 0;
}
}
.group-control {
--sv-dropdown-active-bg: transparent;
--sv-item-wrap-padding: 0.25rem 0;
}
.group-control .option {
width: calc(100% + 0.5rem);
}
.sv-item--wrap {
border-radius: 4px;
padding: 0.25rem;
font-size: 14px;
}
.sv-item--wrap.in-dropdown {
padding: 0;
position: relative;
}
.sv-item--wrap.in-dropdown:not(:last-child)::after {
content: "";
position: absolute;
bottom: 0;
left: 0;
width: 100%;
height: 0;
border-bottom: 1px solid oklch(var(--muted) / 0.5);
}
.sv-dropdown-scroll {
padding: 0 0.75rem !important;
}
.svelecte {
flex: auto !important;
}

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="107" height="128" viewBox="0 0 107 128"><title>svelte-logo</title><path d="M94.157 22.819c-10.4-14.885-30.94-19.297-45.792-9.835L22.282 29.608A29.92 29.92 0 0 0 8.764 49.65a31.5 31.5 0 0 0 3.108 20.231 30 30 0 0 0-4.477 11.183 31.9 31.9 0 0 0 5.448 24.116c10.402 14.887 30.942 19.297 45.791 9.835l26.083-16.624A29.92 29.92 0 0 0 98.235 78.35a31.53 31.53 0 0 0-3.105-20.232 30 30 0 0 0 4.474-11.182 31.88 31.88 0 0 0-5.447-24.116" style="fill:#ff3e00"/><path d="M45.817 106.582a20.72 20.72 0 0 1-22.237-8.243 19.17 19.17 0 0 1-3.277-14.503 18 18 0 0 1 .624-2.435l.49-1.498 1.337.981a33.6 33.6 0 0 0 10.203 5.098l.97.294-.09.968a5.85 5.85 0 0 0 1.052 3.878 6.24 6.24 0 0 0 6.695 2.485 5.8 5.8 0 0 0 1.603-.704L69.27 76.28a5.43 5.43 0 0 0 2.45-3.631 5.8 5.8 0 0 0-.987-4.371 6.24 6.24 0 0 0-6.698-2.487 5.7 5.7 0 0 0-1.6.704l-9.953 6.345a19 19 0 0 1-5.296 2.326 20.72 20.72 0 0 1-22.237-8.243 19.17 19.17 0 0 1-3.277-14.502 17.99 17.99 0 0 1 8.13-12.052l26.081-16.623a19 19 0 0 1 5.3-2.329 20.72 20.72 0 0 1 22.237 8.243 19.17 19.17 0 0 1 3.277 14.503 18 18 0 0 1-.624 2.435l-.49 1.498-1.337-.98a33.6 33.6 0 0 0-10.203-5.1l-.97-.294.09-.968a5.86 5.86 0 0 0-1.052-3.878 6.24 6.24 0 0 0-6.696-2.485 5.8 5.8 0 0 0-1.602.704L37.73 51.72a5.42 5.42 0 0 0-2.449 3.63 5.79 5.79 0 0 0 .986 4.372 6.24 6.24 0 0 0 6.698 2.486 5.8 5.8 0 0 0 1.602-.704l9.952-6.342a19 19 0 0 1 5.295-2.328 20.72 20.72 0 0 1 22.237 8.242 19.17 19.17 0 0 1 3.277 14.503 18 18 0 0 1-8.13 12.053l-26.081 16.622a19 19 0 0 1-5.3 2.328" style="fill:#fff"/></svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

1
docs/src/lib/index.ts Normal file
View file

@ -0,0 +1 @@
// place files you want to import through the `$lib` alias in this folder.

View file

@ -0,0 +1,77 @@
#themed-container {
--nprogress-color: var(--fallback-p, oklch(var(--p) / 1));
}
/* Make clicks pass-through */
#nprogress {
pointer-events: none;
.bar {
background: var(--nprogress-color);
position: fixed;
z-index: 1031;
top: 0;
left: 0;
width: 100%;
height: 4px;
}
}
/* Fancy blur effect */
/* #nprogress .peg {
display: block;
position: absolute;
right: 0px;
width: 100px;
height: 100%;
box-shadow: 0 0 10px var(--nprogress-color), 0 0 5px var(--nprogress-color);
opacity: 1.0;
-webkit-transform: rotate(3deg) translate(0px, -4px);
-ms-transform: rotate(3deg) translate(0px, -4px);
transform: rotate(3deg) translate(0px, -4px);
} */
/* Remove these to get rid of the spinner */
/* #nprogress .spinner {
display: block;
position: fixed;
z-index: 1031;
top: 15px;
right: 15px;
}
#nprogress .spinner-icon {
width: 18px;
height: 18px;
box-sizing: border-box;
border: solid 2px transparent;
border-top-color: var(--nprogress-color);
border-left-color: var(--nprogress-color);
border-radius: 50%;
-webkit-animation: nprogress-spinner 400ms linear infinite;
animation: nprogress-spinner 400ms linear infinite;
}
.nprogress-custom-parent {
overflow: hidden;
position: relative;
}
.nprogress-custom-parent #nprogress .spinner,
.nprogress-custom-parent #nprogress .bar {
position: absolute;
}
@-webkit-keyframes nprogress-spinner {
0% { -webkit-transform: rotate(0deg); }
100% { -webkit-transform: rotate(360deg); }
}
@keyframes nprogress-spinner {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
} */

View file

@ -0,0 +1,64 @@
<script lang="ts">
import { browser } from "$app/environment"
import NavBar from "$components/NavBar.svelte"
import Sidebar from "$components/Sidebar.svelte"
import "$lib/app.scss"
import "$lib/nprogress.scss"
import type { LayoutData } from "./$types"
import Footer from "$components/Footer.svelte"
import { page } from "$app/stores"
import { navigating } from "$app/stores"
import nprogress from "nprogress"
// import apiClient from "$api"
export let data: LayoutData
// if (browser) {
// window.api = apiClient(fetch, data.apiBaseUrl)
// }
if (data.token && browser) {
localStorage.setItem("pk-token", data.token)
} else if (browser) {
localStorage.removeItem("pk-token")
}
nprogress.configure({
parent: "#themed-container",
})
$: {
if ($navigating) nprogress.start()
else if (!$navigating) nprogress.done()
}
// dash.initUser(data.system)
</script>
<div
id="themed-container"
class="max-w-screen h-screen bg-base-100 flex flex-col"
data-theme="coffee"
>
<NavBar />
<div class="flex flex-row flex-1 min-h-0">
<Sidebar />
<main class="flex-1 overflow-y-auto min-h-0">
<slot />
</main>
</div>
<Footer />
</div>
<svelte:head>
<title>PluralKit | {$page.data?.meta?.title ?? "Home"}</title>
<meta
property="og:title"
content={`PluralKit | ${$page.data?.meta?.ogTitle ?? "Web Dashboard"}`}
/>
<meta property="theme-color" content={`#${$page.data?.meta?.color ?? "da9317"}`} />
<meta
property="og:description"
content={$page.data?.meta?.ogDescription ?? "PluralKit's official dashboard."}
/>
</svelte:head>

View file

@ -0,0 +1,7 @@
<script>
let { data } = $props();
</script>
<div class="max-w-full bg-base-200 prose">
<div class="m-5" style="max-width: 900px"><data.PageContent /></div>
</div>

View file

@ -0,0 +1,16 @@
import { error } from '@sveltejs/kit';
const pages = import.meta.glob('/content/**/*.md', { eager: true }) as Record<string, { default: unknown }>;
export async function load({ params }) {
const slug = params.slug || 'index';
const page = pages[`/content/${slug}.md`] || pages[`/content/${slug}/index.md`];
if (!page) {
throw error(404, `Page not found: ${slug}`);
}
return {
PageContent: page.default
};
}

View file

@ -0,0 +1 @@
@import 'tailwindcss';

View file

Before

Width:  |  Height:  |  Size: 88 KiB

After

Width:  |  Height:  |  Size: 88 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 32 KiB

After

Width:  |  Height:  |  Size: 32 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 25 KiB

After

Width:  |  Height:  |  Size: 25 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 38 KiB

After

Width:  |  Height:  |  Size: 38 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 158 KiB

After

Width:  |  Height:  |  Size: 158 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 70 KiB

After

Width:  |  Height:  |  Size: 70 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 48 KiB

After

Width:  |  Height:  |  Size: 48 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 664 KiB

After

Width:  |  Height:  |  Size: 664 KiB

Before After
Before After

BIN
docs/static/assets/premiumTeaser.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 130 KiB

View file

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 12 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 34 KiB

After

Width:  |  Height:  |  Size: 34 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 47 KiB

After

Width:  |  Height:  |  Size: 47 KiB

Before After
Before After

26
docs/svelte.config.js Normal file
View file

@ -0,0 +1,26 @@
import adapter from '@sveltejs/adapter-node';
import { vitePreprocess } from '@sveltejs/vite-plugin-svelte';
import { mdsvex } from 'mdsvex';
/** @type {import('@sveltejs/kit').Config} */
const config = {
// Consult https://svelte.dev/docs/kit/integrations
// for more information about preprocessors
preprocess: [
mdsvex({
extensions: [".md"]
}),
vitePreprocess(),
],
extensions: [".svelte", ".md"],
kit: {
adapter: adapter(),
alias: {
$components: "src/components",
$lib: "src/lib",
}
}
};
export default config;

113
docs/tailwind.config.js Normal file
View file

@ -0,0 +1,113 @@
import daisyui from "daisyui"
import typography from "@tailwindcss/typography"
import { light, dark, night, autumn, coffee, halloween, pastel } from "daisyui/src/theming/themes"
/** @type {import('tailwindcss').Config} */
export default {
content: ["./src/**/*.{html,js,svelte,ts}"],
plugins: [typography, daisyui],
theme: {
extend: {
colors: {
muted: "oklch(var(--muted) / <alpha-value>)",
},
},
},
daisyui: {
themes: [
{
// cool light
light: {
...light,
primary: "#da9317",
secondary: "#9e66ff",
accent: "#22ded8",
// DARK BUTTONS
/*
"--muted": "69.38% 0.01 252.85",
"base-200": "#ededed",
"base-300": "#e1e3e3",
"primary-content": "#090101",
"neutral-content": "#f9fcff",
"base-content": "10161e",
"secondary-content": "fafafa"
*/
// LIGHT BUTTONS
"--muted": "59.37% 0.01 252.85",
"base-200": "#ededed",
"base-300": "#e5e7e7",
"primary-content": "#090101",
neutral: "#f8f8f8",
"neutral-content": "#040507",
"base-content": "10161e",
"secondary-content": "#090101",
},
// cool dark
dark: {
...dark,
primary: "#da9317",
secondary: "#ae81fc",
accent: "#6df1fc",
"--muted": "59.37% 0.0117 254.07",
"base-100": "#22262b",
"base-200": "#191c1f",
"base-300": "#17191b",
"base-content": "#ced3dc",
neutral: "#33383e",
"neutral-content": "#e1e2e3",
},
// bright dark
acid: {
...night,
primary: "#49c701",
secondary: "#00c6cf",
accent: "#f29838",
"base-100": "#1a2433",
"base-200": "#101a27",
"base-300": "#111724",
neutral: "#242e41",
"--muted": "60.8% 0.05 272",
},
// bright light (trans rights!)
cotton: {
...pastel,
primary: "#ff69a8",
secondary: "#63a7f9",
accent: "#f8b939",
neutral: "#f8f8f8",
"base-200": "#eeecf1",
"base-300": "#e2e1e7",
"--muted": "59% 0.01 252.85",
"--rounded-btn": "0.5rem",
},
// warm light
autumn: {
...autumn,
primary: "#e38010",
success: "#2c7866",
"success-content": "#eeeeee",
error: "#97071a",
"error-content": "#eeeeee",
neutral: "#ebebeb",
"neutral-content": "#141414",
"base-100": "#fcfcfc",
"--muted": "67.94% 0.01 39.18",
},
// warm dark
coffee: {
...halloween,
secondary: "#bc4b2b",
accent: coffee.accent,
primary: coffee.primary,
info: "#3499c0",
neutral: "#120f12",
"neutral-content": "#dfe0de",
"base-200": "#1a1a1a",
"base-300": "#181818",
"base-content": "#d9dbd8",
"--muted": "57.65% 0 54",
},
},
],
},
}

20
docs/tsconfig.json Normal file
View file

@ -0,0 +1,20 @@
{
"extends": "./.svelte-kit/tsconfig.json",
"compilerOptions": {
"rewriteRelativeImportExtensions": true,
"allowJs": true,
"checkJs": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": true,
"strict": true,
"moduleResolution": "bundler"
}
// Path aliases are handled by https://svelte.dev/docs/kit/configuration#alias
// except $lib which is handled by https://svelte.dev/docs/kit/configuration#files
//
// To make changes to top-level options such as include and exclude, we recommend extending
// the generated config; see https://svelte.dev/docs/kit/configuration#typescript
}

18
docs/vite.config.ts Normal file
View file

@ -0,0 +1,18 @@
import { sveltekit } from '@sveltejs/kit/vite';
import { mdsvex } from 'mdsvex';
import { defineConfig } from 'vite';
import { execSync } from "node:child_process"
const hash = execSync("git rev-parse --short HEAD").toString().trim()
export default defineConfig({
plugins: [sveltekit(), mdsvex({ extension: ".md" })],
server: {
fs: {
allow: ["."]
}
},
define: {
__COMMIT_HASH__: JSON.stringify("_" + hash),
},
})

File diff suppressed because it is too large Load diff