Compare commits

...

8 commits

Author SHA1 Message Date
alyssa
2e3172df35 add /api/v2/bulk endpoint
Some checks failed
Build and push Docker image / .net docker build (push) Has been cancelled
.net checks / run .net tests (push) Has been cancelled
.net checks / dotnet-format (push) Has been cancelled
also, initial support for patch models in rust!
2025-11-09 09:16:28 +00:00
alyssa
85b2b77730 chore(docs): update catalogger website link
Some checks failed
Build and push Docker image / .net docker build (push) Has been cancelled
.net checks / run .net tests (push) Has been cancelled
.net checks / dotnet-format (push) Has been cancelled
Build and push Rust service Docker images / rust docker build (push) Has been cancelled
rust checks / cargo fmt (push) Has been cancelled
2025-11-09 09:11:21 +00:00
alyssa
6a7ab2b853 feat: disable autoproxy with 3 backslashes 2025-11-09 09:09:59 +00:00
alyssa
83dd880374 chore: move app-commands script to rust 2025-11-09 09:09:50 +00:00
alyssa
c0a5bc81a0 feat(api): improve logging 2025-11-09 09:09:47 +00:00
asleepyskye
0983179240 fix(bot): add allowed mentions to msg info replies
Some checks failed
Build and push Docker image / .net docker build (push) Has been cancelled
.net checks / run .net tests (push) Has been cancelled
.net checks / dotnet-format (push) Has been cancelled
2025-10-24 21:26:33 -04:00
asleepyskye
49ce00e675 fix(bot): check for null avatar in msg info 2025-10-24 19:55:53 -04:00
asleepyskye
83f2d33c3d feat(bot): port message info embeds to cv2
Some checks are pending
Build and push Docker image / .net docker build (push) Waiting to run
.net checks / run .net tests (push) Waiting to run
.net checks / dotnet-format (push) Waiting to run
2025-10-24 10:23:38 -04:00
34 changed files with 1148 additions and 222 deletions

37
Cargo.lock generated
View file

@ -92,6 +92,8 @@ dependencies = [
"pluralkit_models",
"reqwest 0.12.15",
"reverse-proxy-service",
"sea-query",
"sea-query-sqlx",
"serde",
"serde_json",
"serde_urlencoded",
@ -104,6 +106,20 @@ dependencies = [
"twilight-http",
]
[[package]]
name = "app-commands"
version = "0.1.0"
dependencies = [
"anyhow",
"futures",
"libpk",
"tokio",
"tracing",
"twilight-http",
"twilight-model",
"twilight-util",
]
[[package]]
name = "arc-swap"
version = "1.7.1"
@ -3358,19 +3374,20 @@ dependencies = [
[[package]]
name = "sea-query"
version = "0.32.3"
version = "1.0.0-rc.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5a24d8b9fcd2674a6c878a3d871f4f1380c6c43cc3718728ac96864d888458e"
checksum = "ab621a8d8b03a3e513ea075f71aa26830a55c977d7b40f09e825bb91910db823"
dependencies = [
"chrono",
"inherent",
"sea-query-derive",
]
[[package]]
name = "sea-query-derive"
version = "0.4.3"
version = "1.0.0-rc.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bae0cbad6ab996955664982739354128c58d16e126114fe88c2a493642502aab"
checksum = "217e9422de35f26c16c5f671fce3c075a65e10322068dbc66078428634af6195"
dependencies = [
"darling",
"heck 0.4.1",
@ -3380,6 +3397,17 @@ dependencies = [
"thiserror 2.0.12",
]
[[package]]
name = "sea-query-sqlx"
version = "0.8.0-rc.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed5eb19495858d8ae3663387a4f5298516c6f0171a7ca5681055450f190236b8"
dependencies = [
"chrono",
"sea-query",
"sqlx",
]
[[package]]
name = "security-framework"
version = "3.2.0"
@ -4560,6 +4588,7 @@ version = "0.16.0"
source = "git+https://github.com/pluralkit/twilight?branch=pluralkit-7f08d95#054a2aa5d29fb46220af1cd5df568b73511cdb26"
dependencies = [
"twilight-model",
"twilight-validate",
]
[[package]]

View file

@ -14,6 +14,7 @@ futures = "0.3.30"
lazy_static = "1.4.0"
metrics = "0.23.0"
reqwest = { version = "0.12.7" , default-features = false, features = ["rustls-tls", "trust-dns"]}
sea-query = { version = "1.0.0-rc.10", features = ["with-chrono"] }
sentry = { version = "0.36.0", default-features = false, features = ["backtrace", "contexts", "panic", "debug-images", "reqwest", "rustls"] } # replace native-tls with rustls
serde = { version = "1.0.196", features = ["derive"] }
serde_json = "1.0.117"
@ -27,7 +28,7 @@ axum = { git = "https://github.com/pluralkit/axum", branch = "v0.8.4-pluralkit"
twilight-gateway = { git = "https://github.com/pluralkit/twilight", branch = "pluralkit-7f08d95" }
twilight-cache-inmemory = { git = "https://github.com/pluralkit/twilight", branch = "pluralkit-7f08d95", features = ["permission-calculator"] }
twilight-util = { git = "https://github.com/pluralkit/twilight", branch = "pluralkit-7f08d95", features = ["permission-calculator"] }
twilight-util = { git = "https://github.com/pluralkit/twilight", branch = "pluralkit-7f08d95", features = ["permission-calculator", "builder"] }
twilight-model = { git = "https://github.com/pluralkit/twilight", branch = "pluralkit-7f08d95" }
twilight-http = { git = "https://github.com/pluralkit/twilight", branch = "pluralkit-7f08d95", default-features = false, features = ["rustls-aws_lc_rs", "rustls-native-roots"] }

View file

@ -11,6 +11,7 @@ public record MessageComponent
public string? Url { get; init; }
public bool? Disabled { get; init; }
public uint? AccentColor { get; init; }
public int? Spacing { get; init; }
public ComponentMedia? Media { get; init; }
public ComponentMediaItem[]? Items { get; init; }

View file

@ -43,11 +43,10 @@ public class ApplicationCommandProxiedMessage
if (channel == null)
showContent = false;
var embeds = new List<Embed>();
var components = new List<MessageComponent>();
var guild = await _cache.GetGuild(ctx.GuildId);
if (msg.Member != null)
embeds.Add(await _embeds.CreateMemberEmbed(
components.AddRange(await _embeds.CreateMemberMessageComponents(
msg.System,
msg.Member,
guild,
@ -55,10 +54,12 @@ public class ApplicationCommandProxiedMessage
LookupContext.ByNonOwner,
DateTimeZone.Utc
));
embeds.Add(await _embeds.CreateMessageInfoEmbed(msg, showContent, ctx.Config));
await ctx.Reply(embeds: embeds.ToArray());
components.Add(new MessageComponent()
{
Type = ComponentType.Separator
});
components.AddRange(await _embeds.CreateMessageInfoMessageComponents(msg, showContent, ctx.Config));
await ctx.Reply(components: components.ToArray());
}
private async Task QueryCommandMessage(InteractionContext ctx)
@ -68,11 +69,7 @@ public class ApplicationCommandProxiedMessage
if (msg == null)
throw Errors.MessageNotFound(messageId);
var embeds = new List<Embed>();
embeds.Add(await _embeds.CreateCommandMessageInfoEmbed(msg, true));
await ctx.Reply(embeds: embeds.ToArray());
await ctx.Reply(components: await _embeds.CreateCommandMessageInfoMessageComponents(msg, true));
}
public async Task DeleteMessage(InteractionContext ctx)

View file

@ -426,21 +426,33 @@ public class ProxiedMessage
if (ctx.Match("author") || ctx.MatchFlag("author"))
{
var user = await _rest.GetUser(message.Message.Sender);
var eb = new EmbedBuilder()
.Author(new Embed.EmbedAuthor(
user != null
? $"{user.Username}#{user.Discriminator}"
: $"Deleted user ${message.Message.Sender}",
IconUrl: user != null ? user.AvatarUrl() : null))
.Description(message.Message.Sender.ToString());
if (ctx.MatchFlag("show-embed", "se"))
{
var eb = new EmbedBuilder()
.Author(new Embed.EmbedAuthor(
user != null
? $"{user.Username}#{user.Discriminator}"
: $"Deleted user ${message.Message.Sender}",
IconUrl: user != null ? user.AvatarUrl() : null))
.Description(message.Message.Sender.ToString());
await ctx.Reply(
user != null ? $"{user.Mention()} ({user.Id})" : $"*(deleted user {message.Message.Sender})*",
eb.Build());
await ctx.Reply(
user != null ? $"{user.Mention()} ({user.Id})" : $"*(deleted user {message.Message.Sender})*",
eb.Build());
return;
}
await ctx.Reply(components: await _embeds.CreateAuthorMessageComponents(user, message));
return;
}
await ctx.Reply(embed: await _embeds.CreateMessageInfoEmbed(message, showContent, ctx.Config));
if (ctx.MatchFlag("show-embed", "se"))
{
await ctx.Reply(embed: await _embeds.CreateMessageInfoEmbed(message, showContent, ctx.Config));
return;
}
await ctx.Reply(components: await _embeds.CreateMessageInfoMessageComponents(message, showContent, ctx.Config));
}
private async Task GetCommandMessage(Context ctx, ulong messageId, bool isDelete)
@ -472,6 +484,11 @@ public class ProxiedMessage
else if (!await ctx.CheckPermissionsInGuildChannel(channel, PermissionSet.ViewChannel))
showContent = false;
await ctx.Reply(embed: await _embeds.CreateCommandMessageInfoEmbed(msg, showContent));
if (ctx.MatchFlag("show-embed", "se"))
{
await ctx.Reply(embed: await _embeds.CreateCommandMessageInfoEmbed(msg, showContent));
return;
}
await ctx.Reply(components: await _embeds.CreateCommandMessageInfoMessageComponents(msg, showContent));
}
}

View file

@ -186,10 +186,9 @@ public class ReactionAdded: IEventHandler<MessageReactionAddEvent>
{
var dm = await _dmCache.GetOrCreateDmChannel(evt.UserId);
var embeds = new List<Embed>();
var components = new List<MessageComponent>();
if (msg.Member != null)
embeds.Add(await _embeds.CreateMemberEmbed(
components.AddRange(await _embeds.CreateMemberMessageComponents(
msg.System,
msg.Member,
guild,
@ -197,10 +196,12 @@ public class ReactionAdded: IEventHandler<MessageReactionAddEvent>
LookupContext.ByNonOwner,
DateTimeZone.Utc
));
embeds.Add(await _embeds.CreateMessageInfoEmbed(msg, true, config));
await _rest.CreateMessage(dm, new MessageRequest { Embeds = embeds.ToArray() });
components.Add(new MessageComponent()
{
Type = ComponentType.Separator
});
components.AddRange(await _embeds.CreateMessageInfoMessageComponents(msg, true, config));
await _rest.CreateMessage(dm, new MessageRequest { Components = components.ToArray(), Flags = Message.MessageFlags.IsComponentsV2, AllowedMentions = new AllowedMentions() });
}
catch (ForbiddenException) { } // No permissions to DM, can't check for this :(

View file

@ -66,6 +66,15 @@ public class ProxyService
var autoproxySettings = await _repo.GetAutoproxySettings(ctx.SystemId.Value, guild.Id, null);
if (IsDisableAutoproxy(message))
{
await _repo.UpdateAutoproxy(ctx.SystemId.Value, guild.Id, null, new()
{
AutoproxyMode = AutoproxyMode.Off
});
return false;
}
if (autoproxySettings.AutoproxyMode == AutoproxyMode.Latch && IsUnlatch(message))
{
// "unlatch"
@ -495,6 +504,9 @@ public class ProxyService
public static bool IsUnlatch(Message message)
=> message.Content.StartsWith(@"\\") || message.Content.StartsWith("\\\u200b\\");
public static bool IsDisableAutoproxy(Message message)
=> message.Content.StartsWith(@"\\\") || message.Content.StartsWith("\\\u200b\\\u200b\\");
private async Task HandleProxyExecutedActions(MessageContext ctx, AutoproxySettings autoproxySettings,
Message triggerMessage, Message proxyMessage, ProxyMatch match,
bool deletePrevious = true)

View file

@ -766,6 +766,158 @@ public class EmbedService
.Build();
}
public async Task<MessageComponent[]> CreateMessageInfoMessageComponents(FullMessage msg, bool showContent, SystemConfig? ccfg = null)
{
var channel = await _cache.GetOrFetchChannel(_rest, msg.Message.Guild ?? 0, msg.Message.Channel);
var ctx = LookupContext.ByNonOwner;
var serverMsg = await _rest.GetMessageOrNull(msg.Message.Channel, msg.Message.Mid);
// Need this whole dance to handle cases where:
// - the user is deleted (userInfo == null)
// - the bot's no longer in the server we're querying (channel == null)
// - the member is no longer in the server we're querying (memberInfo == null)
// TODO: optimize ordering here a bit with new cache impl; and figure what happens if bot leaves server -> channel still cached -> hits this bit and 401s?
GuildMemberPartial memberInfo = null;
User userInfo = null;
if (channel != null)
{
GuildMember member = null;
try
{
member = await _rest.GetGuildMember(channel.GuildId!.Value, msg.Message.Sender);
}
catch (ForbiddenException)
{
// no permission, couldn't fetch, oh well
}
if (member != null)
// Don't do an extra request if we already have this info from the member lookup
userInfo = member.User;
memberInfo = member;
}
if (userInfo == null)
userInfo = await _cache.GetOrFetchUser(_rest, msg.Message.Sender);
// Calculate string displayed under "Sent by"
string userStr;
if (showContent && memberInfo != null && memberInfo.Nick != null)
userStr = $"**\n Username:** {userInfo.NameAndMention()}\n** Nickname:** {memberInfo.Nick}";
else if (userInfo != null) userStr = userInfo.NameAndMention();
else userStr = $"*(deleted user {msg.Message.Sender})*";
var content = serverMsg?.Content?.NormalizeLineEndSpacing();
if (content == null || !showContent)
content = "*(message contents deleted or inaccessible)*";
var systemStr = msg.System == null
? "*(deleted or unknown system)*"
: msg.System.NameFor(ctx) != null ? $"{msg.System.NameFor(ctx)} (`{msg.System.DisplayHid(ccfg)}`)" : $"`{msg.System.DisplayHid(ccfg)}`";
var memberStr = msg.Member == null
? "*(deleted member)*"
: $"{msg.Member.NameFor(ctx)} (`{msg.Member.DisplayHid(ccfg)}`)";
var roles = memberInfo?.Roles?.ToList();
var rolesContent = "";
if (roles != null && roles.Count > 0 && showContent)
{
var guild = await _cache.GetGuild(channel.GuildId!.Value);
var rolesString = string.Join(", ", (roles
.Select(id =>
{
var role = Array.Find(guild.Roles, r => r.Id == id);
if (role != null)
return role;
return new Role { Name = "*(unknown role)*", Position = 0 };
}))
.OrderByDescending(role => role.Position)
.Select(role => role.Name));
rolesContent = $"**Account Roles ({roles.Count})**\n{rolesString}";
}
MessageComponent authorData = new MessageComponent()
{
Type = ComponentType.Text,
Content = $"**System:** {systemStr}\n**Member:** {memberStr}\n**Sent by:** {userStr}\n\n{rolesContent}"
};
var avatarURL = msg.Member?.AvatarFor(ctx).TryGetCleanCdnUrl();
MessageComponent header = (avatarURL == "" || avatarURL == null) ? authorData : new MessageComponent()
{
Type = ComponentType.Section,
Components = [authorData],
Accessory = new MessageComponent()
{
Type = ComponentType.Thumbnail,
Media = new ComponentMedia()
{
Url = avatarURL
}
}
};
List<MessageComponent> body = [
new MessageComponent()
{
Type = ComponentType.Separator,
Spacing = 2
}
];
if (content != "")
{
body.Add(new MessageComponent()
{
Type = ComponentType.Text,
Content = content
});
}
if (showContent)
{
if (serverMsg != null)
{
var media = new List<ComponentMediaItem>();
foreach (Message.Attachment attachment in serverMsg?.Attachments)
{
var url = attachment.Url;
if (url != null && url != "")
media.Add(new ComponentMediaItem()
{
Media = new ComponentMedia()
{
Url = url
}
});
}
if (media.Count > 0)
body.Add(new MessageComponent()
{
Type = ComponentType.MediaGallery,
Items = media.ToArray()
});
}
}
MessageComponent footer = new MessageComponent()
{
Type = ComponentType.Text,
Content = $"-# Original Message ID: {msg.Message.OriginalMid} · <t:{DiscordUtils.SnowflakeToTimestamp(msg.Message.Mid)}:f>"
};
return [
new MessageComponent()
{
Type = ComponentType.Container,
Components = [
header,
..body
]
},
footer
];
}
public async Task<Embed> CreateMessageInfoEmbed(FullMessage msg, bool showContent, SystemConfig? ccfg = null)
{
var channel = await _cache.GetOrFetchChannel(_rest, msg.Message.Guild ?? 0, msg.Message.Channel);
@ -852,6 +1004,106 @@ public class EmbedService
return eb.Build();
}
public async Task<MessageComponent[]> CreateAuthorMessageComponents(User? user, FullMessage msg)
{
MessageComponent authorInfo;
var author = user != null
? $"{user.Username}#{user.Discriminator}"
: $"Deleted user ${msg.Message.Sender}";
var avatarUrl = user?.AvatarUrl();
var authorString = $"{author}\n**ID: **`{msg.Message.Sender.ToString()}`";
if (user != null && avatarUrl != "")
{
authorInfo = new MessageComponent()
{
Type = ComponentType.Section,
Components = [
new MessageComponent()
{
Type = ComponentType.Text,
Content = authorString
}
],
Accessory = new MessageComponent()
{
Type = ComponentType.Thumbnail,
Media = new ComponentMedia()
{
Url = avatarUrl
}
}
};
}
else
{
authorInfo = new MessageComponent()
{
Type = ComponentType.Text,
Content = authorString
};
}
MessageComponent container = new MessageComponent()
{
Type = ComponentType.Container,
Components = [
authorInfo,
]
};
return (
[
new MessageComponent()
{
Type = ComponentType.Text,
Content = user != null ? $"{user.Mention()} ({user.Id})" : $"*(deleted user {msg.Message.Sender})*"
},
container
]
);
}
public async Task<MessageComponent[]> CreateCommandMessageInfoMessageComponents(Core.CommandMessage msg, bool showContent)
{
var content = "*(command message deleted or inaccessible)*";
if (showContent)
{
var discordMessage = await _rest.GetMessageOrNull(msg.Channel, msg.OriginalMid);
if (discordMessage != null)
content = discordMessage.Content;
}
List<MessageComponent> body = [
new MessageComponent()
{
Type = ComponentType.Text,
Content = $"### Command response message\n**Original message:** https://discord.com/channels/{msg.Guild}/{msg.Channel}/{msg.OriginalMid}\n**Sent By:** <@{msg.Sender}>"
},
new MessageComponent()
{
Type = ComponentType.Separator,
},
new MessageComponent()
{
Type = ComponentType.Text,
Content = content
},
];
MessageComponent footer = new MessageComponent()
{
Type = ComponentType.Text,
Content = $"-# Original Message ID: {msg.OriginalMid} · <t:{DiscordUtils.SnowflakeToTimestamp(msg.OriginalMid)}:f>"
};
return [
new MessageComponent(){
Type = ComponentType.Container,
Components = [
..body
]
},
footer
];
}
public async Task<Embed> CreateCommandMessageInfoEmbed(Core.CommandMessage msg, bool showContent)
{
var content = "*(command message deleted or inaccessible)*";

View file

@ -39,6 +39,9 @@ public static class DiscordUtils
public static Instant SnowflakeToInstant(ulong snowflake) =>
Instant.FromUtc(2015, 1, 1, 0, 0, 0) + Duration.FromMilliseconds(snowflake >> 22);
public static ulong SnowflakeToTimestamp(ulong snowflake) =>
((ulong)Instant.FromUtc(2015, 1, 1, 0, 0, 0).ToUnixTimeMilliseconds() + (snowflake >> 22)) / 1000;
public static ulong InstantToSnowflake(Instant time) =>
(ulong)(time - Instant.FromUtc(2015, 1, 1, 0, 0, 0)).TotalMilliseconds << 22;

View file

@ -5,6 +5,7 @@ using Autofac;
using Myriad.Cache;
using Myriad.Gateway;
using Myriad.Rest;
using Myriad.Rest.Types;
using Myriad.Types;
using PluralKit.Core;
@ -76,6 +77,17 @@ public class InteractionContext
});
}
public async Task Reply(MessageComponent[] components = null, AllowedMentions? mentions = null)
{
await Respond(InteractionResponse.ResponseType.ChannelMessageWithSource,
new InteractionApplicationCommandCallbackData
{
Components = components,
Flags = Message.MessageFlags.Ephemeral | Message.MessageFlags.IsComponentsV2,
AllowedMentions = mentions ?? new AllowedMentions()
});
}
public async Task Defer()
{
await Respond(InteractionResponse.ResponseType.DeferredChannelMessageWithSource,

View file

@ -14,6 +14,7 @@ fred = { workspace = true }
lazy_static = { workspace = true }
metrics = { workspace = true }
reqwest = { workspace = true }
sea-query = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
sqlx = { workspace = true }
@ -28,3 +29,4 @@ serde_urlencoded = "0.7.1"
tower = "0.4.13"
tower-http = { version = "0.5.2", features = ["catch-panic"] }
subtle = "2.6.1"
sea-query-sqlx = { version = "0.8.0-rc.8", features = ["sqlx-postgres", "with-chrono"] }

View file

@ -0,0 +1,211 @@
use axum::{
Extension, Json,
extract::{Json as ExtractJson, State},
response::IntoResponse,
};
use pk_macros::api_endpoint;
use sea_query::{Expr, ExprTrait, PostgresQueryBuilder};
use sea_query_sqlx::SqlxBinder;
use serde_json::{Value, json};
use pluralkit_models::{PKGroup, PKGroupPatch, PKMember, PKMemberPatch, PKSystem};
use crate::{
ApiContext,
auth::AuthState,
error::{
GENERIC_AUTH_ERROR, NOT_OWN_GROUP, NOT_OWN_MEMBER, PKError, TARGET_GROUP_NOT_FOUND,
TARGET_MEMBER_NOT_FOUND,
},
};
#[derive(serde::Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum BulkActionRequestFilter {
All,
Ids { ids: Vec<String> },
Connection { id: String },
}
#[derive(serde::Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum BulkActionRequest {
Member {
filter: BulkActionRequestFilter,
patch: PKMemberPatch,
},
Group {
filter: BulkActionRequestFilter,
patch: PKGroupPatch,
},
}
#[api_endpoint]
pub async fn bulk(
Extension(auth): Extension<AuthState>,
State(ctx): State<ApiContext>,
ExtractJson(req): ExtractJson<BulkActionRequest>,
) -> Json<Value> {
let Some(system_id) = auth.system_id() else {
return Err(GENERIC_AUTH_ERROR);
};
#[derive(sqlx::FromRow)]
struct Ider {
id: i32,
hid: String,
uuid: String,
}
#[derive(sqlx::FromRow)]
struct GroupMemberEntry {
member_id: i32,
group_id: i32,
}
#[allow(dead_code)]
#[derive(sqlx::FromRow)]
struct OnlyIder {
id: i32,
}
println!("BulkActionRequest::{req:#?}");
match req {
BulkActionRequest::Member { filter, mut patch } => {
patch.validate_bulk();
if patch.errors().len() > 0 {
return Err(PKError::from_validation_errors(patch.errors()));
}
let ids: Vec<i32> = match filter {
BulkActionRequestFilter::All => {
let ids: Vec<Ider> = sqlx::query_as("select id from members where system = $1")
.bind(system_id as i64)
.fetch_all(&ctx.db)
.await?;
ids.iter().map(|v| v.id).collect()
}
BulkActionRequestFilter::Ids { ids } => {
let members: Vec<PKMember> = sqlx::query_as(
"select * from members where hid = any($1::array) or uuid::text = any($1::array)",
)
.bind(&ids)
.fetch_all(&ctx.db)
.await?;
// todo: better errors
if members.len() != ids.len() {
return Err(TARGET_MEMBER_NOT_FOUND);
}
if members.iter().any(|m| m.system != system_id) {
return Err(NOT_OWN_MEMBER);
}
members.iter().map(|m| m.id).collect()
}
BulkActionRequestFilter::Connection { id } => {
let Some(group): Option<PKGroup> =
sqlx::query_as("select * from groups where hid = $1 or uuid::text = $1")
.bind(id)
.fetch_optional(&ctx.db)
.await?
else {
return Err(TARGET_GROUP_NOT_FOUND);
};
if group.system != system_id {
return Err(NOT_OWN_GROUP);
}
let entries: Vec<GroupMemberEntry> =
sqlx::query_as("select * from group_members where group_id = $1")
.bind(group.id)
.fetch_all(&ctx.db)
.await?;
entries.iter().map(|v| v.member_id).collect()
}
};
let (q, pms) = patch
.to_sql()
.table("members") // todo: this should be in the model definition
.and_where(Expr::col("id").is_in(ids))
.returning_col("id")
.build_sqlx(PostgresQueryBuilder);
let res: Vec<OnlyIder> = sqlx::query_as_with(&q, pms).fetch_all(&ctx.db).await?;
Ok(Json(json! {{ "updated": res.len() }}))
}
BulkActionRequest::Group { filter, mut patch } => {
patch.validate_bulk();
if patch.errors().len() > 0 {
return Err(PKError::from_validation_errors(patch.errors()));
}
let ids: Vec<i32> = match filter {
BulkActionRequestFilter::All => {
let ids: Vec<Ider> = sqlx::query_as("select id from groups where system = $1")
.bind(system_id as i64)
.fetch_all(&ctx.db)
.await?;
ids.iter().map(|v| v.id).collect()
}
BulkActionRequestFilter::Ids { ids } => {
let groups: Vec<PKGroup> = sqlx::query_as(
"select * from groups where hid = any($1) or uuid::text = any($1)",
)
.bind(&ids)
.fetch_all(&ctx.db)
.await?;
// todo: better errors
if groups.len() != ids.len() {
return Err(TARGET_GROUP_NOT_FOUND);
}
if groups.iter().any(|m| m.system != system_id) {
return Err(NOT_OWN_GROUP);
}
groups.iter().map(|m| m.id).collect()
}
BulkActionRequestFilter::Connection { id } => {
let Some(member): Option<PKMember> =
sqlx::query_as("select * from members where hid = $1 or uuid::text = $1")
.bind(id)
.fetch_optional(&ctx.db)
.await?
else {
return Err(TARGET_MEMBER_NOT_FOUND);
};
if member.system != system_id {
return Err(NOT_OWN_MEMBER);
}
let entries: Vec<GroupMemberEntry> =
sqlx::query_as("select * from group_members where member_id = $1")
.bind(member.id)
.fetch_all(&ctx.db)
.await?;
entries.iter().map(|v| v.group_id).collect()
}
};
let (q, pms) = patch
.to_sql()
.table("groups") // todo: this should be in the model definition
.and_where(Expr::col("id").is_in(ids))
.returning_col("id")
.build_sqlx(PostgresQueryBuilder);
println!("{q:#?} {pms:#?}");
let res: Vec<OnlyIder> = sqlx::query_as_with(&q, pms).fetch_all(&ctx.db).await?;
Ok(Json(json! {{ "updated": res.len() }}))
}
}
}

View file

@ -1,2 +1,3 @@
pub mod bulk;
pub mod private;
pub mod system;

View file

@ -2,6 +2,7 @@ use axum::{
http::StatusCode,
response::{IntoResponse, Response},
};
use pluralkit_models::ValidationError;
use std::fmt;
// todo: model parse errors
@ -11,6 +12,8 @@ pub struct PKError {
pub json_code: i32,
pub message: &'static str,
pub errors: Vec<ValidationError>,
pub inner: Option<anyhow::Error>,
}
@ -30,6 +33,21 @@ impl Clone for PKError {
json_code: self.json_code,
message: self.message,
inner: None,
errors: self.errors.clone(),
}
}
}
// can't `impl From<Vec<ValidationError>>`
// because "upstream crate may add a new impl" >:(
impl PKError {
pub fn from_validation_errors(errs: Vec<ValidationError>) -> Self {
Self {
message: "Error parsing JSON model",
json_code: 40001,
errors: errs,
response_code: StatusCode::BAD_REQUEST,
inner: None,
}
}
}
@ -50,14 +68,19 @@ impl IntoResponse for PKError {
if let Some(inner) = self.inner {
tracing::error!(?inner, "error returned from handler");
}
crate::util::json_err(
self.response_code,
serde_json::to_string(&serde_json::json!({
let json = if self.errors.len() > 0 {
serde_json::json!({
"message": self.message,
"code": self.json_code,
}))
.unwrap(),
)
"errors": self.errors,
})
} else {
serde_json::json!({
"message": self.message,
"code": self.json_code,
})
};
crate::util::json_err(self.response_code, serde_json::to_string(&json).unwrap())
}
}
@ -78,9 +101,17 @@ macro_rules! define_error {
json_code: $json_code,
message: $message,
inner: None,
errors: vec![],
};
};
}
define_error! { GENERIC_AUTH_ERROR, StatusCode::UNAUTHORIZED, 0, "401: Missing or invalid Authorization header" }
define_error! { GENERIC_BAD_REQUEST, StatusCode::BAD_REQUEST, 0, "400: Bad Request" }
define_error! { GENERIC_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR, 0, "500: Internal Server Error" }
define_error! { NOT_OWN_MEMBER, StatusCode::FORBIDDEN, 30006, "Target member is not part of your system." }
define_error! { NOT_OWN_GROUP, StatusCode::FORBIDDEN, 30007, "Target group is not part of your system." }
define_error! { TARGET_MEMBER_NOT_FOUND, StatusCode::BAD_REQUEST, 40010, "Target member not found." }
define_error! { TARGET_GROUP_NOT_FOUND, StatusCode::BAD_REQUEST, 40011, "Target group not found." }

View file

@ -115,6 +115,8 @@ fn router(ctx: ApiContext) -> Router {
.route("/v2/messages/{message_id}", get(rproxy))
.route("/v2/bulk", post(endpoints::bulk::bulk))
.route("/private/bulk_privacy/member", post(rproxy))
.route("/private/bulk_privacy/group", post(rproxy))
.route("/private/discord/callback", post(rproxy))
@ -127,13 +129,10 @@ fn router(ctx: ApiContext) -> Router {
.route("/v2/groups/{group_id}/oembed.json", get(rproxy))
.layer(middleware::ratelimit::ratelimiter(middleware::ratelimit::do_request_ratelimited)) // this sucks
.layer(axum::middleware::from_fn(middleware::ignore_invalid_routes::ignore_invalid_routes))
.layer(axum::middleware::from_fn(middleware::logger::logger))
.layer(axum::middleware::from_fn_with_state(ctx.clone(), middleware::params::params))
.layer(axum::middleware::from_fn_with_state(ctx.clone(), middleware::auth::auth))
.layer(axum::middleware::from_fn(middleware::logger::logger))
.layer(axum::middleware::from_fn(middleware::cors::cors))
.layer(tower_http::catch_panic::CatchPanicLayer::custom(util::handle_panic))

View file

@ -76,5 +76,10 @@ pub async fn auth(State(ctx): State<ApiContext>, mut req: Request, next: Next) -
req.extensions_mut()
.insert(AuthState::new(authed_system_id, authed_app_id, internal));
next.run(req).await
let mut res = next.run(req).await;
res.extensions_mut()
.insert(AuthState::new(authed_system_id, authed_app_id, internal));
res
}

View file

@ -12,9 +12,10 @@ const MIN_LOG_TIME: u128 = 2_000;
pub async fn logger(request: Request, next: Next) -> Response {
let method = request.method().clone();
let headers = request.headers().clone();
let remote_ip = header_or_unknown(request.headers().get("X-PluralKit-Client-IP"));
let user_agent = header_or_unknown(request.headers().get("User-Agent"));
let remote_ip = header_or_unknown(headers.get("X-PluralKit-Client-IP"));
let user_agent = header_or_unknown(headers.get("User-Agent"));
let extensions = request.extensions().clone();
@ -24,10 +25,6 @@ pub async fn logger(request: Request, next: Next) -> Response {
.map(|v| v.as_str().to_string())
.unwrap_or("unknown".to_string());
let auth = extensions
.get::<AuthState>()
.expect("should always have AuthState");
let uri = request.uri().clone();
let request_span = span!(
@ -43,15 +40,24 @@ pub async fn logger(request: Request, next: Next) -> Response {
let response = next.run(request).instrument(request_span).await;
let elapsed = start.elapsed().as_millis();
let system_id = auth
.system_id()
.map(|v| v.to_string())
.unwrap_or("none".to_string());
let rext = response.extensions().clone();
let auth = rext.get::<AuthState>();
let app_id = auth
.app_id()
.map(|v| v.to_string())
.unwrap_or("none".to_string());
let system_id = if let Some(auth) = auth {
auth.system_id()
.map(|v| v.to_string())
.unwrap_or("none".to_string())
} else {
"none".to_string()
};
let app_id = if let Some(auth) = auth {
auth.app_id()
.map(|v| v.to_string())
.unwrap_or("none".to_string())
} else {
"none".to_string()
};
counter!(
"pluralkit_api_requests",
@ -73,6 +79,14 @@ pub async fn logger(request: Request, next: Next) -> Response {
.record(elapsed as f64 / 1_000_f64);
info!(
status = response.status().as_str(),
method = method.to_string(),
endpoint,
elapsed,
user_agent,
remote_ip,
system_id,
app_id,
"{} handled request for {} {} in {}ms",
response.status(),
method,

View file

@ -0,0 +1,14 @@
[package]
name = "app-commands"
version = "0.1.0"
edition = "2024"
[dependencies]
libpk = { path = "../libpk" }
anyhow = { workspace = true }
futures = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
twilight-http = { workspace = true }
twilight-model = { workspace = true }
twilight-util = { workspace = true }

View file

@ -0,0 +1,41 @@
use twilight_model::{
application::command::{Command, CommandType},
guild::IntegrationApplication,
};
use twilight_util::builder::command::CommandBuilder;
#[libpk::main]
async fn main() -> anyhow::Result<()> {
let discord = twilight_http::Client::builder()
.token(
libpk::config
.discord
.as_ref()
.expect("missing discord config")
.bot_token
.clone(),
)
.build();
let interaction = discord.interaction(twilight_model::id::Id::new(
libpk::config
.discord
.as_ref()
.expect("missing discord config")
.client_id
.clone()
.get(),
));
let commands = vec![
// message commands
// description must be empty string
CommandBuilder::new("\u{2753} Message info", "", CommandType::Message).build(),
CommandBuilder::new("\u{274c} Delete message", "", CommandType::Message).build(),
CommandBuilder::new("\u{1f514} Ping author", "", CommandType::Message).build(),
];
interaction.set_global_commands(&commands).await?;
Ok(())
}

View file

@ -85,8 +85,14 @@ fn parse_field(field: syn::Field) -> ModelField {
panic!("must have json name to be publicly patchable");
}
if f.json.is_some() && f.is_privacy {
panic!("cannot set custom json name for privacy field");
if f.is_privacy && f.json.is_none() {
f.json = Some(syn::Expr::Lit(syn::ExprLit {
attrs: vec![],
lit: syn::Lit::Str(syn::LitStr::new(
f.name.clone().to_string().as_str(),
proc_macro2::Span::call_site(),
)),
}))
}
f
@ -122,17 +128,17 @@ pub fn macro_impl(
let fields: Vec<ModelField> = fields
.iter()
.filter(|f| !matches!(f.patch, ElemPatchability::None))
.filter(|f| f.is_privacy || !matches!(f.patch, ElemPatchability::None))
.cloned()
.collect();
let patch_fields = mk_patch_fields(fields.clone());
let patch_from_json = mk_patch_from_json(fields.clone());
let patch_validate = mk_patch_validate(fields.clone());
let patch_validate_bulk = mk_patch_validate_bulk(fields.clone());
let patch_to_json = mk_patch_to_json(fields.clone());
let patch_to_sql = mk_patch_to_sql(fields.clone());
return quote! {
let code = quote! {
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct #tname {
#tfields
@ -146,31 +152,42 @@ pub fn macro_impl(
#to_json
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Default)]
pub struct #patchable_name {
#patch_fields
errors: Vec<crate::ValidationError>,
}
impl #patchable_name {
pub fn from_json(input: String) -> Self {
#patch_from_json
}
pub fn validate(self) -> bool {
pub fn validate(&mut self) {
#patch_validate
}
pub fn errors(&self) -> Vec<crate::ValidationError> {
self.errors.clone()
}
pub fn validate_bulk(&mut self) {
#patch_validate_bulk
}
pub fn to_sql(self) -> sea_query::UpdateStatement {
// sea_query::Query::update()
#patch_to_sql
use sea_query::types::*;
let mut patch = &mut sea_query::Query::update();
#patch_to_sql
patch.clone()
}
pub fn to_json(self) -> serde_json::Value {
#patch_to_json
}
}
}
.into();
};
// panic!("{:#?}", code.to_string());
return code.into();
}
fn mk_tfields(fields: Vec<ModelField>) -> TokenStream {
@ -225,7 +242,7 @@ fn mk_tto_json(fields: Vec<ModelField>) -> TokenStream {
.filter_map(|f| {
if f.is_privacy {
let tname = f.name.clone();
let tnamestr = f.name.clone().to_string();
let tnamestr = f.json.clone();
Some(quote! {
#tnamestr: self.#tname,
})
@ -280,13 +297,48 @@ fn mk_patch_fields(fields: Vec<ModelField>) -> TokenStream {
.collect()
}
fn mk_patch_validate(_fields: Vec<ModelField>) -> TokenStream {
quote! { true }
}
fn mk_patch_from_json(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); }
}
fn mk_patch_to_sql(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); }
fn mk_patch_validate_bulk(fields: Vec<ModelField>) -> TokenStream {
// iterate over all nullable patchable fields other than privacy
// add an error if any field is set to a value other than null
fields
.iter()
.map(|f| {
if let syn::Type::Path(path) = &f.ty && let Some(inner) = path.path.segments.last() && inner.ident != "Option" {
return quote! {};
}
let name = f.name.clone();
if matches!(f.patch, ElemPatchability::Public) {
let json = f.json.clone().unwrap();
quote! {
if let Some(val) = self.#name.clone() && val.is_some() {
self.errors.push(ValidationError::simple(#json, "Only null values are supported in bulk endpoint"));
}
}
} else {
quote! {}
}
})
.collect()
}
fn mk_patch_to_sql(fields: Vec<ModelField>) -> TokenStream {
fields
.iter()
.filter_map(|f| {
if !matches!(f.patch, ElemPatchability::None) || f.is_privacy {
let name = f.name.clone();
let column = f.name.to_string();
Some(quote! {
if let Some(value) = self.#name {
patch = patch.value(#column, value);
}
})
} else {
None
}
})
.collect()
}
fn mk_patch_to_json(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); }

View file

@ -6,7 +6,7 @@ edition = "2024"
[dependencies]
chrono = { workspace = true, features = ["serde"] }
pk_macros = { path = "../macros" }
sea-query = "0.32.1"
sea-query = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true, features = ["preserve_order"] }
# in theory we want to default-features = false for sqlx

132
crates/models/src/group.rs Normal file
View file

@ -0,0 +1,132 @@
use pk_macros::pk_model;
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde_json::Value;
use uuid::Uuid;
use crate::{PrivacyLevel, SystemId, ValidationError};
// todo: fix
pub type GroupId = i32;
#[pk_model]
struct Group {
id: GroupId,
#[json = "hid"]
#[private_patchable]
hid: String,
#[json = "uuid"]
uuid: Uuid,
// TODO fix
#[json = "system"]
system: SystemId,
#[json = "name"]
#[privacy = name_privacy]
#[patchable]
name: String,
#[json = "display_name"]
#[patchable]
display_name: Option<String>,
#[json = "color"]
#[patchable]
color: Option<String>,
#[json = "icon"]
#[patchable]
icon: Option<String>,
#[json = "banner_image"]
#[patchable]
banner_image: Option<String>,
#[json = "description"]
#[privacy = description_privacy]
#[patchable]
description: Option<String>,
#[json = "created"]
created: DateTime<Utc>,
#[privacy]
name_privacy: PrivacyLevel,
#[privacy]
description_privacy: PrivacyLevel,
#[privacy]
banner_privacy: PrivacyLevel,
#[privacy]
icon_privacy: PrivacyLevel,
#[privacy]
list_privacy: PrivacyLevel,
#[privacy]
metadata_privacy: PrivacyLevel,
#[privacy]
visibility: PrivacyLevel,
}
impl<'de> Deserialize<'de> for PKGroupPatch {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let mut patch: PKGroupPatch = Default::default();
let value: Value = Value::deserialize(deserializer)?;
if let Some(v) = value.get("name") {
if let Some(name) = v.as_str() {
patch.name = Some(name.to_string());
} else if v.is_null() {
patch.errors.push(ValidationError::simple(
"name",
"Group name cannot be set to null.",
));
}
}
macro_rules! parse_string_simple {
($k:expr) => {
match value.get($k) {
None => None,
Some(Value::Null) => Some(None),
Some(Value::String(s)) => Some(Some(s.clone())),
_ => {
patch.errors.push(ValidationError::new($k));
None
}
}
};
}
patch.display_name = parse_string_simple!("display_name");
patch.description = parse_string_simple!("description");
patch.icon = parse_string_simple!("icon");
patch.banner_image = parse_string_simple!("banner");
patch.color = parse_string_simple!("color").map(|v| v.map(|t| t.to_lowercase()));
if let Some(privacy) = value.get("privacy").and_then(Value::as_object) {
macro_rules! parse_privacy {
($v:expr) => {
match privacy.get($v) {
None => None,
Some(Value::Null) => Some(PrivacyLevel::Private),
Some(Value::String(s)) if s == "" || s == "private" => {
Some(PrivacyLevel::Private)
}
Some(Value::String(s)) if s == "public" => Some(PrivacyLevel::Public),
_ => {
patch.errors.push(ValidationError::new($v));
None
}
}
};
}
patch.name_privacy = parse_privacy!("name_privacy");
patch.description_privacy = parse_privacy!("description_privacy");
patch.banner_privacy = parse_privacy!("banner_privacy");
patch.icon_privacy = parse_privacy!("icon_privacy");
patch.list_privacy = parse_privacy!("list_privacy");
patch.metadata_privacy = parse_privacy!("metadata_privacy");
patch.visibility = parse_privacy!("visibility");
}
Ok(patch)
}
}

View file

@ -9,6 +9,8 @@ macro_rules! model {
model!(system);
model!(system_config);
model!(member);
model!(group);
#[derive(serde::Serialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
@ -31,3 +33,30 @@ impl From<i32> for PrivacyLevel {
}
}
}
impl From<PrivacyLevel> for sea_query::Value {
fn from(level: PrivacyLevel) -> sea_query::Value {
match level {
PrivacyLevel::Public => sea_query::Value::Int(Some(1)),
PrivacyLevel::Private => sea_query::Value::Int(Some(2)),
}
}
}
#[derive(serde::Serialize, Debug, Clone)]
pub enum ValidationError {
Simple { key: String, value: String },
}
impl ValidationError {
fn new(key: &str) -> Self {
Self::simple(key, "is invalid")
}
fn simple(key: &str, value: &str) -> Self {
Self::Simple {
key: key.to_string(),
value: value.to_string(),
}
}
}

208
crates/models/src/member.rs Normal file
View file

@ -0,0 +1,208 @@
use pk_macros::pk_model;
use chrono::NaiveDateTime;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use uuid::Uuid;
use crate::{PrivacyLevel, SystemId, ValidationError};
// todo: fix
pub type MemberId = i32;
#[derive(Clone, Debug, Serialize, Deserialize, sqlx::Type)]
#[sqlx(type_name = "proxy_tag")]
pub struct ProxyTag {
pub prefix: Option<String>,
pub suffix: Option<String>,
}
#[pk_model]
struct Member {
id: MemberId,
#[json = "hid"]
#[private_patchable]
hid: String,
#[json = "uuid"]
uuid: Uuid,
// TODO fix
#[json = "system"]
system: SystemId,
#[json = "color"]
#[patchable]
color: Option<String>,
#[json = "webhook_avatar_url"]
#[patchable]
webhook_avatar_url: Option<String>,
#[json = "avatar_url"]
#[patchable]
avatar_url: Option<String>,
#[json = "banner_image"]
#[patchable]
banner_image: Option<String>,
#[json = "name"]
#[privacy = name_privacy]
#[patchable]
name: String,
#[json = "display_name"]
#[patchable]
display_name: Option<String>,
#[json = "birthday"]
#[patchable]
birthday: Option<String>,
#[json = "pronouns"]
#[privacy = pronoun_privacy]
#[patchable]
pronouns: Option<String>,
#[json = "description"]
#[privacy = description_privacy]
#[patchable]
description: Option<String>,
#[json = "proxy_tags"]
// #[patchable]
proxy_tags: Vec<ProxyTag>,
#[json = "keep_proxy"]
#[patchable]
keep_proxy: bool,
#[json = "tts"]
#[patchable]
tts: bool,
#[json = "created"]
created: NaiveDateTime,
#[json = "message_count"]
#[private_patchable]
message_count: i32,
#[json = "last_message_timestamp"]
#[private_patchable]
last_message_timestamp: Option<NaiveDateTime>,
#[json = "allow_autoproxy"]
#[patchable]
allow_autoproxy: bool,
#[privacy]
#[json = "visibility"]
member_visibility: PrivacyLevel,
#[privacy]
description_privacy: PrivacyLevel,
#[privacy]
banner_privacy: PrivacyLevel,
#[privacy]
avatar_privacy: PrivacyLevel,
#[privacy]
name_privacy: PrivacyLevel,
#[privacy]
birthday_privacy: PrivacyLevel,
#[privacy]
pronoun_privacy: PrivacyLevel,
#[privacy]
metadata_privacy: PrivacyLevel,
#[privacy]
proxy_privacy: PrivacyLevel,
}
impl<'de> Deserialize<'de> for PKMemberPatch {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let mut patch: PKMemberPatch = Default::default();
let value: Value = Value::deserialize(deserializer)?;
if let Some(v) = value.get("name") {
if let Some(name) = v.as_str() {
patch.name = Some(name.to_string());
} else if v.is_null() {
patch.errors.push(ValidationError::simple(
"name",
"Member name cannot be set to null.",
));
}
}
macro_rules! parse_string_simple {
($k:expr) => {
match value.get($k) {
None => None,
Some(Value::Null) => Some(None),
Some(Value::String(s)) => Some(Some(s.clone())),
_ => {
patch.errors.push(ValidationError::new($k));
None
}
}
};
}
patch.color = parse_string_simple!("color").map(|v| v.map(|t| t.to_lowercase()));
patch.display_name = parse_string_simple!("display_name");
patch.avatar_url = parse_string_simple!("avatar_url");
patch.banner_image = parse_string_simple!("banner");
patch.birthday = parse_string_simple!("birthday"); // fix
patch.pronouns = parse_string_simple!("pronouns");
patch.description = parse_string_simple!("description");
if let Some(keep_proxy) = value.get("keep_proxy").and_then(Value::as_bool) {
patch.keep_proxy = Some(keep_proxy);
}
if let Some(tts) = value.get("tts").and_then(Value::as_bool) {
patch.tts = Some(tts);
}
// todo: legacy import handling
// todo: fix proxy_tag type in sea_query
// if let Some(proxy_tags) = value.get("proxy_tags").and_then(Value::as_array) {
// patch.proxy_tags = Some(
// proxy_tags
// .iter()
// .filter_map(|tag| {
// tag.as_object().map(|tag_obj| {
// let prefix = tag_obj
// .get("prefix")
// .and_then(Value::as_str)
// .map(|s| s.to_string());
// let suffix = tag_obj
// .get("suffix")
// .and_then(Value::as_str)
// .map(|s| s.to_string());
// ProxyTag { prefix, suffix }
// })
// })
// .collect(),
// )
// }
if let Some(privacy) = value.get("privacy").and_then(Value::as_object) {
macro_rules! parse_privacy {
($v:expr) => {
match privacy.get($v) {
None => None,
Some(Value::Null) => Some(PrivacyLevel::Private),
Some(Value::String(s)) if s == "" || s == "private" => {
Some(PrivacyLevel::Private)
}
Some(Value::String(s)) if s == "public" => Some(PrivacyLevel::Public),
_ => {
patch.errors.push(ValidationError::new($v));
None
}
}
};
}
patch.member_visibility = parse_privacy!("visibility");
patch.name_privacy = parse_privacy!("name_privacy");
patch.description_privacy = parse_privacy!("description_privacy");
patch.banner_privacy = parse_privacy!("banner_privacy");
patch.avatar_privacy = parse_privacy!("avatar_privacy");
patch.birthday_privacy = parse_privacy!("birthday_privacy");
patch.pronoun_privacy = parse_privacy!("pronoun_privacy");
patch.proxy_privacy = parse_privacy!("proxy_privacy");
patch.metadata_privacy = parse_privacy!("metadata_privacy");
}
Ok(patch)
}
}

View file

@ -5,7 +5,7 @@ Because PluralKit deletes messages as part of proxying, this can often clutter u
## Bots with PluralKit support
Some moderation bots have official PluralKit support, and properly handle excluding proxy deletes, as well as add PK-specific information to relevant log messages:
- [**Catalogger**](https://catalogger.starshines.xyz/docs)
- [**Catalogger**](https://catalogger.app)
- [**Aero**](https://aero.bot/)
- [**CoreBot**](https://discord.gg/GAAj6DDrCJ)
- [**Quark**](https://quark.bot)

View file

@ -1,4 +0,0 @@
/commands.json
*.pyc
__pycache__/

View file

@ -1,23 +0,0 @@
# PluralKit "application command" helpers
## Adding new commands
Edit the `COMMAND_LIST` global in `commands.py`, making sure that any
command names that are specified in that file match up with the
command names used in the bot code (which will generally be in the list
in `PluralKit.Bot/ApplicationCommandMeta/ApplicationCommandList.cs`).
TODO: add helpers for slash commands to this
## Dumping application command JSON
Run `python3 commands.py` to get a JSON dump of the available application
commands - this is in a format that can be sent to Discord as a `PUT` to
`/applications/{clientId}/commands`.
## Updating Discord's list of application commands
From the root of the repository (where your `pluralkit.conf` resides),
run `python3 ./scripts/app-commands/update.py`. This will **REPLACE**
any existing application commands that Discord knows about, with the
updated list.

View file

@ -1,10 +0,0 @@
from common import *
COMMAND_LIST = [
MessageCommand("\U00002753 Message info"),
MessageCommand("\U0000274c Delete message"),
MessageCommand("\U0001f514 Ping author"),
]
if __name__ == "__main__":
print(__import__('json').dumps(COMMAND_LIST))

View file

@ -1 +0,0 @@
from .types import MessageCommand

View file

@ -1,7 +0,0 @@
class MessageCommand(dict):
COMMAND_TYPE = 3
def __init__(self, name):
super().__init__()
self["type"] = self.__class__.COMMAND_TYPE
self["name"] = name

View file

@ -1,70 +0,0 @@
from common import *
from commands import COMMAND_LIST
import io
import os
import sys
import json
from pathlib import Path
from urllib import request
from urllib.error import URLError
DISCORD_API_BASE = "https://discord.com/api/v10"
def get_config():
data = {}
# prefer token from environment if present
envbase = ["PluralKit", "Bot"]
for var in ["Token", "ClientId"]:
for sep in [':', '__']:
envvar = sep.join(envbase + [var])
if envvar in os.environ:
data[var] = os.environ[envvar]
if "Token" in data and "ClientId" in data:
return data
# else fall back to config
cfg_path = Path(os.getcwd()) / "pluralkit.conf"
if cfg_path.exists():
cfg = {}
with open(str(cfg_path), 'r') as fh:
cfg = json.load(fh)
if 'PluralKit' in cfg and 'Bot' in cfg['PluralKit']:
return cfg['PluralKit']['Bot']
return None
def main():
config = get_config()
if config is None:
raise ArgumentError("config was not loaded")
if 'Token' not in config or 'ClientId' not in config:
raise ArgumentError("config is missing 'Token' or 'ClientId'")
data = json.dumps(COMMAND_LIST)
url = DISCORD_API_BASE + f"/applications/{config['ClientId']}/commands"
req = request.Request(url, method='PUT', data=data.encode('utf-8'))
req.add_header("Content-Type", "application/json")
req.add_header("Authorization", f"Bot {config['Token']}")
req.add_header("User-Agent", "PluralKit (app-commands updater; https://pluralkit.me)")
try:
with request.urlopen(req) as resp:
if resp.status == 200:
print("Update successful!")
return 0
except URLError as resp:
print(f"[!!!] Update not successful: status {resp.status}", file=sys.stderr)
print(f"[!!!] Response body below:\n", file=sys.stderr)
print(resp.read(), file=sys.stderr)
sys.stderr.flush()
return 1
if __name__ == "__main__":
sys.exit(main())

View file

@ -1,3 +0,0 @@
#!/bin/sh
docker-compose -f "$(dirname $0)/../docker-compose.yml" exec -T -u postgres db pg_dump postgres

View file

@ -1,15 +0,0 @@
#!/bin/sh
# Usage: rclone-db.sh <remote>:<path>
# eg. rclone-db.sh b2:pluralkit
FILENAME=pluralkit-$(date -u +"%Y-%m-%dT%H:%M:%S").sql.gz
echo Dumping database to /tmp/$FILENAME...
$(dirname $0)/dump-db.sh | gzip > /tmp/$FILENAME
echo Transferring to remote $1...
rclone -P copy /tmp/$FILENAME $1
echo Cleaning up...
rm /tmp/$FILENAME

View file

@ -1,5 +0,0 @@
#!/bin/sh
# Runs a local database in the background listening on port 5432, deleting itself once stopped
# Requires Docker. May need sudo if your user isn't in the `docker` group.
docker run --rm --detach --publish 5432:5432 -e POSTGRES_HOST_AUTH_METHOD=trust postgres:alpine