chore: clean up compile warnings

This commit is contained in:
alyssa 2025-01-02 02:37:15 +00:00
parent 16ce67e02c
commit 89d6481ddb
16 changed files with 37 additions and 32 deletions

2
Cargo.lock generated
View file

@ -186,7 +186,6 @@ dependencies = [
"sha2", "sha2",
"sqlx", "sqlx",
"thiserror", "thiserror",
"time",
"tokio", "tokio",
"tracing", "tracing",
"uuid", "uuid",
@ -1702,7 +1701,6 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"sqlx", "sqlx",
"time",
"tokio", "tokio",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",

View file

@ -2,6 +2,7 @@
members = [ members = [
"./crates/*" "./crates/*"
] ]
resolver = "2"
[workspace.dependencies] [workspace.dependencies]
anyhow = "1" anyhow = "1"
@ -19,7 +20,6 @@ serde = { version = "1.0.196", features = ["derive"] }
serde_json = "1.0.117" serde_json = "1.0.117"
signal-hook = "0.3.17" signal-hook = "0.3.17"
sqlx = { version = "0.8.2", features = ["runtime-tokio", "postgres", "time", "macros", "uuid"] } sqlx = { version = "0.8.2", features = ["runtime-tokio", "postgres", "time", "macros", "uuid"] }
time = "0.3.34"
tokio = { version = "1.36.0", features = ["full"] } tokio = { version = "1.36.0", features = ["full"] }
tracing = "0.1.40" tracing = "0.1.40"
tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] } tracing-subscriber = { version = "0.3.16", features = ["env-filter", "json"] }

View file

@ -165,6 +165,8 @@ public class HttpDiscordCache: IDiscordCache
// return hres; // return hres;
// } // }
// it's fine
#pragma warning disable CS8603
public async Task<IEnumerable<Channel>> GetGuildChannels(ulong guildId) public async Task<IEnumerable<Channel>> GetGuildChannels(ulong guildId)
{ {
var hres = await QueryCache<IEnumerable<Channel>>($"/guilds/{guildId}/channels", guildId); var hres = await QueryCache<IEnumerable<Channel>>($"/guilds/{guildId}/channels", guildId);

View file

@ -70,7 +70,7 @@ public class ShardConnection: IAsyncDisposable
var (_, packet) = await _serializer.ReadPacket(_client); var (_, packet) = await _serializer.ReadPacket(_client);
return packet; return packet;
} }
catch (Exception e) catch (Exception)
{ {
// these are never useful // these are never useful
// _logger.Error(e, "Shard {ShardId}: Error reading from WebSocket"); // _logger.Error(e, "Shard {ShardId}: Error reading from WebSocket");

View file

@ -159,7 +159,7 @@ public class DiscordApiClient
public Task<Channel> CreateDm(ulong recipientId) => public Task<Channel> CreateDm(ulong recipientId) =>
_client.Post<Channel>("/users/@me/channels", ("CreateDM", default), new CreateDmRequest(recipientId))!; _client.Post<Channel>("/users/@me/channels", ("CreateDM", default), new CreateDmRequest(recipientId))!;
public Task<RefreshedUrlsResponse> RefreshUrls(string[] urls) => public Task<RefreshedUrlsResponse?> RefreshUrls(string[] urls) =>
_client.Post<RefreshedUrlsResponse>("/attachments/refresh-urls", ("RefreshUrls", default), new RefreshUrlsRequest(urls)); _client.Post<RefreshedUrlsResponse>("/attachments/refresh-urls", ("RefreshUrls", default), new RefreshUrlsRequest(urls));
private static string EncodeEmoji(Emoji emoji) => private static string EncodeEmoji(Emoji emoji) =>

View file

@ -1,6 +1,8 @@
use axum::http::StatusCode; use axum::http::StatusCode;
use std::fmt; use std::fmt;
// todo
#[allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
pub struct PKError { pub struct PKError {
pub response_code: StatusCode, pub response_code: StatusCode,
@ -16,6 +18,7 @@ impl fmt::Display for PKError {
impl std::error::Error for PKError {} impl std::error::Error for PKError {}
#[allow(unused_macros)]
macro_rules! define_error { macro_rules! define_error {
( $name:ident, $response_code:expr, $json_code:expr, $message:expr ) => { ( $name:ident, $response_code:expr, $json_code:expr, $message:expr ) => {
const $name: PKError = PKError { const $name: PKError = PKError {
@ -26,4 +29,4 @@ macro_rules! define_error {
}; };
} }
define_error! { GENERIC_BAD_REQUEST, StatusCode::BAD_REQUEST, 0, "400: Bad Request" } // define_error! { GENERIC_BAD_REQUEST, StatusCode::BAD_REQUEST, 0, "400: Bad Request" }

View file

@ -20,6 +20,7 @@ pub fn header_or_unknown(header: Option<&HeaderValue>) -> &str {
} }
} }
#[allow(dead_code)]
pub fn wrapper<F>(handler: F) -> impl Fn() -> axum::response::Response pub fn wrapper<F>(handler: F) -> impl Fn() -> axum::response::Response
where where
F: Fn() -> anyhow::Result<Value>, F: Fn() -> anyhow::Result<Value>,

View file

@ -15,7 +15,6 @@ futures = { workspace = true }
reqwest = { workspace = true } reqwest = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
sqlx = { workspace = true } sqlx = { workspace = true }
time = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
uuid = { workspace = true } uuid = { workspace = true }

View file

@ -1,5 +1,5 @@
mod hash; mod hash;
mod migrate; // mod migrate;
mod process; mod process;
mod pull; mod pull;
mod store; mod store;

View file

@ -1,7 +1,7 @@
use image::{DynamicImage, ImageFormat}; use image::{DynamicImage, ImageFormat};
use std::borrow::Cow; use std::borrow::Cow;
use std::io::Cursor; use std::io::Cursor;
use time::Instant; use std::time::Instant;
use tracing::{debug, error, info, instrument}; use tracing::{debug, error, info, instrument};
use crate::{hash::Hash, ImageKind, PKAvatarError}; use crate::{hash::Hash, ImageKind, PKAvatarError};
@ -100,10 +100,10 @@ pub fn process(data: &[u8], kind: ImageKind) -> Result<ProcessOutput, PKAvatarEr
"{}: lossy size {}K (parse: {} ms, decode: {} ms, resize: {} ms, encode: {} ms)", "{}: lossy size {}K (parse: {} ms, decode: {} ms, resize: {} ms, encode: {} ms)",
encoded.hash, encoded.hash,
encoded.data.len() / 1024, encoded.data.len() / 1024,
(time_after_parse - time_before).whole_milliseconds(), (time_after_parse - time_before).as_millis(),
(time_after_decode - time_after_parse).whole_milliseconds(), (time_after_decode - time_after_parse).as_millis(),
(time_after_resize - time_after_decode).whole_milliseconds(), (time_after_resize - time_after_decode).as_millis(),
(time_after - time_after_resize).whole_milliseconds(), (time_after - time_after_resize).as_millis(),
); );
debug!( debug!(
@ -198,7 +198,7 @@ fn process_gif_inner(
hash, hash,
original_data.buffer().len() / 1024, original_data.buffer().len() / 1024,
data.len() / 1024, data.len() / 1024,
(time_after - time_before).whole_milliseconds(), (time_after - time_before).as_millis(),
frame_count frame_count
); );

View file

@ -1,14 +1,14 @@
use std::time::Duration;
use std::{str::FromStr, sync::Arc}; use std::{str::FromStr, sync::Arc};
use crate::PKAvatarError; use crate::PKAvatarError;
use anyhow::Context; use anyhow::Context;
use reqwest::{Client, ClientBuilder, StatusCode, Url}; use reqwest::{Client, StatusCode, Url};
use time::Instant; use std::time::Instant;
use tracing::{error, instrument}; use tracing::{error, instrument};
const MAX_SIZE: u64 = 8 * 1024 * 1024; const MAX_SIZE: u64 = 8 * 1024 * 1024;
#[allow(dead_code)]
pub struct PullResult { pub struct PullResult {
pub data: Vec<u8>, pub data: Vec<u8>,
pub content_type: String, pub content_type: String,
@ -85,16 +85,16 @@ pub async fn pull(
"{}: {} (headers: {}ms, body: {}ms)", "{}: {} (headers: {}ms, body: {}ms)",
status, status,
&trimmed_url, &trimmed_url,
headers_time.whole_milliseconds(), headers_time.as_millis(),
body_time.whole_milliseconds() body_time.as_millis()
); );
} else { } else {
tracing::info!( tracing::info!(
"{}: {} (headers: {}ms, body: {}ms)", "{}: {} (headers: {}ms, body: {}ms)",
status, status,
&trimmed_url, &trimmed_url,
headers_time.whole_milliseconds(), headers_time.as_millis(),
body_time.whole_milliseconds() body_time.as_millis()
); );
}; };
@ -105,6 +105,7 @@ pub async fn pull(
}) })
} }
#[allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
pub struct ParsedUrl { pub struct ParsedUrl {
pub channel_id: u64, pub channel_id: u64,

View file

@ -54,6 +54,7 @@ struct DispatchRequest {
test: Option<String>, test: Option<String>,
} }
#[allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
enum DispatchResponse { enum DispatchResponse {
OK, OK,

View file

@ -12,7 +12,6 @@ sentry = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }
sqlx = { workspace = true } sqlx = { workspace = true }
time = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
tracing-subscriber = { workspace = true} tracing-subscriber = { workspace = true}

View file

@ -1,6 +1,5 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::FromRow; use sqlx::{types::chrono::NaiveDateTime, FromRow};
use time::OffsetDateTime;
use uuid::Uuid; use uuid::Uuid;
#[derive(FromRow)] #[derive(FromRow)]
@ -12,7 +11,7 @@ pub struct ImageMeta {
pub file_size: i32, pub file_size: i32,
pub width: i32, pub width: i32,
pub height: i32, pub height: i32,
pub uploaded_at: Option<OffsetDateTime>, pub uploaded_at: Option<NaiveDateTime>,
pub original_url: Option<String>, pub original_url: Option<String>,
pub original_attachment_id: Option<i64>, pub original_attachment_id: Option<i64>,

View file

@ -112,7 +112,7 @@ pub fn pk_model(
let tfields = mk_tfields(fields.clone()); let tfields = mk_tfields(fields.clone());
let from_json = mk_tfrom_json(fields.clone()); let from_json = mk_tfrom_json(fields.clone());
let from_sql = mk_tfrom_sql(fields.clone()); let _from_sql = mk_tfrom_sql(fields.clone());
let to_json = mk_tto_json(fields.clone()); let to_json = mk_tto_json(fields.clone());
let fields: Vec<ModelField> = fields let fields: Vec<ModelField> = fields
@ -182,10 +182,10 @@ fn mk_tfields(fields: Vec<ModelField>) -> TokenStream {
}) })
.collect() .collect()
} }
fn mk_tfrom_json(fields: Vec<ModelField>) -> TokenStream { fn mk_tfrom_json(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); } quote! { unimplemented!(); }
} }
fn mk_tfrom_sql(fields: Vec<ModelField>) -> TokenStream { fn mk_tfrom_sql(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); } quote! { unimplemented!(); }
} }
fn mk_tto_json(fields: Vec<ModelField>) -> TokenStream { fn mk_tto_json(fields: Vec<ModelField>) -> TokenStream {
@ -248,12 +248,12 @@ fn mk_patch_fields(fields: Vec<ModelField>) -> TokenStream {
fn mk_patch_validate(_fields: Vec<ModelField>) -> TokenStream { fn mk_patch_validate(_fields: Vec<ModelField>) -> TokenStream {
quote! { true } quote! { true }
} }
fn mk_patch_from_json(fields: Vec<ModelField>) -> TokenStream { fn mk_patch_from_json(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); } quote! { unimplemented!(); }
} }
fn mk_patch_to_sql(fields: Vec<ModelField>) -> TokenStream { fn mk_patch_to_sql(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); } quote! { unimplemented!(); }
} }
fn mk_patch_to_json(fields: Vec<ModelField>) -> TokenStream { fn mk_patch_to_json(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); } quote! { unimplemented!(); }
} }

View file

@ -9,5 +9,7 @@ model_macros = { path = "../model_macros" }
sea-query = "0.32.1" sea-query = "0.32.1"
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true, features = ["preserve_order"] } serde_json = { workspace = true, features = ["preserve_order"] }
sqlx = { workspace = true, default-features = false, features = ["chrono"] } # in theory we want to default-features = false for sqlx
# but cargo doesn't seem to support this
sqlx = { workspace = true, features = ["chrono"] }
uuid = { workspace = true } uuid = { workspace = true }