chore: clean up compile warnings

This commit is contained in:
alyssa 2025-01-02 02:37:15 +00:00
parent 16ce67e02c
commit 89d6481ddb
16 changed files with 37 additions and 32 deletions

View file

@ -1,6 +1,8 @@
use axum::http::StatusCode;
use std::fmt;
// todo
#[allow(dead_code)]
#[derive(Debug)]
pub struct PKError {
pub response_code: StatusCode,
@ -16,6 +18,7 @@ impl fmt::Display for PKError {
impl std::error::Error for PKError {}
#[allow(unused_macros)]
macro_rules! define_error {
( $name:ident, $response_code:expr, $json_code:expr, $message:expr ) => {
const $name: PKError = PKError {
@ -26,4 +29,4 @@ macro_rules! define_error {
};
}
define_error! { GENERIC_BAD_REQUEST, StatusCode::BAD_REQUEST, 0, "400: Bad Request" }
// define_error! { GENERIC_BAD_REQUEST, StatusCode::BAD_REQUEST, 0, "400: Bad Request" }

View file

@ -20,6 +20,7 @@ pub fn header_or_unknown(header: Option<&HeaderValue>) -> &str {
}
}
#[allow(dead_code)]
pub fn wrapper<F>(handler: F) -> impl Fn() -> axum::response::Response
where
F: Fn() -> anyhow::Result<Value>,

View file

@ -15,7 +15,6 @@ futures = { workspace = true }
reqwest = { workspace = true }
serde = { workspace = true }
sqlx = { workspace = true }
time = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
uuid = { workspace = true }

View file

@ -1,5 +1,5 @@
mod hash;
mod migrate;
// mod migrate;
mod process;
mod pull;
mod store;

View file

@ -1,7 +1,7 @@
use image::{DynamicImage, ImageFormat};
use std::borrow::Cow;
use std::io::Cursor;
use time::Instant;
use std::time::Instant;
use tracing::{debug, error, info, instrument};
use crate::{hash::Hash, ImageKind, PKAvatarError};
@ -100,10 +100,10 @@ pub fn process(data: &[u8], kind: ImageKind) -> Result<ProcessOutput, PKAvatarEr
"{}: lossy size {}K (parse: {} ms, decode: {} ms, resize: {} ms, encode: {} ms)",
encoded.hash,
encoded.data.len() / 1024,
(time_after_parse - time_before).whole_milliseconds(),
(time_after_decode - time_after_parse).whole_milliseconds(),
(time_after_resize - time_after_decode).whole_milliseconds(),
(time_after - time_after_resize).whole_milliseconds(),
(time_after_parse - time_before).as_millis(),
(time_after_decode - time_after_parse).as_millis(),
(time_after_resize - time_after_decode).as_millis(),
(time_after - time_after_resize).as_millis(),
);
debug!(
@ -198,7 +198,7 @@ fn process_gif_inner(
hash,
original_data.buffer().len() / 1024,
data.len() / 1024,
(time_after - time_before).whole_milliseconds(),
(time_after - time_before).as_millis(),
frame_count
);

View file

@ -1,14 +1,14 @@
use std::time::Duration;
use std::{str::FromStr, sync::Arc};
use crate::PKAvatarError;
use anyhow::Context;
use reqwest::{Client, ClientBuilder, StatusCode, Url};
use time::Instant;
use reqwest::{Client, StatusCode, Url};
use std::time::Instant;
use tracing::{error, instrument};
const MAX_SIZE: u64 = 8 * 1024 * 1024;
#[allow(dead_code)]
pub struct PullResult {
pub data: Vec<u8>,
pub content_type: String,
@ -85,16 +85,16 @@ pub async fn pull(
"{}: {} (headers: {}ms, body: {}ms)",
status,
&trimmed_url,
headers_time.whole_milliseconds(),
body_time.whole_milliseconds()
headers_time.as_millis(),
body_time.as_millis()
);
} else {
tracing::info!(
"{}: {} (headers: {}ms, body: {}ms)",
status,
&trimmed_url,
headers_time.whole_milliseconds(),
body_time.whole_milliseconds()
headers_time.as_millis(),
body_time.as_millis()
);
};
@ -105,6 +105,7 @@ pub async fn pull(
})
}
#[allow(dead_code)]
#[derive(Debug)]
pub struct ParsedUrl {
pub channel_id: u64,

View file

@ -54,6 +54,7 @@ struct DispatchRequest {
test: Option<String>,
}
#[allow(dead_code)]
#[derive(Debug)]
enum DispatchResponse {
OK,

View file

@ -12,7 +12,6 @@ sentry = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
sqlx = { workspace = true }
time = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true}

View file

@ -1,6 +1,5 @@
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
use time::OffsetDateTime;
use sqlx::{types::chrono::NaiveDateTime, FromRow};
use uuid::Uuid;
#[derive(FromRow)]
@ -12,7 +11,7 @@ pub struct ImageMeta {
pub file_size: i32,
pub width: i32,
pub height: i32,
pub uploaded_at: Option<OffsetDateTime>,
pub uploaded_at: Option<NaiveDateTime>,
pub original_url: Option<String>,
pub original_attachment_id: Option<i64>,

View file

@ -112,7 +112,7 @@ pub fn pk_model(
let tfields = mk_tfields(fields.clone());
let from_json = mk_tfrom_json(fields.clone());
let from_sql = mk_tfrom_sql(fields.clone());
let _from_sql = mk_tfrom_sql(fields.clone());
let to_json = mk_tto_json(fields.clone());
let fields: Vec<ModelField> = fields
@ -182,10 +182,10 @@ fn mk_tfields(fields: Vec<ModelField>) -> TokenStream {
})
.collect()
}
fn mk_tfrom_json(fields: Vec<ModelField>) -> TokenStream {
fn mk_tfrom_json(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); }
}
fn mk_tfrom_sql(fields: Vec<ModelField>) -> TokenStream {
fn mk_tfrom_sql(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); }
}
fn mk_tto_json(fields: Vec<ModelField>) -> TokenStream {
@ -248,12 +248,12 @@ fn mk_patch_fields(fields: Vec<ModelField>) -> TokenStream {
fn mk_patch_validate(_fields: Vec<ModelField>) -> TokenStream {
quote! { true }
}
fn mk_patch_from_json(fields: Vec<ModelField>) -> TokenStream {
fn mk_patch_from_json(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); }
}
fn mk_patch_to_sql(fields: Vec<ModelField>) -> TokenStream {
fn mk_patch_to_sql(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); }
}
fn mk_patch_to_json(fields: Vec<ModelField>) -> TokenStream {
fn mk_patch_to_json(_fields: Vec<ModelField>) -> TokenStream {
quote! { unimplemented!(); }
}

View file

@ -9,5 +9,7 @@ model_macros = { path = "../model_macros" }
sea-query = "0.32.1"
serde = { workspace = true }
serde_json = { workspace = true, features = ["preserve_order"] }
sqlx = { workspace = true, default-features = false, features = ["chrono"] }
# in theory we want to default-features = false for sqlx
# but cargo doesn't seem to support this
sqlx = { workspace = true, features = ["chrono"] }
uuid = { workspace = true }