chore(rust): correctly format values in errors

This commit is contained in:
alyssa 2025-05-17 15:05:37 +00:00
parent 347add8998
commit 7737850afb
15 changed files with 54 additions and 62 deletions

View file

@ -47,8 +47,8 @@ async fn rproxy(
.rproxy_client .rproxy_client
.request(req) .request(req)
.await .await
.map_err(|err| { .map_err(|error| {
error!("failed to serve reverse proxy to dotnet-api: {:?}", err); error!(?error, "failed to serve reverse proxy to dotnet-api");
StatusCode::BAD_GATEWAY StatusCode::BAD_GATEWAY
})? })?
.into_response()) .into_response())

View file

@ -52,7 +52,7 @@ pub fn ratelimiter<F, T>(f: F) -> FromFnLayer<F, Option<RedisPool>, T> {
.await .await
{ {
Ok(_) => info!("connected to redis for request rate limiting"), Ok(_) => info!("connected to redis for request rate limiting"),
Err(err) => error!("could not load redis script: {}", err), Err(error) => error!(?error, "could not load redis script"),
} }
} else { } else {
error!("could not wait for connection to load redis script!"); error!("could not wait for connection to load redis script!");
@ -212,8 +212,8 @@ pub async fn do_request_ratelimited(
return response; return response;
} }
Err(err) => { Err(error) => {
tracing::error!("error getting ratelimit info: {}", err); tracing::error!(?error, "error getting ratelimit info");
return json_err( return json_err(
StatusCode::INTERNAL_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR,
r#"{"message": "500: internal server error", "code": 0}"#.to_string(), r#"{"message": "500: internal server error", "code": 0}"#.to_string(),

View file

@ -11,7 +11,7 @@ pub fn header_or_unknown(header: Option<&HeaderValue>) -> &str {
match value.to_str() { match value.to_str() {
Ok(v) => v, Ok(v) => v,
Err(err) => { Err(err) => {
error!("failed to parse header value {:#?}: {:#?}", value, err); error!(?err, ?value, "failed to parse header value");
"failed to parse" "failed to parse"
} }
} }
@ -34,11 +34,7 @@ where
.unwrap(), .unwrap(),
), ),
None => { None => {
error!( error!(?error, "error in handler {}", std::any::type_name::<F>(),);
"error in handler {}: {:#?}",
std::any::type_name::<F>(),
error
);
json_err( json_err(
StatusCode::INTERNAL_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR,
r#"{"message": "500: Internal Server Error", "code": 0}"#.to_string(), r#"{"message": "500: Internal Server Error", "code": 0}"#.to_string(),
@ -48,8 +44,8 @@ where
} }
} }
pub fn handle_panic(err: Box<dyn std::any::Any + Send + 'static>) -> axum::response::Response { pub fn handle_panic(error: Box<dyn std::any::Any + Send + 'static>) -> axum::response::Response {
error!("caught panic from handler: {:#?}", err); error!(?error, "caught panic from handler");
json_err( json_err(
StatusCode::INTERNAL_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR,
r#"{"message": "500: Internal Server Error", "code": 0}"#.to_string(), r#"{"message": "500: Internal Server Error", "code": 0}"#.to_string(),

View file

@ -38,8 +38,8 @@ async fn real_main() -> anyhow::Result<()> {
tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;
match cleanup_job(pool.clone(), bucket.clone()).await { match cleanup_job(pool.clone(), bucket.clone()).await {
Ok(()) => {} Ok(()) => {}
Err(err) => { Err(error) => {
error!("failed to run avatar cleanup job: {}", err); error!(?error, "failed to run avatar cleanup job");
// sentry // sentry
} }
} }

View file

@ -232,26 +232,11 @@ async fn real_main() -> anyhow::Result<()> {
Ok(()) Ok(())
} }
struct AppError(anyhow::Error);
#[derive(Serialize)] #[derive(Serialize)]
struct ErrorResponse { struct ErrorResponse {
error: String, error: String,
} }
impl IntoResponse for AppError {
fn into_response(self) -> Response {
error!("error handling request: {}", self.0);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: self.0.to_string(),
}),
)
.into_response()
}
}
impl IntoResponse for PKAvatarError { impl IntoResponse for PKAvatarError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
let status_code = match self { let status_code = match self {
@ -278,12 +263,3 @@ impl IntoResponse for PKAvatarError {
.into_response() .into_response()
} }
} }
impl<E> From<E> for AppError
where
E: Into<anyhow::Error>,
{
fn from(err: E) -> Self {
Self(err.into())
}
}

View file

@ -129,9 +129,9 @@ pub async fn worker(worker_id: u32, state: Arc<AppState>) {
Ok(()) => {} Ok(()) => {}
Err(e) => { Err(e) => {
error!( error!(
"error in migrate worker {}: {}", error = e.source().unwrap_or(&e)
worker_id, ?worker_id,
e.source().unwrap_or(&e) "error in migrate worker",
); );
tokio::time::sleep(Duration::from_secs(5)).await; tokio::time::sleep(Duration::from_secs(5)).await;
} }

View file

@ -84,7 +84,7 @@ pub fn process(data: &[u8], kind: ImageKind) -> Result<ProcessOutput, PKAvatarEr
} else { } else {
reader.decode().map_err(|e| { reader.decode().map_err(|e| {
// print the ugly error, return the nice error // print the ugly error, return the nice error
error!("error decoding image: {}", e); error!(error = format!("{e:#?}"), "error decoding image");
PKAvatarError::ImageFormatError(e) PKAvatarError::ImageFormatError(e)
})? })?
}; };

View file

@ -41,7 +41,11 @@ pub async fn pull(
} }
} }
error!("network error for {}: {}", parsed_url.full_url, s); error!(
url = parsed_url.full_url,
error = s,
"network error pulling image"
);
PKAvatarError::NetworkErrorString(s) PKAvatarError::NetworkErrorString(s)
})?; })?;
let time_after_headers = Instant::now(); let time_after_headers = Instant::now();
@ -82,7 +86,22 @@ pub async fn pull(
.map(|x| x.to_string()); .map(|x| x.to_string());
let body = response.bytes().await.map_err(|e| { let body = response.bytes().await.map_err(|e| {
error!("network error for {}: {}", parsed_url.full_url, e); // terrible
let mut s = format!("{}", e);
if let Some(src) = e.source() {
let _ = write!(s, ": {}", src);
let mut err = src;
while let Some(src) = err.source() {
let _ = write!(s, ": {}", src);
err = src;
}
}
error!(
url = parsed_url.full_url,
error = s,
"network error pulling image"
);
PKAvatarError::NetworkError(e) PKAvatarError::NetworkError(e)
})?; })?;
if body.len() != size as usize { if body.len() != size as usize {

View file

@ -86,11 +86,11 @@ async fn dispatch(
let uri = match req.url.parse::<Uri>() { let uri = match req.url.parse::<Uri>() {
Ok(v) if v.scheme_str() == Some("https") && v.host().is_some() => v, Ok(v) if v.scheme_str() == Some("https") && v.host().is_some() => v,
Err(error) => { Err(error) => {
error!(?error, "failed to parse uri {}", req.url); error!(?error, uri = req.url, "failed to parse uri");
return DispatchResponse::BadData.to_string(); return DispatchResponse::BadData.to_string();
} }
_ => { _ => {
error!("uri {} is invalid", req.url); error!(uri = req.url, "uri is invalid");
return DispatchResponse::BadData.to_string(); return DispatchResponse::BadData.to_string();
} }
}; };

View file

@ -124,7 +124,7 @@ pub async fn runner(
.increment(1); .increment(1);
if let Err(error) = shard_state.socket_closed(shard_id).await { if let Err(error) = shard_state.socket_closed(shard_id).await {
error!("failed to update shard state for socket closure: {error}"); error!(?error, "failed to update shard state for socket closure");
} }
continue; continue;
@ -145,7 +145,7 @@ pub async fn runner(
continue; continue;
} }
Err(error) => { Err(error) => {
error!("shard {shard_id} failed to parse gateway event: {error}"); error!(?error, ?shard_id, "failed to parse gateway event");
continue; continue;
} }
}; };

View file

@ -78,8 +78,8 @@ async fn request_inner(redis: RedisPool, concurrency: u32, shard_id: u32, tx: on
Ok(None) => { Ok(None) => {
// not allowed yet, waiting // not allowed yet, waiting
} }
Err(e) => { Err(error) => {
error!(shard_id, bucket, "error getting shard allowance: {}", e) error!(?error, ?shard_id, ?bucket, "error getting shard allowance")
} }
} }

View file

@ -129,7 +129,7 @@ async fn real_main() -> anyhow::Result<()> {
} }
} }
Err(error) => { Err(error) => {
error!(error = ?error, "failed to request event target"); error!(?error, "failed to request event target");
} }
} }
} }

View file

@ -42,8 +42,8 @@ async fn real_main() -> anyhow::Result<()> {
tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;
match run_job(db.clone(), client.clone()).await { match run_job(db.clone(), client.clone()).await {
Ok(()) => {} Ok(()) => {}
Err(err) => { Err(error) => {
error!("failed to run messages gdpr job: {:?}", err); error!(?error, "failed to run messages gdpr job");
} }
} }
} }
@ -131,8 +131,10 @@ async fn run_job(pool: sqlx::PgPool, discord: Arc<twilight_http::Client>) -> any
} }
_ => { _ => {
error!( error!(
"got unknown error deleting message {}: status={status}, code={code}", ?status,
message.mid ?code,
message_id = message.mid,
"got unknown error deleting message",
); );
} }
} }

View file

@ -81,12 +81,12 @@ macro_rules! main {
.build() .build()
.unwrap() .unwrap()
.block_on(async { .block_on(async {
if let Err(err) = libpk::init_metrics() { if let Err(error) = libpk::init_metrics() {
tracing::error!("failed to init metrics collector: {err}"); tracing::error!(?error, "failed to init metrics collector");
}; };
tracing::info!("hello world"); tracing::info!("hello world");
if let Err(err) = real_main().await { if let Err(error) = real_main().await {
tracing::error!("failed to run service: {err}"); tracing::error!(?error, "failed to run service");
}; };
}); });
Ok(()) Ok(())

View file

@ -74,8 +74,7 @@ async fn real_main() -> anyhow::Result<()> {
info!("running {}", $desc); info!("running {}", $desc);
let before = std::time::Instant::now(); let before = std::time::Instant::now();
if let Err(error) = $fn(ctx).await { if let Err(error) = $fn(ctx).await {
error!("failed to run {}: {}", $desc, error); error!(?error, "failed to run {}", $desc);
// sentry
} }
let duration = before.elapsed(); let duration = before.elapsed();
info!("ran {} in {duration:?}", $desc); info!("ran {} in {duration:?}", $desc);