mirror of
https://github.com/PluralKit/PluralKit.git
synced 2026-02-04 13:06:50 +00:00
chore: bump rust edition to 2024
Some checks failed
Build dashboard Docker image / dashboard docker build (push) Has been cancelled
Build and push Docker image / .net docker build (push) Has been cancelled
.net checks / run .net tests (push) Has been cancelled
.net checks / dotnet-format (push) Has been cancelled
Build and push Rust service Docker images / rust docker build (push) Has been cancelled
rust checks / cargo fmt (push) Has been cancelled
Some checks failed
Build dashboard Docker image / dashboard docker build (push) Has been cancelled
Build and push Docker image / .net docker build (push) Has been cancelled
.net checks / run .net tests (push) Has been cancelled
.net checks / dotnet-format (push) Has been cancelled
Build and push Rust service Docker images / rust docker build (push) Has been cancelled
rust checks / cargo fmt (push) Has been cancelled
This commit is contained in:
parent
68a8a9d67e
commit
f848e260db
38 changed files with 64 additions and 66 deletions
|
|
@ -4,7 +4,7 @@ WORKDIR /build
|
|||
|
||||
RUN apk add rustup build-base
|
||||
# todo: arm64 target
|
||||
RUN rustup-init --default-host x86_64-unknown-linux-musl --default-toolchain nightly-2024-08-20 --profile default -y
|
||||
RUN rustup-init --default-host x86_64-unknown-linux-musl --default-toolchain nightly-2025-08-22 --profile default -y
|
||||
|
||||
ENV PATH=/root/.cargo/bin:$PATH
|
||||
ENV RUSTFLAGS='-C link-arg=-s'
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "api"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
pluralkit_models = { path = "../models" }
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@ use fred::interfaces::*;
|
|||
use libpk::state::ShardState;
|
||||
use pk_macros::api_endpoint;
|
||||
use serde::Deserialize;
|
||||
use serde_json::{json, Value};
|
||||
use serde_json::{Value, json};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "PascalCase")]
|
||||
struct ClusterStats {
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
use axum::{extract::State, response::IntoResponse, Extension, Json};
|
||||
use axum::{Extension, Json, extract::State, response::IntoResponse};
|
||||
use pk_macros::api_endpoint;
|
||||
use serde_json::{json, Value};
|
||||
use serde_json::{Value, json};
|
||||
use sqlx::Postgres;
|
||||
|
||||
use pluralkit_models::{PKSystem, PKSystemConfig, PrivacyLevel};
|
||||
|
||||
use crate::{auth::AuthState, error::fail, ApiContext};
|
||||
use crate::{ApiContext, auth::AuthState, error::fail};
|
||||
|
||||
#[api_endpoint]
|
||||
pub async fn get_system_settings(
|
||||
|
|
|
|||
|
|
@ -1,16 +1,14 @@
|
|||
#![feature(let_chains)]
|
||||
|
||||
use auth::{AuthState, INTERNAL_APPID_HEADER, INTERNAL_SYSTEMID_HEADER};
|
||||
use axum::{
|
||||
Extension, Router,
|
||||
body::Body,
|
||||
extract::{Request as ExtractRequest, State},
|
||||
http::Uri,
|
||||
response::{IntoResponse, Response},
|
||||
routing::{delete, get, patch, post},
|
||||
Extension, Router,
|
||||
};
|
||||
use hyper_util::{
|
||||
client::legacy::{connect::HttpConnector, Client},
|
||||
client::legacy::{Client, connect::HttpConnector},
|
||||
rt::TokioExecutor,
|
||||
};
|
||||
use tracing::info;
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ use subtle::ConstantTimeEq;
|
|||
use tracing::error;
|
||||
|
||||
use crate::auth::AuthState;
|
||||
use crate::{util::json_err, ApiContext};
|
||||
use crate::{ApiContext, util::json_err};
|
||||
|
||||
pub async fn auth(State(ctx): State<ApiContext>, mut req: Request, next: Next) -> Response {
|
||||
let mut authed_system_id: Option<i32> = None;
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ use std::time::Instant;
|
|||
|
||||
use axum::{extract::MatchedPath, extract::Request, middleware::Next, response::Response};
|
||||
use metrics::{counter, histogram};
|
||||
use tracing::{info, span, warn, Instrument, Level};
|
||||
use tracing::{Instrument, Level, info, span, warn};
|
||||
|
||||
use crate::{auth::AuthState, util::header_or_unknown};
|
||||
|
||||
|
|
|
|||
|
|
@ -6,11 +6,11 @@ use axum::{
|
|||
routing::url_params::UrlParams,
|
||||
};
|
||||
|
||||
use sqlx::{types::Uuid, Postgres};
|
||||
use sqlx::{Postgres, types::Uuid};
|
||||
use tracing::error;
|
||||
|
||||
use crate::auth::AuthState;
|
||||
use crate::{util::json_err, ApiContext};
|
||||
use crate::{ApiContext, util::json_err};
|
||||
use pluralkit_models::PKSystem;
|
||||
|
||||
// move this somewhere else
|
||||
|
|
@ -31,7 +31,7 @@ pub async fn params(State(ctx): State<ApiContext>, mut req: Request, next: Next)
|
|||
StatusCode::BAD_REQUEST,
|
||||
r#"{"message":"400: Bad Request","code": 0}"#.to_string(),
|
||||
)
|
||||
.into()
|
||||
.into();
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ use axum::{
|
|||
http::{HeaderValue, StatusCode},
|
||||
response::IntoResponse,
|
||||
};
|
||||
use serde_json::{json, to_string, Value};
|
||||
use serde_json::{Value, json, to_string};
|
||||
use tracing::error;
|
||||
|
||||
pub fn header_or_unknown(header: Option<&HeaderValue>) -> &str {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "avatars"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[[bin]]
|
||||
name = "avatar_cleanup"
|
||||
|
|
|
|||
|
|
@ -8,10 +8,10 @@ use anyhow::Context;
|
|||
use axum::extract::State;
|
||||
use axum::routing::get;
|
||||
use axum::{
|
||||
Json, Router,
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Response},
|
||||
routing::post,
|
||||
Json, Router,
|
||||
};
|
||||
use libpk::_config::AvatarsConfig;
|
||||
use libpk::db::repository::avatars as db;
|
||||
|
|
@ -153,7 +153,7 @@ async fn verify(
|
|||
)
|
||||
.await?;
|
||||
|
||||
let encoded = process::process_async(result.data, req.kind).await?;
|
||||
process::process_async(result.data, req.kind).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ use std::io::Cursor;
|
|||
use std::time::Instant;
|
||||
use tracing::{debug, error, info, instrument};
|
||||
|
||||
use crate::{hash::Hash, ImageKind, PKAvatarError};
|
||||
use crate::{ImageKind, PKAvatarError, hash::Hash};
|
||||
|
||||
const MAX_DIMENSION: u32 = 4000;
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ pub async fn pull(
|
|||
let size = match response.content_length() {
|
||||
None => return Err(PKAvatarError::MissingHeader("Content-Length")),
|
||||
Some(size) if size > MAX_SIZE => {
|
||||
return Err(PKAvatarError::ImageFileSizeTooLarge(size, MAX_SIZE))
|
||||
return Err(PKAvatarError::ImageFileSizeTooLarge(size, MAX_SIZE));
|
||||
}
|
||||
Some(size) => size,
|
||||
};
|
||||
|
|
@ -162,7 +162,7 @@ pub fn parse_url(url: &str) -> anyhow::Result<ParsedUrl> {
|
|||
attachment_id: 0,
|
||||
filename: "".to_string(),
|
||||
full_url: url.to_string(),
|
||||
})
|
||||
});
|
||||
}
|
||||
_ => anyhow::bail!("not a discord cdn url"),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "dispatch"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
use std::time::Instant;
|
||||
|
||||
use axum::{extract::MatchedPath, extract::Request, middleware::Next, response::Response};
|
||||
use tracing::{info, span, warn, Instrument, Level};
|
||||
use tracing::{Instrument, Level, info, span, warn};
|
||||
|
||||
// log any requests that take longer than 2 seconds
|
||||
// todo: change as necessary
|
||||
|
|
|
|||
|
|
@ -5,17 +5,16 @@ use hickory_client::{
|
|||
rr::{DNSClass, Name, RData, RecordType},
|
||||
udp::UdpClientStream,
|
||||
};
|
||||
use reqwest::{redirect::Policy, StatusCode};
|
||||
use reqwest::{StatusCode, redirect::Policy};
|
||||
use std::{
|
||||
net::{Ipv4Addr, SocketAddr, SocketAddrV4},
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
use tokio::{net::UdpSocket, sync::RwLock};
|
||||
use tracing::{debug, error, info};
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use tracing::{debug, error};
|
||||
|
||||
use axum::{extract::State, http::Uri, routing::post, Json, Router};
|
||||
use axum::{Json, Router, extract::State, http::Uri, routing::post};
|
||||
|
||||
mod logger;
|
||||
|
||||
|
|
@ -128,7 +127,7 @@ async fn dispatch(
|
|||
|
||||
match res {
|
||||
Ok(res) if res.status() != 200 => {
|
||||
return DispatchResponse::InvalidResponseCode(res.status()).to_string()
|
||||
return DispatchResponse::InvalidResponseCode(res.status()).to_string();
|
||||
}
|
||||
Err(error) => {
|
||||
error!(?error, url = req.url.clone(), "failed to fetch");
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "gateway"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,18 +1,18 @@
|
|||
use axum::{
|
||||
Router,
|
||||
extract::{ConnectInfo, Path, State},
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Response},
|
||||
routing::{delete, get, post},
|
||||
Router,
|
||||
};
|
||||
use libpk::runtime_config::RuntimeConfig;
|
||||
use serde_json::{json, to_string};
|
||||
use tracing::{error, info};
|
||||
use twilight_model::id::{marker::ChannelMarker, Id};
|
||||
use twilight_model::id::{Id, marker::ChannelMarker};
|
||||
|
||||
use crate::{
|
||||
discord::{
|
||||
cache::{dm_channel, DiscordCache, DM_PERMISSIONS},
|
||||
cache::{DM_PERMISSIONS, DiscordCache, dm_channel},
|
||||
gateway::cluster_config,
|
||||
shard_state::ShardStateManager,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -4,18 +4,18 @@ use serde::Serialize;
|
|||
use std::{collections::HashMap, sync::Arc};
|
||||
use tokio::sync::RwLock;
|
||||
use twilight_cache_inmemory::{
|
||||
InMemoryCache, ResourceType,
|
||||
model::CachedMember,
|
||||
permission::{MemberRoles, RootError},
|
||||
traits::CacheableChannel,
|
||||
InMemoryCache, ResourceType,
|
||||
};
|
||||
use twilight_gateway::Event;
|
||||
use twilight_model::{
|
||||
channel::{Channel, ChannelType},
|
||||
guild::{Guild, Member, Permissions},
|
||||
id::{
|
||||
marker::{ChannelMarker, GuildMarker, MessageMarker, UserMarker},
|
||||
Id,
|
||||
marker::{ChannelMarker, GuildMarker, MessageMarker, UserMarker},
|
||||
},
|
||||
};
|
||||
use twilight_util::permission_calculator::PermissionCalculator;
|
||||
|
|
|
|||
|
|
@ -6,17 +6,17 @@ use std::sync::Arc;
|
|||
use tokio::sync::mpsc::Sender;
|
||||
use tracing::{error, info, warn};
|
||||
use twilight_gateway::{
|
||||
create_iterator, CloseFrame, ConfigBuilder, Event, EventTypeFlags, Message, Shard, ShardId,
|
||||
CloseFrame, ConfigBuilder, Event, EventTypeFlags, Message, Shard, ShardId, create_iterator,
|
||||
};
|
||||
use twilight_model::gateway::{
|
||||
Intents,
|
||||
payload::outgoing::update_presence::UpdatePresencePayload,
|
||||
presence::{Activity, ActivityType, Status},
|
||||
Intents,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
discord::identify_queue::{self, RedisQueue},
|
||||
RUNTIME_CONFIG_KEY_EVENT_TARGET,
|
||||
discord::identify_queue::{self, RedisQueue},
|
||||
};
|
||||
|
||||
use super::cache::DiscordCache;
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
// - interaction: (custom_id where not_includes "help-menu")
|
||||
|
||||
use std::{
|
||||
collections::{hash_map::Entry, HashMap},
|
||||
collections::{HashMap, hash_map::Entry},
|
||||
net::{IpAddr, SocketAddr},
|
||||
time::Duration,
|
||||
};
|
||||
|
|
@ -15,8 +15,8 @@ use twilight_gateway::Event;
|
|||
use twilight_model::{
|
||||
application::interaction::InteractionData,
|
||||
id::{
|
||||
marker::{ChannelMarker, MessageMarker, UserMarker},
|
||||
Id,
|
||||
marker::{ChannelMarker, MessageMarker, UserMarker},
|
||||
},
|
||||
};
|
||||
|
||||
|
|
@ -103,7 +103,13 @@ impl EventAwaiter {
|
|||
}
|
||||
}
|
||||
}
|
||||
info!("ran event_awaiter cleanup loop, took {}us, {} reactions, {} messages, {} interactions", Instant::now().duration_since(now).as_micros(), counts.0, counts.1, counts.2);
|
||||
info!(
|
||||
"ran event_awaiter cleanup loop, took {}us, {} reactions, {} messages, {} interactions",
|
||||
Instant::now().duration_since(now).as_micros(),
|
||||
counts.0,
|
||||
counts.1,
|
||||
counts.2
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ use axum::{
|
|||
extract::MatchedPath, extract::Request, http::StatusCode, middleware::Next, response::Response,
|
||||
};
|
||||
use metrics::{counter, histogram};
|
||||
use tracing::{info, span, warn, Instrument, Level};
|
||||
use tracing::{Instrument, Level, info, span, warn};
|
||||
|
||||
// log any requests that take longer than 2 seconds
|
||||
// todo: change as necessary
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
#![feature(let_chains)]
|
||||
#![feature(if_let_guard)]
|
||||
#![feature(duration_constructors)]
|
||||
|
||||
|
|
@ -10,7 +9,7 @@ use libpk::{runtime_config::RuntimeConfig, state::ShardStateEvent};
|
|||
use reqwest::{ClientBuilder, StatusCode};
|
||||
use std::{sync::Arc, time::Duration, vec::Vec};
|
||||
use tokio::{
|
||||
signal::unix::{signal, SignalKind},
|
||||
signal::unix::{SignalKind, signal},
|
||||
sync::mpsc::channel,
|
||||
task::JoinSet,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "gdpr_worker"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
libpk = { path = "../libpk" }
|
||||
|
|
|
|||
|
|
@ -1,12 +1,10 @@
|
|||
#![feature(let_chains)]
|
||||
|
||||
use sqlx::prelude::FromRow;
|
||||
use std::{sync::Arc, time::Duration};
|
||||
use tracing::{error, info, warn};
|
||||
use twilight_http::api_error::{ApiError, GeneralApiError};
|
||||
use twilight_model::id::{
|
||||
marker::{ChannelMarker, MessageMarker},
|
||||
Id,
|
||||
marker::{ChannelMarker, MessageMarker},
|
||||
};
|
||||
|
||||
// create table messages_gdpr_jobs (mid bigint not null references messages(mid) on delete cascade, channel bigint not null);
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "libpk"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ use lazy_static::lazy_static;
|
|||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
|
||||
use twilight_model::id::{marker::UserMarker, Id};
|
||||
use twilight_model::id::{Id, marker::UserMarker};
|
||||
|
||||
#[derive(Clone, Deserialize, Debug)]
|
||||
pub struct ClusterSettings {
|
||||
|
|
@ -151,11 +151,11 @@ lazy_static! {
|
|||
// hacks
|
||||
if let Ok(var) = std::env::var("NOMAD_ALLOC_INDEX")
|
||||
&& std::env::var("pluralkit__discord__cluster__total_nodes").is_ok() {
|
||||
std::env::set_var("pluralkit__discord__cluster__node_id", var);
|
||||
unsafe { std::env::set_var("pluralkit__discord__cluster__node_id", var); }
|
||||
}
|
||||
if let Ok(var) = std::env::var("STATEFULSET_NAME_FOR_INDEX")
|
||||
&& std::env::var("pluralkit__discord__cluster__total_nodes").is_ok() {
|
||||
std::env::set_var("pluralkit__discord__cluster__node_id", var.split("-").last().unwrap());
|
||||
unsafe { std::env::set_var("pluralkit__discord__cluster__node_id", var.split("-").last().unwrap()); }
|
||||
}
|
||||
|
||||
Arc::new(Config::builder()
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ pub async fn remove_deletion_queue(pool: &PgPool, attachment_id: u64) -> anyhow:
|
|||
|
||||
pub async fn pop_queue(
|
||||
pool: &PgPool,
|
||||
) -> anyhow::Result<Option<(Transaction<Postgres>, ImageQueueEntry)>> {
|
||||
) -> anyhow::Result<Option<(Transaction<'_, Postgres>, ImageQueueEntry)>> {
|
||||
let mut tx = pool.begin().await?;
|
||||
let res: Option<ImageQueueEntry> = sqlx::query_as("delete from image_queue where itemid = (select itemid from image_queue order by itemid for update skip locked limit 1) returning *")
|
||||
.fetch_optional(&mut *tx).await?;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{
|
||||
types::chrono::{DateTime, Utc},
|
||||
FromRow,
|
||||
types::chrono::{DateTime, Utc},
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
#![feature(let_chains)]
|
||||
use std::net::SocketAddr;
|
||||
|
||||
use metrics_exporter_prometheus::PrometheusBuilder;
|
||||
use sentry::IntoDsn;
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
|
||||
use tracing_subscriber::{EnvFilter, layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
use sentry_tracing::event_from_event;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "pk_macros"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
use quote::quote;
|
||||
use syn::{parse_macro_input, FnArg, ItemFn, Pat};
|
||||
use syn::{FnArg, ItemFn, Pat, parse_macro_input};
|
||||
|
||||
fn pretty_print(ts: &proc_macro2::TokenStream) -> String {
|
||||
fn _pretty_print(ts: &proc_macro2::TokenStream) -> String {
|
||||
let file = syn::parse_file(&ts.to_string()).unwrap();
|
||||
prettyplease::unparse(&file)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::quote;
|
||||
use syn::{parse_macro_input, DeriveInput, Expr, Ident, Meta, Type};
|
||||
use syn::{DeriveInput, Expr, Ident, Meta, Type, parse_macro_input};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum ElemPatchability {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "migrate"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
libpk = { path = "../libpk" }
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
#![feature(let_chains)]
|
||||
|
||||
use tracing::info;
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/data.rs"));
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "pluralkit_models"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ pub enum PrivacyLevel {
|
|||
}
|
||||
|
||||
// this sucks, put it somewhere else
|
||||
use sqlx::{postgres::PgTypeInfo, Database, Decode, Postgres, Type};
|
||||
use sqlx::{Database, Decode, Postgres, Type, postgres::PgTypeInfo};
|
||||
use std::error::Error;
|
||||
_util::fake_enum_impls!(PrivacyLevel);
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "scheduled_tasks"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
libpk = { path = "../libpk" }
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue