From 3171dc6c63280dc0f383f56f0198b9c33d9d3357 Mon Sep 17 00:00:00 2001 From: Tristan Druyen Date: Mon, 9 Sep 2024 15:03:22 +0200 Subject: [PATCH] Remove unused pkg --- Cargo.lock | 4 ---- Cargo.toml | 4 +--- clippy.yml | 2 ++ leptos_stub/src/main.rs | 1 - llama_cpp_spec_wrapper/Cargo.toml | 12 ------------ llama_cpp_spec_wrapper/src/main.rs | 3 --- llama_forge_rs/src/api/backend_process.rs | 1 + 7 files changed, 4 insertions(+), 23 deletions(-) create mode 100644 clippy.yml delete mode 100644 llama_cpp_spec_wrapper/Cargo.toml delete mode 100644 llama_cpp_spec_wrapper/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index 843fa41..f125565 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4059,10 +4059,6 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a385b1be4e5c3e362ad2ffa73c392e53f031eaa5b7d648e64cd87f27f6063d7" -[[package]] -name = "llama_cpp_spec_wrapper" -version = "0.1.1" - [[package]] name = "llama_forge_rs" version = "0.1.1" diff --git a/Cargo.toml b/Cargo.toml index 2612e6d..9426cad 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,8 +10,6 @@ opt-level = 1 opt-level = 1 [profile.dev.package.leptos_stub] opt-level = 1 -[profile.dev.package.llama_cpp_spec_wrapper] -opt-level = 1 [profile.release] @@ -29,7 +27,7 @@ lto = "fat" panic = "abort" [workspace] -members = ["llama_forge_rs", "leptos_stub", "frozen_llama", "llama_cpp_spec_wrapper"] +members = ["llama_forge_rs", "leptos_stub", "frozen_llama"] resolver = "2" [workspace.package] diff --git a/clippy.yml b/clippy.yml new file mode 100644 index 0000000..02f00a0 --- /dev/null +++ b/clippy.yml @@ -0,0 +1,2 @@ +too-many-arguments-threshold = 20 +enum-variant-name-threshold = 10 diff --git a/leptos_stub/src/main.rs b/leptos_stub/src/main.rs index 5f2736e..1827efa 100644 --- a/leptos_stub/src/main.rs +++ b/leptos_stub/src/main.rs @@ -1,4 +1,3 @@ - #[cfg(feature = "ssr")] #[tokio::main] async fn main() { diff --git a/llama_cpp_spec_wrapper/Cargo.toml b/llama_cpp_spec_wrapper/Cargo.toml deleted file mode 100644 index e9339f6..0000000 --- a/llama_cpp_spec_wrapper/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -name = "llama_cpp_spec_wrapper" -authors.workspace = true -description.workspace = true -license.workspace = true -publish.workspace = true -readme.workspace = true -repository.workspace = true -version.workspace = true -edition.workspace = true - -[dependencies] diff --git a/llama_cpp_spec_wrapper/src/main.rs b/llama_cpp_spec_wrapper/src/main.rs deleted file mode 100644 index e7a11a9..0000000 --- a/llama_cpp_spec_wrapper/src/main.rs +++ /dev/null @@ -1,3 +0,0 @@ -fn main() { - println!("Hello, world!"); -} diff --git a/llama_forge_rs/src/api/backend_process.rs b/llama_forge_rs/src/api/backend_process.rs index aca39fb..fcbdbd3 100644 --- a/llama_forge_rs/src/api/backend_process.rs +++ b/llama_forge_rs/src/api/backend_process.rs @@ -20,6 +20,7 @@ pub struct BackendProcess { /// - `WaitingForStop`: The process is waiting to be stopped. /// - `Finished`: The process has completed its execution successfully. /// - `Failed`: The process has failed or encountered an error during execution. +/// /// This enum is used to keep track of the state of a process in a backend application, allowing for proper management and control over the process lifecycle. #[derive( Default, PartialEq, Debug, Clone, Serialize, Deserialize, sqlx::Type, strum::EnumString,