[[bin]]
name = "server_runner"
path = "src/llm_backends/llama_cpp/bin/server_runner.rs"
[dependencies.anyhow]
version = "1.0.86"
[dependencies.async-openai]
version = "0.23.4"
[dependencies.backoff]
features = ["tokio"]
version = "0.4.0"
[dependencies.bytes]
version = "1.5.0"
[dependencies.clap]
version = "4.5.4"
[dependencies.clust]
version = "0.9.0"
[dependencies.derive_builder]
version = "0.20.0"
[dependencies.dotenv]
version = "0.15.0"
[dependencies.llm_utils]
version = "0.0.8"
[dependencies.reqwest]
version = "0.12.4"
[dependencies.serde]
features = ["derive"]
version = "1.0.202"
[dependencies.serde_json]
version = "1.0.117"
[dependencies.thiserror]
version = "1.0.60"
[dependencies.tokio]
version = "1.37.0"
[dependencies.tokio-test]
version = "0.4.4"
[dependencies.tracing]
version = "0.1.40"
[dependencies.tracing-appender]
version = "0.2.3"
[dependencies.tracing-subscriber]
features = ["json"]
version = "0.3.18"
[dependencies.url]
version = "2.5.2"
[dev-dependencies.serial_test]
version = "3.1.1"
[package]
authors = ["Shelby Jenkins"]
categories = ["api-bindings", "asynchronous"]
description = "llm_client: An Interface for Deterministic Signals from Probabilistic LLM Vibes"
edition = "2021"
exclude = ["src/llm_backends/llama_cpp/llama_cpp/**/*"]
homepage = "https://github.com/shelbyJenkins/llm_client"
keywords = ["anthropic", "gguf", "llama-cpp", "llm", "openai"]
license = "MIT"
name = "llm_client"
readme = "README.md"
repository = "https://github.com/shelbyJenkins/llm_client"
version = "0.0.4"