[dependencies.anyhow]
version = "1.0.89"
[dependencies.colorful]
version = "0.3.2"
[dependencies.indenter]
version = "0.3.3"
[dependencies.indoc]
version = "2.0.5"
[dependencies.llm_devices]
version = "0.0.2"
[dependencies.llm_interface]
version = "0.0.2"
[dependencies.llm_models]
version = "0.0.1"
[dependencies.llm_prompt]
version = "0.0.1"
[dependencies.llm_utils]
version = "0.0.11"
[dependencies.thiserror]
version = "1.0.64"
[dependencies.tokio]
version = "1.40.0"
[dependencies.tracing]
version = "0.1.40"
[dependencies.url]
version = "2.5.2"
[dev-dependencies.serde]
version = "1.0.210"
[dev-dependencies.serde_json]
version = "1.0.128"
[dev-dependencies.serial_test]
version = "3.1.1"
[dev-dependencies.tokio]
features = ["macros", "test-util"]
version = "1.40.0"
[[example]]
name = "basic_completion"
path = "examples/basic_completion.rs"
[[example]]
name = "basic_primitive"
path = "examples/basic_primitive.rs"
[[example]]
name = "decision"
path = "examples/decision.rs"
[[example]]
name = "device_config"
path = "examples/device_config.rs"
[[example]]
name = "extract_urls"
path = "examples/extract_urls.rs"
[[example]]
name = "reason"
path = "examples/reason.rs"
[features]
all = ["llama_cpp_backend", "mistral_rs_backend"]
default = ["llama_cpp_backend"]
llama_cpp_backend = ["llm_interface/llama_cpp_backend"]
mistral_rs_backend = ["llm_interface/mistral_rs_backend"]
[lib]
name = "llm_client"
path = "src/lib.rs"
[package]
autobenches = false
autobins = false
autoexamples = false
autotests = false
build = false
categories = ["api-bindings", "asynchronous"]
description = "The Easiest Rust Interface for Local LLMs, and an Interface for Deterministic Signals from Probabilistic LLM Vibes"
edition = "2021"
homepage = "https://github.com/shelbyJenkins/llm_client"
keywords = ["anthropic", "gguf", "llama-cpp", "llm", "openai"]
license = "MIT"
name = "llm_client"
readme = "README.md"
repository = "https://github.com/shelbyJenkins/llm_client"
version = "0.0.6"
[[test]]
name = "it"
path = "tests/it/main.rs"