rllama 0.3.0

Pure Rust implementation of LLaMA-family of models, executable
[[bench]]
harness = false
name = "benchmark"
path = "src/benches/benchmark.rs"

[[bin]]
name = "rllama"
path = "src/main.rs"
[build-dependencies.protobuf-codegen]
version = "3.2"

[build-dependencies.protobuf-parse]
version = "3.2"
[dependencies.approx]
version = "0.5"

[dependencies.clap]
features = ["derive"]
version = "4.1"

[dependencies.colored]
version = "2"

[dependencies.embedded-profiling]
version = "0.3"

[dependencies.half]
version = "2.2"

[dependencies.indicatif]
version = "0.17"

[dependencies.num-complex]
version = "0.4"

[dependencies.ocl]
optional = true
version = "0.19"

[dependencies.protobuf]
version = "3.2"

[dependencies.rand]
version = "0.8"

[dependencies.rayon]
version = "1.7"

[dependencies.serde]
features = ["derive"]
version = "1"

[dependencies.serde_json]
version = "1"

[dependencies.thiserror]
version = "1.0"
[dev-dependencies.criterion]
version = "0.4"

[features]
opencl = ["ocl"]

[lib]
path = "src/lib.rs"

[package]
authors = ["Mikko Juola"]
categories = ["command-line-utilities"]
description = "Pure Rust implementation of LLaMA-family of models, executable"
documentation = "https://github.com/Noeda/rllama"
edition = "2021"
homepage = "https://github.com/Noeda/rllama"
keywords = ["llama", "machine-learning"]
license = "AGPL-3.0"
name = "rllama"
readme = "README.md"
repository = "https://github.com/Noeda/rllama"
version = "0.3.0"
[profile.release]
debug = true