[build-dependencies.anyhow]
features = ["backtrace"]
version = "1"
[build-dependencies.bindgen_cuda]
version = "0.1.1"
[dependencies.candle]
features = ["cuda"]
package = "candle-core"
version = "0.8.0"
[dependencies.half]
features = ["num-traits"]
version = "2.3.1"
[dev-dependencies.anyhow]
features = ["backtrace"]
version = "1"
[lib]
name = "candle_flash_attn"
path = "src/lib.rs"
[package]
autobenches = false
autobins = false
autoexamples = false
autotests = false
build = "build.rs"
categories = ["science"]
description = "Flash attention layer for the candle ML framework."
edition = "2021"
keywords = ["blas", "tensor", "machine-learning"]
license = "MIT OR Apache-2.0"
name = "candle-flash-attn"
readme = "README.md"
repository = "https://github.com/huggingface/candle"
version = "0.8.0"
[[test]]
name = "flash_attn_tests"
path = "tests/flash_attn_tests.rs"