jieba-rs 0.7.1

The Jieba Chinese Word Segmentation Implemented in Rust
Documentation
[package]
name = "jieba-rs"
version = "0.7.1"
authors = ["messense <messense@icloud.com>", "Paul Meng <me@paulme.ng>"]
categories = ["text-processing"]
description = "The Jieba Chinese Word Segmentation Implemented in Rust"
keywords = ["nlp", "chinese", "segmenation"]
license = "MIT"
readme = "README.md"
repository = "https://github.com/messense/jieba-rs"
edition = '2021'

[package.metadata.docs.rs]
all-features = true

[dev-dependencies]
codspeed-criterion-compat = "2.4.1"
rand = "0.8"
wasm-bindgen-test = "0.3.0"

[target.'cfg(unix)'.dev-dependencies]
jemallocator = "0.5.0"

[[bench]]
name = "jieba_benchmark"
harness = false
required-features = ["tfidf", "textrank"]

[dependencies]
jieba-macros = { version = "0.7.1", path = "./jieba-macros" }
cedarwood = "0.4"
derive_builder = { version = "0.20.0", optional = true }
fxhash = "0.2.1"
include-flate = "0.3.0"
lazy_static = "1.0"
ordered-float = { version = "4.0", optional = true }
phf = "0.11"
regex = "1.0"

[features]
default = ["default-dict"]
default-dict = []
tfidf = ["dep:ordered-float", "dep:derive_builder"]
textrank = ["dep:ordered-float", "dep:derive_builder"]

[workspace]
members = [".", "capi", "jieba-macros", "examples/weicheng"]