use self::ChatbotClient::*;
use super::{ChatbotService, OpenAiChatCompletion};
use toml::Table;
use zino_core::{bail, error::Error, extension::TomlTableExt, Map};
#[non_exhaustive]
pub(super) enum ChatbotClient {
OpenAi(OpenAiChatCompletion),
}
pub struct Chatbot {
service: String,
name: String,
client: ChatbotClient,
}
impl Chatbot {
#[inline]
pub(super) fn new(
service: impl Into<String>,
name: impl Into<String>,
client: ChatbotClient,
) -> Self {
Self {
service: service.into(),
name: name.into(),
client,
}
}
pub fn try_new(service: &str, config: &Table) -> Result<Chatbot, Error> {
match service {
"openai" => OpenAiChatCompletion::try_new_chatbot(config),
_ => {
bail!("chatbot service `{}` is unsupported", service);
}
}
}
#[inline]
pub fn service(&self) -> &str {
self.service.as_str()
}
#[inline]
pub fn name(&self) -> &str {
self.name.as_str()
}
}
impl ChatbotService for Chatbot {
fn try_new_chatbot(config: &Table) -> Result<Chatbot, Error> {
let service = config.get_str("service").unwrap_or("unkown");
Self::try_new(service, config)
}
fn model(&self) -> &str {
match &self.client {
OpenAi(chat_completion) => chat_completion.model(),
}
}
async fn try_send(&self, message: String, options: Option<Map>) -> Result<Vec<String>, Error> {
match &self.client {
OpenAi(chat_completion) => chat_completion.try_send(message, options).await,
}
}
}