gemini_ai/content_gen/
memory.rs1use crate::{decode_gemini, format::memory_schema, GeminiContentGen, Memorys};
2#[cfg(feature = "sync")]
3use std::{
4 fs::{read_to_string, OpenOptions},
5 io::Write,
6};
7
8#[cfg(feature = "async")]
9use async_std::fs::{read_to_string, OpenOptions};
10
11use super::content::gemini;
12
13#[cfg(feature = "sync")]
14pub fn memory(memory: Memorys, user: &GeminiContentGen) -> String {
15 let user_text = user.clone().text;
16 match memory {
17 Memorys::File => file_store_retrive(user, user_text, "txt"),
18 Memorys::Json => file_store_retrive(user, user_text, "json"),
19 }
20}
21
22#[cfg(feature = "async")]
23pub async fn memory<'b>(memory: Memorys, user: &GeminiContentGen<'b>) -> String {
24 let user_text = user.clone().text;
25 match memory {
26 Memorys::File => file_store_retrive(user, user_text, "txt").await,
27 Memorys::Json => file_store_retrive(user, user_text, "json").await,
28 }
29}
30fn responses(model: &str, response: &str) -> String {
31 let response = format!("{{\"text\":\"{}: {}\"}},\r\n", model, response);
32 response
33}
34
35#[cfg(feature = "sync")]
36fn file_store_retrive(user: &GeminiContentGen, user_text: &str, mode: &str) -> String {
37 let mut local_store = OpenOptions::new()
38 .append(true)
39 .create(true)
40 .open(format!("conversation.{}", mode))
41 .unwrap();
42 let store_user_prompt = responses("user", user_text);
43 local_store.write_all(store_user_prompt.as_bytes());
44 let file = read_to_string(format!("conversation.{}", mode)).unwrap();
45 let schema = memory_schema(user_text, &file, user.max_len);
46 let gemini = gemini(schema, &user.env_variable, user.model, "application/json");
47 let content = decode_gemini(&gemini);
49 match content {
50 Err(err) => err.to_string(),
51 Ok(content) => {
52 for parts in content.candidates {
53 let part = parts.content.parts;
54 for part in part {
55 let gemini = part.text.trim();
56 local_store.write_all(responses("output", &gemini).as_bytes());
57 }
58 }
59 gemini
60 }
61 }
62}
63
64#[cfg(feature = "async")]
65async fn file_store_retrive<'a>(
66 user: &GeminiContentGen<'a>,
67 user_text: &str,
68 mode: &str,
69) -> String {
70 use async_std::io::WriteExt;
71
72 let mut local_store = OpenOptions::new()
73 .append(true)
74 .create(true)
75 .open(format!("conversation.{}", mode))
76 .await
77 .unwrap();
78 let store_user_prompt = responses("user", user_text);
79 local_store.write_all(store_user_prompt.as_bytes()).await;
80 let file = read_to_string(format!("conversation.{}", mode))
81 .await
82 .unwrap();
83 let schema = memory_schema(user_text, &file, user.max_len);
84 let gemini = gemini(schema, &user.env_variable, user.model, "application/json").await;
85 let content = decode_gemini(&gemini);
87 match content {
88 Err(err) => gemini,
89 Ok(content) => {
90 for parts in content.candidates {
91 let part = parts.content.parts;
92 for part in part {
93 let gemini = part.text.trim();
94 local_store
95 .write_all(responses("output", &gemini).as_bytes())
96 .await;
97 }
98 }
99 gemini
100 }
101 }
102}