aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.md21
-rw-r--r--config/categorize-LLM.json10
-rw-r--r--src/app/init.rs23
-rw-r--r--src/app/modules.rs3
-rw-r--r--src/app/modules/chat.rs9
-rw-r--r--src/app/modules/code.rs0
-rw-r--r--src/app/modules/resume.rs9
-rw-r--r--src/app/modules/wikipedia.rs6
-rw-r--r--src/ui/init.rs15
9 files changed, 66 insertions, 30 deletions
diff --git a/README.md b/README.md
index 31f2181..c944045 100644
--- a/README.md
+++ b/README.md
@@ -15,9 +15,10 @@ be added to this AI.
### Dependencies
This project is written in Rust, so you will need `rustc` and `cargo`.
-Moreover, you will need a LLM API, currently only works with local
-[ollama](https://github.com/ollama/ollama) API.
-
+- A [ollama](https://github.com/ollama/ollama) API for LLM
+- A [kiwix server](https://kiwix.org/en/applications/) web server and API for getting data from a zim
+ file (wikipedia).
+
### Building & Running
To build and run this project you will need to install all the dependencies used:
@@ -30,7 +31,18 @@ Once that is done, just
```bash
cargo run
```
-and there you go !
+and there you go for the UI !
+
+Ollama must be running, a model (like llama3.2) must be downloaded.
+Configuration files will probably need some modifications, so check everyone of
+them.
+
+For Wikipedia data, run a kiwix webserver on your zim file and modify the
+configuration in `wiki/wiki.json`.
+
+```bash
+kiwix-serve -p 1025 -i 127.0.0.1 MYZIMFILE.zim
+```
## Screenshots
@@ -45,7 +57,6 @@ and there you go !
## TODO
-- Color change if it's an user or the LLM (dunno how to do it in this code base)
- Connect & try LLM / tools
## Inspiration and reason
diff --git a/config/categorize-LLM.json b/config/categorize-LLM.json
index fd488c8..80fb74e 100644
--- a/config/categorize-LLM.json
+++ b/config/categorize-LLM.json
@@ -1,23 +1,23 @@
{
"url": "http://127.0.0.1:11434/api/chat",
"model": "llama3.2",
- "system_prompt": "You are a categorizer. Your role is to categorize in which category the last message fit the most 'chat' for simple conversation with the LLM, 'code' for code completion or request about technical subject around programming, 'wikipedia' for research of factual information, don't create new categories",
+ "system_prompt": "You are a strict categorizer. You must classify the user's last message into exactly one of the following categories: 'chat', 'code', or 'wikipedia'. Do not invent new categories. If the message doesn't clearly fit one, choose the closest matching category. Output only one of the allowed values. Never generate or suggest any category outside: 'chat', 'code', 'wikipedia'",
"tools": [
{
"type": "function",
"function": {
"name": "categorize_message",
- "description": "Classify the last message into a category (chat, code, wikipedia)",
+ "description": "You are a strict categorizer. You must classify the user's last message into exactly one of the following categories: 'chat', 'code', or 'wikipedia'. Do not invent new categories. If the message doesn't clearly fit one, choose the closest matching category. Output only one of the allowed values. Never generate or suggest any category outside: 'chat', 'code', 'wikipedia",
"parameters": {
"type" : "object",
"properties": {
- "category": {
+ "category_choice": {
"type": "string",
- "description": "The category in which the message fit the most e.g. 'chat', 'code', 'wikipedia'",
+ "description": "The category in which the message fit the most e.g. 'chat', 'code', 'wikipedia' only, don't create new categories",
"enum": ["chat", "code", "wikipedia"]
}
},
- "required": ["category"]
+ "required": ["category_choice"]
}
}
}
diff --git a/src/app/init.rs b/src/app/init.rs
index 0930319..f2b1ee3 100644
--- a/src/app/init.rs
+++ b/src/app/init.rs
@@ -1,5 +1,5 @@
use crate::app::llm::{Message, MessageType, LLM};
-use crate::app::modules::wikipedia::ask_wiki;
+use crate::app::modules::{wikipedia, resume, chat, code};
use crate::helper::init::warn;
use uuid::Uuid;
use tokio::runtime::Builder;
@@ -8,23 +8,15 @@ pub struct App {
pub messages: Vec<Message>, // History of recorded message
pub conv_id: Uuid, // ID for retrieving and saving the history of messag
categorize_llm: LLM,
- chat_llm: LLM, // Configuration for the LLM that chat with you
- resume_llm: LLM, // Configuration for the LLM that resume conversation
}
impl App {
pub fn new() -> App {
- let categorize_llm = LLM::new("config/categorize-LLM.json");
App {
- messages: vec![Message::new(
- MessageType::SYSTEM,
- categorize_llm.system_prompt.clone(),
- )],
+ messages: Vec::new(),
conv_id: Uuid::new_v4(),
- categorize_llm,
- chat_llm: LLM::new("config/chat-LLM.json"),
- resume_llm: LLM::new("config/resume-LLM.json"),
+ categorize_llm: LLM::new("config/categorize-LLM.json"),
}
}
@@ -47,7 +39,7 @@ impl App {
match result {
Ok(msg) => {
- let categorie = msg[0]["function"]["arguments"]["category"].clone();
+ let categorie = msg[0]["function"]["arguments"]["category_choice"].clone();
self.ask(&categorie.to_string().replace("\"", ""));
},
Err(e) => self.append_message(e.to_string(), MessageType::ASSISTANT),
@@ -62,11 +54,11 @@ impl App {
let result = runtime.block_on(async {
if mode == "resume" {
- self.resume_llm.ask(&self.messages).await
+ resume::resume_conv(self.messages.clone()).await
} else if mode == "wikipedia" {
- ask_wiki(&self.messages).await
+ wikipedia::ask_wiki(&self.messages).await
} else {
- self.chat_llm.ask(&self.messages).await
+ chat::ask_chat(self.messages.clone()).await
}
});
@@ -82,7 +74,6 @@ impl App {
}
pub fn resume_conv(&mut self) {
- self.append_message(self.resume_llm.system_prompt.to_string(), MessageType::USER);
self.ask("resume");
}
}
diff --git a/src/app/modules.rs b/src/app/modules.rs
index 622d63c..d25441d 100644
--- a/src/app/modules.rs
+++ b/src/app/modules.rs
@@ -1 +1,4 @@
+pub mod chat;
+pub mod code;
+pub mod resume;
pub mod wikipedia;
diff --git a/src/app/modules/chat.rs b/src/app/modules/chat.rs
new file mode 100644
index 0000000..ee00c98
--- /dev/null
+++ b/src/app/modules/chat.rs
@@ -0,0 +1,9 @@
+use crate::app::llm::{LLM, Message, MessageType};
+
+pub async fn ask_chat(mut messages: Vec<Message>) -> Result<String, Box<dyn std::error::Error>> {
+ let chat_llm = LLM::new("config/chat-LLM.json");
+ messages.push(Message::new(MessageType::USER, chat_llm.system_prompt.to_string()));
+
+ let result: String = chat_llm.ask(&messages).await?;
+ Ok(result)
+}
diff --git a/src/app/modules/code.rs b/src/app/modules/code.rs
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/app/modules/code.rs
diff --git a/src/app/modules/resume.rs b/src/app/modules/resume.rs
new file mode 100644
index 0000000..057314c
--- /dev/null
+++ b/src/app/modules/resume.rs
@@ -0,0 +1,9 @@
+use crate::app::llm::{LLM, Message, MessageType};
+
+pub async fn resume_conv(mut messages: Vec<Message>) -> Result<String, Box<dyn std::error::Error>> {
+ let resume_llm = LLM::new("config/resume-LLM.json");
+ messages.push(Message::new(MessageType::USER, resume_llm.system_prompt.to_string()));
+
+ let result: String = resume_llm.ask(&messages).await?;
+ Ok(result)
+}
diff --git a/src/app/modules/wikipedia.rs b/src/app/modules/wikipedia.rs
index 5864df4..86177ae 100644
--- a/src/app/modules/wikipedia.rs
+++ b/src/app/modules/wikipedia.rs
@@ -76,7 +76,7 @@ async fn find_get_best_article(articles: Vec<String>, user_query: &String, best_
let messages = vec![
Message::new(MessageType::SYSTEM, best_llm.system_prompt.clone()),
- Message::new(MessageType::USER, format!("The user's query is: {}. Here are the headings:\n{}\n\nPlease select the most relevant heading. Output the heading **only** and nothing else.", user_query, articles_headings))];
+ Message::new(MessageType::USER, format!("The user's query is: {}. Here are the headings:\n{}\n\nPlease select the most relevant heading. Output the heading only and nothing else.", user_query, articles_headings))];
let best_article = best_llm.ask(&messages).await?;
// wiki query get article content & parse
@@ -98,9 +98,9 @@ fn extract_text_from_tags(html: &str) -> String {
// Trouver le premier groupe capturé non vide (parmi cap[1] à cap[4])
(1..=4)
.filter_map(|i| cap.get(i))
- .map(|m| m.as_str()) // &str
+ .map(|m| m.as_str())
.flat_map(|s| s.split_whitespace())
- .collect::<Vec<_>>() // Vec<&str>
+ .collect::<Vec<_>>()
})
.collect::<Vec<_>>() // collect words
.join(" "); // join with spaces
diff --git a/src/ui/init.rs b/src/ui/init.rs
index afd686a..b789f38 100644
--- a/src/ui/init.rs
+++ b/src/ui/init.rs
@@ -1,4 +1,5 @@
use crate::app::init::App;
+use crate::app::llm::MessageType;
use crate::ui::inputfield::{BoxData, InputField, InputMode};
use color_eyre::Result;
use ratatui::{
@@ -179,7 +180,19 @@ impl Ui {
let size = msg.chars().take(available_width_message as usize).count();
let text = Text::from(msg);
- for line in text {
+ for mut line in text {
+ match m.role {
+ MessageType::USER => {
+ line.style = Style::default().fg(Color::Yellow);
+ }
+ MessageType::ASSISTANT => {
+ line.style = Style::default().fg(Color::Cyan);
+ }
+ MessageType::SYSTEM => {
+ line.style = Style::default().fg(Color::Red);
+ }
+ }
+
messages.push_line(line.clone());
let line_count =
(line.to_string().chars().count() as f64 / size as f64).ceil() as usize;
ArKa projects. All rights to me, and your next child right arm.