mirror of
https://github.com/TECHNOFAB11/pay-respects.git
synced 2026-02-02 07:35:10 +01:00
feat: experimental AI support
This commit is contained in:
parent
070343a5e2
commit
5d7624563d
6 changed files with 1312 additions and 0 deletions
1180
Cargo.lock
generated
1180
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -22,7 +22,9 @@ rust-i18n = "3"
|
||||||
regex-lite = "0.1"
|
regex-lite = "0.1"
|
||||||
|
|
||||||
toml = { version = "0.8", optional = true }
|
toml = { version = "0.8", optional = true }
|
||||||
|
serde_json = { version = "1.0", optional = true }
|
||||||
serde = { version = "1.0", features = ["derive"], optional = true }
|
serde = { version = "1.0", features = ["derive"], optional = true }
|
||||||
|
reqwest = { version = "0.12", features = ["blocking", "json"], optional = true }
|
||||||
|
|
||||||
pay-respects-parser = "0.2.5"
|
pay-respects-parser = "0.2.5"
|
||||||
# pay-respects-parser = { path = "../pay-respects-parser" }
|
# pay-respects-parser = { path = "../pay-respects-parser" }
|
||||||
|
|
@ -30,6 +32,7 @@ pay-respects-parser = "0.2.5"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
runtime-rules = ["dep:serde", "dep:toml"]
|
runtime-rules = ["dep:serde", "dep:toml"]
|
||||||
|
request-ai = ["dep:serde", "dep:serde_json", "dep:reqwest"]
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
strip = true
|
strip = true
|
||||||
|
|
|
||||||
|
|
@ -182,6 +182,18 @@ ja = "コマンドが見つかりません。"
|
||||||
ko = "명령을 찾을 수 없습니다."
|
ko = "명령을 찾을 수 없습니다."
|
||||||
zh = "找不到命令。"
|
zh = "找不到命令。"
|
||||||
|
|
||||||
|
[ai-suggestion]
|
||||||
|
en = "Suggestion from AI"
|
||||||
|
es = "Sugerencia de la IA"
|
||||||
|
de = "Vorschlag von KI"
|
||||||
|
fr = "Suggestion de l'IA"
|
||||||
|
it = "Proposta dall'IA"
|
||||||
|
pt = "Sugestão da IA"
|
||||||
|
ru = "Предложение от ИИ"
|
||||||
|
ja = "AIからの提案"
|
||||||
|
ko = "AI 제안"
|
||||||
|
zh = "AI 建议"
|
||||||
|
|
||||||
[confirm]
|
[confirm]
|
||||||
en = "Execute suggestion?"
|
en = "Execute suggestion?"
|
||||||
es = "¿Ejecutar sugerencia?"
|
es = "¿Ejecutar sugerencia?"
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,9 @@ mod replaces;
|
||||||
#[cfg(feature = "runtime-rules")]
|
#[cfg(feature = "runtime-rules")]
|
||||||
mod runtime_rules;
|
mod runtime_rules;
|
||||||
|
|
||||||
|
#[cfg(feature = "request-ai")]
|
||||||
|
mod requests;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate rust_i18n;
|
extern crate rust_i18n;
|
||||||
i18n!("i18n", fallback = "en", minify_key = true);
|
i18n!("i18n", fallback = "en", minify_key = true);
|
||||||
|
|
@ -40,6 +43,11 @@ fn main() {
|
||||||
let locale = get_locale().unwrap_or("en_US".to_string());
|
let locale = get_locale().unwrap_or("en_US".to_string());
|
||||||
rust_i18n::set_locale(&locale[0..2]);
|
rust_i18n::set_locale(&locale[0..2]);
|
||||||
|
|
||||||
|
#[cfg(feature = "request-ai")]
|
||||||
|
{
|
||||||
|
std::env::set_var("_PR_LOCALE", &locale);
|
||||||
|
}
|
||||||
|
|
||||||
args::handle_args();
|
args::handle_args();
|
||||||
|
|
||||||
let shell = match std::env::var("_PR_SHELL") {
|
let shell = match std::env::var("_PR_SHELL") {
|
||||||
|
|
|
||||||
94
src/requests.rs
Normal file
94
src/requests.rs
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::{Result, Value};
|
||||||
|
use reqwest::blocking::Client;
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
struct Input {
|
||||||
|
role: String,
|
||||||
|
content: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
struct Messages {
|
||||||
|
messages: Vec<Input>,
|
||||||
|
model: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct AISuggest {
|
||||||
|
pub command: String,
|
||||||
|
pub note: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ai_suggestion(last_command: &str, error_msg: &str) -> Option<AISuggest> {
|
||||||
|
let mut map = HashMap::new();
|
||||||
|
map.insert("last_command", last_command);
|
||||||
|
map.insert("error_msg", error_msg);
|
||||||
|
|
||||||
|
let api_key = match std::env::var("_PR_AI_API_KEY") {
|
||||||
|
Ok(key) => Some(key),
|
||||||
|
Err(_) => option_env!("_DEV_PR_AI_API_KEY").map(|key| key.to_string())
|
||||||
|
};
|
||||||
|
|
||||||
|
let api_key = match api_key {
|
||||||
|
Some(key) => key,
|
||||||
|
None => {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let request_url = match std::env::var("_PR_AI_URL") {
|
||||||
|
Ok(url) => url,
|
||||||
|
Err(_) => "https://api.groq.com/openai/v1/chat/completions".to_string()
|
||||||
|
};
|
||||||
|
let model = match std::env::var("_PR_AI_MODEL") {
|
||||||
|
Ok(model) => model,
|
||||||
|
Err(_) => "llama3-8b-8192".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
let user_locale = std::env::var("_PR_LOCALE").unwrap_or("en_US".to_string());
|
||||||
|
|
||||||
|
let ai_prompt = format!(r#"
|
||||||
|
You are a programmer trying to run a command in your shell. You run the command `{last_command}` and get the following error message: `{error_msg}`. What command should you run next to fix the error?
|
||||||
|
|
||||||
|
Answer in the following JSON format without any extra text:
|
||||||
|
```
|
||||||
|
{{"command":"your suggestion","note":"why you think this command will fix the error"}}
|
||||||
|
```
|
||||||
|
|
||||||
|
User locale is: {user_locale}, plese make sure to provide the note in the same language.
|
||||||
|
|
||||||
|
If you don't know the answer or can't provide a good suggestion, please reply the command field with `None` and provide a note explaining why you can't provide a suggestion
|
||||||
|
"#);
|
||||||
|
|
||||||
|
let messages = Messages {
|
||||||
|
messages: vec![Input {
|
||||||
|
role: "user".to_string(),
|
||||||
|
content: ai_prompt.to_string(),
|
||||||
|
}],
|
||||||
|
model,
|
||||||
|
};
|
||||||
|
|
||||||
|
let client = Client::new();
|
||||||
|
let res = client.post(&request_url)
|
||||||
|
.header("Authorization", format!("Bearer {}", api_key))
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.json(&messages)
|
||||||
|
.send();
|
||||||
|
|
||||||
|
let res = match res {
|
||||||
|
Ok(res) => res,
|
||||||
|
Err(_) => {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let res = &res.text().unwrap();
|
||||||
|
let json: Value = serde_json::from_str(res).unwrap();
|
||||||
|
let content = &json["choices"][0]["message"]["content"];
|
||||||
|
|
||||||
|
let suggestion: AISuggest = serde_json::from_str(content.as_str().unwrap()).unwrap();
|
||||||
|
Some(suggestion)
|
||||||
|
}
|
||||||
|
|
@ -56,6 +56,21 @@ pub fn suggest_command(shell: &str, last_command: &str, error_msg: &str) -> Opti
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "request-ai")]{
|
||||||
|
use crate::requests::ai_suggestion;
|
||||||
|
let suggest = ai_suggestion(last_command, error_msg);
|
||||||
|
if let Some(suggest) = suggest {
|
||||||
|
eprintln!("{}: {}\n", t!("ai-suggestion").bold().blue(), suggest.note);
|
||||||
|
let command = suggest.command;
|
||||||
|
if command != "None" {
|
||||||
|
if PRIVILEGE_LIST.contains(&split_command[0].as_str()) {
|
||||||
|
return Some(format!("{} {}", split_command[0], command));
|
||||||
|
}
|
||||||
|
return Some(command);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue