From 8233ab723d0aa5656af4d7d9be662a2b0159a057 Mon Sep 17 00:00:00 2001 From: iff Date: Sun, 6 Apr 2025 17:53:27 +0200 Subject: [PATCH] feat: reasoning AI models --- CHANGELOG.md | 2 ++ module-request-ai/i18n/i18n.toml | 11 ++++++++++ module-request-ai/src/main.rs | 9 ++++++-- module-request-ai/src/requests.rs | 34 +++++++++++++++++++++++++------ 4 files changed, 48 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 281f56a..424fc40 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added +- Support reasoning AI models (can take more than 20 seconds) +- Allow adding additional prompts for role-playing with perversion or whatever - `exe_contains` condition to check if the command contains the argument ### Fixed diff --git a/module-request-ai/i18n/i18n.toml b/module-request-ai/i18n/i18n.toml index b56227c..52d86c3 100644 --- a/module-request-ai/i18n/i18n.toml +++ b/module-request-ai/i18n/i18n.toml @@ -12,3 +12,14 @@ ja = "AIからの提案" ko = "AI 제안" zh = "AI 建议" +[ai-thinking] +en = "AI is thinking..." +es = "La IA está pensando..." +de = "KI denkt nach..." +fr = "L'IA réfléchit..." +it = "L'IA sta pensando..." +pt = "A IA está pensando..." +ru = "ИИ думает..." +ja = "AIが考えています..." +ko = "AI가 생각 중입니다..." +zh = "AI正在思考..." diff --git a/module-request-ai/src/main.rs b/module-request-ai/src/main.rs index 8192f43..d18d086 100644 --- a/module-request-ai/src/main.rs +++ b/module-request-ai/src/main.rs @@ -48,11 +48,16 @@ fn main() -> Result<(), std::io::Error> { } let suggest = ai_suggestion(&command, &error); if let Some(suggest) = suggest { + if let Some(thinking) = suggest.think { + let note = format!("{}:", t!("ai-thinking")).bold().blue(); + let thinking = fill(&thinking, termwidth()); + eprintln!("{}{}", note, thinking); + } let warn = format!("{}:", t!("ai-suggestion")).bold().blue(); - let note = fill(&suggest.note, termwidth()); + let note = fill(&suggest.suggestion.note, termwidth()); eprintln!("{}\n{}\n", warn, note); - let suggestions = suggest.commands; + let suggestions = suggest.suggestion.commands; for suggestion in suggestions { print!("{}<_PR_BR>", suggestion); } diff --git a/module-request-ai/src/requests.rs b/module-request-ai/src/requests.rs index ae8c4c5..3f31651 100644 --- a/module-request-ai/src/requests.rs +++ b/module-request-ai/src/requests.rs @@ -28,7 +28,12 @@ pub struct AISuggest { pub note: String, } -pub fn ai_suggestion(last_command: &str, error_msg: &str) -> Option { +pub struct AIResponse { + pub suggestion: AISuggest, + pub think: Option, +} + +pub fn ai_suggestion(last_command: &str, error_msg: &str) -> Option { if std::env::var("_PR_AI_DISABLE").is_ok() { return None; } @@ -148,6 +153,7 @@ pub fn ai_suggestion(last_command: &str, error_msg: &str) -> Option { }; res = String::from_utf8(out).unwrap(); } + let json: Value = { let json = serde_json::from_str(&res); if let Ok(json) = json { @@ -159,12 +165,26 @@ pub fn ai_suggestion(last_command: &str, error_msg: &str) -> Option { }; let content = &json["choices"][0]["message"]["content"]; + let mut str = content + .as_str() + .expect("AI module: Failed to get content from response") + .trim() + .to_string(); + + let think = if str.starts_with("") { + let start_len = "".len(); + let end_len = "".len(); + let end = str.find("").unwrap() + end_len; + let think = str[start_len..end - end_len].to_string(); + str = str[end..].to_string(); + Some(think) + } else { + None + }; let suggestion: AISuggest = { let str = { - let str = content.as_str(); - str?; - str.expect("AI module: Failed to get content from response") + str.trim() .trim_start_matches("```") .trim_start_matches("json") .trim_end_matches("```") @@ -176,7 +196,9 @@ pub fn ai_suggestion(last_command: &str, error_msg: &str) -> Option { } json.unwrap() }; - Some(suggestion) + + let response = AIResponse { suggestion, think }; + Some(response) } impl Conf { @@ -215,7 +237,7 @@ impl Conf { if let Some(model) = option_env!("_DEF_PR_AI_MODEL") { model.to_string() } else { - "llama3-70b-8192".to_string() + "qwen-2.5-32b".to_string() } } };