mirror of
https://github.com/TECHNOFAB11/pay-respects.git
synced 2025-12-12 06:20:09 +01:00
docs: AI configuration
This commit is contained in:
parent
c9b9af1728
commit
c354fe014e
4 changed files with 80 additions and 19 deletions
|
|
@ -1,8 +1,8 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::blocking::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Result, Value};
|
||||
use reqwest::blocking::Client;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Input {
|
||||
|
|
@ -50,16 +50,17 @@ pub fn ai_suggestion(last_command: &str, error_msg: &str) -> Option<AISuggest> {
|
|||
|
||||
let request_url = match std::env::var("_PR_AI_URL") {
|
||||
Ok(url) => url,
|
||||
Err(_) => "https://api.groq.com/openai/v1/chat/completions".to_string()
|
||||
Err(_) => "https://api.groq.com/openai/v1/chat/completions".to_string(),
|
||||
};
|
||||
let model = match std::env::var("_PR_AI_MODEL") {
|
||||
Ok(model) => model,
|
||||
Err(_) => "llama3-8b-8192".to_string()
|
||||
Err(_) => "llama3-8b-8192".to_string(),
|
||||
};
|
||||
|
||||
let user_locale = std::env::var("_PR_LOCALE").unwrap_or("en_US".to_string());
|
||||
|
||||
let ai_prompt = format!(r#"
|
||||
let ai_prompt = format!(
|
||||
r#"
|
||||
You are a programmer trying to run a command in your shell. You run the command `{last_command}` and get the following error message: `{error_msg}`. What command should you run next to fix the error?
|
||||
|
||||
Answer in the following JSON format without any extra text:
|
||||
|
|
@ -70,7 +71,8 @@ Answer in the following JSON format without any extra text:
|
|||
User locale is: {user_locale}, plese make sure to provide the note in the same language.
|
||||
|
||||
If you don't know the answer or can't provide a good suggestion, please reply the command field with `None` and provide a note explaining why you can't provide a suggestion
|
||||
"#);
|
||||
"#
|
||||
);
|
||||
|
||||
let messages = Messages {
|
||||
messages: vec![Input {
|
||||
|
|
@ -79,14 +81,15 @@ If you don't know the answer or can't provide a good suggestion, please reply th
|
|||
}],
|
||||
model,
|
||||
};
|
||||
|
||||
|
||||
let client = Client::new();
|
||||
let res = client.post(&request_url)
|
||||
let res = client
|
||||
.post(&request_url)
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&messages)
|
||||
.send();
|
||||
|
||||
|
||||
let res = match res {
|
||||
Ok(res) => res,
|
||||
Err(_) => {
|
||||
|
|
|
|||
|
|
@ -19,9 +19,7 @@ pub fn runtime_match(
|
|||
shell: &str,
|
||||
) -> Option<String> {
|
||||
let file = get_rule(executable);
|
||||
if file.is_none() {
|
||||
return None;
|
||||
}
|
||||
file.as_ref()?;
|
||||
|
||||
let file = std::fs::read_to_string(file.unwrap()).unwrap();
|
||||
let rule: Rule = toml::from_str(&file).unwrap();
|
||||
|
|
|
|||
|
|
@ -56,7 +56,8 @@ pub fn suggest_command(shell: &str, last_command: &str, error_msg: &str) -> Opti
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "request-ai")]{
|
||||
#[cfg(feature = "request-ai")]
|
||||
{
|
||||
use crate::requests::ai_suggestion;
|
||||
let suggest = ai_suggestion(last_command, error_msg);
|
||||
if let Some(suggest) = suggest {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue