feat: additional AI configuration

This commit is contained in:
iff 2024-11-20 21:16:14 +01:00
parent 927e7d4263
commit b4e432cfd8
3 changed files with 19 additions and 15 deletions

View file

@ -63,7 +63,7 @@ Please follow the instruction for your shell:
> - `_PR_LAST_COMMAND`: The last command.
>
> pay-respects echos back, if applicable, a `cd` command that can be evaluated by the current working shell.
>
> General example:
> ```shell
> eval $(_PR_SHELL=sh _PR_LAST_COMMAND="git comit" pay-respects)
@ -115,12 +115,12 @@ Compiled binaries can be found at [GitHub releases](https://github.com/iffse/pay
<summary>Compile from source (any OS/architecture)</summary>
> This installation requires you to have Cargo (the Rust package manager) installed.
>
> Install from [crates.io](https://crates.io/), features are optional
> ```shell
> cargo install pay-respects --features=runtime-rules,request-ai
> ```
>
> Clone from git and install, suitable for adding custom compile-time rules:
> ```
> git clone --depth 1 https://github.com/iffse/pay-respects
@ -187,10 +187,12 @@ If it's useful to you, **please share this project and spread the word**. Also c
> Configuration is done via environment variables:
>
> - `_PR_AI_API_KEY`: Your own API key. Set it to an empty string to disable AI integration (`""`)
> - `_PR_AI_API_KEY`: Your own API key
> - `_PR_AI_URL`: URL used. Defaults to `https://api.groq.com/openai/v1/chat/completions`
> - `_PR_AI_MODEL`: Model used. Defaults to `llama3-8b-8192`
>
> - `_PR_AI_DISABLE`: Setting to any value disables AI integration
> - `_PR_AI_LOCALE`: Locale in which the AI explains the suggestion. Defaults to user system locale
> Compile time variables:
>
> - `_DEF_PR_AI_API_KEY`: Default API key, included in compile-time

View file

@ -45,7 +45,9 @@ fn main() {
#[cfg(feature = "request-ai")]
{
std::env::set_var("_PR_LOCALE", &locale);
if std::env::var("_PR_AI_LOCALE").is_err() {
std::env::set_var("_PR_AI_LOCALE", &locale);
}
}
args::handle_args();

View file

@ -23,6 +23,10 @@ pub struct AISuggest {
}
pub fn ai_suggestion(last_command: &str, error_msg: &str) -> Option<AISuggest> {
if std::env::var("_PR_AI_DISABLE").is_ok() {
return None;
}
let error_msg = if error_msg.len() > 300 {
&error_msg[..300]
} else {
@ -70,23 +74,19 @@ pub fn ai_suggestion(last_command: &str, error_msg: &str) -> Option<AISuggest> {
Err(_) => "llama3-8b-8192".to_string(),
};
let user_locale = std::env::var("_PR_LOCALE").unwrap_or("en-US".to_string());
let set_locale = if user_locale != "en-US" {
format!(
"Provide the note in the language for the locale {}\n",
user_locale
)
let user_locale = std::env::var("_PR_AI_LOCALE").unwrap_or("en-US".to_string());
let set_locale = if !user_locale.starts_with("en") {
format!(". Use language for locale {}", user_locale)
} else {
"".to_string()
};
let ai_prompt = format!(
r#"
You run the command `{last_command}` and get the following error message: `{error_msg}`. What would you run next? Answer in the following JSON format without any extra text:
The command `{last_command}` returns the following error message: `{error_msg}`. Provide a command to fix it. Answer in the following JSON format without any extra text:
```
{{"command":"suggestion","note":"why it may fix the error"}}
{{"command":"suggestion","note":"why it may fix the error{set_locale}"}}
```
{set_locale}If you can't provide a good suggestion, reply the command field with `None` and a explanation in note
"#
);