docs: AI configuration

This commit is contained in:
iff 2024-11-19 16:19:22 +01:00
parent c9b9af1728
commit c354fe014e
4 changed files with 80 additions and 19 deletions

View file

@ -5,6 +5,7 @@ Typed a wrong command? Pay Respects will try to correct your wrong console comma
- 🚀 **Blazing fast suggestion**: You won't notice any delay for asking suggestions!
- ✏️ **Easy to write rules**: You don't need to know Rust. The rules are written in a TOML file that is simple to work with and can be evaluated to Rust code upon compilation! Optional runtime user defined rules can be enabled starting from 0.5!
- 🎯 **Accurate results**: Suggestions must pass several conditions in order to be prompted to the user, no `sudo` suggestions when you are using `doas`!
- 🤖 **AI Support**: AI comes in aid when there is no rule for your error!
- 🪶 **Tiny binary size**: Not even 1MB!
![pacman-fix](img/pacman-fix.png)
@ -115,9 +116,9 @@ Compiled binaries can be found at [GitHub releases](https://github.com/iffse/pay
> This installation requires you to have Cargo (the Rust package manager) installed.
>
> Install from [crates.io](https://crates.io/), `runtime-rules` is optional:
> Install from [crates.io](https://crates.io/), features are optional
> ```shell
> cargo install pay-respects --features=runtime-rules
> cargo install pay-respects --features=runtime-rules,request-ai
> ```
>
> Clone from git and install, suitable for adding custom compile-time rules:
@ -133,6 +134,64 @@ Compiled binaries can be found at [GitHub releases](https://github.com/iffse/pay
See [writing rules](./rules.md) for how to write rules.
## AI Integration
AI suggestions should work out of the box unless rate limit has reached. Bring your own API keys to avoid it.
If it's useful to you, **please share this project and spread the word**. Also consider making a donation to keep its public usage alive:
<div>
<a
href="https://liberapay.com/iff/donate"
target="_blank"
rel="noreferrer"
><img
src="https://liberapay.com/assets/widgets/donate.svg"
alt="Donate using Liberapay"
/></a
>
<a href="https://ko-fi.com/iffse" target="_blank" rel="noreferrer"
><img
src="https://www.vectorlogo.zone/logos/ko-fi/ko-fi-ar21.svg"
alt="Donate using Ko-fi"
style="height: 30px;"
/></a
>
<br />
<a href="https://iffse.eu.org/stripe" target="_blank" rel="noreferrer"
><img
src="https://cdn.brandfolder.io/KGT2DTA4/at/8vbr8k4mr5xjwk4hxq4t9vs/Stripe_wordmark_-_blurple.svg"
alt="Donate using Stripe"
style="height: 30px;"
/></a
>
<a
href="https://www.paypal.com/donate/?hosted_button_id=QN7Z7ZHRAAFZL"
target="_blank"
rel="noreferrer"
><img
src="https://upload.wikimedia.org/wikipedia/commons/b/b5/PayPal.svg"
alt="Donate using PayPal"
style="height: 25px; margin-bottom: 3px;"
/></a
>
</div>
<details>
<summary>AI and API Configuration</summary>
> Configuration is done via environment variables:
>
> - `_PR_AI_API_KEY`: You own API key
> - `_PR_AI_URL`: URL used. Defaults to `https://api.groq.com/openai/v1/chat/completions`
> - `_PR_AI_MODEL`: Model used. Defaults to `llama3-8b-8192`
>
> Compile time variables:
>
> - `_DEF_PR_AI_API_KEY`: Default API key, included in compile-time
>
> </details>
## Contributing
Current option to write rules should cover most of the cases.

View file

@ -1,8 +1,8 @@
use std::collections::HashMap;
use reqwest::blocking::Client;
use serde::{Deserialize, Serialize};
use serde_json::{Result, Value};
use reqwest::blocking::Client;
#[derive(Serialize, Deserialize)]
struct Input {
@ -50,16 +50,17 @@ pub fn ai_suggestion(last_command: &str, error_msg: &str) -> Option<AISuggest> {
let request_url = match std::env::var("_PR_AI_URL") {
Ok(url) => url,
Err(_) => "https://api.groq.com/openai/v1/chat/completions".to_string()
Err(_) => "https://api.groq.com/openai/v1/chat/completions".to_string(),
};
let model = match std::env::var("_PR_AI_MODEL") {
Ok(model) => model,
Err(_) => "llama3-8b-8192".to_string()
Err(_) => "llama3-8b-8192".to_string(),
};
let user_locale = std::env::var("_PR_LOCALE").unwrap_or("en_US".to_string());
let ai_prompt = format!(r#"
let ai_prompt = format!(
r#"
You are a programmer trying to run a command in your shell. You run the command `{last_command}` and get the following error message: `{error_msg}`. What command should you run next to fix the error?
Answer in the following JSON format without any extra text:
@ -70,7 +71,8 @@ Answer in the following JSON format without any extra text:
User locale is: {user_locale}, plese make sure to provide the note in the same language.
If you don't know the answer or can't provide a good suggestion, please reply the command field with `None` and provide a note explaining why you can't provide a suggestion
"#);
"#
);
let messages = Messages {
messages: vec![Input {
@ -81,7 +83,8 @@ If you don't know the answer or can't provide a good suggestion, please reply th
};
let client = Client::new();
let res = client.post(&request_url)
let res = client
.post(&request_url)
.header("Authorization", format!("Bearer {}", api_key))
.header("Content-Type", "application/json")
.json(&messages)

View file

@ -19,9 +19,7 @@ pub fn runtime_match(
shell: &str,
) -> Option<String> {
let file = get_rule(executable);
if file.is_none() {
return None;
}
file.as_ref()?;
let file = std::fs::read_to_string(file.unwrap()).unwrap();
let rule: Rule = toml::from_str(&file).unwrap();

View file

@ -56,7 +56,8 @@ pub fn suggest_command(shell: &str, last_command: &str, error_msg: &str) -> Opti
}
}
#[cfg(feature = "request-ai")]{
#[cfg(feature = "request-ai")]
{
use crate::requests::ai_suggestion;
let suggest = ai_suggestion(last_command, error_msg);
if let Some(suggest) = suggest {