feat: more default configuration for ai module

This commit is contained in:
iff 2024-12-12 03:37:58 +01:00
parent 7a5357e2b1
commit 600ad10275
2 changed files with 24 additions and 9 deletions

View file

@ -195,17 +195,20 @@ An API key is included with the source. It should always work unless I can no lo
> Configuration is done via environment variables:
>
> - `_PR_AI_API_KEY`: Your own API key
> - `_PR_AI_URL`: URL used. Defaults to pay-respects' own URL
> - Any OpenAI compatible URL can be used, e.g.:
> - `https://api.openai.com/v1/chat/completions` (Note: OpenAI's ChatGPT is very slow)
> - `https://api.groq.com/openai/v1/chat/completions`
> - `_PR_AI_MODEL`: Model used. Defaults to `llama3-8b-8192` for fast response
> - `_PR_AI_URL`: URL used. Any OpenAI compatible URL can be used, e.g.:
> - `https://api.openai.com/v1/chat/completions` (Note: OpenAI's ChatGPT is very slow)
> - `https://api.groq.com/openai/v1/chat/completions`
> - `_PR_AI_MODEL`: Model used
> - `_PR_AI_DISABLE`: Setting to any value disables AI integration
> - `_PR_AI_LOCALE`: Locale in which the AI explains the suggestion. Defaults to user system locale
> Compile time variables:
> Compile time variables: Default values for the respective variables above when not set
>
> - `_DEF_PR_AI_API_KEY`: Default API key, included in compile-time
> - `_DEF_PR_AI_API_KEY`
> - `_DEF_PR_AI_URL`
> - `_DEF_PR_AI_MODEL`
>
> If the default values were not provided, pay-respects' own values will be used. Your request will be filtered to avoid abuse usages. Request will then be forwarded to a LLM provider that will not use your data for training.
>
> </details>

View file

@ -65,11 +65,23 @@ pub fn ai_suggestion(last_command: &str, error_msg: &str) -> Option<AISuggest> {
let request_url = match std::env::var("_PR_AI_URL") {
Ok(url) => url,
Err(_) => "https://iff.envs.net/completions.py".to_string(),
Err(_) => {
if let Some(url) = option_env!("_DEF_PR_AI_URL") {
url.to_string()
} else {
"https://iff.envs.net/completions.py".to_string()
}
}
};
let model = match std::env::var("_PR_AI_MODEL") {
Ok(model) => model,
Err(_) => "llama3-8b-8192".to_string(),
Err(_) => {
if let Some(model) = option_env!("_DEF_PR_AI_MODEL") {
model.to_string()
} else {
"llama3-8b-8192".to_string()
}
}
};
let user_locale = {