1515
1616error_exit () {
1717 echo -e " \e[31mAutocomplete.sh - $1 \e[0m" >&2
18- exit 1
18+ # In a completion context, exit is too severe. Use return instead.
19+ return 1
1920}
2021
2122echo_error () {
@@ -30,9 +31,8 @@ echo_green() {
3031# Global Variables & Model Definitions #
3132# ##############################################################################
3233
33- export ACSH_VERSION=0.4.3
34+ export ACSH_VERSION=0.4.4
3435
35- # Supported models defined in an associative array.
3636unset _autocomplete_modellist
3737declare -A _autocomplete_modellist
3838# OpenAI models
@@ -46,24 +46,21 @@ _autocomplete_modellist['anthropic: claude-3-7-sonnet-20250219']='{ "completion_
4646_autocomplete_modellist[' anthropic: claude-3-5-sonnet-20241022' ]=' { "completion_cost":0.0000150, "prompt_cost":0.0000030, "endpoint": "https://api.anthropic.com/v1/messages", "model": "claude-3-5-sonnet-20241022", "provider": "anthropic" }'
4747_autocomplete_modellist[' anthropic: claude-3-5-haiku-20241022' ]=' { "completion_cost":0.0000040, "prompt_cost":0.0000008, "endpoint": "https://api.anthropic.com/v1/messages", "model": "claude-3-5-haiku-20241022", "provider": "anthropic" }'
4848# Groq models
49- # Production Models
5049_autocomplete_modellist[' groq: llama3-8b-8192' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama3-8b-8192", "provider": "groq" }'
5150_autocomplete_modellist[' groq: llama3-70b-8192' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama3-70b-8192", "provider": "groq" }'
5251_autocomplete_modellist[' groq: llama-3.3-70b-versatile' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.3-70b-versatile", "provider": "groq" }'
5352_autocomplete_modellist[' groq: llama-3.1-8b-instant' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.1-8b-instant", "provider": "groq" }'
5453_autocomplete_modellist[' groq: llama-guard-3-8b' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-guard-3-8b", "provider": "groq" }'
5554_autocomplete_modellist[' groq: mixtral-8x7b-32768' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "mixtral-8x7b-32768", "provider": "groq" }'
5655_autocomplete_modellist[' groq: gemma2-9b-it' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "gemma2-9b-it", "provider": "groq" }'
57- # Groq models
58- # Preview Models
56+ # Groq preview models
5957_autocomplete_modellist[' groq: mistral-saba-24b' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "mistral-saba-24b", "provider": "groq" }'
6058_autocomplete_modellist[' groq: qwen-2.5-coder-32b' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "qwen-2.5-coder-32b", "provider": "groq" }'
6159_autocomplete_modellist[' groq: deepseek-r1-distill-qwen-32b' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "deepseek-r1-distill-qwen-32b", "provider": "groq" }'
6260_autocomplete_modellist[' groq: deepseek-r1-distill-llama-70b-specdec' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "deepseek-r1-distill-llama-70b-specdec", "provider": "groq" }'
6361_autocomplete_modellist[' groq: llama-3.3-70b-specdec' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.3-70b-specdec", "provider": "groq" }'
6462_autocomplete_modellist[' groq: llama-3.2-1b-preview' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.2-1b-preview", "provider": "groq" }'
6563_autocomplete_modellist[' groq: llama-3.2-3b-preview' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "https://api.groq.com/openai/v1/chat/completions", "model": "llama-3.2-3b-preview", "provider": "groq" }'
66-
6764# Ollama model
6865_autocomplete_modellist[' ollama: codellama' ]=' { "completion_cost":0.0000000, "prompt_cost":0.0000000, "endpoint": "http://localhost:11434/api/chat", "model": "codellama", "provider": "ollama" }'
6966
@@ -79,7 +76,7 @@ _get_terminal_info() {
7976 * Operating system: \$ OSTYPE=$OSTYPE
8077 * Shell: \$ BASH=$BASH
8178 * Terminal type: \$ TERM=$TERM
82- * Hostname: \$ HOSTNAME= $HOSTNAME "
79+ * Hostname: \$ HOSTNAME"
8380 echo " $terminal_info "
8481}
8582
@@ -145,15 +142,19 @@ _get_recent_files() {
145142 find . -maxdepth 1 -type f -exec ls -ld {} + | sort -r | head -n " $FILE_LIMIT "
146143}
147144
145+ # Rewritten _get_help_message using a heredoc to preserve formatting.
148146_get_help_message () {
149147 local COMMAND HELP_INFO
150148 COMMAND=$( echo " $1 " | awk ' {print $1}' )
151149 HELP_INFO=" "
152150 {
153151 set +e
154- HELP_INFO=$( $COMMAND --help 2>&1 || true)
152+ HELP_INFO=$( cat << EOF
153+ $( $COMMAND --help 2>&1 || true)
154+ EOF
155+ )
155156 set -e
156- } || HELP_INFO=" Error: '$COMMAND --help' not available"
157+ } || HELP_INFO=" '$COMMAND --help' not available"
157158 echo " $HELP_INFO "
158159}
159160
@@ -341,7 +342,8 @@ openai_completion() {
341342 user_input=${*:- $default_user_input }
342343
343344 if [[ -z " $ACSH_ACTIVE_API_KEY " && ${ACSH_PROVIDER^^} != " OLLAMA" ]]; then
344- error_exit " ACSH_ACTIVE_API_KEY not set. Please set it with: export ${ACSH_PROVIDER^^} _API_KEY=<your-api-key>"
345+ echo_error " ACSH_ACTIVE_API_KEY not set. Please set it with: export ${ACSH_PROVIDER^^} _API_KEY=<your-api-key>"
346+ return
345347 fi
346348 api_key=" ${ACSH_ACTIVE_API_KEY:- $OPENAI_API_KEY } "
347349 payload=$( _build_payload " $user_input " )
@@ -350,15 +352,15 @@ openai_completion() {
350352 attempt=1
351353 while [ $attempt -le $max_attempts ]; do
352354 if [[ " ${ACSH_PROVIDER^^} " == " ANTHROPIC" ]]; then
353- response=$( curl -s -m " $timeout " -w " \n%{http_code}" " $endpoint " \
355+ response=$( \ c url -s -m " $timeout " -w " \n%{http_code}" " $endpoint " \
354356 -H " content-type: application/json" \
355357 -H " anthropic-version: 2023-06-01" \
356358 -H " x-api-key: $api_key " \
357359 --data " $payload " )
358360 elif [[ " ${ACSH_PROVIDER^^} " == " OLLAMA" ]]; then
359- response=$( curl -s -m " $timeout " -w " \n%{http_code}" " $endpoint " --data " $payload " )
361+ response=$( \ c url -s -m " $timeout " -w " \n%{http_code}" " $endpoint " --data " $payload " )
360362 else
361- response=$( curl -s -m " $timeout " -w " \n%{http_code}" " $endpoint " \
363+ response=$( \ c url -s -m " $timeout " -w " \n%{http_code}" " $endpoint " \
362364 -H " Content-Type: application/json" \
363365 -H " Authorization: Bearer $api_key " \
364366 -d " $payload " )
@@ -716,12 +718,16 @@ acsh_load_config() {
716718 if [[ -z " $ACSH_OLLAMA_API_KEY " && -n " $LLM_API_KEY " ]]; then
717719 export ACSH_OLLAMA_API_KEY=" $LLM_API_KEY "
718720 fi
721+ # If the custom API key was set, map it to OLLAMA if needed.
722+ if [[ -z " $ACSH_OLLAMA_API_KEY " && -n " $ACSH_CUSTOM_API_KEY " ]]; then
723+ export ACSH_OLLAMA_API_KEY=" $ACSH_CUSTOM_API_KEY "
724+ fi
719725 case " ${ACSH_PROVIDER:- openai} " in
720726 " openai" ) export ACSH_ACTIVE_API_KEY=" $ACSH_OPENAI_API_KEY " ;;
721727 " anthropic" ) export ACSH_ACTIVE_API_KEY=" $ACSH_ANTHROPIC_API_KEY " ;;
722728 " groq" ) export ACSH_ACTIVE_API_KEY=" $ACSH_GROQ_API_KEY " ;;
723729 " ollama" ) export ACSH_ACTIVE_API_KEY=" $ACSH_OLLAMA_API_KEY " ;;
724- * ) error_exit " Unknown provider: $ACSH_PROVIDER " ;;
730+ * ) echo_error " Unknown provider: $ACSH_PROVIDER " ;;
725731 esac
726732 else
727733 echo " Configuration file not found: $config_file "
@@ -787,10 +793,15 @@ remove_command() {
787793 autocomplete_script=$( command -v autocomplete)
788794 if [ -n " $autocomplete_script " ]; then
789795 echo " Autocomplete script is at: $autocomplete_script "
790- read -r -p " Remove the autocomplete script? (y/n): " confirm
791- if [[ $confirm == " y" ]]; then
796+ if [ " $1 " == " -y" ]; then
792797 rm " $autocomplete_script "
793798 echo " Removed: $autocomplete_script "
799+ else
800+ read -r -p " Remove the autocomplete script? (y/n): " confirm
801+ if [[ $confirm == " y" ]]; then
802+ rm " $autocomplete_script "
803+ echo " Removed: $autocomplete_script "
804+ fi
794805 fi
795806 fi
796807 echo " Uninstallation complete."
0 commit comments