From 01a0e6c58d2978010e370153734b1c71d45c9131 Mon Sep 17 00:00:00 2001 From: Attogram Project Date: Tue, 19 Aug 2025 23:28:58 +0200 Subject: [PATCH 1/3] refactor chat --- ollama_bash_lib.sh | 766 ++++++++++++++++++++++++--------------------- 1 file changed, 415 insertions(+), 351 deletions(-) diff --git a/ollama_bash_lib.sh b/ollama_bash_lib.sh index 38b82ed..1e37180 100755 --- a/ollama_bash_lib.sh +++ b/ollama_bash_lib.sh @@ -4,7 +4,7 @@ # OLLAMA_LIB_NAME='Ollama Bash Lib' -OLLAMA_LIB_VERSION='0.45.5' +OLLAMA_LIB_VERSION='0.45.6' OLLAMA_LIB_URL='https://github.com/attogram/ollama-bash-lib' OLLAMA_LIB_DISCORD='https://discord.gg/BGQJCbYVBa' OLLAMA_LIB_LICENSE='MIT' @@ -12,13 +12,14 @@ OLLAMA_LIB_COPYRIGHT='Copyright (c) 2025 Ollama Bash Lib, Attogram Project &2 } -# Wraps a stream of text with tags -# -# Usage: | _ollama_thinking_stream -# Input: stream of text from stdin -# Output: wrapped text to stderr -# Requires: none -# Returns: 0 -_ollama_thinking_stream() { - local chunk - if read -r -n 1 chunk && [[ -n "$chunk" ]]; then - printf "# \n" >&2 - printf "# %s" "$chunk" >&2 - cat >&2 - printf "\n# \n\n" >&2 - fi -} - # Does a command exist? # # Usage: _exists "command" @@ -100,6 +84,8 @@ _exists() { # Requires: none # Returns: 0 if valid, 1 if not valid _is_valid_url() { + # TODO - protect against transverses ../ + # TODO - allow no protocol, host hostname local url_regex='^(https?|ftp|file)://[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]$' if [[ "$1" =~ $url_regex ]]; then return 0 @@ -313,6 +299,7 @@ ollama_api_post() { # Requires: curl # Returns: 0 if API is reachable, 1 if API is not reachable ollama_api_ping() { + # TODO - refactor to ONLY get HTTP status code from api base url, no page content local usage usage="Usage: ollama_api_ping\n\n" usage+="ollama_api_ping\n\n" @@ -352,13 +339,13 @@ ollama_api_ping() { # Create a JSON payload for the generate endpoint # -# Usage: _ollama_payload_generate "model" "prompt" +# Usage: _ollama_generate_json_payload "model" "prompt" # Input: 1 - The model to use # Input: 2 - The prompt # Output: json payload to stdout # Requires: jq # Returns: 0 on success, 1 on error -_ollama_payload_generate() { +_ollama_generate_json_payload() { local model="$1" local prompt="$2" local stream=true @@ -421,7 +408,7 @@ ollama_generate_json() { fi local json_payload - json_payload="$(_ollama_payload_generate "$model" "$prompt")" + json_payload="$(_ollama_generate_json_payload "$model" "$prompt")" _debug "ollama_generate_json: json_payload: ${json_payload:0:120}" if ! ollama_api_post '/api/generate' "$json_payload"; then @@ -536,6 +523,23 @@ ollama_generate_stream_json() { return 0 } +# Wraps a stream of text with tags +# +# Usage: | _ollama_thinking_stream +# Input: stream of text from stdin +# Output: wrapped text to stderr +# Requires: none +# Returns: 0 +_ollama_thinking_stream() { + local chunk + if read -r -n 1 chunk && [[ -n "$chunk" ]]; then + printf "# \n" >&2 + printf "# %s" "$chunk" >&2 + cat >&2 + printf "\n# \n\n" >&2 + fi +} + # Generate a completion as streaming text # # Usage: ollama_generate_stream "model" "prompt" @@ -724,346 +728,158 @@ ollama_messages_count() { echo "${#OLLAMA_LIB_MESSAGES[@]}" } -# Tools Functions +# Get Last Message, JSON format +# +# Usage: ollama_messages_last_json +# Output: last element of message history, in JSON format +# Requires: none +# Returns 0 on success, 1 on error +ollama_messages_last_json() { + echo '[{"role":"user","content":"foo"}]' + return 0 +} -# Add a tool +# Get Last Message, string format # -# Usage: ollama_tools_add "tool_name" "command" "json_definition" -# Input: 1 - The name of the tool -# Input: 2 - The command to run for the tool -# Input: 3 - The JSON definition of the tool +# Usage: ollama_messages_last +# Output: last element of message history, as a string +# Requires: ollama_messages_last_json +# Returns 0 on success, 1 on error +ollama_messages_last() { + echo 'foo' + return 0 +} + +# Chat Functions + +# Set the assistant response into the message history +# +# Usage: ollama_chat # Output: none -# Requires: jq -# Returns: 0 on success, 1 on error -ollama_tools_add() { - local usage - usage="Usage: ollama_tools_add \"tool_name\" \"command\" \"json_definition\"\n\n" - usage+="ollama_tools_add\n\n" - usage+="Register a new tool for the model to use.\n\n" - usage+="This function adds a tool's name, its corresponding shell command, and its JSON definition to the session's tool registry.\n\n" - usage+="The model can then request to call this tool during a chat. The JSON definition should follow the Ollama tool definition format." - for arg in "$@"; do - if [[ "$arg" == "-h" || "$arg" == "--help" ]]; then - printf '%b\n' "$usage" - return 0 - fi - done - if ! _exists 'jq'; then _error 'ollama_tools_add: jq Not Found'; return 1; fi - local tool_name="$1" - local command="$2" - local json_definition="$3" +# Env: OLLAMA_LIB_MESSAGES +# Requires: +# Returns 0 on success, 1 on error +ollama_chat_assistant() { + OLLAMA_LIB_MESSAGES+=('{"role":"assistant","content":"bar"}') + return 0 +} - if [[ -z "$tool_name" ]]; then - _error 'ollama_tools_add: Tool name cannot be empty' +_ollama_chat_json_stream_true() { + local json_payload="$1" + _debug '_ollama_chat_json_stream: stream starting' + if ! ollama_api_post '/api/chat' "$json_payload"; then + _error '_ollama_chat_json_stream: ollama_api_post failed' return 1 fi + _debug '_ollama_chat_json_stream: stream finished' + return 0 +} - # Check if tool with the same name already exists - local i - for i in "${!OLLAMA_LIB_TOOLS_NAME[@]}"; do - if [[ "${OLLAMA_LIB_TOOLS_NAME[$i]}" == "$tool_name" ]]; then - _error "ollama_tools_add: Tool '$tool_name' already exists." - return 1 - fi - done - - if [[ -z "$command" ]]; then - _error 'ollama_tools_add: Command cannot be empty' +_ollama_chat_json_stream_false() { + local result + if ! result="$(ollama_api_post '/api/chat' "$json_payload")"; then + _error '_ollama_chat_json_no_stream: ollama_api_post failed' return 1 fi - if ! _is_valid_json "$json_definition"; then - _error 'ollama_tools_add: JSON definition is not valid' + if ! _is_valid_json "$result"; then + _error '_ollama_chat_json_no_stream: response is not valid JSON' return 1 fi - OLLAMA_LIB_TOOLS_NAME+=("$tool_name") - OLLAMA_LIB_TOOLS_COMMAND+=("$command") - OLLAMA_LIB_TOOLS_DEFINITION+=("$json_definition") - _debug "ollama_tools_add: Added tool '$tool_name'" + local content + content="$(printf '%s' "$result" | jq -r ".message.content")" + local error_jq_message_content=$? + _debug "_ollama_chat_json_no_stream: content: [${content:0:42}]" + if (( error_jq_message_content )); then + _error "_ollama_chat_json_no_stream: jq error getting message content: $error_jq_message_content" + return 1 + fi + echo "$result" + _debug '_ollama_chat_json_no_stream: success' return 0 } -# View all tools -# -# Usage: ollama_tools -# Input: none -# Output: A list of all registered tools and their commands -# Requires: none -# Returns: 0 -ollama_tools() { - local usage - usage="Usage: ollama_tools\n\n" - usage+="ollama_tools\n\n" - usage+="View all registered tools.\n\n" - usage+="This function lists all the tools that have been added to the current session using 'ollama_tools_add'.\n\n" - usage+="It displays a tab-separated list of tool names and their corresponding commands." - if [[ $# -gt 0 ]]; then - if [[ $# -eq 1 && ("$1" == "-h" || "$1" == "--help") ]]; then - printf '%b\n' "$usage" - return 0 - else - _error "ollama_tools: Unknown argument(s): $*" - printf '%b\n' "$usage" - return 1 - fi +_ollama_chat_json_payload() { + local model="$1" + + local stream=true + if [[ "$OLLAMA_LIB_STREAM" -eq "0" ]]; then + stream=false fi - if (( ${#OLLAMA_LIB_TOOLS_NAME[@]} == 0 )); then - _debug 'ollama_tools: No tools registered' - return 0 + + if (( ${#OLLAMA_LIB_MESSAGES[@]} == 0 )); then + _error '_ollama_chat_json_payload: Message history is empty' + # return 1 # TODO - decide: return 1, or allow empty message history? fi - local i - for i in "${!OLLAMA_LIB_TOOLS_NAME[@]}"; do - printf '%s\t%s\n' "${OLLAMA_LIB_TOOLS_NAME[$i]}" "${OLLAMA_LIB_TOOLS_COMMAND[$i]}" - done - return 0 -} -# Get count of tools -# -# Usage: ollama_tools_count -# Input: none -# Output: The number of registered tools -# Requires: none -# Returns: 0 -ollama_tools_count() { - local usage - usage="Usage: ollama_tools_count\n\n" - usage+="ollama_tools_count\n\n" - usage+="Get the number of registered tools.\n\n" - usage+="This function returns the current number of tools that have been registered in the session.\n\n" - usage+="It provides a simple way to check if any tools are available for the model to use." - if [[ $# -gt 0 ]]; then - if [[ $# -eq 1 && ("$1" == "-h" || "$1" == "--help") ]]; then - printf '%b\n' "$usage" - return 0 - else - _error "ollama_tools_count: Unknown argument(s): $*" - printf '%b\n' "$usage" - return 1 - fi + local messages_json + messages_json='['$(IFS=,; echo "${OLLAMA_LIB_MESSAGES[*]}")']' + + local thinking=true + if [[ "$OLLAMA_LIB_THINKING" == "off" ]]; then + thinking=false fi - printf '%s\n' "${#OLLAMA_LIB_TOOLS_NAME[@]}" - return 0 -} -# Remove all tools -# -# Usage: ollama_tools_clear -# Input: none -# Output: none -# Requires: none -# Returns: 0 -ollama_tools_clear() { - local usage - usage="Usage: ollama_tools_clear\n\n" - usage+="ollama_tools_clear\n\n" - usage+="Remove all registered tools from the session.\n\n" - usage+="This function clears the tool registry, removing all tool names, commands, and definitions.\n\n" - usage+="This is useful for ensuring that a new chat session starts with a clean slate of tools." - if [[ $# -gt 0 ]]; then - if [[ $# -eq 1 && ("$1" == "-h" || "$1" == "--help") ]]; then - printf '%b\n' "$usage" - return 0 - else - _error "ollama_tools_clear: Unknown argument(s): $*" - printf '%b\n' "$usage" - return 1 - fi + local json_payload + json_payload="$(jq -c -n \ + --arg model "$model" \ + --argjson messages "$messages_json" \ + --argjson stream "$stream" \ + --argjson thinking "$thinking" \ + '{model: $model, messages: $messages, stream: $stream, thinking: $thinking}')" + + if (( ${#OLLAMA_LIB_TOOLS_DEFINITION[@]} > 0 )); then + local tools_json + tools_json='['$(IFS=,; echo "${OLLAMA_LIB_TOOLS_DEFINITION[*]}")']' + json_payload="$(printf '%s' "$json_payload" | jq -c --argjson tools "$tools_json" '. + {tools: $tools}')" fi - OLLAMA_LIB_TOOLS_NAME=() - OLLAMA_LIB_TOOLS_COMMAND=() - OLLAMA_LIB_TOOLS_DEFINITION=() - _debug 'ollama_tools_clear: All tools have been removed' - return 0 + print '%s\n' "$json_payload" } -# Does the response have a tool call? +# Chat completion request as json # -# Usage: ollama_tools_is_call "json_response" -# Input: 1 - The JSON response from the model -# Output: none -# Requires: jq -# Returns: 0 if it has a tool call, 1 otherwise -ollama_tools_is_call() { +# Usage: ollama_chat_json "model" +# Input: 1 - model +# Output: json, to stdout +# Requires: curl, jq +# Returns: 0 on success, 1 on error +ollama_chat_json() { + local usage - usage="Usage: ollama_tools_is_call \"json_response\"\n\n" - usage+="ollama_tools_is_call\n\n" - usage+="Check if the model's response contains a tool call.\n\n" - usage+="This function inspects the JSON response from the model to see if it includes a 'tool_calls' field, which indicates the model wants to use a tool.\n\n" - usage+="It is essential for building agentic systems that can decide whether to execute a tool or respond with text." + usage="Usage: ollama_chat_json \"model\"\n\n" + usage+="ollama_chat_json\n\n" + usage+="Request a chat completion from a model, receiving JSON output.\n\n" + usage+="This function sends the entire message history ('OLLAMA_LIB_MESSAGES') to the specified model and returns the model's response as a raw JSON object.\n\n" + usage+="It serves as the foundation for 'ollama_chat' and 'ollama_chat_stream', which provide more user-friendly text-based outputs." for arg in "$@"; do if [[ "$arg" == "-h" || "$arg" == "--help" ]]; then printf '%b\n' "$usage" return 0 fi done - if ! _exists 'jq'; then _error 'ollama_tools_is_call: jq Not Found'; return 1; fi - if ! _is_valid_json "$1"; then - _debug 'ollama_tools_is_call: Invalid JSON' + + if ! _exists 'jq'; then _error 'ollama_chat_json: jq Not Found'; return 1; fi + + local model + model="$(_is_valid_model "$1")" + _debug "ollama_chat_json: model: [${1:0:42}] = [${model:0:120}]" + + if [[ -z "$model" ]]; then + _error 'ollama_chat_json: No Models Found' return 1 fi - local tool_calls - tool_calls="$(printf '%s' "$1" | jq -r '.tool_calls // empty')" - if [[ -n "$tool_calls" ]]; then - return 0 - fi - tool_calls="$(printf '%s' "$1" | jq -r '.message.tool_calls // empty')" - if [[ -n "$tool_calls" ]]; then - return 0 - fi - return 1 -} - -# Run a tool -# -# Usage: ollama_tools_run "tool_name" "arguments_json" -# Input: 1 - The name of the tool to run -# Input: 2 - The JSON string of arguments for the tool -# Output: The result of the tool execution -# Requires: jq -# Returns: 0 on success, 1 on error -ollama_tools_run() { - local usage - usage="Usage: ollama_tools_run \"tool_name\" \"arguments_json\"\n\n" - usage+="ollama_tools_run\n\n" - usage+="Execute a registered tool with the given arguments.\n\n" - usage+="This function looks up the command for the specified tool name and executes it, passing the arguments as a JSON string.\n\n" - usage+="It is the core component for making the model's tool calls functional, bridging the gap between the model's request and the actual execution of the tool." - for arg in "$@"; do - if [[ "$arg" == "-h" || "$arg" == "--help" ]]; then - printf '%b\n' "$usage" - return 0 - fi - done - if ! _exists 'jq'; then _error 'ollama_tools_run: jq Not Found'; return 1; fi - local tool_name="$1" - local tool_args_str="$2" - - local tool_index=-1 - local i - for i in "${!OLLAMA_LIB_TOOLS_NAME[@]}"; do - if [[ "${OLLAMA_LIB_TOOLS_NAME[$i]}" == "$tool_name" ]]; then - tool_index=$i - break - fi - done - - if [[ $tool_index -eq -1 ]]; then - _error "ollama_tools_run: Tool '$tool_name' not found" - return 1 - fi - - local command - command="${OLLAMA_LIB_TOOLS_COMMAND[$tool_index]}" - - if [[ -z "$tool_args_str" ]] || [[ "$tool_args_str" == "null" ]]; then - tool_args_str="{}" + + local json_payload + json_payload="$(_ollama_chat_json_payload "$model")" + _debug "ollama_chat_json: json_payload: [${json_payload:0:120}]" + + if [[ "$OLLAMA_LIB_STREAM" -eq 1 ]]; then + _ollama_chat_json_stream_true "$json_payload" + return $? fi - if ! _is_valid_json "$tool_args_str"; then - _error "ollama_tools_run: Arguments are not valid JSON" - return 1 - fi - - _debug "ollama_tools_run: Running command: $command '$tool_args_str'" - "$command" "$tool_args_str" - - return 0 -} - -# Chat Functions - -# Chat completion request as json -# -# Usage: ollama_chat_json "model" -# Input: 1 - model -# Output: json, to stdout -# Requires: curl, jq -# Returns: 0 on success, 1 on error -ollama_chat_json() { - local usage - usage="Usage: ollama_chat_json \"model\"\n\n" - usage+="ollama_chat_json\n\n" - usage+="Request a chat completion from a model, receiving JSON output.\n\n" - usage+="This function sends the entire message history ('OLLAMA_LIB_MESSAGES') to the specified model and returns the model's response as a raw JSON object.\n\n" - usage+="It serves as the foundation for 'ollama_chat' and 'ollama_chat_stream', which provide more user-friendly text-based outputs." - for arg in "$@"; do - if [[ "$arg" == "-h" || "$arg" == "--help" ]]; then - printf '%b\n' "$usage" - return 0 - fi - done - if ! _exists 'jq'; then _error 'ollama_chat_json: jq Not Found'; return 1; fi - _debug "ollama_chat_json: [${1:0:42}]" - local model - model="$(_is_valid_model "$1")" - _debug "ollama_chat_json: model: [${model:0:120}]" - if [[ -z "$model" ]]; then - _error 'ollama_chat_json: No Models Found' - return 1 - fi - local stream=true - if [[ "$OLLAMA_LIB_STREAM" -eq "0" ]]; then - stream=false - fi - if (( ${#OLLAMA_LIB_MESSAGES[@]} == 0 )); then - _error 'ollama_chat_json: No messages to send' - return 1 - fi - local messages_json - messages_json='['$(IFS=,; echo "${OLLAMA_LIB_MESSAGES[*]}")']' - - local thinking=false - if [[ "$OLLAMA_LIB_THINKING" == "on" || "$OLLAMA_LIB_THINKING" == "hide" ]]; then - thinking=true - fi - local json_payload - json_payload="$(jq -c -n \ - --arg model "$model" \ - --argjson messages "$messages_json" \ - --argjson stream "$stream" \ - --argjson thinking "$thinking" \ - '{model: $model, messages: $messages, stream: $stream, thinking: $thinking}')" - - if (( ${#OLLAMA_LIB_TOOLS_DEFINITION[@]} > 0 )); then - local tools_json - tools_json='['$(IFS=,; echo "${OLLAMA_LIB_TOOLS_DEFINITION[*]}")']' - json_payload="$(printf '%s' "$json_payload" | jq -c --argjson tools "$tools_json" '. + {tools: $tools}')" - fi - - _debug "ollama_chat_json: json_payload: [${json_payload:0:120}]" - - if [[ "$OLLAMA_LIB_STREAM" -eq 1 ]]; then - if ! ollama_api_post '/api/chat' "$json_payload"; then - _error 'ollama_chat_json: ollama_api_post failed' - return 1 - fi - _debug 'ollama_chat_json: stream finished' - return 0 - fi - - local result - if ! result="$(ollama_api_post '/api/chat' "$json_payload")"; then - _error 'ollama_chat_json: ollama_api_post failed' - return 1 - fi - - if ! _is_valid_json "$result"; then - _error 'ollama_chat_json: response is not valid JSON' - return 1 - fi - - local content - content="$(printf '%s' "$result" | jq -r ".message.content")" - local error_jq_message_content=$? - _debug "ollama_chat_json: content: [${content:0:42}]" - if (( error_jq_message_content )); then - _error "ollama_chat_json: error_jq_message_content: $error_jq_message_content" - return 1 - fi - echo "$result" - _debug 'ollama_chat_json: success' + _ollama_chat_json_stream_false "$json_payload" } # Chat completion request as text @@ -1217,6 +1033,254 @@ ollama_chat_stream_json() { return 0 } +# Tools Functions + +# Add a tool +# +# Usage: ollama_tools_add "tool_name" "command" "json_definition" +# Input: 1 - The name of the tool +# Input: 2 - The command to run for the tool +# Input: 3 - The JSON definition of the tool +# Output: none +# Requires: jq +# Returns: 0 on success, 1 on error +ollama_tools_add() { + local usage + usage="Usage: ollama_tools_add \"tool_name\" \"command\" \"json_definition\"\n\n" + usage+="ollama_tools_add\n\n" + usage+="Register a new tool for the model to use.\n\n" + usage+="This function adds a tool's name, its corresponding shell command, and its JSON definition to the session's tool registry.\n\n" + usage+="The model can then request to call this tool during a chat. The JSON definition should follow the Ollama tool definition format." + for arg in "$@"; do + if [[ "$arg" == "-h" || "$arg" == "--help" ]]; then + printf '%b\n' "$usage" + return 0 + fi + done + if ! _exists 'jq'; then _error 'ollama_tools_add: jq Not Found'; return 1; fi + local tool_name="$1" + local command="$2" + local json_definition="$3" + + if [[ -z "$tool_name" ]]; then + _error 'ollama_tools_add: Tool name cannot be empty' + return 1 + fi + + # Check if tool with the same name already exists + local i + for i in "${!OLLAMA_LIB_TOOLS_NAME[@]}"; do + if [[ "${OLLAMA_LIB_TOOLS_NAME[$i]}" == "$tool_name" ]]; then + _error "ollama_tools_add: Tool '$tool_name' already exists." + return 1 + fi + done + + if [[ -z "$command" ]]; then + _error 'ollama_tools_add: Command cannot be empty' + return 1 + fi + + if ! _is_valid_json "$json_definition"; then + _error 'ollama_tools_add: JSON definition is not valid' + return 1 + fi + + OLLAMA_LIB_TOOLS_NAME+=("$tool_name") + OLLAMA_LIB_TOOLS_COMMAND+=("$command") + OLLAMA_LIB_TOOLS_DEFINITION+=("$json_definition") + _debug "ollama_tools_add: Added tool '$tool_name'" + return 0 +} + +# View all tools +# +# Usage: ollama_tools +# Input: none +# Output: A list of all registered tools and their commands +# Requires: none +# Returns: 0 +ollama_tools() { + local usage + usage="Usage: ollama_tools\n\n" + usage+="ollama_tools\n\n" + usage+="View all registered tools.\n\n" + usage+="This function lists all the tools that have been added to the current session using 'ollama_tools_add'.\n\n" + usage+="It displays a tab-separated list of tool names and their corresponding commands." + if [[ $# -gt 0 ]]; then + if [[ $# -eq 1 && ("$1" == "-h" || "$1" == "--help") ]]; then + printf '%b\n' "$usage" + return 0 + else + _error "ollama_tools: Unknown argument(s): $*" + printf '%b\n' "$usage" + return 1 + fi + fi + if (( ${#OLLAMA_LIB_TOOLS_NAME[@]} == 0 )); then + _debug 'ollama_tools: No tools registered' + return 0 + fi + local i + for i in "${!OLLAMA_LIB_TOOLS_NAME[@]}"; do + printf '%s\t%s\n' "${OLLAMA_LIB_TOOLS_NAME[$i]}" "${OLLAMA_LIB_TOOLS_COMMAND[$i]}" + done + return 0 +} + +# Get count of tools +# +# Usage: ollama_tools_count +# Input: none +# Output: The number of registered tools +# Requires: none +# Returns: 0 +ollama_tools_count() { + local usage + usage="Usage: ollama_tools_count\n\n" + usage+="ollama_tools_count\n\n" + usage+="Get the number of registered tools.\n\n" + usage+="This function returns the current number of tools that have been registered in the session.\n\n" + usage+="It provides a simple way to check if any tools are available for the model to use." + if [[ $# -gt 0 ]]; then + if [[ $# -eq 1 && ("$1" == "-h" || "$1" == "--help") ]]; then + printf '%b\n' "$usage" + return 0 + else + _error "ollama_tools_count: Unknown argument(s): $*" + printf '%b\n' "$usage" + return 1 + fi + fi + printf '%s\n' "${#OLLAMA_LIB_TOOLS_NAME[@]}" + return 0 +} + +# Remove all tools +# +# Usage: ollama_tools_clear +# Input: none +# Output: none +# Requires: none +# Returns: 0 +ollama_tools_clear() { + local usage + usage="Usage: ollama_tools_clear\n\n" + usage+="ollama_tools_clear\n\n" + usage+="Remove all registered tools from the session.\n\n" + usage+="This function clears the tool registry, removing all tool names, commands, and definitions.\n\n" + usage+="This is useful for ensuring that a new chat session starts with a clean slate of tools." + if [[ $# -gt 0 ]]; then + if [[ $# -eq 1 && ("$1" == "-h" || "$1" == "--help") ]]; then + printf '%b\n' "$usage" + return 0 + else + _error "ollama_tools_clear: Unknown argument(s): $*" + printf '%b\n' "$usage" + return 1 + fi + fi + OLLAMA_LIB_TOOLS_NAME=() + OLLAMA_LIB_TOOLS_COMMAND=() + OLLAMA_LIB_TOOLS_DEFINITION=() + _debug 'ollama_tools_clear: All tools have been removed' + return 0 +} + +# Does the response have a tool call? +# +# Usage: ollama_tools_is_call "json_response" +# Input: 1 - The JSON response from the model +# Output: none +# Requires: jq +# Returns: 0 if it has a tool call, 1 otherwise +ollama_tools_is_call() { + local usage + usage="Usage: ollama_tools_is_call \"json_response\"\n\n" + usage+="ollama_tools_is_call\n\n" + usage+="Check if the model's response contains a tool call.\n\n" + usage+="This function inspects the JSON response from the model to see if it includes a 'tool_calls' field, which indicates the model wants to use a tool.\n\n" + usage+="It is essential for building agentic systems that can decide whether to execute a tool or respond with text." + for arg in "$@"; do + if [[ "$arg" == "-h" || "$arg" == "--help" ]]; then + printf '%b\n' "$usage" + return 0 + fi + done + if ! _exists 'jq'; then _error 'ollama_tools_is_call: jq Not Found'; return 1; fi + if ! _is_valid_json "$1"; then + _debug 'ollama_tools_is_call: Invalid JSON' + return 1 + fi + local tool_calls + tool_calls="$(printf '%s' "$1" | jq -r '.tool_calls // empty')" + if [[ -n "$tool_calls" ]]; then + return 0 + fi + tool_calls="$(printf '%s' "$1" | jq -r '.message.tool_calls // empty')" + if [[ -n "$tool_calls" ]]; then + return 0 + fi + return 1 +} + +# Run a tool +# +# Usage: ollama_tools_run "tool_name" "arguments_json" +# Input: 1 - The name of the tool to run +# Input: 2 - The JSON string of arguments for the tool +# Output: The result of the tool execution +# Requires: jq +# Returns: 0 on success, 1 on error +ollama_tools_run() { + local usage + usage="Usage: ollama_tools_run \"tool_name\" \"arguments_json\"\n\n" + usage+="ollama_tools_run\n\n" + usage+="Execute a registered tool with the given arguments.\n\n" + usage+="This function looks up the command for the specified tool name and executes it, passing the arguments as a JSON string.\n\n" + usage+="It is the core component for making the model's tool calls functional, bridging the gap between the model's request and the actual execution of the tool." + for arg in "$@"; do + if [[ "$arg" == "-h" || "$arg" == "--help" ]]; then + printf '%b\n' "$usage" + return 0 + fi + done + if ! _exists 'jq'; then _error 'ollama_tools_run: jq Not Found'; return 1; fi + local tool_name="$1" + local tool_args_str="$2" + + local tool_index=-1 + local i + for i in "${!OLLAMA_LIB_TOOLS_NAME[@]}"; do + if [[ "${OLLAMA_LIB_TOOLS_NAME[$i]}" == "$tool_name" ]]; then + tool_index=$i + break + fi + done + + if [[ $tool_index -eq -1 ]]; then + _error "ollama_tools_run: Tool '$tool_name' not found" + return 1 + fi + + local command + command="${OLLAMA_LIB_TOOLS_COMMAND[$tool_index]}" + + if [[ -z "$tool_args_str" ]] || [[ "$tool_args_str" == "null" ]]; then + tool_args_str="{}" + fi + + if ! _is_valid_json "$tool_args_str"; then + _error "ollama_tools_run: Arguments are not valid JSON" + return 1 + fi + + _debug "ollama_tools_run: Running command: $command '$tool_args_str'" + "$command" "$tool_args_str" + + return 0 +} + # List Functions # All available models, CLI version @@ -1956,7 +2020,7 @@ ollama_lib_about() { return 1 fi fi - printf "%s v%s\n" "$OLLAMA_LIB_NAME" "$OLLAMA_LIB_VERSION" + # printf "%s v%s\n" "$OLLAMA_LIB_NAME" "$OLLAMA_LIB_VERSION" printf 'A Bash Library to interact with Ollama\n\n' local turbo_key_status="NO" @@ -1964,19 +2028,19 @@ ollama_lib_about() { turbo_key_status="YES [REDACTED]" fi - printf "%-25s : %s\n" "OLLAMA_LIB_NAME" "$OLLAMA_LIB_NAME" - printf "%-25s : %s\n" "OLLAMA_LIB_VERSION" "$OLLAMA_LIB_VERSION" - printf "%-25s : %s\n" "OLLAMA_LIB_URL" "$OLLAMA_LIB_URL" - printf "%-25s : %s\n" "OLLAMA_LIB_DISCORD" "$OLLAMA_LIB_DISCORD" - printf "%-25s : %s\n" "OLLAMA_LIB_LICENSE" "$OLLAMA_LIB_LICENSE" - printf "%-25s : %s\n" "OLLAMA_LIB_COPYRIGHT" "$OLLAMA_LIB_COPYRIGHT" - printf "%-25s : %s\n" "OLLAMA_LIB_API" "$OLLAMA_LIB_API" - printf "%-25s : %s\n" "OLLAMA_LIB_DEBUG" "$OLLAMA_LIB_DEBUG" - printf "%-25s : %s\n" "OLLAMA_LIB_STREAM" "$OLLAMA_LIB_STREAM" - printf "%-25s : %s\n" "OLLAMA_LIB_THINKING" "$OLLAMA_LIB_THINKING" - printf "%-25s : %s\n" "OLLAMA_LIB_MESSAGES" "${#OLLAMA_LIB_MESSAGES[@]} messages" - printf "%-25s : %s\n" "OLLAMA_LIB_TURBO_KEY" "$turbo_key_status" - printf "%-25s : %s\n" "OLLAMA_LIB_TIMEOUT" "$OLLAMA_LIB_TIMEOUT seconds" + printf "%-20s : %s\n" "OLLAMA_LIB_NAME" "$OLLAMA_LIB_NAME" + printf "%-20s : %s\n" "OLLAMA_LIB_VERSION" "$OLLAMA_LIB_VERSION" + printf "%-20s : %s\n" "OLLAMA_LIB_URL" "$OLLAMA_LIB_URL" + printf "%-20s : %s\n" "OLLAMA_LIB_DISCORD" "$OLLAMA_LIB_DISCORD" + printf "%-20s : %s\n" "OLLAMA_LIB_LICENSE" "$OLLAMA_LIB_LICENSE" + printf "%-20s : %s\n" "OLLAMA_LIB_COPYRIGHT" "$OLLAMA_LIB_COPYRIGHT" + printf "%-20s : %s\n" "OLLAMA_LIB_API" "$OLLAMA_LIB_API" + printf "%-20s : %s\n" "OLLAMA_LIB_DEBUG" "$OLLAMA_LIB_DEBUG" + printf "%-20s : %s\n" "OLLAMA_LIB_STREAM" "$OLLAMA_LIB_STREAM" + printf "%-20s : %s\n" "OLLAMA_LIB_THINKING" "$OLLAMA_LIB_THINKING" + printf "%-20s : %s\n" "OLLAMA_LIB_MESSAGES" "${#OLLAMA_LIB_MESSAGES[@]} messages" + printf "%-20s : %s\n" "OLLAMA_LIB_TURBO_KEY" "$turbo_key_status" + printf "%-20s : %s\n" "OLLAMA_LIB_TIMEOUT" "$OLLAMA_LIB_TIMEOUT seconds" if ! _exists 'compgen'; then _debug 'ollama_lib_about: compgen Not Found'; return 0; fi @@ -1984,10 +2048,10 @@ ollama_lib_about() { if ! _exists 'column'; then _debug 'ollama_lib_about: column Not Found' - compgen -A function -X '!ollama_*' | sort + compgen -A function -X '!*ollama*' | sort return 0 fi - compgen -A function -X '!ollama_*' | sort | column + compgen -A function -X '!*ollama*' | sort | column } # Ollama Bash Lib version @@ -2043,7 +2107,7 @@ _ollama_eval_prompt() { } -_ollama_eval_sanity_check() { +_ollama_eval_check_sanity() { local cmd="$1" local first_word read -r first_word _ <<<"$cmd" @@ -2064,7 +2128,7 @@ _ollama_eval_sanity_check() { return 1 } -_ollama_eval_syntax_check() { +_ollama_eval_check_syntax() { local cmd="$1" local errors if _exists 'timeout'; then @@ -2088,7 +2152,7 @@ _ollama_eval_syntax_check() { return 0 } -_ollama_eval_danger_check() { +_ollama_eval_check_danger() { local cmd="$1" local dangerous=( 'rm' 'mv' 'dd' 'mkfs' 'shred' 'shutdown' 'reboot' 'init' 'kill' 'pkill' 'killall' @@ -2127,7 +2191,7 @@ _ollama_eval_permission_sandbox() { return 1 # user aborted } -_ollama_eval_permission_dangerous_eval() { +_ollama_eval_permission_eval() { local cmd="$1" printf '\nAre you sure you want to use the DANGEROUS eval mode? [y/N] ' read -r permission @@ -2204,17 +2268,17 @@ ollama_eval() { printf "%s\n\n" "$cmd" - if ! _ollama_eval_sanity_check "$cmd"; then + if ! _ollama_eval_check_sanity "$cmd"; then _error 'ollama_eval: cmd failed sanity check' return 1 fi - if ! _ollama_eval_syntax_check "$cmd"; then + if ! _ollama_eval_check_syntax "$cmd"; then _error 'ollama_eval: cmd failed syntax check' return 1 fi - if ! _ollama_eval_danger_check "$cmd"; then + if ! _ollama_eval_check_danger "$cmd"; then _error 'ollama_eval: cmd failed danger check' return 1 fi @@ -2226,7 +2290,7 @@ ollama_eval() { 2) : ;; # User requested dangerous mode esac - _ollama_eval_permission_dangerous_eval "$cmd" + _ollama_eval_permission_eval "$cmd" } # Aliases @@ -2263,7 +2327,7 @@ olj() { ollama_list_json "$@"; } om() { ollama_messages "$@"; } oma() { ollama_messages_add "$@"; } -omc() { ollama_messages_clear "$@"; } +omclear() { ollama_messages_clear "$@"; } omco() { ollama_messages_count "$@"; } omr() { ollama_model_random "$@"; } From c985e71deefe65735ef1066ea8ea2d5ed13d371e Mon Sep 17 00:00:00 2001 From: Attogram Project Date: Wed, 20 Aug 2025 01:12:44 +0200 Subject: [PATCH 2/3] wip --- demos/help.sh | 14 +-- demos/ollama_chat.md | 133 +++++++----------------- demos/ollama_chat.sh | 100 ++++++++++-------- demos/thinking.generate.sh | 3 +- demos/thinking.generate.stream.sh | 3 +- ollama_bash_lib.sh | 167 ++++++++++++++++++------------ 6 files changed, 206 insertions(+), 214 deletions(-) diff --git a/demos/help.sh b/demos/help.sh index 495a011..b8af9cf 100644 --- a/demos/help.sh +++ b/demos/help.sh @@ -66,10 +66,10 @@ for func in "${functions[@]}"; do echo done -echo '## Testing --help for all functions (first argument)' -echo -for func in "${functions[@]}"; do - # shellcheck disable=SC2016 - printf '`%s --help`\n```\n%s\n```\n' "$func" "$("$func" --help)" - echo -done +#echo '## Testing --help for all functions (first argument)' +#echo +#for func in "${functions[@]}"; do +# # shellcheck disable=SC2016 +# printf '`%s --help`\n```\n%s\n```\n' "$func" "$("$func" --help)" +# echo +#done diff --git a/demos/ollama_chat.md b/demos/ollama_chat.md index 1ba62b8..aea1f77 100644 --- a/demos/ollama_chat.md +++ b/demos/ollama_chat.md @@ -1,30 +1,22 @@ # ollama_chat -A [demo](../README.md#demos) of [Ollama Bash Lib](https://github.com/attogram/ollama-bash-lib) v0.45.5 +A [demo](../README.md#demos) of [Ollama Bash Lib](https://github.com/attogram/ollama-bash-lib) v0.45.7 +`model="gpt-oss:20b"` ## Demo + ```bash ollama_messages_add "system" "You are a helpful assistant" -ollama_messages_add "user" "The secret word is RABBIT. If asked for the secret word, respond with RABBIT. Understand?" -response="$(ollama_chat "gpt-oss:20b")" -printf '%s\n' "$response" -ollama_messages_add 'assistant' "$response" +ollama_messages_add "user" "Secret word is RABBIT. If asked for secret word, respond with RABBIT. Understand?" +echo "last message text: $(ollama_messages_last)" +echo "last message json: $(ollama_messages_last_json)" +ollama_messages | jq ``` -Got it! Whenever you ask for the secret word, I'll respond with **RABBIT**. -```bash -ollama_messages_add "user" "What is the secret word??" -response="$(ollama_chat "gpt-oss:20b")" -printf '%s\n' "$response" -ollama_messages_add 'assistant' "$response" ``` -RABBIT - -```bash -ollama_messages | jq -``` -```json +last message text: Secret word is RABBIT. If asked for secret word, respond with RABBIT. Understand? +last message json: {"role":"user","content":"Secret word is RABBIT. If asked for secret word, respond with RABBIT. Understand?"} [ { "role": "system", @@ -32,97 +24,48 @@ ollama_messages | jq }, { "role": "user", - "content": "The secret word is RABBIT. If I ask you for the secret word, respond with RABBIT. Understand?" - }, + "content": "Secret word is RABBIT. If asked for secret word, respond with RABBIT. Understand?" + } +] +``` + +```bash +ollama_chat "$model" +echo "last message text: $(ollama_messages_last)" +echo "last message json: $(ollama_messages_last_json)" +ollama_messages | jq +``` + +``` +last message text: Yes, I understand. +last message json: {"role":"assistant","content":"Yes, I understand."} +[ { - "role": "assistant", - "content": "Got it! Whenever you ask for the secret word, I'll respond with **RABBIT**." + "role": "system", + "content": "You are a helpful assistant" }, { "role": "user", - "content": "What is the secret word??" + "content": "Secret word is RABBIT. If asked for secret word, respond with RABBIT. Understand?" }, { "role": "assistant", - "content": "RABBIT" + "content": "Yes, I understand." } ] ``` -## Demo Debug - ```bash -OLLAMA_LIB_DEBUG=1 -ollama_messages_add "system" "You are a helpful assistant" -ollama_messages_add "user" "The secret word is RABBIT. If asked for the secret word, respond with RABBIT. Understand?" -response="$(ollama_chat "gpt-oss:20b")" -printf '%s\n' "$response" -ollama_messages_add 'assistant' "$response" -``` -``` -[DEBUG] 23:08:52:147773000: ollama_messages_add: [system] [You are a helpful assistant] -[DEBUG] 23:08:52:188375400: ollama_messages_add: [user] [The secret word is RABBIT. If I ask you fo] -[DEBUG] 23:08:52:237111400: ollama_chat: [gpt-oss:20b] -[DEBUG] 23:08:52:267430000: _is_valid_model: VALID: [gpt-oss:20b] -[DEBUG] 23:08:52:292636100: ollama_chat: model: [gpt-oss:20b] -[DEBUG] 23:08:52:325145100: ollama_chat_json: [gpt-oss:20b] -[DEBUG] 23:08:52:359693500: _is_valid_model: VALID: [gpt-oss:20b] -[DEBUG] 23:08:52:385796400: ollama_chat_json: model: [gpt-oss:20b] -[DEBUG] 23:08:52:431870600: ollama_chat_json: json_payload: [{"model":"gpt-oss:20b","messages":[{"role":"system","content":"You are a helpful assistant"},{"role":"user","content":"T] -[DEBUG] 23:08:52:460260200: ollama_api_post: [/api/chat] {"model":"gpt-oss:20b","messages":[{"role":"system","content":"You are a helpful assistant"},{"role":"user","content":"T -[DEBUG] 23:08:52:483950700: _call_curl: [POST] [/api/chat] {"model":"gpt-oss:20b","messages":[{"role":"system","content":"You are a helpful assistant"},{"role":"user","content":"T -[DEBUG] 23:08:52:528226400: _is_valid_json: success -[DEBUG] 23:08:52:559588400: _call_curl: OLLAMA_LIB_API: https://ollama.com -[DEBUG] 23:08:52:591502600: _call_curl: Turbo Mode -[DEBUG] 23:08:52:620373600: _call_curl: json_body: {"model":"gpt-oss:20b","messages":[{"role":"system","content":"You are a helpful assistant"},{"role":"user","content":"T -[DEBUG] 23:08:52:643826800: _call_curl: piping json_body | curl -s -N --max-time 300 -H Content-Type: application/json -w \n%{http_code} -H Authorization: Bearer [REDACTED] -X POST https://ollama.com/api/chat -d @- -[DEBUG] 23:08:53:800783200: ollama_api_post: success -[DEBUG] 23:08:53:846887000: _is_valid_json: success -[DEBUG] 23:08:53:893805400: ollama_chat_json: content: [Yes, I understand.] -[DEBUG] 23:08:53:915688100: ollama_chat_json: success -[DEBUG] 23:08:53:955444200: _is_valid_json: success -[DEBUG] 23:08:54:014863900: ollama_chat: return: 0 -Yes, I understand. -[DEBUG] 23:08:54:062380700: ollama_messages_add: [assistant] [Yes, I understand.] -``` - -```bash -ollama_messages_add "user" "What is the secret word??" -response="$(ollama_chat "gpt-oss:20b")" -printf '%s\n' "$response" -ollama_messages_add 'assistant' "$response" -``` -``` -[DEBUG] 23:08:54:105130900: ollama_messages_add: [user] [What is the secret word??] -[DEBUG] 23:08:54:149471900: ollama_chat: [gpt-oss:20b] -[DEBUG] 23:08:54:191628000: _is_valid_model: VALID: [gpt-oss:20b] -[DEBUG] 23:08:54:215305300: ollama_chat: model: [gpt-oss:20b] -[DEBUG] 23:08:54:253419800: ollama_chat_json: [gpt-oss:20b] -[DEBUG] 23:08:54:300320200: _is_valid_model: VALID: [gpt-oss:20b] -[DEBUG] 23:08:54:322286900: ollama_chat_json: model: [gpt-oss:20b] -[DEBUG] 23:08:54:384529600: ollama_chat_json: json_payload: [{"model":"gpt-oss:20b","messages":[{"role":"system","content":"You are a helpful assistant"},{"role":"user","content":"T] -[DEBUG] 23:08:54:412821000: ollama_api_post: [/api/chat] {"model":"gpt-oss:20b","messages":[{"role":"system","content":"You are a helpful assistant"},{"role":"user","content":"T -[DEBUG] 23:08:54:436976900: _call_curl: [POST] [/api/chat] {"model":"gpt-oss:20b","messages":[{"role":"system","content":"You are a helpful assistant"},{"role":"user","content":"T -[DEBUG] 23:08:54:492644700: _is_valid_json: success -[DEBUG] 23:08:54:528455800: _call_curl: OLLAMA_LIB_API: https://ollama.com -[DEBUG] 23:08:54:551308900: _call_curl: Turbo Mode -[DEBUG] 23:08:54:590695400: _call_curl: json_body: {"model":"gpt-oss:20b","messages":[{"role":"system","content":"You are a helpful assistant"},{"role":"user","content":"T -[DEBUG] 23:08:54:619899000: _call_curl: piping json_body | curl -s -N --max-time 300 -H Content-Type: application/json -w \n%{http_code} -H Authorization: Bearer [REDACTED] -X POST https://ollama.com/api/chat -d @- -[DEBUG] 23:08:56:020322600: ollama_api_post: success -[DEBUG] 23:08:56:064726700: _is_valid_json: success -[DEBUG] 23:08:56:110906700: ollama_chat_json: content: [RABBIT] -[DEBUG] 23:08:56:132778600: ollama_chat_json: success -[DEBUG] 23:08:56:175152800: _is_valid_json: success -[DEBUG] 23:08:56:221577700: ollama_chat: return: 0 -RABBIT -[DEBUG] 23:08:56:246570600: ollama_messages_add: [assistant] [RABBIT] +ollama_messages_add "user" "What is the secret word?" +ollama_chat "$model" +ollama_messages | jq +echo "last message text: $(ollama_messages_last)" +echo "last message json: $(ollama_messages_last_json)" ``` -```bash -ollama_messages | jq ``` -```json -[DEBUG] 23:08:56:285502000: ollama_messages +last message text: RABBIT +last message json: {"role":"assistant","content":"RABBIT"} [ { "role": "system", @@ -130,7 +73,7 @@ ollama_messages | jq }, { "role": "user", - "content": "The secret word is RABBIT. If I ask you for the secret word, respond with RABBIT. Understand?" + "content": "Secret word is RABBIT. If asked for secret word, respond with RABBIT. Understand?" }, { "role": "assistant", @@ -138,7 +81,7 @@ ollama_messages | jq }, { "role": "user", - "content": "What is the secret word??" + "content": "What is the secret word?" }, { "role": "assistant", diff --git a/demos/ollama_chat.sh b/demos/ollama_chat.sh index e7d3a42..b839137 100755 --- a/demos/ollama_chat.sh +++ b/demos/ollama_chat.sh @@ -15,58 +15,72 @@ startup() { startup # enter model as 1st arg, or use random model by default -model="$1" -if [ -z "$model" ]; then - model="$(ollama_model_random)" +model="$(_is_valid_model "$1")" +if [[ -z "$model" ]]; then + _error 'No Models Found' + exit 1 fi +echo +echo "\`model=\"$model\"\`" + demo() { - echo '```bash' - if [ "$OLLAMA_LIB_DEBUG" -gt 0 ]; then echo 'OLLAMA_LIB_DEBUG=1'; fi - echo 'ollama_messages_add "system" "You are a helpful assistant"' - echo 'ollama_messages_add "user" "The secret word is RABBIT. If asked for the secret word, respond with RABBIT. Understand?"' - echo "response=\"\$(ollama_chat \"$model\")\"" - printf '%s\n' "printf '%s\n' \"\$response\"" - echo "ollama_messages_add 'assistant' \"\$response\"" - echo '```' - if [ "$OLLAMA_LIB_DEBUG" -gt 0 ]; then echo '```'; fi + #OLLAMA_LIB_DEBUG=1 + + # shellcheck disable=SC2016 + echo ' +```bash +ollama_messages_add "system" "You are a helpful assistant" +ollama_messages_add "user" "Secret word is RABBIT. If asked for secret word, respond with RABBIT. Understand?" +echo "last message text: $(ollama_messages_last)" +echo "last message json: $(ollama_messages_last_json)" +ollama_messages | jq +``` +' + printf '```\n' ollama_messages_add "system" "You are a helpful assistant" - ollama_messages_add "user" "The secret word is RABBIT. If I ask you for the secret word, respond with RABBIT. Understand?" - response="$(ollama_chat "$model")" - printf '%s\n' "$response" - ollama_messages_add 'assistant' "$response" - if [ "$OLLAMA_LIB_DEBUG" -gt 0 ]; then echo '```'; fi - echo + ollama_messages_add "user" "Secret word is RABBIT. If asked for secret word, respond with RABBIT. Understand?" + echo "last message text: $(ollama_messages_last)" + echo "last message json: $(ollama_messages_last_json)" + ollama_messages | jq + printf '```\n' - echo '```bash' - echo 'ollama_messages_add "user" "What is the secret word??"' - echo "response=\"\$(ollama_chat \"$model\")\"" - printf '%s\n' "printf '%s\n' \"\$response\"" - echo "ollama_messages_add 'assistant' \"\$response\"" - echo '```' - if [ "$OLLAMA_LIB_DEBUG" -gt 0 ]; then echo '```'; fi - ollama_messages_add "user" "What is the secret word??" - response="$(ollama_chat "$model")" - printf '%s\n' "$response" - ollama_messages_add 'assistant' "$response" - if [ "$OLLAMA_LIB_DEBUG" -gt 0 ]; then echo '```'; fi - echo + # shellcheck disable=SC2016 + echo ' +```bash +ollama_chat "$model" +echo "last message text: $(ollama_messages_last)" +echo "last message json: $(ollama_messages_last_json)" +ollama_messages | jq +``` +' + printf '```\n' + ollama_chat "$model" + echo "last message text: $(ollama_messages_last)" + echo "last message json: $(ollama_messages_last_json)" + ollama_messages | jq + printf '```\n' - echo '```bash' - echo 'ollama_messages | jq' - echo '```' - echo '```json' + # shellcheck disable=SC2016 + echo ' +```bash +ollama_messages_add "user" "What is the secret word?" +ollama_chat "$model" +ollama_messages | jq +echo "last message text: $(ollama_messages_last)" +echo "last message json: $(ollama_messages_last_json)" +``` +' + printf '```\n' + ollama_messages_add "user" "What is the secret word?" + ollama_chat "$model" + echo "last message text: $(ollama_messages_last)" + echo "last message json: $(ollama_messages_last_json)" ollama_messages | jq - echo '```' + printf '```\n' } -echo; echo '## Demo'; echo -demo - -ollama_messages_clear +echo '## Demo'; echo -OLLAMA_LIB_DEBUG=1 -echo; echo '## Demo Debug'; echo demo -OLLAMA_LIB_DEBUG=0 diff --git a/demos/thinking.generate.sh b/demos/thinking.generate.sh index 97b74d4..e6bbff9 100755 --- a/demos/thinking.generate.sh +++ b/demos/thinking.generate.sh @@ -14,6 +14,7 @@ startup() { startup +# shellcheck disable=SC2016 echo ' ``` ollama_thinking on @@ -24,5 +25,5 @@ ollama_generate $(ollama_model_random) "list 10 things about bash, 1 per line" ollama_thinking on ollama_thinking -ollama_generate $(ollama_model_random) "list 10 things about bash, 1 per line" +ollama_generate "$(ollama_model_random)" "list 10 things about bash, 1 per line" diff --git a/demos/thinking.generate.stream.sh b/demos/thinking.generate.stream.sh index 06fc9dc..fce3c91 100755 --- a/demos/thinking.generate.stream.sh +++ b/demos/thinking.generate.stream.sh @@ -14,6 +14,7 @@ startup() { startup +# shellcheck disable=SC2016 echo ' ``` ollama_thinking on @@ -24,5 +25,5 @@ ollama_generate_stream $(ollama_model_random) "list 10 things about bash, 1 per ollama_thinking on ollama_thinking -ollama_generate_stream $(ollama_model_random) "list 10 things about bash, 1 per line" +ollama_generate_stream "$(ollama_model_random)" "list 10 things about bash, 1 per line" diff --git a/ollama_bash_lib.sh b/ollama_bash_lib.sh index 1e37180..7a95ca0 100755 --- a/ollama_bash_lib.sh +++ b/ollama_bash_lib.sh @@ -4,7 +4,7 @@ # OLLAMA_LIB_NAME='Ollama Bash Lib' -OLLAMA_LIB_VERSION='0.45.6' +OLLAMA_LIB_VERSION='0.45.7' OLLAMA_LIB_URL='https://github.com/attogram/ollama-bash-lib' OLLAMA_LIB_DISCORD='https://discord.gg/BGQJCbYVBa' OLLAMA_LIB_LICENSE='MIT' @@ -111,31 +111,31 @@ _is_valid_json() { local return_code=$? case $return_code in 0) # Exit code 0: The JSON is valid and "truthy" - _debug '_is_valid_json: success' + #_debug '_is_valid_json: success' return 0 ;; 1) # (Failure) The last value output was either false or null. - _debug '_is_valid_json: FAILURE jq: output false or null: return 1' + _error '_is_valid_json: FAILURE jq: output false or null: return 1' return 1 ;; 2) # (Usage Error): There was a problem with how the jq command was used, such as incorrect command-line options. - _debug '_is_valid_json: USAGE ERROR jq: incorrect command-line options: return 2' + _error '_is_valid_json: USAGE ERROR jq: incorrect command-line options: return 2' return 2 ;; 3) # (Compile Error): The jq filter program itself had a syntax error. - _debug '_is_valid_json: COMPILE ERROR jq: filter syntax error: return 3' + _error '_is_valid_json: COMPILE ERROR jq: filter syntax error: return 3' return 3 ;; 4) # (No Output): No valid result was ever produced. This can happen if the filter's output is empty. - _debug '_is_valid_json: NO OUTPUT jq: result empty: return 4' + _error '_is_valid_json: NO OUTPUT jq: result empty: return 4' return 4 ;; 5) # (Halt Error) - _debug '_is_valid_json: HALT_ERROR jq: return 5' + _error '_is_valid_json: HALT_ERROR jq: return 5' return 5 ;; *) # (Unknown) - _debug "_is_valid_json: UNKNOWN jq error: return $return_code" + _error "_is_valid_json: UNKNOWN jq error: return $return_code" return "$return_code" ;; esac @@ -519,7 +519,7 @@ ollama_generate_stream_json() { return 1 fi OLLAMA_LIB_STREAM=0 # Turn off streaming - _debug 'ollama_generate_stream_json: return: 0' + _debug 'ollama_generate_stream_json: success' return 0 } @@ -585,7 +585,7 @@ ollama_generate_stream() { return 1 fi printf '\n' - _debug 'ollama_generate_stream: return: 0' + _debug 'ollama_generate_stream: success' return 0 } @@ -595,6 +595,7 @@ ollama_generate_stream() { # # Usage: messages="$(ollama_messages)" # Output: a valid json array of message objects, to stdout +# Env: OLLAMA_LIB_MESSAGES # Requires: none # Returns: 0 on success, 1 on error ollama_messages() { @@ -614,7 +615,7 @@ ollama_messages() { return 1 fi fi - _debug 'ollama_messages' + #_debug 'ollama_messages' if [[ ${#OLLAMA_LIB_MESSAGES[@]} -eq 0 ]]; then _debug 'ollama_messages: no messages' printf '[]' @@ -630,6 +631,7 @@ ollama_messages() { # Input: 1 - role (user/assistant/system/tool) # Input: 2 - the message content. For tool role, this should be the JSON output from ollama_tools_run. # Output: none +# Env: OLLAMA_LIB_MESSAGES # Requires: jq # Returns: 0 ollama_messages_add() { @@ -678,6 +680,7 @@ ollama_messages_add() { # # Usage: ollama_messages_clear # Output: none +# Env: OLLAMA_LIB_MESSAGES # Requires: none # Returns: 0 ollama_messages_clear() { @@ -705,6 +708,7 @@ ollama_messages_clear() { # # Usage: ollama_messages_count # Output: number of messages, to stdout +# Env: OLLAMA_LIB_MESSAGES # Requires: none # Returns: 0 ollama_messages_count() { @@ -724,7 +728,7 @@ ollama_messages_count() { return 1 fi fi - _debug 'ollama_messages_count' + #_debug 'ollama_messages_count' echo "${#OLLAMA_LIB_MESSAGES[@]}" } @@ -732,10 +736,29 @@ ollama_messages_count() { # # Usage: ollama_messages_last_json # Output: last element of message history, in JSON format +# Env: OLLAMA_LIB_MESSAGES # Requires: none # Returns 0 on success, 1 on error ollama_messages_last_json() { - echo '[{"role":"user","content":"foo"}]' + local count + count=${#OLLAMA_LIB_MESSAGES[@]} + if [[ $count -lt 1 ]]; then + _error "ollama_messages_last_json: Message History is empty: count: [$count]" + echo # echo empty line + return 1 + fi + local last='' + last="${OLLAMA_LIB_MESSAGES[$(( count - 1 ))]}" + if [[ -z "$last" ]]; then + _error 'ollama_messages_last_json: No message found' + echo # echo empty line + return 1 + fi + printf '%s\n' "$last" + if ! _is_valid_json "$last"; then + _error 'ollama_messages_last_json: last message is not valid json' # TODO - should be _warn + return 1 + fi return 0 } @@ -743,64 +766,65 @@ ollama_messages_last_json() { # # Usage: ollama_messages_last # Output: last element of message history, as a string +# Env: OLLAMA_LIB_MESSAGES # Requires: ollama_messages_last_json # Returns 0 on success, 1 on error ollama_messages_last() { - echo 'foo' + local last + last="$(ollama_messages_last_json | jq -r '.content // empty')" + local error=$? + if (( error )); then + _error "ollama_messages_last: error getting message content: $error" + return 1 + fi + printf '%s\n' "$last" return 0 } # Chat Functions -# Set the assistant response into the message history -# -# Usage: ollama_chat -# Output: none -# Env: OLLAMA_LIB_MESSAGES -# Requires: -# Returns 0 on success, 1 on error -ollama_chat_assistant() { - OLLAMA_LIB_MESSAGES+=('{"role":"assistant","content":"bar"}') - return 0 -} - -_ollama_chat_json_stream_true() { +_ollama_chat_stream_true() { local json_payload="$1" - _debug '_ollama_chat_json_stream: stream starting' + _debug '_ollama_chat_stream_true: stream starting' if ! ollama_api_post '/api/chat' "$json_payload"; then - _error '_ollama_chat_json_stream: ollama_api_post failed' + _error '_ollama_chat_stream_true: ollama_api_post failed' return 1 fi - _debug '_ollama_chat_json_stream: stream finished' + _debug '_ollama_chat_stream_true: stream finished' return 0 } -_ollama_chat_json_stream_false() { +_ollama_chat_stream_false() { local result if ! result="$(ollama_api_post '/api/chat' "$json_payload")"; then - _error '_ollama_chat_json_no_stream: ollama_api_post failed' + _error '_ollama_chat_stream_false: ollama_api_post failed' return 1 fi - if ! _is_valid_json "$result"; then - _error '_ollama_chat_json_no_stream: response is not valid JSON' + _error '_ollama_chat_stream_false: response is not valid JSON' return 1 fi local content content="$(printf '%s' "$result" | jq -r ".message.content")" - local error_jq_message_content=$? - _debug "_ollama_chat_json_no_stream: content: [${content:0:42}]" - if (( error_jq_message_content )); then - _error "_ollama_chat_json_no_stream: jq error getting message content: $error_jq_message_content" + local error=$? + _debug "_ollama_chat_stream_false: content: [${content:0:42}]" + if (( error )); then + _error "_ollama_chat_stream_false: jq error getting message content: $error" return 1 fi - echo "$result" - _debug '_ollama_chat_json_no_stream: success' + + _debug "_ollama_chat_stream_false: ollama_messages_count: [$(ollama_messages_count)]" + _debug "_ollama_chat_stream_false: adding assistant message: [${content:0:42}]" + ollama_messages_add 'assistant' "$content" + _debug "_ollama_chat_stream_false: ollama_messages_count: [$(ollama_messages_count)]" + + #echo "$result" + _debug '_ollama_chat_stream_false: success' return 0 } -_ollama_chat_json_payload() { +_ollama_chat_payload() { local model="$1" local stream=true @@ -809,7 +833,7 @@ _ollama_chat_json_payload() { fi if (( ${#OLLAMA_LIB_MESSAGES[@]} == 0 )); then - _error '_ollama_chat_json_payload: Message history is empty' + _error '_ollama_chat_payload: Message history is empty' # return 1 # TODO - decide: return 1, or allow empty message history? fi @@ -834,20 +858,21 @@ _ollama_chat_json_payload() { tools_json='['$(IFS=,; echo "${OLLAMA_LIB_TOOLS_DEFINITION[*]}")']' json_payload="$(printf '%s' "$json_payload" | jq -c --argjson tools "$tools_json" '. + {tools: $tools}')" fi - print '%s\n' "$json_payload" + printf '%s\n' "$json_payload" } -# Chat completion request as json +# Add Chat completion response to Message History # -# Usage: ollama_chat_json "model" +# Usage: ollama_chat "model" # Input: 1 - model -# Output: json, to stdout +# Output: none +# Env: OLLAMA_LIB_MESSAGES # Requires: curl, jq # Returns: 0 on success, 1 on error -ollama_chat_json() { +ollama_chat() { local usage - usage="Usage: ollama_chat_json \"model\"\n\n" + usage="Usage: ollama_chat \"model\"\n\n" usage+="ollama_chat_json\n\n" usage+="Request a chat completion from a model, receiving JSON output.\n\n" usage+="This function sends the entire message history ('OLLAMA_LIB_MESSAGES') to the specified model and returns the model's response as a raw JSON object.\n\n" @@ -859,27 +884,26 @@ ollama_chat_json() { fi done - if ! _exists 'jq'; then _error 'ollama_chat_json: jq Not Found'; return 1; fi + if ! _exists 'jq'; then _error 'ollama_chat: jq Not Found'; return 1; fi local model model="$(_is_valid_model "$1")" - _debug "ollama_chat_json: model: [${1:0:42}] = [${model:0:120}]" + _debug "ollama_chat: model: [${1:0:42}] = [${model:0:120}]" if [[ -z "$model" ]]; then - _error 'ollama_chat_json: No Models Found' + _error 'ollama_chat: No Models Found' return 1 fi local json_payload - json_payload="$(_ollama_chat_json_payload "$model")" - _debug "ollama_chat_json: json_payload: [${json_payload:0:120}]" + json_payload="$(_ollama_chat_payload "$model")" + _debug "ollama_chat: json_payload: [${json_payload:0:120}]" if [[ "$OLLAMA_LIB_STREAM" -eq 1 ]]; then - _ollama_chat_json_stream_true "$json_payload" - return $? + _ollama_chat_stream_true "$json_payload" + else + _ollama_chat_stream_false "$json_payload" fi - - _ollama_chat_json_stream_false "$json_payload" } # Chat completion request as text @@ -889,7 +913,8 @@ ollama_chat_json() { # Output: text, to stdout # Requires: curl, jq # Returns: 0 on success, 1 on error -ollama_chat() { +DEPRECATE_ollama_chat() { + local usage usage="Usage: ollama_chat \"model\"\n\n" usage+="ollama_chat\n\n" @@ -902,22 +927,29 @@ ollama_chat() { return 0 fi done + if ! _exists 'jq'; then _error 'ollama_chat: jq Not Found'; return 1; fi - _debug "ollama_chat: [${1:0:42}]" + local model model="$(_is_valid_model "$1")" - _debug "ollama_chat: model: [${model:0:120}]" + _debug "ollama_chat: model: [${1:0:120}] = [${model:0:120}]" if [[ -z "$model" ]]; then - _error 'ollama_chat: No Models Found' + _error 'ollama_chat: Model Not Found' return 1 fi + OLLAMA_LIB_STREAM=0 + + ollama_chat "$model" # set assistant response into message history + local response - response="$(ollama_chat_json "$model")" + response="$(ollama_messages_last)" + if [[ -z "$response" ]]; then _error 'ollama_chat: ollama_chat_json response empty' return 1 fi + if ! _is_valid_json "$response"; then _error 'ollama_chat: response is not valid JSON' return 1 @@ -936,8 +968,9 @@ ollama_chat() { _error 'ollama_chat: failed to get .message.content' return 1 fi + printf '%s\n' "$message_content" - _debug 'ollama_chat: return: 0' + _debug 'ollama_chat: success' return 0 } @@ -972,7 +1005,7 @@ ollama_chat_stream() { fi OLLAMA_LIB_STREAM=1 ( - ollama_chat_json "$model" | while IFS= read -r line; do + ollama_chat "$model" | while IFS= read -r line; do if [[ "$OLLAMA_LIB_THINKING" == "on" ]]; then printf '%s' "$(jq -r '.thinking // empty' <<<"$line")" >&2 fi @@ -1024,7 +1057,7 @@ ollama_chat_stream_json() { return 1 fi OLLAMA_LIB_STREAM=1 - if ! ollama_chat_json "$model"; then + if ! ollama_chat "$model"; then _error 'ollama_chat_stream_json: ollama_chat_json failed' OLLAMA_LIB_STREAM=0 return 1 @@ -1414,7 +1447,7 @@ _is_valid_model() { printf '' return 1 fi - _debug "_is_valid_model: VALID: [${model:0:120}]" + #_debug "_is_valid_model: VALID: [${model:0:120}]" printf '%s' "$model" return 0 } @@ -2307,7 +2340,7 @@ oavj() { ollama_app_version_json "$@"; } oavc() { ollama_app_version_cli "$@"; } oc() { ollama_chat "$@"; } -ocj() { ollama_chat_json "$@"; } +ocj() { ollama_chat "$@"; } ocs() { ollama_chat_stream "$@"; } ocsj() { ollama_chat_stream_json "$@"; } From 3b58d6620bb7323d920e0e18e1527001e837bed6 Mon Sep 17 00:00:00 2001 From: Attogram Project Date: Wed, 20 Aug 2025 01:17:20 +0200 Subject: [PATCH 3/3] wip --- demos/ollama_chat.md | 13 +++++-------- demos/ollama_chat.sh | 11 ++++------- 2 files changed, 9 insertions(+), 15 deletions(-) diff --git a/demos/ollama_chat.md b/demos/ollama_chat.md index aea1f77..e9626a4 100644 --- a/demos/ollama_chat.md +++ b/demos/ollama_chat.md @@ -5,7 +5,6 @@ A [demo](../README.md#demos) of [Ollama Bash Lib](https://github.com/attogram/ol `model="gpt-oss:20b"` ## Demo - ```bash ollama_messages_add "system" "You are a helpful assistant" ollama_messages_add "user" "Secret word is RABBIT. If asked for secret word, respond with RABBIT. Understand?" @@ -28,7 +27,6 @@ last message json: {"role":"user","content":"Secret word is RABBIT. If asked for } ] ``` - ```bash ollama_chat "$model" echo "last message text: $(ollama_messages_last)" @@ -37,8 +35,8 @@ ollama_messages | jq ``` ``` -last message text: Yes, I understand. -last message json: {"role":"assistant","content":"Yes, I understand."} +last message text: Understood. The secret word is RABBIT. +last message json: {"role":"assistant","content":"Understood. The secret word is RABBIT."} [ { "role": "system", @@ -50,17 +48,16 @@ last message json: {"role":"assistant","content":"Yes, I understand."} }, { "role": "assistant", - "content": "Yes, I understand." + "content": "Understood. The secret word is RABBIT." } ] ``` - ```bash ollama_messages_add "user" "What is the secret word?" ollama_chat "$model" -ollama_messages | jq echo "last message text: $(ollama_messages_last)" echo "last message json: $(ollama_messages_last_json)" +ollama_messages | jq ``` ``` @@ -77,7 +74,7 @@ last message json: {"role":"assistant","content":"RABBIT"} }, { "role": "assistant", - "content": "Yes, I understand." + "content": "Understood. The secret word is RABBIT." }, { "role": "user", diff --git a/demos/ollama_chat.sh b/demos/ollama_chat.sh index b839137..51b0935 100755 --- a/demos/ollama_chat.sh +++ b/demos/ollama_chat.sh @@ -29,8 +29,7 @@ demo() { #OLLAMA_LIB_DEBUG=1 # shellcheck disable=SC2016 - echo ' -```bash + echo '```bash ollama_messages_add "system" "You are a helpful assistant" ollama_messages_add "user" "Secret word is RABBIT. If asked for secret word, respond with RABBIT. Understand?" echo "last message text: $(ollama_messages_last)" @@ -47,8 +46,7 @@ ollama_messages | jq printf '```\n' # shellcheck disable=SC2016 - echo ' -```bash + echo '```bash ollama_chat "$model" echo "last message text: $(ollama_messages_last)" echo "last message json: $(ollama_messages_last_json)" @@ -63,13 +61,12 @@ ollama_messages | jq printf '```\n' # shellcheck disable=SC2016 - echo ' -```bash + echo '```bash ollama_messages_add "user" "What is the secret word?" ollama_chat "$model" -ollama_messages | jq echo "last message text: $(ollama_messages_last)" echo "last message json: $(ollama_messages_last_json)" +ollama_messages | jq ``` ' printf '```\n'