From ca35365e4274bdf02f553d4794303b06f591a814 Mon Sep 17 00:00:00 2001 From: CanbiZ <47820557+MickLesk@users.noreply.github.com> Date: Wed, 30 Apr 2025 10:10:03 +0200 Subject: [PATCH] Refactor: Ollama & Adding to Website (#4147) * Refactor: Ollama & Adding to Website * VED -> VE * Update ollama.sh * Update ollama-install.sh * Update ollama.sh --- ct/ollama.sh | 54 +++++++++++++++++++++--------- frontend/public/json/ollama.json | 35 ++++++++++++++++++++ install/ollama-install.sh | 57 ++++++++++++++++++-------------- 3 files changed, 107 insertions(+), 39 deletions(-) create mode 100644 frontend/public/json/ollama.json diff --git a/ct/ollama.sh b/ct/ollama.sh index 41ac5d0b9..f12274483 100644 --- a/ct/ollama.sh +++ b/ct/ollama.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func) # Copyright (c) 2021-2025 tteck -# Author: tteck | Co-Author: havardthom +# Author: havardthom | Co-Author: MickLesk (CanbiZ) # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE # Source: https://ollama.com/ @@ -9,9 +9,9 @@ APP="Ollama" var_tags="${var_tags:-ai}" var_cpu="${var_cpu:-4}" var_ram="${var_ram:-4096}" -var_disk="${var_disk:-24}" +var_disk="${var_disk:-35}" var_os="${var_os:-ubuntu}" -var_version="${var_version:-22.04}" +var_version="${var_version:-24.04}" header_info "$APP" variables @@ -19,18 +19,42 @@ color catch_errors function update_script() { - header_info - check_container_storage - check_container_resources - if [[ ! -d /opt/ollama ]]; then - msg_error "No ${APP} Installation Found!" - exit - fi - msg_info "Updating ${APP}" - $STD apt-get update - $STD apt-get -y upgrade - msg_ok "Updated Successfully" + header_info + check_container_storage + check_container_resources + if [[ ! -d /usr/local/lib/ollama ]]; then + msg_error "No Ollama Installation Found!" exit + fi + RELEASE=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest | grep "tag_name" | awk -F '"' '{print $4}') + if [[ ! -f /opt/Ollama_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/Ollama_version.txt)" ]]; then + if [[ ! -f /opt/Ollama_version.txt ]]; then + touch /opt/Ollama_version.txt + fi + msg_info "Stopping Services" + systemctl stop ollama + msg_ok "Services Stopped" + + TMP_TAR=$(mktemp --suffix=.tgz) + curl -fL# -o "${TMP_TAR}" "https://github.com/ollama/ollama/releases/download/${RELEASE}/ollama-linux-amd64.tgz" + msg_info "Updating Ollama to ${RELEASE}" + tar -xzf "${TMP_TAR}" -C /usr/local/lib/ollama + ln -sf /usr/local/lib/ollama/bin/ollama /usr/local/bin/ollama + echo "${RELEASE}" >/opt/Ollama_version.txt + msg_ok "Updated Ollama to ${RELEASE}" + + msg_info "Starting Services" + systemctl start ollama + msg_ok "Started Services" + + msg_info "Cleaning Up" + rm -f "${TMP_TAR}" + msg_ok "Cleaned" + msg_ok "Updated Successfully" + else + msg_ok "No update required. Ollama is already at ${RELEASE}" + fi + exit } start @@ -40,4 +64,4 @@ description msg_ok "Completed Successfully!\n" echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}" echo -e "${INFO}${YW} Access it using the following URL:${CL}" -echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:14434${CL}" \ No newline at end of file +echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:11434${CL}" diff --git a/frontend/public/json/ollama.json b/frontend/public/json/ollama.json new file mode 100644 index 000000000..8447e9de5 --- /dev/null +++ b/frontend/public/json/ollama.json @@ -0,0 +1,35 @@ +{ + "name": "Ollama", + "slug": "ollama", + "categories": [ + 20 + ], + "date_created": "2025-04-28", + "type": "ct", + "updateable": true, + "privileged": false, + "interface_port": 11434, + "documentation": "https://github.com/ollama/ollama/tree/main/docs", + "config_path": "/usr/local/lib/ollama", + "website": "https://ollama.com/", + "logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/ollama.svg", + "description": "Ollama is a tool that allows you to run large language models locally on your own computer. This means you can experiment with and use these AI models without needing an internet connection or relying on cloud-based services. It simplifies the process of managing and running these models, offering a way to keep your data private and potentially work faster. 1 You can use Ollama to create local chatbots, conduct AI research, develop privacy-focused AI applications, and integrate AI into existing systems.", + "install_methods": [ + { + "type": "default", + "script": "ct/ollama.sh", + "resources": { + "cpu": 4, + "ram": 4096, + "hdd": 35, + "os": "Ubuntu", + "version": "24.04" + } + } + ], + "default_credentials": { + "username": null, + "password": null + }, + "notes": [] +} diff --git a/install/ollama-install.sh b/install/ollama-install.sh index fd0baacd5..b04500a51 100644 --- a/install/ollama-install.sh +++ b/install/ollama-install.sh @@ -1,8 +1,7 @@ #!/usr/bin/env bash # Copyright (c) 2021-2025 tteck -# Author: tteck -# Co-Author: havardthom +# Author: havardthom | Co-Author: MickLesk (CanbiZ) # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE # Source: https://ollama.com/ @@ -16,24 +15,10 @@ update_os msg_info "Installing Dependencies" $STD apt-get install -y \ - gpg \ - git \ build-essential \ - pkg-config \ - cmake + pkg-config msg_ok "Installed Dependencies" -msg_info "Installing Golang" -set +o pipefail -temp_file=$(mktemp) -golang_tarball=$(curl -fsSL https://go.dev/dl/ | grep -oP 'go[\d\.]+\.linux-amd64\.tar\.gz' | head -n 1) -curl -fsSL "https://golang.org/dl/${golang_tarball}" -o "$temp_file" -tar -C /usr/local -xzf "$temp_file" -ln -sf /usr/local/go/bin/go /usr/local/bin/go -rm -f "$temp_file" -set -o pipefail -msg_ok "Installed Golang" - msg_info "Setting up Intel® Repositories" mkdir -p /etc/apt/keyrings curl -fsSL https://repositories.intel.com/gpu/intel-graphics.key | gpg --dearmor -o /etc/apt/keyrings/intel-graphics.gpg @@ -59,11 +44,35 @@ $STD apt-get install -y --no-install-recommends intel-basekit-2024.1 msg_ok "Installed Intel® oneAPI Base Toolkit" msg_info "Installing Ollama (Patience)" -$STD git clone https://github.com/ollama/ollama.git /opt/ollama -cd /opt/ollama -$STD go generate ./... -$STD go build . -msg_ok "Installed Ollama" +RELEASE=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest | grep "tag_name" | awk -F '"' '{print $4}') +OLLAMA_INSTALL_DIR="/usr/local/lib/ollama" +BINDIR="/usr/local/bin" +mkdir -p $OLLAMA_INSTALL_DIR +OLLAMA_URL="https://github.com/ollama/ollama/releases/download/${RELEASE}/ollama-linux-amd64.tgz" +TMP_TAR="/tmp/ollama.tgz" +echo -e "\n" +if curl -fL# -o "$TMP_TAR" "$OLLAMA_URL"; then + if tar -xzf "$TMP_TAR" -C "$OLLAMA_INSTALL_DIR"; then + ln -sf "$OLLAMA_INSTALL_DIR/bin/ollama" "$BINDIR/ollama" + echo "${RELEASE}" >/opt/Ollama_version.txt + msg_ok "Installed Ollama ${RELEASE}" + else + msg_error "Extraction failed – archive corrupt or incomplete" + exit 1 + fi +else + msg_error "Download failed – $OLLAMA_URL not reachable" + exit 1 +fi + +msg_info "Creating ollama User and Group" +if ! id ollama >/dev/null 2>&1; then + useradd -r -s /usr/sbin/nologin -U -m -d /usr/share/ollama ollama +fi +$STD usermod -aG render ollama || true +$STD usermod -aG video ollama || true +$STD usermod -aG ollama $(id -u -n) +msg_ok "Created ollama User and adjusted Groups" msg_info "Creating Service" cat </etc/systemd/system/ollama.service @@ -73,7 +82,7 @@ After=network-online.target [Service] Type=exec -ExecStart=/opt/ollama/ollama serve +ExecStart=/usr/local/bin/ollama serve Environment=HOME=$HOME Environment=OLLAMA_INTEL_GPU=true Environment=OLLAMA_HOST=0.0.0.0 @@ -95,4 +104,4 @@ customize msg_info "Cleaning up" $STD apt-get -y autoremove $STD apt-get -y autoclean -msg_ok "Cleaned" \ No newline at end of file +msg_ok "Cleaned"