1
0
mirror of https://github.com/community-scripts/ProxmoxVE.git synced 2025-05-03 13:23:08 +00:00

Refactor: Ollama & Adding to Website (#4147)

* Refactor: Ollama & Adding to Website

* VED -> VE

* Update ollama.sh

* Update ollama-install.sh

* Update ollama.sh
This commit is contained in:
CanbiZ 2025-04-30 10:10:03 +02:00 committed by GitHub
parent 8e83943498
commit ca35365e42
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 107 additions and 39 deletions

View File

@ -1,7 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func) source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
# Copyright (c) 2021-2025 tteck # Copyright (c) 2021-2025 tteck
# Author: tteck | Co-Author: havardthom # Author: havardthom | Co-Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://ollama.com/ # Source: https://ollama.com/
@ -9,9 +9,9 @@ APP="Ollama"
var_tags="${var_tags:-ai}" var_tags="${var_tags:-ai}"
var_cpu="${var_cpu:-4}" var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-4096}" var_ram="${var_ram:-4096}"
var_disk="${var_disk:-24}" var_disk="${var_disk:-35}"
var_os="${var_os:-ubuntu}" var_os="${var_os:-ubuntu}"
var_version="${var_version:-22.04}" var_version="${var_version:-24.04}"
header_info "$APP" header_info "$APP"
variables variables
@ -19,18 +19,42 @@ color
catch_errors catch_errors
function update_script() { function update_script() {
header_info header_info
check_container_storage check_container_storage
check_container_resources check_container_resources
if [[ ! -d /opt/ollama ]]; then if [[ ! -d /usr/local/lib/ollama ]]; then
msg_error "No ${APP} Installation Found!" msg_error "No Ollama Installation Found!"
exit
fi
msg_info "Updating ${APP}"
$STD apt-get update
$STD apt-get -y upgrade
msg_ok "Updated Successfully"
exit exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest | grep "tag_name" | awk -F '"' '{print $4}')
if [[ ! -f /opt/Ollama_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/Ollama_version.txt)" ]]; then
if [[ ! -f /opt/Ollama_version.txt ]]; then
touch /opt/Ollama_version.txt
fi
msg_info "Stopping Services"
systemctl stop ollama
msg_ok "Services Stopped"
TMP_TAR=$(mktemp --suffix=.tgz)
curl -fL# -o "${TMP_TAR}" "https://github.com/ollama/ollama/releases/download/${RELEASE}/ollama-linux-amd64.tgz"
msg_info "Updating Ollama to ${RELEASE}"
tar -xzf "${TMP_TAR}" -C /usr/local/lib/ollama
ln -sf /usr/local/lib/ollama/bin/ollama /usr/local/bin/ollama
echo "${RELEASE}" >/opt/Ollama_version.txt
msg_ok "Updated Ollama to ${RELEASE}"
msg_info "Starting Services"
systemctl start ollama
msg_ok "Started Services"
msg_info "Cleaning Up"
rm -f "${TMP_TAR}"
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. Ollama is already at ${RELEASE}"
fi
exit
} }
start start
@ -40,4 +64,4 @@ description
msg_ok "Completed Successfully!\n" msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}" echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}" echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:14434${CL}" echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:11434${CL}"

View File

@ -0,0 +1,35 @@
{
"name": "Ollama",
"slug": "ollama",
"categories": [
20
],
"date_created": "2025-04-28",
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 11434,
"documentation": "https://github.com/ollama/ollama/tree/main/docs",
"config_path": "/usr/local/lib/ollama",
"website": "https://ollama.com/",
"logo": "https://raw.githubusercontent.com/selfhst/icons/refs/heads/main/svg/ollama.svg",
"description": "Ollama is a tool that allows you to run large language models locally on your own computer. This means you can experiment with and use these AI models without needing an internet connection or relying on cloud-based services. It simplifies the process of managing and running these models, offering a way to keep your data private and potentially work faster. 1 You can use Ollama to create local chatbots, conduct AI research, develop privacy-focused AI applications, and integrate AI into existing systems.",
"install_methods": [
{
"type": "default",
"script": "ct/ollama.sh",
"resources": {
"cpu": 4,
"ram": 4096,
"hdd": 35,
"os": "Ubuntu",
"version": "24.04"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": []
}

View File

@ -1,8 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# Copyright (c) 2021-2025 tteck # Copyright (c) 2021-2025 tteck
# Author: tteck # Author: havardthom | Co-Author: MickLesk (CanbiZ)
# Co-Author: havardthom
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://ollama.com/ # Source: https://ollama.com/
@ -16,24 +15,10 @@ update_os
msg_info "Installing Dependencies" msg_info "Installing Dependencies"
$STD apt-get install -y \ $STD apt-get install -y \
gpg \
git \
build-essential \ build-essential \
pkg-config \ pkg-config
cmake
msg_ok "Installed Dependencies" msg_ok "Installed Dependencies"
msg_info "Installing Golang"
set +o pipefail
temp_file=$(mktemp)
golang_tarball=$(curl -fsSL https://go.dev/dl/ | grep -oP 'go[\d\.]+\.linux-amd64\.tar\.gz' | head -n 1)
curl -fsSL "https://golang.org/dl/${golang_tarball}" -o "$temp_file"
tar -C /usr/local -xzf "$temp_file"
ln -sf /usr/local/go/bin/go /usr/local/bin/go
rm -f "$temp_file"
set -o pipefail
msg_ok "Installed Golang"
msg_info "Setting up Intel® Repositories" msg_info "Setting up Intel® Repositories"
mkdir -p /etc/apt/keyrings mkdir -p /etc/apt/keyrings
curl -fsSL https://repositories.intel.com/gpu/intel-graphics.key | gpg --dearmor -o /etc/apt/keyrings/intel-graphics.gpg curl -fsSL https://repositories.intel.com/gpu/intel-graphics.key | gpg --dearmor -o /etc/apt/keyrings/intel-graphics.gpg
@ -59,11 +44,35 @@ $STD apt-get install -y --no-install-recommends intel-basekit-2024.1
msg_ok "Installed Intel® oneAPI Base Toolkit" msg_ok "Installed Intel® oneAPI Base Toolkit"
msg_info "Installing Ollama (Patience)" msg_info "Installing Ollama (Patience)"
$STD git clone https://github.com/ollama/ollama.git /opt/ollama RELEASE=$(curl -fsSL https://api.github.com/repos/ollama/ollama/releases/latest | grep "tag_name" | awk -F '"' '{print $4}')
cd /opt/ollama OLLAMA_INSTALL_DIR="/usr/local/lib/ollama"
$STD go generate ./... BINDIR="/usr/local/bin"
$STD go build . mkdir -p $OLLAMA_INSTALL_DIR
msg_ok "Installed Ollama" OLLAMA_URL="https://github.com/ollama/ollama/releases/download/${RELEASE}/ollama-linux-amd64.tgz"
TMP_TAR="/tmp/ollama.tgz"
echo -e "\n"
if curl -fL# -o "$TMP_TAR" "$OLLAMA_URL"; then
if tar -xzf "$TMP_TAR" -C "$OLLAMA_INSTALL_DIR"; then
ln -sf "$OLLAMA_INSTALL_DIR/bin/ollama" "$BINDIR/ollama"
echo "${RELEASE}" >/opt/Ollama_version.txt
msg_ok "Installed Ollama ${RELEASE}"
else
msg_error "Extraction failed archive corrupt or incomplete"
exit 1
fi
else
msg_error "Download failed $OLLAMA_URL not reachable"
exit 1
fi
msg_info "Creating ollama User and Group"
if ! id ollama >/dev/null 2>&1; then
useradd -r -s /usr/sbin/nologin -U -m -d /usr/share/ollama ollama
fi
$STD usermod -aG render ollama || true
$STD usermod -aG video ollama || true
$STD usermod -aG ollama $(id -u -n)
msg_ok "Created ollama User and adjusted Groups"
msg_info "Creating Service" msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/ollama.service cat <<EOF >/etc/systemd/system/ollama.service
@ -73,7 +82,7 @@ After=network-online.target
[Service] [Service]
Type=exec Type=exec
ExecStart=/opt/ollama/ollama serve ExecStart=/usr/local/bin/ollama serve
Environment=HOME=$HOME Environment=HOME=$HOME
Environment=OLLAMA_INTEL_GPU=true Environment=OLLAMA_INTEL_GPU=true
Environment=OLLAMA_HOST=0.0.0.0 Environment=OLLAMA_HOST=0.0.0.0
@ -95,4 +104,4 @@ customize
msg_info "Cleaning up" msg_info "Cleaning up"
$STD apt-get -y autoremove $STD apt-get -y autoremove
$STD apt-get -y autoclean $STD apt-get -y autoclean
msg_ok "Cleaned" msg_ok "Cleaned"