From 4a36528cc7a1b2192f930491ed1a66e719f7143d Mon Sep 17 00:00:00 2001 From: Bazunov Andrew Igorevich <35404933+viltskaa@users.noreply.github.com> Date: Wed, 25 Sep 2024 12:35:39 +0400 Subject: [PATCH] Complete docker compose --- bazunov_andrew_lab_1/.gitignore | 2 + bazunov_andrew_lab_1/README.md | 14 ++++++ bazunov_andrew_lab_1/docker-compose.yml | 61 +++++++++++++++++++++++++ 3 files changed, 77 insertions(+) create mode 100644 bazunov_andrew_lab_1/.gitignore create mode 100644 bazunov_andrew_lab_1/README.md create mode 100644 bazunov_andrew_lab_1/docker-compose.yml diff --git a/bazunov_andrew_lab_1/.gitignore b/bazunov_andrew_lab_1/.gitignore new file mode 100644 index 0000000..a1d0536 --- /dev/null +++ b/bazunov_andrew_lab_1/.gitignore @@ -0,0 +1,2 @@ +ollama +./ollama \ No newline at end of file diff --git a/bazunov_andrew_lab_1/README.md b/bazunov_andrew_lab_1/README.md new file mode 100644 index 0000000..712ccd9 --- /dev/null +++ b/bazunov_andrew_lab_1/README.md @@ -0,0 +1,14 @@ +docker-compose up -d + +docker-compose ps + +docker-compose down + +docker-compose down +docker-compose up -d + +docker-compose logs +docker-compose logs drupal + +docker-compose pull +docker-compose up -d --build \ No newline at end of file diff --git a/bazunov_andrew_lab_1/docker-compose.yml b/bazunov_andrew_lab_1/docker-compose.yml new file mode 100644 index 0000000..74b1f11 --- /dev/null +++ b/bazunov_andrew_lab_1/docker-compose.yml @@ -0,0 +1,61 @@ +services: + gitea: + image: gitea/gitea:latest + container_name: gitea + ports: + - "3000:3000" # Проброс порта Gitea на хост + volumes: + - data:/data + environment: + USER_UID: 1000 + USER_GID: 1000 + + ollama: + image: ollama/ollama:latest # Указываем образ Ollama + container_name: ollama + restart: always + ports: + - 7869:11434 # Проброс порта для Ollama API + pull_policy: always + tty: true + volumes: + - .:/code + - ./ollama/ollama:/root/.ollama # Директория для данных Ollama + environment: + - OLLAMA_KEEP_ALIVE=24h + - OLLAMA_HOST=0.0.0.0 # Указываем порт для API Ollama + networks: + - ollama-docker + command: ["serve"] # Запускаем Ollama в режиме сервера + + ollama-webui: + image: ghcr.io/open-webui/open-webui:main # Образ Open Web UI + container_name: ollama-webui + restart: unless-stopped + volumes: + - ./ollama/ollama-webui:/app/backend/data + ports: + - 8080:8080 # Порт для веб-интерфейса + environment: # https://docs.openwebui.com/getting-started/env-configuration#default_models + - OLLAMA_BASE_URLS=http://host.docker.internal:7869 #comma separated ollama hosts + - ENV=dev + - WEBUI_AUTH=False + - WEBUI_NAME=Viltskaa AI + - WEBUI_URL=http://localhost:8080 + - WEBUI_SECRET_KEY=t0p-s3cr3t + depends_on: + - ollama + extra_hosts: + - host.docker.internal:host-gateway + networks: + - ollama-docker + +networks: + ollama-docker: + external: false + +volumes: + ollama: + driver: local + data: + driver: local \ No newline at end of file