From ead126fdb493375178950da7c0ca56cf164bb7bf Mon Sep 17 00:00:00 2001 From: L4RM4ND <21357789+l4rm4nd@users.noreply.github.com> Date: Tue, 23 Apr 2024 12:40:10 +0200 Subject: [PATCH] add llms --- README.md | 10 +++++++ examples/ollama-ui/README.md | 30 +++++++++++++++++++++ examples/ollama-ui/docker-compose.yml | 39 +++++++++++++++++++++++++++ examples/serge/README.md | 7 +++++ examples/serge/docker-compose.yml | 26 ++++++++++++++++++ 5 files changed, 112 insertions(+) create mode 100644 examples/ollama-ui/README.md create mode 100644 examples/ollama-ui/docker-compose.yml create mode 100644 examples/serge/README.md create mode 100644 examples/serge/docker-compose.yml diff --git a/README.md b/README.md index a2a3a6b..8007ff7 100644 --- a/README.md +++ b/README.md @@ -61,6 +61,7 @@ docker compose up - [Games and Control Panels](#games-and-control-servers) - [Genealogy](#genealogy) - [Identity Management - Single Sign-On (SSO) & LDAP](#identity-management---single-sign-on-sso--ldap) +- [LLM & AI](#llm--ai) - [Miscellaneous](#miscellaneous) - [Money, Budgeting & Management](#money-budgeting--management) - [Note-taking & Editors](#note-taking--editors) @@ -122,6 +123,15 @@ A [proxy](https://en.wikipedia.org/wiki/Proxy_server) is a server application th - [Keycloak](https://github.com/keycloak/keycloak-containers/tree/main/docker-compose-examples) - Keycloak is an open-source Identity and Access Management (IAM) solution for modern applications and services. - [lldap](examples/lldap) - lldap is a lightweight authentication server that provides an opinionated, simplified LDAP interface for authentication. It integrates with many backends, from KeyCloak to Authelia to Nextcloud and more. +### Large Language Models & AI + +**[`^ back to top ^`](#-project-list)** + +A [Large Language Model (LLM)](https://en.wikipedia.org/wiki/Large_language_model) is a language model notable for its ability to achieve general-purpose language generation and other natural language processing tasks such as classification. LLMs can be used for text generation, a form of generative [AI](https://en.wikipedia.org/wiki/Artificial_intelligence), by taking an input text and repeatedly predicting the next token or word. + +- [Ollama + Open WebUI](examples/ollama-ui) - Get up and running with Llama 3, Mistral, Gemma, and other large language models using Ollama. Using an interactive, user-friendly WebUI via Open WebUI (formerly known as Ollama WebUI). +- [Serge](examples/serge) - A web interface for chatting with Alpaca through llama.cpp. Fully dockerized, with an easy to use API. + ### Virtual Private Network (VPN) & Remote Access **[`^ back to top ^`](#-project-list)** diff --git a/examples/ollama-ui/README.md b/examples/ollama-ui/README.md new file mode 100644 index 0000000..3a10b78 --- /dev/null +++ b/examples/ollama-ui/README.md @@ -0,0 +1,30 @@ +# References + +- https://github.com/ollama/ollama +- https://hub.docker.com/r/ollama/ollama +- https://github.com/open-webui/open-webui + +# Notes + +You should spawn ollama first and download the respective LLM models: + +```` +# spawn ollama +docker compose up -d ollama + +# download an llm model +docker exec ollama ollama run llama3:8b +```` + +Afterwards, we can spawn Open WebUI and register our first user account: + +```` +# spawn ui +docker compose up -d ui +```` + +Finally, we may want to disable open user registration for Open WebUI by uncommenting the env `ENABLE_SIGNUP` variable and restarting the container. + +> [!TIP] +> +> You likely want to pass a GPU into the Ollama container. Please read [this](https://hub.docker.com/r/ollama/ollama). diff --git a/examples/ollama-ui/docker-compose.yml b/examples/ollama-ui/docker-compose.yml new file mode 100644 index 0000000..694b7b1 --- /dev/null +++ b/examples/ollama-ui/docker-compose.yml @@ -0,0 +1,39 @@ +services: + + ui: + image: ghcr.io/open-webui/open-webui:main + container_name: ollama-ui + restart: always + ports: + - 8080 + expose: + - 8080 + volumes: + - ${DOCKER_VOLUME_STORAGE:-/mnt/docker-volumes}/ollama/open-webui:/app/backend/data + environment: + #- "ENABLE_SIGNUP=false" + - "OLLAMA_BASE_URL=http://ollama:11434" + #networks: + # - proxy + #labels: + # - traefik.enable=true + # - traefik.docker.network=proxy + # - traefik.http.routers.ollama-ui.rule=Host(`ai.example.com`) + # - traefik.http.services.ollama-ui.loadbalancer.server.port=8080 + # # Optional part for traefik middlewares + # - traefik.http.routers.ollama-ui.middlewares=local-ipwhitelist@file,authelia@docker + + ollama: + image: ollama/ollama:latest + container_name: ollama + restart: always + expose: + - 11434 + volumes: + - ${DOCKER_VOLUME_STORAGE:-/mnt/docker-volumes}/ollama/data:/root/.ollama + #networks: + # - proxy + +#networks: +# proxy: +# external: true diff --git a/examples/serge/README.md b/examples/serge/README.md new file mode 100644 index 0000000..6c436a8 --- /dev/null +++ b/examples/serge/README.md @@ -0,0 +1,7 @@ +# References + +- https://github.com/serge-chat/serge + +# Notes + +TBD diff --git a/examples/serge/docker-compose.yml b/examples/serge/docker-compose.yml new file mode 100644 index 0000000..2dbe26c --- /dev/null +++ b/examples/serge/docker-compose.yml @@ -0,0 +1,26 @@ +services: + + serge: + image: ghcr.io/serge-chat/serge:main + container_name: serge + restart: unless-stopped + ports: + - 8008 + expose: + - 8008 + volumes: + - ${DOCKER_VOLUME_STORAGE:-/mnt/docker-volumes}/serge/weights:/usr/src/app/weights + - ${DOCKER_VOLUME_STORAGE:-/mnt/docker-volumes}/serge/datadb:/data/db/ + #networks: + # - proxy + #labels: + # - traefik.enable=true + # - traefik.docker.network=proxy + # - traefik.http.routers.serge.rule=Host(`serge.example.com`) + # - traefik.http.services.serge.loadbalancer.server.port=8080 + # # Optional part for traefik middlewares + # - traefik.http.routers.serge.middlewares=local-ipwhitelist@file,authelia@docker + +#networks: +# proxy: +# external: true \ No newline at end of file