services: inference: build: context: . dockerfile: .docker/python/Dockerfile command: uvicorn src.app:app --reload --host 0.0.0.0 --port 8000 user: "${APP_UID:-1000}:${APP_GID:-1000}" volumes: - ./:/app:cached # ollama: # build: # context: .docker/ollama # dockerfile: Dockerfile # volumes: # - ollama-data:/root/.ollama # environment: # OLLAMA_MODELS: ${OLLAMA_MODELS} # OLLAMA_MODEL: ${OLLAMA_MODEL} # healthcheck: # test: ["CMD", "ollama", "ps"] # interval: 15s # retries: 5 # start_period: 5s # timeout: 3s chroma: image: chromadb/chroma volumes: - chroma-data:/data volumes: ollama-data: {} chroma-data: {} networks: default: name: rag external: true