x-app: &app build: context: . dockerfile: .docker/python/Dockerfile environment: - PYTHONPATH=/app - SENTENCE_TRANSFORMERS_HOME=/app/.cache volumes: - ./:/app:rw,cached depends_on: - chroma networks: default: rag: aliases: - inference services: inference: <<: *app # command: uvicorn app:app --app-dir src --reload --host 0.0.0.0 --port 8000 command: uvicorn src.app:app --reload --reload-dir src --host 0.0.0.0 --port 8000 ports: - "8000:8000" # ollama: # build: # context: .docker/ollama # dockerfile: Dockerfile # volumes: # - ollama-data:/root/.ollama # environment: # OLLAMA_MODELS: ${OLLAMA_MODELS} # OLLAMA_MODEL: ${OLLAMA_MODEL} # healthcheck: # test: ["CMD", "ollama", "ps"] # interval: 15s # retries: 5 # start_period: 5s # timeout: 3s chroma: image: chromadb/chroma volumes: - chroma-data:/data ports: - "8008:8000" cli: <<: *app volumes: ollama-data: {} chroma-data: {} networks: default: {} rag: name: rag external: true