-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
66 lines (62 loc) · 1.56 KB
/
docker-compose.yml
File metadata and controls
66 lines (62 loc) · 1.56 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
version: '3.8'
services:
# API Server - main service for production
api:
build:
context: .
dockerfile: Dockerfile
target: api
image: verify-cbl-api:latest
ports:
- "8000:8000"
environment:
- PYTHONUNBUFFERED=1
# For Ollama integration (if running locally)
# - OLLAMA_HOST=http://host.docker.internal:11434
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/health')"]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
restart: unless-stopped
# CLI tool for batch processing
verify-cbl:
build:
context: .
dockerfile: Dockerfile
image: verify-cbl:latest
volumes:
# Mount input/output directories
- ./input:/app/input:ro
- ./output:/app/output
environment:
- PYTHONUNBUFFERED=1
profiles:
- batch
# Override command for batch processing
command: ["batch", "/app/input/statements.cbl", "-o", "/app/output/report.json"]
# One-off CLI commands
verify-cbl-check:
build:
context: .
dockerfile: Dockerfile
image: verify-cbl:latest
profiles:
- tools
command: ["check", "--help"]
# With Ollama for local LLM translation
api-with-ollama:
build:
context: .
dockerfile: Dockerfile
target: api
image: verify-cbl-api:latest
ports:
- "8000:8000"
environment:
- PYTHONUNBUFFERED=1
profiles:
- ollama
# Use host network to access Ollama on localhost
network_mode: host