-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcompose.yml
More file actions
151 lines (143 loc) · 3.62 KB
/
compose.yml
File metadata and controls
151 lines (143 loc) · 3.62 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
networks:
internet: {}
data: {}
services:
camps_fastapi:
build: ./camps_fastapi/app
command: uvicorn app.main:app --reload --workers 1 --host 0.0.0.0 --port 8000
container_name: camps-fastapi
develop:
watch:
- action: sync
path: ./camps_fastapi/app
target: /app
volumes:
- ./camps_fastapi/app:/usr/src/app
ports:
- 8000:8000
environment:
- DATABASE_URL=postgresql+asyncpg://postgres:postgres@postgres:5432/stack
depends_on:
- camps_pgvector_dev
networks:
- data
- internet
camps_cuda:
image: nvidia/cuda:12.3.1-base-ubuntu20.04
container_name: camps-cuda
command: nvidia-smi
deploy:
resources:
reservations:
devices:
- capabilities: ["utility"]
count: all
camps_ollama_model:
volumes:
- ./camps_ollama_model/ollama:/root/.ollama
- ./camps_ollama_model:/model_files
container_name: camps-ollama-model
pull_policy: always
tty: true
restart: unless-stopped
image: ollama/ollama:latest
# command: ollama run deepseek-r1:671b
# entrypoint: ["/bin/sh", "/model_files/run_ollama.sh"]
ports:
- 11435:11434
environment:
- OLLAMA_KEEP_ALIVE=24h
networks:
- data
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
camps_ollama:
volumes:
- ./ollama/ollama:/root/.ollama
# - ./model_files:/model_files
container_name: camps-ollama
pull_policy: always
tty: true
restart: unless-stopped
image: ollama/ollama:latest
# command: ollama run deepseek-r1:671b
# entrypoint: ["/bin/sh", "/model_files/run_ollama.sh"]
ports:
- 11434:11434
environment:
- OLLAMA_KEEP_ALIVE=24h
networks:
- data
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
camps_ash_dev:
build:
context: ./store
dockerfile: ./gpu.Dockerfile
container_name: camps-ash-dev
develop:
watch:
- action: sync
path: ./camps_ash
target: /app
# ignore:
# - deps/
# - action: rebuild
# path: package.json
deploy:
resources:
reservations:
devices:
- capabilities: ["utility"]
count: all
# volumes:
# - type: bind
# source: ./folkbot
# target: /app
# - docker-composer-elixir-mix:/root/.mix
# - hex:/root/.hex
networks:
- internet
- data
depends_on:
- camps_pgvector_dev
ports:
- "4600:4004"
environment:
- NVIDIA_VISIBLE_DEVICES=all
- DATABASE_URL=ecto://admin:cohort9theory+Defy@pgvector/folkbot_prod
- SECRET_KEY_BASE="HzhOtUKuruFSjI5Gbl1PjfN68Red9VZ6YDZ9wY687hXnw3RwqwROWTv5JBpxLSwj
# command: sleep infinity
command:
- /app/gpu.sh
camps_pgvector_dev:
image: pgvector/pgvector:pg17
container_name: camps-pgvector-dev
ports:
- 5632:5432
networks:
- data
restart: always
environment:
- POSTGRES_DB=camps-pgvector-dev
- POSTGRES_USER=admin
- POSTGRES_PASSWORD=bow_ModernQuoteDearGenre&Venus
volumes:
- ./pgvector-init:/docker-entrypoint-initdb.d/
- postgres_data:/var/lib/postgresql
#- ./folkbot_prod.sql:/docker-entrypoint-initdb.d/folkbot_prod.sql
# configs:
# - source: pgvector-init/folkbot_prod.sql
# target: /docker-entrypoint-initdb.d/init.sql
volumes:
postgres_data: