/
fabfile.py
203 lines (156 loc) · 5.87 KB
/
fabfile.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
"""
This is a collection of useful utility functions when working with docker on different environments.
In order to use these functions, install fabric on your local machine with::
pip install fabric
Please note: Fabric is a remote code execution tool, NOT a remote configuration tool. While you can copy files
from here to there, it is not a good replacement for salt or ansible in this regard.
There is a function called `production` where you need to fill in the details about your production machine(s).
You can then run::
fab production status
to get the status of your stack
To list all available commands, run::
fab -l
"""
import time
from fabric.api import * # noqa
from fabric.api import cd, env, lcd
from fabric.colors import blue
from fabric.operations import local as lrun
from fabric.operations import put, run
from rich import print
def local():
"""
Work on the local environment
"""
env.compose_file = "docker-compose.yml"
env.compose_version = "v1"
env.project_dir = "."
env.run = lrun
env.cd = lcd
def production():
"""
Work on the production environment
"""
env.hosts = [
"165.22.184.193"
] # list the ip addresses or domain names of your production boxes here
env.user = "root" # remote user, see `env.run` if you don't log in as root
env.compose_file = "docker-compose.prod.yml"
env.compose_version = "v2"
env.project_dir = "/code/djangopackages" # this is the project dir where your code lives on this machine
env.run = run # if you don't log in as root, replace with 'env.run = sudo'
env.cd = cd
def setup():
env.run("apt update")
env.run(
"apt install apt-transport-https ca-certificates curl software-properties-common"
)
env.run(
"curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg"
)
env.run(
'echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null'
)
env.run("apt update")
env.run("apt-cache policy docker-ce")
env.run("apt install docker-ce")
env.run("systemctl status docker")
env.run("systemctl enable docker.service")
def copy_secrets():
"""
Copies secrets from local to remote.
:return:
"""
secrets = [
".env-production",
]
for secret in secrets:
remote_path = "/".join([env.project_dir, secret])
print(
blue(
"Copying {secret} to {remote_path} on {host}".format(
secret=secret, remote_path=remote_path, host=env.host
)
)
)
put(secret, remote_path)
def rollback(commit="HEAD~1"):
"""
Rollback to a previous commit and build the stack
:param commit: Commit you want to roll back to. Default is the previous commit
"""
with env.cd(env.project_dir):
env.run(f"git checkout {commit}")
deploy()
def build_and_restart(service):
docker_compose(f"build {service} --parallel --progress plain")
docker_compose(f"create {service}")
docker_compose(f"stop {service}")
docker_compose(f"start {service}")
def clearsessions():
"""
Clear old database sessions
"""
with env.cd(env.project_dir):
docker_compose("run django-a python manage.py clearsessions")
def cron():
with env.cd(env.project_dir):
docker_compose("run django-a python manage.py import_classifiers")
docker_compose("run django-a python manage.py import_products")
docker_compose("run django-a python manage.py import_releases")
def deploy(clearsessions: bool = False, stash: bool = False):
"""
Pulls the latest changes from main, rebuilt and restarts the stack
"""
# copy_secrets()
with env.cd(env.project_dir):
# Clear old database sessions
if clearsessions:
docker_compose("run django-a python manage.py clearsessions")
# stash existing changes
if stash:
env.run("git stash")
# Pull the latest code
env.run("git pull origin main")
# stash existing changes
if stash:
env.run("git stash pop")
# turn maintenance mode on
# maintenance_mode_on("django-a")
# Build our primary Docker image
build_and_restart("django-a")
print("[yellow]waiting 10 seconds[/yellow]")
time.sleep(10)
# just to make sure they are on
# docker_compose("start postgres")
docker_compose("start redis")
# print("[yellow]waiting 10 seconds[/yellow]")
# time.sleep(10)
# Build our secondary Docker image
build_and_restart("django-b")
# Restart django-q2
docker_compose("stop django-q")
docker_compose("start django-q")
# collectstatic
collectstatic("django-a")
# turn maintenance mode off
# maintenance_mode_off("django-a")
def collectstatic(service):
docker_compose(f"exec {service} python manage.py collectstatic --no-input -v 1")
def maintenance_mode_on(service):
docker_compose(f"exec {service} python manage.py maintenance_mode on")
def maintenance_mode_off(service):
docker_compose(f"exec {service} python manage.py maintenance_mode off")
def purge_cache(service):
docker_compose(
f"exec {service} cli4 --delete purge_everything=true /zones/:djangopackages.org/purge_cache"
)
def docker_compose(command, old=True):
"""
Run a docker-compose command
:param command: Command you want to run
"""
with env.cd(env.project_dir):
if env.compose_version == "v2":
return env.run(f"docker compose -f {env.compose_file} {command}")
return env.run(f"docker-compose -f {env.compose_file} {command}")