-
Notifications
You must be signed in to change notification settings - Fork 935
476 lines (448 loc) · 19.8 KB
/
ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
###
# Initially copied from
# https://github.com/actions/starter-workflows/blob/main/ci/python-package.yml
#
# Original comment follows.
###
###
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
###
###
# Important notes on GitHub actions:
#
# - We only get 2,000 free minutes a month
# - We only get 500MB of artifact storage
# - Cache storage is limited to 7 days and 5GB.
# - macOS minutes are 10x as expensive as Linux minutes
# - windows minutes are twice as expensive.
#
# So keep those workflows light.
#
# In December 2020, github only supports x86/64. If we wanted to test
# gevent on other architectures, we might be able to use docker
# emulation, but there's no native support.
#
# Another major downside: You can't just re-run the job for one part
# of the matrix. So if there's a transient test failure that hit, say, 3.8,
# to get a clean run every version of Python runs again. That's bad.
# https://github.community/t/ability-to-rerun-just-a-single-job-in-a-workflow/17234/65
name: gevent testing
# Triggers the workflow on push or pull request events
on: [push, pull_request]
# Limiting to particular branches might be helpful to conserve minutes.
#on:
# push:
# branches: [ $default-branch ]
# pull_request:
# branches: [ $default-branch ]
env:
# Weirdly, this has to be a top-level key, not ``defaults.env``
PYTHONHASHSEED: 8675309
PYTHONUNBUFFERED: 1
PYTHONDONTWRITEBYTECODE: 1
PIP_UPGRADE_STRATEGY: eager
# Don't get warnings about Python 2 support being deprecated. We
# know. The env var works for pip 20.
PIP_NO_PYTHON_VERSION_WARNING: 1
PIP_NO_WARN_SCRIPT_LOCATION: 1
GEVENTSETUP_EV_VERIFY: 1
# Disable some warnings produced by libev especially and also some Cython generated code.
# These are shared between GCC and clang so it must be a minimal set.
# TODO: Figure out how to set env vars per platform without resorting to inline scripting.
# Note that changing the value of these variables invalidates configure caches
CFLAGS: -O3 -pipe -Wno-strict-aliasing -Wno-comment
CPPFLAGS: -DEV_VERIFY=1
# Uploading built wheels for releases.
# TWINE_PASSWORD is encrypted and stored directly in the
# travis repo settings.
TWINE_USERNAME: __token__
###
# caching
###
CCACHE_DIR: ~/.ccache
CC: "ccache gcc"
CCACHE_NOCPP2: true
CCACHE_SLOPPINESS: file_macro,time_macros,include_file_ctime,include_file_mtime
CCACHE_NOHASHDIR: true
#
jobs:
test:
runs-on: ${{ matrix.os }}
strategy:
matrix:
# 3.10+ needs more work: dnspython for example doesn't work
# with it. That means for the bulk of our testing we need to
# stick to 3.9.
# XXX: PyPy came out of the matrix!
python-version: ["3.12-dev", 3.8, 3.9, '3.10', '3.11']
os: [macos-latest, ubuntu-latest]
exclude:
- os: macos-latest
python-version: 3.8
- os: macos-latest
python-version: 3.9
- os: macos-latest
python-version: 3.10
steps:
- name: checkout
uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: setup.py
- name: Install ccache (ubuntu)
if: startsWith(runner.os, 'Linux')
run: |
sudo apt-get install -y ccache sed gcc
- name: Install ccache (macos)
if: startsWith(runner.os, 'macOS')
run: |
brew install ccache
echo CFLAGS=$CFLAGS -Wno-parentheses-equality >>$GITHUB_ENV
- name: Set coverage status
# coverage is too slow on PyPy. We can't submit it from macOS (see that action),
# so don't bother taking the speed hit there either.
if: ${{ !startsWith(matrix.python-version, 'pypy') && startsWith(runner.os, 'Linux') }}
run: |
echo G_USE_COV=--coverage >> $GITHUB_ENV
###
# Caching.
# This actually *restores* a cache and schedules a cleanup action
# to save the cache. So it must come before the thing we want to use
# the cache.
###
- name: Cache ~/.ccache
uses: actions/cache@v2
with:
path: ~/.ccache/
key: ${{ runner.os }}-ccache2-${{ matrix.python-version }}
- name: Cache config.cache
# Store the configure caches. Having a cache can speed up c-ares
# configure from 2-3 minutes to 20 seconds.
uses: actions/cache@v2
with:
path: deps/*/config.cache
# XXX: This should probably include a hash of each configure
# script We don't have a restore-keys that doesn't include
# the CFLAGS becouse the scripts fail to run if they get
# different CFLAGS, CC, CPPFLAGS, etc, and GHA offers no way
# to manually clear the cache. At one time, we had a
# restore-key configured, and it still seems to be used even
# without that setting here. The whole thing is being
# matched even without the CFLAGS matching. Perhaps the - is
# a generic search separator?
key: ${{ runner.os }}-${{ matrix.os }}-configcache3-${{ matrix.python-version }}-${{ env.CFLAGS }}
# Install gevent. Yes, this will create different files each time,
# leading to a fresh cache. But because of CCache stats, we had already been doing
# that (before we learned about CCACHE_NOSTATS).
# We don't install using the requirements file for speed (reduced deps) and because an editable
# install doesn't work in the cache.
# First, the build dependencies (see setup.cfg)
# so that we don't have to use build isolation and can better use the cache;
# Note that we can't use -U for cffi and greenlet on PyPy.
# The -q is because Pypy-2.7 sometimes started raising
# UnicodeEncodeError: 'ascii' codec can't encode character u'\u2588' in position 6: ordinal not in range(128)
# when downloading files. This started sometime in mid 2020. It's from
# pip's vendored progress.bar class.
- name: Install dependencies
run: |
pip install -U pip
pip install -U -q setuptools wheel twine
pip install -q -U 'cffi;platform_python_implementation=="CPython"'
pip install -q -U 'cython>=3.0b3; python_version < "3.12"' "Cython @ https://github.com/cython/cython/archive/37f4dcdc04547875e2836fda076f5707ec50e579.zip; python_version >= '3.12'"
pip install 'greenlet>=2.0.0 ;platform_python_implementation=="CPython"' 'greenlet >= 3.0a1; python_version >="3.12"'
- name: Build gevent (non-Mac)
if: ${{ ! startsWith(runner.os, 'Mac') }}
run: |
# Next, build the wheel *in place*. This helps ccache, and also lets us cache the configure
# output (pip install uses a random temporary directory, making this difficult)
python setup.py build_ext -i
python setup.py bdist_wheel
env:
# Ensure we test with assertions enabled.
# As opposed to the manylinux builds, which we distribute and
# thus only use O3 (because Ofast enables fast-math, which has
# process-wide effects), we test with Ofast here, because we
# expect that some people will compile it themselves with that setting.
CPPFLAGS: "-Ofast -UNDEBUG"
- name: Build gevent (Mac)
if: startsWith(runner.os, 'Mac')
run: |
# Next, build the wheel *in place*. This helps ccache, and also lets us cache the configure
# output (pip install uses a random temporary directory, making this difficult)
python setup.py build_ext -i
python setup.py bdist_wheel
env:
# Unlike the above, we are actually distributing these
# wheels, so they need to be built for production use.
CPPFLAGS: "-O3"
# Build for both architectures
ARCHFLAGS: "-arch x86_64 -arch arm64"
- name: Check gevent build
run: |
ls -l dist
twine check dist/*
- name: Upload gevent wheel
uses: actions/upload-artifact@v2
with:
name: gevent-${{ runner.os }}-${{ matrix.python-version }}.whl
path: dist/*whl
- name: Publish package to PyPI (mac)
# We cannot 'uses: pypa/gh-action-pypi-publish@v1.4.1' because
# that's apparently a container action, and those don't run on
# the Mac.
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') && startsWith(runner.os, 'Mac')
env:
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
run: |
twine upload --skip-existing dist/*
- name: Install gevent
run: |
WHL=$(ls dist/*whl)
pip install -U "$WHL[test]"
- name: Report environment details
run: |
python --version
python -c 'import greenlet; print(greenlet, greenlet.__version__)'
python -c 'import gevent; print(gevent.__version__)'
python -c 'from gevent._compat import get_clock_info; print(get_clock_info("perf_counter"))'
python -c 'import gevent.core; print(gevent.core.loop)'
python -c 'import gevent.ares; print(gevent.ares)'
ccache -s
- name: "Tests: Basic"
run: |
python -m gevent.tests --second-chance $G_USE_COV
# For the CPython interpreters, unless we have reason to expect
# different behaviour across the versions (e.g., as measured by coverage)
# it's sufficient to run the full suite on the current version
# and oldest version.
- name: "Tests: subproccess and FileObjectThread"
# Now, the non-default threaded file object.
# In the past, we included all test files that had a reference to 'subprocess'' somewhere in their
# text. The monkey-patched stdlib tests were specifically included here.
# However, we now always also test on AppVeyor (Windows) which only has GEVENT_FILE=thread,
# so we can save a lot of CI time by reducing the set and excluding the stdlib tests without
# losing any coverage.
env:
GEVENT_FILE: thread
run: |
python -m gevent.tests --second-chance $G_USE_COV `(cd src/gevent/tests >/dev/null && ls test__*subprocess*.py)`
- name: "Tests: c-ares resolver"
# This sometimes fails on mac.
if: (matrix.python-version == '3.11') && startsWith(runner.os, 'Linux')
env:
GEVENT_RESOLVER: ares
run: |
python -mgevent.tests --second-chance $G_USE_COV --ignore tests_that_dont_use_resolver.txt
- name: "Tests: dnspython resolver"
# This has known issues on Pypy-3.6. dnspython resolver not
# supported under anything newer than 3.10, so far.
if: (matrix.python-version == '3.9') && startsWith(runner.os, 'Linux')
env:
GEVENT_RESOLVER: dnspython
run: |
python -mgevent.tests --second-chance $G_USE_COV --ignore tests_that_dont_use_resolver.txt
- name: "Tests: leakchecks"
# Run the leaktests;
# This is incredibly important and we MUST have an environment that successfully passes
# these tests.
if: (startsWith(matrix.python-version, '3.11')) && startsWith(runner.os, 'Linux')
env:
GEVENTTEST_LEAKCHECK: 1
run: |
python -m gevent.tests --second-chance --ignore tests_that_dont_do_leakchecks.txt
- name: "Tests: PURE_PYTHON"
# No compiled cython modules on CPython, using the default backend. Get coverage here.
# We should only need to run this for a single version.
if: (matrix.python-version == '3.11') && startsWith(runner.os, 'Linux')
env:
PURE_PYTHON: 1
run: |
python -mgevent.tests --second-chance --coverage
- name: "Tests: libuv"
if: (startsWith(matrix.python-version, '3.11'))
env:
GEVENT_LOOP: libuv
run: |
python -m gevent.tests --second-chance $G_USE_COV
- name: "Tests: libev-cffi"
if: (matrix.python-version == '3.11') && startsWith(runner.os, 'Linux')
env:
GEVENT_LOOP: libev-cffi
run: |
python -m gevent.tests --second-chance $G_USE_COV
- name: Report coverage
if: ${{ !startsWith(matrix.python-version, 'pypy') }}
run: |
python -m coverage combine || true
python -m coverage report -i || true
- name: Coveralls Parallel
uses: coverallsapp/github-action@v2
# 20230707: On macOS, this installs coveralls from homebrew.
# It then runs ``coveralls report``. But that is producing
# a usage error from ``coveralls`` (report is not recognized) Presumably the
# brew and action versions are out of sync?
if: (startsWith(runner.os, 'Linux'))
with:
flag-name: run-${{ join(matrix.*, '-') }}
parallel: true
- name: Lint
if: matrix.python-version == '3.10' && startsWith(runner.os, 'Linux')
# We only need to do this on one version.
# We do this here rather than a separate job to avoid the compilation overhead.
# 20230707: Python 3.11 crashes inside pylint/astroid on _ssl3.py;
# reverting to Python 3.10 solved that.
# TODO: Revisit this when we have caching of that part.
run: |
pip install -U pylint
python -m pylint --rcfile=.pylintrc gevent
coveralls_finish:
needs: test
runs-on: ubuntu-latest
steps:
- name: Coveralls Finished
uses: coverallsapp/github-action@v2
with:
parallel-finished: true
test_no_embed:
runs-on: ${{ matrix.os }}
strategy:
matrix:
python-version: ['3.11']
os: [ubuntu-latest]
steps:
- name: checkout
uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: setup.py
- name: Install ccache (ubuntu)
if: startsWith(runner.os, 'Linux')
run: |
sudo apt-get install -y ccache sed gcc
- name: Cache ~/.ccache
uses: actions/cache@v2
with:
path: ~/.ccache/
key: ${{ runner.os }}-ccache2_embed-${{ matrix.python-version }}
- name: Cache config.cache
# Store the configure caches. Having a cache can speed up c-ares
# configure from 2-3 minutes to 20 seconds.
uses: actions/cache@v2
with:
path: deps/*/config.cache
# XXX: This should probably include a hash of each configure
# script We don't have a restore-keys that doesn't include
# the CFLAGS becouse the scripts fail to run if they get
# different CFLAGS, CC, CPPFLAGS, etc, and GHA offers no way
# to manually clear the cache. At one time, we had a
# restore-key configured, and it still seems to be used even
# without that setting here. The whole thing is being
# matched even without the CFLAGS matching. Perhaps the - is
# a generic search separator?
key: ${{ runner.os }}-${{ matrix.os }}-configcache_embed-${{ matrix.python-version }}-${{ env.CFLAGS }}
- name: Install dependencies
run: |
pip install -U pip
pip install -U -q setuptools wheel twine
pip install -q -U 'cffi;platform_python_implementation=="CPython"'
pip install -q -U 'cython>=3.0b3; python_version < "3.12"' "Cython @ https://github.com/cython/cython/archive/37f4dcdc04547875e2836fda076f5707ec50e579.zip; python_version >= '3.12'"
pip install 'greenlet>=2.0.0; platform_python_implementation=="CPython"'
- name: build libs and gevent
env:
GEVENTSETUP_EMBED: 0
GEVENTSETUP_EV_VERIFY: 1
run: |
# These need to be absolute paths
export BUILD_LIBS="$HOME/.libs/"
mkdir -p $BUILD_LIBS
export LDFLAGS=-L$BUILD_LIBS/lib
export CPPFLAGS="-I$BUILD_LIBS/include"
env | sort
echo which sed? `which sed`
echo LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$BUILD_LIBS/lib >>$GITHUB_ENV
(pushd deps/libev && sh ./configure -C --prefix=$BUILD_LIBS && make install && popd)
(pushd deps/c-ares && sh ./configure -C --prefix=$BUILD_LIBS && make -j4 install && popd)
(pushd deps/libuv && ./autogen.sh && sh ./configure -C --disable-static --prefix=$BUILD_LIBS && make -j4 install && popd)
# libev builds a manpage each time, and it includes today's date, so it frequently changes.
# delete to avoid repacking the archive
rm -rf $BUILD_LIBS/share/man/
ls -l $BUILD_LIBS $BUILD_LIBS/lib $BUILD_LIBS/include
python setup.py bdist_wheel
pip uninstall -y gevent
pip install -U `ls dist/*whl`[test]
# Test that we're actually linking
# to the .so file.
objdump -p build/lib*/gevent/libev/_corecffi*so | grep "NEEDED.*libev.so"
objdump -p build/lib*/gevent/libev/corecext*so | grep "NEEDED.*libev.so"
objdump -p build/lib*/gevent/libuv/_corecffi*so | grep "NEEDED.*libuv.so"
objdump -p build/lib*/gevent/resolver/cares*so | grep "NEEDED.*libcares.so"
- name: test non-embedded
run: |
# Verify that we got non-embedded builds
python -c 'import gevent.libev.corecffi as CF; assert not CF.LIBEV_EMBED'
python -c 'import gevent.libuv.loop as CF; assert not CF.libuv.LIBUV_EMBED'
python -mgevent.tests --second-chance
manylinux:
runs-on: ubuntu-latest
# If we have 'needs: test', then these wait to start running until
# all the test matrix passes. That's good, because these take a
# long time, and they take a long time to kill if something goes
# wrong. OTOH, if one of the tests fail, and this is a release tag,
# we have to notice that and try restarting things so that the
# wheels get built and uploaded. For that reason, it's simplest to
# remove this for release branches.
needs: test
strategy:
matrix:
python-version: [3.9]
image:
- manylinux2014_aarch64
- manylinux2014_ppc64le
- manylinux2014_s390x
- manylinux2014_x86_64
- musllinux_1_1_x86_64
- musllinux_1_1_aarch64
name: ${{ matrix.image }}
steps:
- name: checkout
uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Cache ~/.ccache
uses: actions/cache@v2
with:
path: ~/.ccache/
key: ${{ runner.os }}-ccache_${{ matrix.config[2] }}-${{ matrix.config[0] }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
with:
platforms: all
- name: Build and test gevent
env:
DOCKER_IMAGE: quay.io/pypa/${{ matrix.image }}
GEVENT_MANYLINUX_NAME: ${{ matrix.image }}
run: scripts/releases/make-manylinux
- name: Upload gevent wheels
uses: actions/upload-artifact@v2
with:
path: wheelhouse/*whl
name: ${{ matrix.config[2] }}_x86_64_wheels.zip
- name: Publish package to PyPI
uses: pypa/gh-action-pypi-publish@v1.4.1
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
with:
user: __token__
password: ${{ secrets.TWINE_PASSWORD }}
skip_existing: true
packages_dir: wheelhouse/
# TODO:
# * Use YAML syntax to share snippets, like the old .travis.yml did