forked from conda-forge/staged-recipes
/
create_feedstocks.py
executable file
·278 lines (233 loc) · 11.5 KB
/
create_feedstocks.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
#!/usr/bin/env python
"""
Convert all recipes into feedstocks.
This script is to be run in a TravisCI context, with all secret environment variables defined (BINSTAR_TOKEN, GH_TOKEN)
Such as:
export GH_TOKEN=$(cat ~/.conda-smithy/github.token)
"""
from __future__ import print_function
from conda_build.metadata import MetaData
from conda_smithy.github import gh_token
from contextlib import contextmanager
from datetime import datetime
from github import Github, GithubException, Team
import os.path
from random import choice
import shutil
import subprocess
import tempfile
# Enable DEBUG to run the diagnostics, without actually creating new feedstocks.
DEBUG = False
superlative = ['awesome', 'slick', 'formidable', 'awe-inspiring', 'breathtaking',
'magnificent', 'wonderous', 'stunning', 'astonishing', 'superb',
'splendid', 'impressive', 'unbeatable', 'excellent', 'top', 'outstanding',
'exalted', 'standout', 'smashing']
def list_recipes():
recipe_directory_name = 'recipes'
if os.path.isdir(recipe_directory_name):
recipes = os.listdir(recipe_directory_name)
else:
recipes = []
for recipe_dir in recipes:
# We don't list the "example" feedstock. It is an example, and is there
# to be helpful.
if recipe_dir.startswith('example'):
continue
path = os.path.abspath(os.path.join(recipe_directory_name, recipe_dir))
yield path, recipe_dir
@contextmanager
def tmp_dir(*args, **kwargs):
temp_dir = tempfile.mkdtemp(*args, **kwargs)
try:
yield temp_dir
finally:
shutil.rmtree(temp_dir)
def repo_exists(organization, name):
token = gh_token()
gh = Github(token)
# Use the organization provided.
org = gh.get_organization(organization)
try:
org.get_repo(name)
return True
except GithubException as e:
if e.status == 404:
return False
raise
def create_team(org, name, description, repo_names):
# PyGithub creates secret teams, and has no way of turning that off! :(
post_parameters = {
"name": name,
"description": description,
"privacy": "closed",
"permission": "push",
"repo_names": repo_names
}
headers, data = org._requester.requestJsonAndCheck(
"POST",
org.url + "/teams",
input=post_parameters
)
return Team.Team(org._requester, headers, data, completed=True)
def print_rate_limiting_info(gh):
# Compute some info about our GitHub API Rate Limit.
# Note that it doesn't count against our limit to
# get this info. So, we should be doing this regularly
# to better know when it is going to run out. Also,
# this will help us better understand where we are
# spending it and how to better optimize it.
# Get GitHub API Rate Limit usage and total
gh_api_remaining, gh_api_total = gh.rate_limiting
# Compute time until GitHub API Rate Limit reset
gh_api_reset_time = gh.rate_limiting_resettime
gh_api_reset_time = datetime.utcfromtimestamp(gh_api_reset_time)
gh_api_reset_time -= datetime.utcnow()
print("")
print("GitHub API Rate Limit Info:")
print("---------------------------")
print("Currently remaining {remaining} out of {total}.".format(remaining=gh_api_remaining, total=gh_api_total))
print("Will reset in {time}.".format(time=gh_api_reset_time))
print("")
if __name__ == '__main__':
is_merged_pr = (os.environ.get('TRAVIS_BRANCH') == 'master' and os.environ.get('TRAVIS_PULL_REQUEST') == 'false')
smithy_conf = os.path.expanduser('~/.conda-smithy')
if not os.path.exists(smithy_conf):
os.mkdir(smithy_conf)
def write_token(name, token):
with open(os.path.join(smithy_conf, name + '.token'), 'w') as fh:
fh.write(token)
if 'APPVEYOR_TOKEN' in os.environ:
write_token('appveyor', os.environ['APPVEYOR_TOKEN'])
if 'CIRCLE_TOKEN' in os.environ:
write_token('circle', os.environ['CIRCLE_TOKEN'])
gh = None
if 'GH_TOKEN' in os.environ:
write_token('github', os.environ['GH_TOKEN'])
gh = Github(os.environ['GH_TOKEN'])
# Get our initial rate limit info.
print_rate_limiting_info(gh)
owner_info = ['--organization', 'conda-forge']
print('Calculating the recipes which need to be turned into feedstocks.')
removed_recipes = []
with tmp_dir('__feedstocks') as feedstocks_dir:
feedstock_dirs = []
for recipe_dir, name in list_recipes():
feedstock_dir = os.path.join(feedstocks_dir, name + '-feedstock')
os.mkdir(feedstock_dir)
print('Making feedstock for {}'.format(name))
subprocess.check_call(['conda', 'smithy', 'init', recipe_dir,
'--feedstock-directory', feedstock_dir])
if not is_merged_pr:
# We just want to check that conda-smithy is doing its thing without having any metadata issues.
continue
feedstock_dirs.append([feedstock_dir, name, recipe_dir])
subprocess.check_call(['git', 'remote', 'add', 'upstream_with_token',
'https://conda-forge-manager:{}@github.com/conda-forge/{}'.format(os.environ['GH_TOKEN'],
os.path.basename(feedstock_dir))],
cwd=feedstock_dir)
# Sometimes we already have the feedstock created. We need to deal with that case.
if repo_exists('conda-forge', os.path.basename(feedstock_dir)):
subprocess.check_call(['git', 'fetch', 'upstream_with_token'], cwd=feedstock_dir)
subprocess.check_call(['git', 'branch', '-m', 'master', 'old'], cwd=feedstock_dir)
try:
subprocess.check_call(['git', 'checkout', '-b', 'master', 'upstream_with_token/master'], cwd=feedstock_dir)
except subprocess.CalledProcessError:
# Sometimes, we have a repo, but there are no commits on it! Just catch that case.
subprocess.check_call(['git', 'checkout', '-b' 'master'], cwd=feedstock_dir)
else:
subprocess.check_call(['conda', 'smithy', 'register-github', feedstock_dir] + owner_info)
conda_forge = None
teams = None
if gh:
# Only get the org and teams if there is stuff to add.
if feedstock_dirs:
conda_forge = gh.get_organization('conda-forge')
teams = {team.name: team for team in conda_forge.get_teams()}
# Break the previous loop to allow the TravisCI registering to take place only once per function call.
# Without this, intermittent failiures to synch the TravisCI repos ensue.
all_maintainers = set()
for feedstock_dir, name, recipe_dir in feedstock_dirs:
subprocess.check_call(['conda', 'smithy', 'register-ci', '--feedstock_directory', feedstock_dir] + owner_info)
subprocess.check_call(['conda', 'smithy', 'rerender'], cwd=feedstock_dir)
subprocess.check_call(['git', 'commit', '-am', "Re-render the feedstock after CI registration."], cwd=feedstock_dir)
# Capture the output, as it may contain the GH_TOKEN.
out = subprocess.check_output(['git', 'push', 'upstream_with_token', 'master'], cwd=feedstock_dir,
stderr=subprocess.STDOUT)
# Add team members as maintainers.
if conda_forge:
meta = MetaData(recipe_dir)
maintainers = set(meta.meta.get('extra', {}).get('recipe-maintainers', []))
all_maintainers.update(maintainers)
team_name = name.lower()
repo_name = 'conda-forge/{}'.format(os.path.basename(feedstock_dir))
# Try to get team or create it if it doesn't exist.
team = teams.get(team_name)
if not team:
team = create_team(
conda_forge,
team_name,
'The {} {} contributors!'.format(choice(superlative), team_name),
repo_names=[repo_name]
)
teams[team_name] = team
current_maintainers = []
else:
current_maintainers = team.get_members()
# Add only the new maintainers to the team.
current_maintainers_handles = set([each_maintainers.login.lower() for each_maintainers in current_maintainers])
for new_maintainer in maintainers - current_maintainers_handles:
headers, data = team._requester.requestJsonAndCheck(
"PUT",
team.url + "/memberships/" + new_maintainer
)
# Mention any maintainers that need to be removed (unlikely here).
for old_maintainer in current_maintainers_handles - maintainers:
print("AN OLD MEMBER ({}) NEEDS TO BE REMOVED FROM {}".format(old_maintainer, repo_name))
# Remove this recipe from the repo.
removed_recipes.append(name)
if is_merged_pr:
subprocess.check_call(['git', 'rm', '-r', recipe_dir])
# Add new conda-forge members to all-members team. Welcome! :)
if conda_forge:
team_name = 'all-members'
team = teams.get(team_name)
if not team:
team = create_team(
conda_forge,
team_name,
'All of the awesome conda-forge contributors!',
[]
)
teams[team_name] = team
current_members = []
else:
current_members = team.get_members()
# Add only the new members to the team.
current_members_handles = set([each_member.login.lower() for each_member in current_members])
for new_member in all_maintainers - current_members_handles:
print("Adding a new member ({}) to conda-forge. Welcome! :)".format(new_member))
headers, data = team._requester.requestJsonAndCheck(
"PUT",
team.url + "/memberships/" + new_member
)
# Commit any removed packages.
subprocess.check_call(['git', 'status'])
if removed_recipes:
subprocess.check_call(['git', 'checkout', os.environ.get('TRAVIS_BRANCH')])
msg = ('Removed recipe{s} ({}) after converting into feedstock{s}. '
'[ci skip]'.format(', '.join(removed_recipes),
s=('s' if len(removed_recipes) > 1 else '')))
if is_merged_pr:
# Capture the output, as it may contain the GH_TOKEN.
out = subprocess.check_output(['git', 'remote', 'add', 'upstream_with_token',
'https://conda-forge-manager:{}@github.com/conda-forge/staged-recipes'.format(os.environ['GH_TOKEN'])],
stderr=subprocess.STDOUT)
subprocess.check_call(['git', 'commit', '-m', msg])
# Capture the output, as it may contain the GH_TOKEN.
out = subprocess.check_output(['git', 'push', 'upstream_with_token', os.environ.get('TRAVIS_BRANCH')],
stderr=subprocess.STDOUT)
else:
print('Would git commit, with the following message: \n {}'.format(msg))
if gh:
# Get our final rate limit info.
print_rate_limiting_info(gh)