-
Notifications
You must be signed in to change notification settings - Fork 299
/
generate_headers.py
302 lines (233 loc) · 11.8 KB
/
generate_headers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
"""
In this module, we do some of the preparatory work that is needed to get
CoolProp ready to build. This includes setting the correct versions in the
headers, generating the fluid files, etc.
"""
from __future__ import division, print_function, unicode_literals
from datetime import datetime
import subprocess
import os
import sys
import json
import hashlib
import struct
import glob
json_options = {'indent' : 2, 'sort_keys' : True}
def get_hash(data):
try:
return hashlib.sha224(data).hexdigest()
except TypeError:
return hashlib.sha224(data.encode('ascii')).hexdigest()
# unicode
repo_root_path = os.path.normpath(os.path.join(os.path.abspath(__file__), '..', '..'))
# Load up the hashes of the data that will be written to each file
hashes_fname = os.path.join(repo_root_path,'dev','hashes.json')
if os.path.exists(hashes_fname):
hashes = json.load(open(hashes_fname,'r'))
else:
hashes = dict()
# 0: Input file path relative to dev folder
# 1: Output file path relative to include folder
# 2: Name of variable
values = [
('all_fluids.json','all_fluids_JSON.h','all_fluids_JSON'),
('all_incompressibles.json','all_incompressibles_JSON.h','all_incompressibles_JSON'),
('mixtures/mixture_departure_functions.json', 'mixture_departure_functions_JSON.h', 'mixture_departure_functions_JSON'),
('mixtures/mixture_binary_pairs.json', 'mixture_binary_pairs_JSON.h', 'mixture_binary_pairs_JSON'),
('mixtures/predefined_mixtures.json', 'predefined_mixtures_JSON.h', 'predefined_mixtures_JSON'),
('cubics/all_cubic_fluids.json', 'all_cubics_JSON.h', 'all_cubics_JSON'),
('cubics/cubic_fluids_schema.json', 'cubic_fluids_schema_JSON.h', 'cubic_fluids_schema_JSON')
]
def TO_CPP(root_dir, hashes):
def to_chunks(l, n):
if n<1:
n=1
return [l[i:i+n] for i in range(0, len(l), n)]
# Normalise path name
root_dir = os.path.normpath(root_dir)
# First we package up the JSON files
combine_json(root_dir)
for infile,outfile,variable in values:
import json
# Confirm that the JSON file can be loaded and doesn't have any formatting problems
with open(os.path.join(root_dir,'dev',infile), 'r') as fp:
try:
jj = json.load(fp)
except ValueError:
file = os.path.join(root_dir,'dev',infile)
print('"python -mjson.tool '+file+'" returns ->', end='')
subprocess.call('python -mjson.tool '+file, shell = True)
raise ValueError('unable to decode file %s' % file)
json = open(os.path.join(root_dir,'dev',infile),'r').read().encode('ascii')
# convert each character to hex and add a terminating NULL character to end the
# string, join into a comma separated string
try:
h = ["0x{:02x}".format(ord(b)) for b in json] + ['0x00']
except TypeError:
h = ["0x{:02x}".format(int(b)) for b in json] + ['0x00']
# Break up the file into lines of 16 hex characters
chunks = to_chunks(h, 16)
# Put the lines back together again
# The chunks are joined together with commas, and then EOL are used to join the rest
hex_string = ',\n'.join([', '.join(chunk) for chunk in chunks])
# Check if hash is up to date based on using variable as key
if not os.path.isfile(os.path.join(root_dir,'include',outfile)) or variable not in hashes or (variable in hashes and hashes[variable] != get_hash(hex_string.encode('ascii'))):
# Generate the output string
output = '// File generated by the script dev/generate_headers.py on '+ str(datetime.now()) + '\n\n'
output += '// JSON file encoded in binary form\n'
output += 'const unsigned char '+variable+'_binary[] = {\n' + hex_string + '\n};'+'\n\n'
output += '// Combined into a single std::string \n'
output += 'std::string {v:s}({v:s}_binary, {v:s}_binary + sizeof({v:s}_binary)/sizeof({v:s}_binary[0]));'.format(v = variable)
# Write it to file
f = open(os.path.join(root_dir,'include',outfile), 'w')
f.write(output)
f.close()
# Store the hash of the data that was written to file (not including the header)
hashes[variable] = get_hash(hex_string.encode('ascii'))
print(os.path.join(root_dir,'include',outfile)+ ' written to file')
else:
print(outfile + ' is up to date')
def version_to_file(root_dir):
# Parse the CMakeLists.txt file to generate the version
"""
Should have lines like
"
set (CoolProp_VERSION_MAJOR 5)
set (CoolProp_VERSION_MINOR 0)
set (CoolProp_VERSION_PATCH 0)
"
"""
lines = open(os.path.join(root_dir,'CMakeLists.txt'),'r').readlines()
# Find the necessary lines
MAJOR_line = [line for line in lines if ('VERSION_MAJOR' in line and 'MINOR' not in line)]
MINOR_line = [line for line in lines if ('VERSION_MINOR' in line and 'MAJOR' not in line)]
PATCH_line = [line for line in lines if ('VERSION_PATCH' in line and 'MINOR' not in line)]
REVISION_line = [line for line in lines if ('VERSION_REVISION' in line and 'MINOR' not in line)]
# String processing
MAJOR = MAJOR_line[0].strip().split('VERSION_MAJOR')[1].split(')')[0].strip()
MINOR = MINOR_line[0].strip().split('VERSION_MINOR')[1].split(')')[0].strip()
PATCH = PATCH_line[0].strip().split('VERSION_PATCH')[1].split(')')[0].strip()
REVISION = REVISION_line[0].strip().split('VERSION_REVISION')[1].split(')')[0].strip()
# Generate the strings
version = '.'.join([MAJOR,MINOR,PATCH]) + REVISION
# Get the hash of the version
if 'version' not in hashes or ('version' in hashes and hashes['version'] != get_hash(version.encode('ascii'))):
hashes['version'] = get_hash(version)
# Format the string to be written
string_for_file = '//Generated by the generate_headers.py script on {t:s}\n\nstatic char version [] ="{v:s}";'.format(t = str(datetime.now()),v = version)
# Include path relative to the root
include_dir = os.path.join(root_dir, 'include')
# The name of the file to be written into
file_name = os.path.join(include_dir, 'cpversion.h')
# Write to file
f = open(file_name,'w')
f.write(string_for_file)
f.close()
print('version written to file: ' + file_name)
else:
print('cpversion.h is up to date')
hidden_file_name = os.path.join(root_dir, '.version')
# Write to file
f = open(hidden_file_name,'w')
f.write(version)
f.close()
print('version written to hidden file: ' + hidden_file_name + " for use in builders that don't use git repo")
def gitrev_to_file(root_dir):
"""
If a git repo, use git to update the gitrevision. If not a git repo, read
the gitrevision from the gitrevision.txt file. Otherwise, fail.
"""
try:
try:
subprocess.check_call('git --version', shell=True)
print('git is accessible at the command line')
# Try to get the git revision
p = subprocess.Popen('git rev-parse HEAD',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell = True,
cwd = os.path.dirname(__file__))
stdout, stderr = p.communicate()
stdout = stdout.decode('utf-8')
if p.returncode != 0:
print('tried to get git revision from git, but could not (building from zip file?)')
gitrevision_path = os.path.join(root_dir, 'dev', 'gitrevision.txt')
if os.path.exists(gitrevision_path):
gitrev = open(gitrevision_path, 'r').read().strip()
else:
print('tried to get git revision from '+gitrevision_path+', but could not')
gitrev = '???'
else:
gitrev = stdout.strip()
is_hash = not ' ' in gitrev
if not is_hash:
raise ValueError('No hash returned from call to git, got '+rev+' instead')
except subprocess.CalledProcessError:
print('git was not found')
gitrev = '???'
# Include path relative to the root
include_dir = os.path.join(root_dir,'include')
print('git revision is', str(gitrev))
if 'gitrevision' not in hashes or ('gitrevision' in hashes and hashes['gitrevision'] != get_hash(gitrev)):
print('*** Generating gitrevision.h ***')
gitstring = '//Generated by the generate_headers.py script on {t:s}\n\nstd::string gitrevision = \"{rev:s}\";'.format(t = str(datetime.now()), rev = gitrev)
f = open(os.path.join(include_dir,'gitrevision.h'),'w')
f.write(gitstring)
f.close()
hashes['gitrevision'] = get_hash(gitrev)
print(os.path.join(include_dir,'gitrevision.h') + ' written to file')
else:
print('gitrevision.h is up to date')
except (subprocess.CalledProcessError,OSError):
pass
def combine_json(root_dir):
master = []
for file in glob.glob(os.path.join(root_dir,'dev','fluids','*.json')):
path, file_name = os.path.split(file)
fluid_name = file_name.split('.')[0]
try:
# Load the fluid file
fluid = json.load(open(file, 'r'))
except ValueError:
print('"python -mjson.tool '+file+'" returns ->', end='')
subprocess.call('python -mjson.tool '+file, shell = True)
raise ValueError('unable to decode file %s' % file)
master += [fluid]
fp = open(os.path.join(root_dir,'dev','all_fluids_verbose.json'),'w')
fp.write(json.dumps(master, **json_options))
fp.close()
fp = open(os.path.join(root_dir,'dev','all_fluids.json'),'w')
fp.write(json.dumps(master))
fp.close()
master = []
for file in glob.glob(os.path.join(root_dir,'dev','incompressible_liquids','json','*.json')):
path, file_name = os.path.split(file)
fluid_name = file_name.split('.')[0]
try:
# Load the fluid file
fluid = json.load(open(file, 'r'))
except ValueError:
print('"python -mjson.tool '+file+'" returns ->', end='')
subprocess.call('python -mjson.tool '+file, shell = True)
raise ValueError('unable to decode file %s' % file)
master += [fluid]
fp = open(os.path.join(root_dir,'dev','all_incompressibles_verbose.json'),'w')
fp.write(json.dumps(master, **json_options))
fp.close()
fp = open(os.path.join(root_dir,'dev','all_incompressibles.json'),'w')
fp.write(json.dumps(master))
fp.close()
def generate():
import shutil
shutil.copy2(os.path.join(repo_root_path, 'externals','Catch','single_include','catch.hpp'),os.path.join(repo_root_path,'include','catch.hpp'))
#shutil.copy2(os.path.join(repo_root_path, 'externals','REFPROP-headers','REFPROP_lib.h'),os.path.join(repo_root_path,'include','REFPROP_lib.h'))
version_to_file(root_dir = repo_root_path)
gitrev_to_file(root_dir = repo_root_path)
TO_CPP(root_dir = repo_root_path, hashes = hashes)
# Write the hashes to a hashes JSON file
if hashes:
fp = open(hashes_fname,'w')
fp.write(json.dumps(hashes))
fp.close()
if __name__=='__main__':
generate()