-
Notifications
You must be signed in to change notification settings - Fork 27
/
hfcs
550 lines (429 loc) · 19.3 KB
/
hfcs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
#!/usr/bin/env python
"""
This tool provides functionality to:
* Generate a content set directory that the High Fidelity domain server
and assignment clients can process.
* Copy the models.json.gz that your entity server is using back into the src
directory.
* Package the generated content set into a releasable archive that the High
Fidelity Sandbox can download and use.
The tool expects the following directory structure in the input/src directory:
src/
assets/ # ATP server assets
...
entities/ # Entity server assets, models.json in unzipped form
models.json
domain-server/ # Domain server assets
config.json
Building the content set will process the above directory structure and produce
a directory with a High Fidelity server compatible structure, which includes a
gzipped models file and a map.json for the assets server.
Build directory structure:
build/
assignment-client/
assets/
map.json
files/
...
entities/
models.json.gz
domain-server/
config.json
Packaging the build will generate a gzipped tarball that can be downloaded and
extracted by the Sandbox.
Example usage:
Generate build into default ac/ds directory. This is useful when working with the Sandbox.
./hfcs build -o ~/AppData/Roaming/High\ Fidelity
After making modifications to entities on your local sandbox in interface,
copy the models file back:
./hfcs pull -o ~/AppData/Roaming/High\ Fidelity
Create a release:
# Assuming build does not exist
./hfcs build --bake -o ./build
./hfcs package ./build
"""
from __future__ import print_function
import argparse
import datetime
import gzip
import hashlib
import json
import os
import shutil
import subprocess
import sys
import tarfile
import tempfile
import time
import errno
TEXTURE_EXTENSIONS = ('jpg', 'jpeg', 'png', 'tga')
verbose_enabled = False
def print_verbose(*args, **kwargs):
if verbose_enabled:
print(*args, **kwargs)
def is_texture_extension(extension):
return extension in TEXTURE_EXTENSIONS
def create_assets_map(file_path_pairs):
assets_map = {}
for filename, path, filehash in file_path_pairs:
path_parts = split(path)
assets_path = '/' + '/'.join(path_parts)
if assets_path in assets_map:
if assets_map[assets_path] == filehash:
print(" Found duplicate: {}".format(assets_path))
else:
print(" !!!! Overwriting asset: {}".format(assets_path))
assets_map[assets_path] = filehash
return assets_map
def split(path):
"""
Return a list containing the individual directories and filename (if
included) in `path`. This is in contract to os.path.split, which will only
split the path into 2 parts - the beginning and the last component.
"""
head, tail = os.path.split(path)
if tail == '':
if head == '':
return []
else:
return [head]
return split(head) + [tail]
def makedirs(path):
"""
Create directory `path`, including its parent directories if they do
not already exist. Return True if the directory did not exist and was
created, or False if it already existed.
"""
try:
os.makedirs(path)
return True
except OSError as e:
if e.errno == errno.EEXIST:
return False
raise
except FileExistsError:
return False
def basename_without_extension(filepath):
"""
Given a filepath, return the basename without the extension.
Exampe: /home/ryan/file.json.gz => file
"""
basename = os.path.basename(filepath)
dotpos = basename.find('.')
if dotpos > -1:
return basename[:dotpos]
return basename
def get_extension(filepath):
"""
Return the extension for a file, without the dot.
Example: /home/ryan/sphere.fbx => fbx
"""
dotpos = filepath.rfind('.')
if dotpos == -1:
extension = ''
else:
extension = filepath[dotpos + 1:]
return extension
def bake_file(input_filepath, output_directory):
"""
Bake a file and return a list of info about each generated files. If the input file
can't be baked or the bake fails, None will be returned.
The file info dict will contain:
relative_path - path to the file, relative to the baked folder for this
asset. This will generally have a depth of 0 (example:
'sphere.fbx')
absolute_path - absolute path to the file
For an fbx, the returned list will usually contain a set of relative paths
like:
chair.fbx
textures/wood_diffuse.ktx
textures/wood_normal.ktx
"""
extension = get_extension(input_filepath)
is_texture = is_texture_extension(extension)
if extension == 'fbx' or is_texture:
FNULL = open(os.devnull, 'w')
if is_texture:
output_directory = os.path.join(output_directory, basename_without_extension(input_filepath))
makedirs(output_directory)
res = subprocess.call(
['oven', '-i', input_filepath, '-o', output_directory],
stdout=FNULL,
stderr=subprocess.STDOUT)
if res == 0:
input_filename = os.path.basename(input_filepath)
pos = input_filename.rfind('.')
if pos > -1:
input_filename_no_ext = input_filename[:pos]
else:
input_filename_no_ext = input_filename
baked_file_info = []
# For models, if input_filepath is something.fbx, output folder
# will be:
#
# output_filepath/something/baked/
#
# For textures, the directory is just the output_directory
#
if is_texture:
baked_directory = output_directory
else:
baked_directory = os.path.join(output_directory, input_filename_no_ext, 'baked')
for dirpath, _dirs, baked_files in os.walk(baked_directory):
relpath = os.path.relpath(dirpath, baked_directory)
for baked_file in baked_files:
rel_baked_file = os.path.normpath(os.path.join(relpath, baked_file))
baked_file_info.append({
'relative_path': rel_baked_file,
'absolute_path': os.path.join(os.getcwd(), dirpath, baked_file)
})
return baked_file_info
return None
def ask_yes_no(prompt):
"""
Prompt the user to answer yes or no to the question in `prompt`. The
default response is no if nothing has been entered.
"""
while True:
resp = raw_input(prompt + " (y/N) ")
if resp == 'y' or resp == 'Y':
return True
elif resp == '' or resp == 'n' or resp == 'N':
return False
def remove_baked_extension(filepath):
"""
Remove the ".baked." portion of an extension from the path `filepath`.
If the filepath does not contain ".baked." in the extension, the original
path will be returned.
"""
pos = filepath.rfind('.baked.')
if pos > -1:
return filepath[:pos] + filepath[pos + len('.baked'):]
return filepath
def generate_build(source_dir, output_dir, bake=False, skip_baking_skyboxes=False,
version=None):
"""
Generate a build by processing the directories and files in source_dir
and outputting the build to build_dir. if a version is specified, it will
be written to a file in the assignment-client directory.
"""
src_assets_dir = os.path.join(source_dir, 'assets')
src_entities_dir = os.path.join(source_dir, 'entities')
src_ds_dir = os.path.join(source_dir, 'domain-server')
output_ac_dir = os.path.join(output_dir, 'assignment-client')
output_assets_dir = os.path.join(output_ac_dir, 'assets')
output_assets_files_dir = os.path.join(output_assets_dir, 'files')
output_entities_dir = os.path.join(output_ac_dir, 'entities')
output_ds_dir = os.path.join(output_dir, 'domain-server')
timestr = datetime.datetime.fromtimestamp(time.time())\
.strftime('%Y-%m-%d-%H_%M_%S')
base_temp_dir = tempfile.gettempdir()
temp_dir = os.path.join(base_temp_dir, 'tut-' + timestr)
print_verbose("Temp path for baked files is ", temp_dir)
if bake:
makedirs(temp_dir)
makedirs(output_assets_dir)
makedirs(output_assets_files_dir)
makedirs(output_entities_dir)
makedirs(output_ds_dir)
# Generate models.json.gz if it doesn't exist
print(" Writing entities")
models_filepath = os.path.join(src_entities_dir, 'models.json')
output_models_filepath = os.path.join(output_entities_dir, 'models.json.gz')
should_copy_entities_to_build = False
if os.path.exists(output_models_filepath):
print(" models.json.gz in build directory already exists.")
should_copy_entities_to_build = ask_yes_no(" Do you want to replace it?")
else:
should_copy_entities_to_build = True;
# Find zone entities to determine which files are used as a skybox
skybox_asset_files = []
with open(models_filepath, 'r') as models_file:
try:
entities = json.load(models_file)
for entity in entities['Entities']:
if entity['type'] == 'Zone':
url = entity.get('skybox', {}).get('url', None)
if url is not None and url.startswith('atp:/'):
skybox_asset_files.append(url[len('atp:/'):])
except:
print("ERROR: Failed to load models file")
raise
sys.exit(1)
print_verbose("Found skyboxes: ", ', '.join(skybox_asset_files))
# Build asset server files
print(" Writing assets")
print(" Source assets directory is: " + src_assets_dir)
print(" Copying assets to output directory")
assets_files = []
skyboxes_to_update = {}
for dirpath, _dirs, files in os.walk(os.path.join(src_assets_dir)):
for filename in files:
abs_filepath = os.path.abspath(os.path.join(dirpath, filename))
asset_dir = os.path.relpath(os.path.abspath(dirpath), src_assets_dir)
asset_filepath = os.path.normpath(os.path.join(asset_dir, filename)).replace('\\', '/')
asset_files_to_copy = []
needs_copy = True
if bake:
extension = get_extension(filename)
is_texture = is_texture_extension(extension)
is_skybox_texture = (is_texture and asset_filepath in skybox_asset_files)
if extension == 'fbx' or (not skip_baking_skyboxes and is_skybox_texture):
print(" Baking ", abs_filepath)
baked_files = bake_file(abs_filepath, temp_dir)
if baked_files is None:
print(" Failed to bake:", abs_filepath)
else:
for baked_file_info in baked_files:
needs_copy = False
rel_path = baked_file_info['relative_path']
abs_path = baked_file_info['absolute_path']
print_verbose('Got baked file: ', rel_path, abs_path)
asset_filepath = remove_baked_extension(rel_path)
with open(abs_path, 'rb') as f:
sha256 = hashlib.sha256()
for chunk in iter(lambda: f.read(4096), b''):
sha256.update(chunk)
filehash = sha256.hexdigest()
asset_filepath = os.path.normpath(os.path.join(asset_dir, asset_filepath))
asset_files_to_copy.append( (filehash, abs_path, asset_filepath) )
if is_skybox_texture:
rel_asset_filepath = asset_dir.replace('\\', '/')
pos = rel_path.rfind('.')
original_path = 'atp:/' + '/'.join((rel_asset_filepath, filename))
baked_path = 'atp:/' + '/'.join((rel_asset_filepath, rel_path[:pos] + '.ktx'))
print("Mapping {} to {}".format(original_path, baked_path))
skyboxes_to_update[original_path] = baked_path
if needs_copy:
asset_filepath = os.path.normpath(os.path.join(asset_dir, filename))
with open(abs_filepath, 'rb') as f:
filehash = hashlib.sha256(f.read()).hexdigest()
asset_files_to_copy.append( (filehash, abs_filepath, asset_filepath) )
for filehash, source_filepath, asset_filepath in asset_files_to_copy:
assets_files.append((source_filepath, asset_filepath, filehash))
output_filepath = os.path.join(output_assets_files_dir, filehash)
print_verbose(" Copying {} to {}".format(source_filepath, output_filepath))
shutil.copy(source_filepath, output_filepath)
print(" Copied {} assets".format(len(assets_files)))
assets_map = create_assets_map(assets_files)
output_assets_map_file = os.path.join(output_assets_dir, 'map.json')
with open(output_assets_map_file, 'w') as map_file:
json.dump(assets_map, map_file, indent=4)
def replace_with_baked_skybox(models_data, mapping):
for entity in models_data['Entities']:
if entity['type'] == 'Zone':
url = entity.get('skybox', {}).get('url', None)
if url is not None and url in skyboxes_to_update:
entity['skybox']['url'] = skyboxes_to_update[url]
if should_copy_entities_to_build:
print(" Creating models.json.gz")
with open(models_filepath, 'r') as orig_file, \
gzip.open(output_models_filepath, 'w') as gz_file:
models_data = json.load(orig_file)
for entity in models_data['Entities']:
if entity['type'] == 'Zone':
url = entity.get('skybox', {}).get('url', None)
if url is not None and url in skyboxes_to_update:
print('Updating models file', url, 'to', skyboxes_to_update[url])
entity['skybox']['url'] = skyboxes_to_update[url]
data = json.dumps(models_data)
gz_file.write(data.encode())
# Copy domain-server config
print(" Writing domain-server config")
src_ds_config_filepath= os.path.join(src_ds_dir, 'config.json')
output_ds_config_filepath= os.path.join(output_ds_dir, 'config.json')
shutil.copy(src_ds_config_filepath, output_ds_config_filepath)
# Write content version
print(" Copying content version")
src_content_version_filepath = os.path.join(source_dir, 'content-version.txt')
output_content_version_filepath= os.path.join(output_ac_dir, 'content-version.txt')
shutil.copy(src_content_version_filepath, output_content_version_filepath)
print("Complete")
# These are the paths in an output/build directory to include in a content set
# package (.tar.gz).
PATHS_TO_INCLUDE_IN_ARCHIVE = (
'assignment-client/assets/files',
'assignment-client/assets/map.json',
'assignment-client/entities/models.json.gz',
'assignment-client/content-version.txt',
'domain-server/config.json',
)
def generate_package(input_dir, output_filepath):
print("Generating release")
if not output_filepath.endswith('.tar.gz'):
print(' Skipping, output must end in "tar.gz": {}'.format(output_filepath))
else:
def tarfilter(tarinfo):
tarinfo.uid = tarinfo.gid = 0
tarinfo.uname = tarinfo.gname = 'hifi'
return tarinfo
print(" Writing archive to {}".format(output_filepath))
with tarfile.open(output_filepath, 'w:gz') as f:
for path in PATHS_TO_INCLUDE_IN_ARCHIVE:
full_path = os.path.join(input_dir, path)
print(" Adding to archive: {}".format(full_path))
f.add(full_path, path, filter=tarfilter)
print(" Complete")
def handle_generate_build(args):
source_dir = args.input_directory
output_dir = args.output_directory
print("Generating build in `{}` from `{}`".format(output_dir, source_dir))
generate_build(source_dir, output_dir, args.bake, args.skip_baking_skyboxes, 35)
def handle_generate_package(args):
archive_path = os.path.join(os.getcwd(), args.output_filename)
generate_package(args.input_directory, archive_path)
def handle_pull_entities(args):
input_dir = args.input_directory
output_dir = args.output_directory
input_models_filepath = os.path.join(
input_dir,
'entities',
'models.json')
output_models_filepath = os.path.join(
output_dir,
'assignment-client',
'entities',
'models.json.gz')
if os.path.exists(output_models_filepath):
print("Copying {} to {}".format(output_models_filepath, input_models_filepath))
with open(input_models_filepath, 'wb') as orig_file, \
gzip.open(output_models_filepath, 'rb') as gz_file:
shutil.copyfileobj(gz_file, orig_file)
else:
print("Error: A models file could not be found at {}".format(output_models_filepath))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=\
"High Fidelity Content Set generator and packager.")
DEFAULT_OUTPUT_DIRECTORY = 'build'
parser.add_argument('--verbose', action='store_true')
subparsers = parser.add_subparsers()
parser_gen_build = subparsers.add_parser('build',
help='Build input directory into output directory')
parser_gen_build.set_defaults(func=handle_generate_build)
parser_gen_build.add_argument('-i', '--input_directory', default='src',
help='Directory to pull data from')
parser_gen_build.add_argument('-o', '--output_directory',
default=DEFAULT_OUTPUT_DIRECTORY)
parser_gen_build.add_argument('--bake', action='store_true',
help='Bake models and textures')
parser_gen_build.add_argument('--skip-baking-skyboxes', action='store_true',
help='If baking, do not bake skybox textures')
parser_pull_entities = subparsers.add_parser('pull',
help='Pull the models.json.gz file from an output directory back into '\
+ 'an input directory')
parser_pull_entities.set_defaults(func=handle_pull_entities)
parser_pull_entities.add_argument('-i', '--input_directory', default='src',
help='Directory to pull data from')
parser_pull_entities.add_argument('-o', '--output_directory',
default=DEFAULT_OUTPUT_DIRECTORY)
parser_package = subparsers.add_parser('package', help='Generate a release\
from a build generated using the `build` command')
parser_package.set_defaults(func=handle_generate_package)
parser_package.add_argument('input_directory')
parser_package.add_argument('output_filename')
args = parser.parse_args(sys.argv[1:])
verbose_enabled = args.verbose
if 'func' in args:
args.func(args)
else:
parser.print_help()