-
Notifications
You must be signed in to change notification settings - Fork 4
/
process_files.py
1009 lines (860 loc) · 37.7 KB
/
process_files.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""This module handles processing of files referenced in the bitstream tags of an XML file."""
import hashlib
import json
import pickle
import shutil
import subprocess
import sys
import uuid
from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import datetime
from pathlib import Path, PurePath
from typing import Any, Optional, Union
import docker
import requests
from docker.models.containers import Container
from lxml import etree
from dsp_tools.models.exceptions import UserError
from dsp_tools.utils.logging import get_logger
from dsp_tools.utils.shared import http_call_with_retry
logger = get_logger(__name__, filesize_mb=100, backupcount=36)
sipi_container: Optional[Container] = None
export_moving_image_frames_script: Optional[Path] = None
def _get_export_moving_image_frames_script() -> None:
"""
Downloads the shell script that is used to extract the preview image from a video.
"""
user_folder = Path.home() / Path(".dsp-tools/fast-xmlupload")
user_folder.mkdir(parents=True, exist_ok=True)
global export_moving_image_frames_script
export_moving_image_frames_script = user_folder / "export-moving-image-frames.sh"
script_text_response = http_call_with_retry(
action=requests.get,
url="https://github.com/dasch-swiss/dsp-api/raw/main/sipi/scripts/export-moving-image-frames.sh",
)
script_text = script_text_response.text
with open(export_moving_image_frames_script, "w", encoding="utf-8") as f:
f.write(script_text)
def _determine_success_status_and_exit_code(
files_to_process: list[Path],
processed_files: list[tuple[Path, Optional[Path]]],
is_last_batch: bool,
) -> tuple[bool, int]:
"""
Based on the result of the file processing,
this function determines the success status and the exit code.
If some files of the current batch could not be processed,
the success status is false, and the exit code is 1.
If all files of the current batch were processed, the success status is true,
and the exit code is 0 if this is the last batch,
and 2 if there are more batches to process.
Args:
files_to_process: list of all paths that should have been processed (current batch)
processed_files: list of tuples of Paths. If the second Path is None, the file could not be processed.
is_last_batch: true if this is the last batch of files to process
Returns:
tuple (success status, exit_code)
"""
processed_paths = [x[1] for x in processed_files if x and x[1]]
if len(processed_paths) == len(files_to_process):
success = True
print(f"{datetime.now()}: All files ({len(files_to_process)}) of this batch were processed: Okay")
logger.info(f"All files ({len(files_to_process)}) of this batch were processed: Okay")
if is_last_batch:
exit_code = 0
print(f"{datetime.now()}: All multimedia files referenced in the XML are processed. No more batches.")
logger.info("All multimedia files referenced in the XML are processed. No more batches.")
else:
exit_code = 2
else:
success = False
ratio = f"{len(processed_paths)}/{len(files_to_process)}"
msg = f"Some files of this batch could not be processed: Only {ratio} were processed. The failed ones are:"
print(f"{datetime.now()}: ERROR: {msg}")
logger.error(msg)
for input_file, output_file in processed_files:
if not output_file:
print(f" - {input_file}")
logger.error(f" - {input_file}")
exit_code = 1
return success, exit_code
def _process_files_in_parallel(
files_to_process: list[Path],
input_dir: Path,
output_dir: Path,
nthreads: Optional[int],
) -> tuple[list[tuple[Path, Optional[Path]]], list[Path]]:
"""
Creates a thread pool and executes the file processing in parallel.
If a Docker API error occurs, the SIPI container is restarted,
and the unprocessed files are returned,
so that this function can be called again with the unprocessed files.
Args:
files_to_process: a list of all paths to the files that should be processed
input_dir: the root directory of the input files
output_dir: the directory where the processed files should be written to
nthreads: number of threads to use for processing
Returns:
- a list of tuples with the original file path and the path to the processed file.
(if a file could not be processed, the second path is None)
- a list of all paths that could not be processed
(this list will only have content if a Docker API error led to a restart of the SIPI container)
"""
with ThreadPoolExecutor(max_workers=nthreads) as pool:
processing_jobs = [pool.submit(_process_file, f, input_dir, output_dir) for f in files_to_process]
orig_filepath_2_uuid: list[tuple[Path, Optional[Path]]] = []
for processed in as_completed(processing_jobs):
try:
orig_filepath_2_uuid.append(processed.result())
except docker.errors.APIError:
print(f"{datetime.now()}: ERROR: A Docker exception occurred. Cancel jobs and restart SIPI...")
logger.error("A Docker exception occurred. Cancel jobs and restart SIPI...", exc_info=True)
for job in processing_jobs:
job.cancel()
_stop_and_remove_sipi_container()
_start_sipi_container_and_mount_volumes(input_dir, output_dir)
processed_paths = [x[0] for x in orig_filepath_2_uuid]
unprocessed_paths = [x for x in files_to_process if x not in processed_paths]
return orig_filepath_2_uuid, unprocessed_paths
return orig_filepath_2_uuid, []
def _write_result_to_pkl_file(processed_files: list[tuple[Path, Optional[Path]]]) -> None:
"""
Writes the processing result to a pickle file in the working directory.
Args:
processed_files: the result of the file processing
Raises:
UserError: if the file could not be written
"""
filename = Path(f"processing_result_{datetime.now().strftime('%Y-%m-%d_%H.%M.%S.%f')}.pkl")
try:
with open(filename, "wb") as pkl_file:
pickle.dump(processed_files, pkl_file)
print(f"{datetime.now()}: The result was written to: {filename}")
except OSError:
err_msg = f"An error occurred while writing the result to the pickle file. Content of file: {processed_files}"
logger.error(err_msg, exc_info=True)
raise UserError(err_msg) from None
def _check_input_params(
input_dir: str,
out_dir: str,
xml_file: str,
) -> tuple[Path, Path, Path]:
"""
Checks the input parameters provided by the user and transforms them into Path objects.
If the output directory doesn't exist, it is created.
Args:
input_dir: the root directory of the input files
out_dir: the output directory where the created files should be written to
xml_file: the XML file the paths are extracted from
Raises:
UserError: if one of the parameters is not valid
Returns:
A tuple with the Path objects of the input strings
"""
input_dir_path = Path(input_dir)
out_dir_path = Path(out_dir)
xml_file_path = Path(xml_file)
try:
out_dir_path.mkdir(parents=True, exist_ok=True)
except Exception: # pylint: disable=broad-exception-caught
raise UserError(f"Couldn't create directory {out_dir_path}") from None
if not input_dir_path.is_dir():
raise UserError("input_dir is not a directory")
if not xml_file_path.is_file():
raise UserError("xml_file is not a file")
return input_dir_path, out_dir_path, xml_file_path
def _get_file_paths_from_xml(xml_file: Path) -> list[Path]:
"""
Parse XML file to get all file paths.
If the same file is referenced several times in the XML,
it is only returned once.
Args:
xml_file: path to the XML file
Raises:
BaseError: if a referenced file doesn't exist in the file system
Returns:
list of all paths in the <bitstream> tags
"""
tree: etree._ElementTree[etree._Element] = etree.parse(xml_file)
bitstream_paths: set[Path] = set()
for x in tree.iter():
if x.text and etree.QName(x).localname.endswith("bitstream"):
path = Path(x.text)
if path.is_file():
bitstream_paths.add(path)
else:
err_msg = f"'{path}' is referenced in the XML file, but it doesn't exist. Skipping..."
print(f"{datetime.now()}: ERROR: {err_msg}")
logger.error(err_msg)
return list(bitstream_paths)
def _start_sipi_container_and_mount_volumes(
input_dir: Path,
output_dir: Path,
) -> None:
"""
Creates and starts a Sipi container from the provided image.
Checks first if it already exists and if yes,
if it is already running.
Args:
input_dir: the root directory of the images that should be processed, is mounted into the container
output_dir: the output directory where the processed files should be written to, is mounted into the container
"""
# prepare parameters for container creation
container_name = "sipi"
volumes = [
f"{input_dir.absolute()}:/sipi/processing-input",
f"{output_dir.absolute()}:/sipi/processing-output",
]
entrypoint = ["tail", "-f", "/dev/null"]
docker_client = docker.from_env()
# get container. if it doesn't exist: create and run it
try:
container: Container = docker_client.containers.get(container_name)
except docker.errors.NotFound:
docker_client.containers.run(
image="daschswiss/sipi:3.8.1",
name=container_name,
volumes=volumes,
entrypoint=entrypoint,
detach=True,
)
container = docker_client.containers.get(container_name)
print(f"{datetime.now()}: Created and started Sipi container '{container_name}'.")
logger.info(f"Created and started Sipi container '{container_name}'.")
# the container exists. if it is not running, restart it
container_running = bool(container.attrs and container.attrs.get("State", {}).get("Running"))
if not container_running:
container.restart()
# make container globally available
global sipi_container
sipi_container = docker_client.containers.get(container_name)
print(f"{datetime.now()}: Sipi container is running.")
logger.info("Sipi container is running.")
def _stop_and_remove_sipi_container() -> None:
"""
Stop and remove the SIPI container.
"""
if not sipi_container:
return
try:
sipi_container.stop()
sipi_container.remove()
logger.info("Stopped and removed Sipi container.")
except docker.errors.APIError:
pass
def _compute_sha256(file: Path) -> Optional[str]:
"""
Calculates SHA256 checksum of a file
Args:
file: path of the file
Returns:
the calculated checksum
"""
if not file.is_file():
print(f"{datetime.now()}: ERROR: Couldn't calculate checksum for {file}, because such a file doesn't exist.")
logger.error(f"Couldn't calculate checksum for {file}, because such a file doesn't exist.")
return None
hash_sha256 = hashlib.sha256()
with open(file, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_sha256.update(chunk)
return hash_sha256.hexdigest()
def _convert_file_with_sipi(
in_file_local_path: Path,
input_dir: Path,
out_file_local_path: Path,
output_dir: Path,
) -> bool:
"""
Converts a file by calling a locally running Sipi container.
Args:
in_file_local_path: path to input file
input_dir: the directory where the input files are located
out_file_local_path: path to output file,
e.g. tmp/in/te/internal_file_name.jp2 if the internal filename is "internal_file_name"
output_dir: the directory where the processed files are written to,
e.g. tmp/in/te/ if the internal filename is "internal_file_name"
Returns:
success status
"""
original_output_dir = output_dir.parent.parent
in_file_sipi_path = Path("processing-input") / in_file_local_path.relative_to(input_dir)
out_file_sipi_path = Path("processing-output") / out_file_local_path.relative_to(original_output_dir)
if not sipi_container:
print(f"{datetime.now()}: ERROR: Cannot convert file {in_file_local_path} with Sipi: Sipi container not found.")
logger.error(f"Cannot convert file {in_file_local_path} with Sipi: Sipi container not found.")
return False
result = sipi_container.exec_run(f"/sipi/sipi '{in_file_sipi_path}' {out_file_sipi_path}")
if result.exit_code != 0:
print(f"{datetime.now()}: ERROR: Sipi conversion of {in_file_local_path} failed: {result}")
logger.error(f"Sipi conversion of {in_file_local_path} failed: {result}")
return False
return True
def _create_orig_file(
in_file: Path,
internal_file_name: str,
out_dir: Path,
) -> bool:
"""
Creates the .orig file expected by the API.
Args:
in_file: the input file from which the .orig should be created
internal_file_name: the internal filename which should be used for the .orig file
out_dir: the directory where the .orig file should be written to,
e.g. tmp/in/te/ if the internal filename is "internal_file_name"
Returns:
success status
"""
orig_ext = PurePath(in_file).suffix
orig_file_full_path = Path(out_dir, f"{internal_file_name}{orig_ext}.orig")
try:
shutil.copyfile(in_file, orig_file_full_path)
logger.info(f"Created .orig file {orig_file_full_path}")
return True
except Exception: # pylint: disable=broad-exception-caught
print(f"{datetime.now()}: ERROR: Couldn't create .orig file {orig_file_full_path}")
logger.error(f"Couldn't create .orig file {orig_file_full_path}", exc_info=True)
return False
def _get_video_metadata_with_ffprobe(file_path: Path) -> Optional[dict[str, Any]]:
"""
Gets video metadata by running ffprobe
Args:
file_path: path to the file which the metadata should be extracted from
Returns:
the metadata object as json
"""
command_array = [
"ffprobe",
"-v",
"error",
"-select_streams",
"v:0",
"-show_entries",
"stream=width,height,bit_rate,duration,nb_frames,r_frame_rate",
"-print_format",
"json",
"-i",
str(file_path),
]
try:
result = subprocess.run(
command_array,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
check=False,
)
except Exception: # pylint: disable=broad-exception-caught
print(f"{datetime.now()}: ERROR: Exception occurred while running ffprobe for {file_path}")
logger.error(f"Exception occurred while running ffprobe for {file_path}", exc_info=True)
return None
if result.returncode == 0:
logger.info(f"Successfully ran ffprobe for {file_path}")
video_metadata: dict[str, Any] = json.loads(result.stdout)["streams"][0] # get first stream
return video_metadata
else:
print(f"{datetime.now()}: ERROR: Couldn't run ffprobe for {file_path}")
logger.error(f"Couldn't run ffprobe for {file_path}")
return None
def _create_sidecar_file(
orig_file: Path,
converted_file: Path,
file_category: str,
) -> bool:
"""
Creates the sidecar file for a given file. Depending on the file category, it adds category specific metadata.
Args:
orig_file: path to the original file
converted_file: path to the converted file, e.g. out_dir/in/te/internal_filename.ext
file_category: the file category, either IMAGE, VIDEO or OTHER
Returns:
true if successful, false otherwise
"""
if file_category not in ("IMAGE", "VIDEO", "OTHER"):
print(f"{datetime.now()}: ERROR: Unexpected file category {file_category}")
logger.error(f"Unexpected file category {file_category}")
return False
checksum_original = _compute_sha256(orig_file)
if not checksum_original:
return False
checksum_derivative = _compute_sha256(converted_file)
if not checksum_derivative:
return False
original_filename = PurePath(orig_file).name
internal_filename = PurePath(converted_file).name
random_part_of_filename = PurePath(converted_file).stem
original_extension = PurePath(orig_file).suffix
original_internal_filename = f"{random_part_of_filename}{original_extension}.orig"
sidecar_dict: dict[str, Union[str, float]] = {
"originalFilename": original_filename,
"checksumOriginal": checksum_original,
"checksumDerivative": checksum_derivative,
"internalFilename": internal_filename,
"originalInternalFilename": original_internal_filename,
}
# add video specific metadata to sidecar file
if file_category == "VIDEO":
video_metadata = _get_video_metadata_with_ffprobe(converted_file)
if not video_metadata:
return False
sidecar_dict["width"] = video_metadata["width"]
sidecar_dict["height"] = video_metadata["height"]
sidecar_dict["duration"] = float(video_metadata["duration"])
nb_frames = int(video_metadata["nb_frames"])
duration = float(video_metadata["duration"])
fps = nb_frames / duration
sidecar_dict["fps"] = fps
sidecar_file_basename = f"{random_part_of_filename}.info"
sidecar_file = PurePath(converted_file.parent, sidecar_file_basename)
with open(sidecar_file, "w", encoding="utf-8") as f:
sidecar_json = json.dumps(sidecar_dict, indent=4)
f.write(sidecar_json)
logger.info(f"Created sidecar file {sidecar_file}")
return True
def _get_file_category_from_extension(file: Path) -> Optional[str]:
"""
Gets the file category of a file according to its extension.
Args:
file: file which the category should be got from
Returns:
the file category, either IMAGE, VIDEO or OTHER (or None)
"""
extensions: dict[str, list[str]] = dict()
extensions["image"] = [".jpg", ".jpeg", ".tif", ".tiff", ".jp2", ".png"]
extensions["video"] = [".mp4"]
extensions["archive"] = [".7z", ".gz", ".gzip", ".tar", ".tar.gz", ".tgz", ".z", ".zip"]
extensions["text"] = [".csv", ".txt", ".xml", ".xsd", ".xsl"]
extensions["document"] = [".doc", ".docx", ".pdf", ".ppt", ".pptx", ".xls", ".xlsx"]
extensions["audio"] = [".mp3", ".wav"]
if file.suffix.lower() in extensions["video"]:
category = "VIDEO"
elif file.suffix.lower() in extensions["image"]:
category = "IMAGE"
elif file.suffix.lower() in (
extensions["archive"] + extensions["text"] + extensions["document"] + extensions["audio"]
):
category = "OTHER"
else:
category = None
print(f"{datetime.now()}: ERROR: Couldn't get category for {file}")
logger.error(f"Couldn't get category for {file}")
return category
def _extract_preview_from_video(file: Path) -> bool:
"""
Extracts a preview image of a video and writes it to disk.
Args:
file: the video file which the preview is extracted from
Returns:
true if successful, false otherwise
"""
result = subprocess.call(["/bin/bash", f"{export_moving_image_frames_script}", "-i", f"{file}"])
if result != 0:
return False
else:
return True
def _process_file(
in_file: Path,
input_dir: Path,
output_dir: Path,
) -> tuple[Path, Optional[Path]]:
"""
Creates all expected derivative files and writes the output into the provided output directory.
In case of image: .orig file, JP2 derivate, sidecar file
In case of video: .orig file, identical derivate file, sidecar file, folder with 1 preview image
other files: .orig file, identical derivate file, sidecar file
Args:
in_file: path to input file that should be processed
input_dir: root directory of the input files
output_dir: target location where the created files are written to.
If the directory doesn't exist, it is created
Returns:
tuple consisting of the original path and the internal filename.
If there was an error, the internal filename is None.
"""
# ensure that input file exists
if not in_file.is_file():
print(f"{datetime.now()}: ERROR: '{in_file}' does not exist. Skipping...")
logger.error(f"'{in_file}' does not exist. Skipping...")
return in_file, None
# get random UUID for internal file handling, and create directory structure
internal_filename = str(uuid.uuid4())
out_dir_full = Path(output_dir, internal_filename[0:2], internal_filename[2:4])
out_dir_full.mkdir(parents=True, exist_ok=True)
# create .orig file
if not _create_orig_file(
in_file=in_file,
internal_file_name=internal_filename,
out_dir=out_dir_full,
):
return in_file, None
# convert file (create derivative) and create sidecar file based on category (image, video or other)
file_category = _get_file_category_from_extension(in_file)
if not file_category:
return in_file, None
if file_category == "OTHER":
result = _process_other_file(
in_file=in_file,
internal_filename=internal_filename,
out_dir=out_dir_full,
)
elif file_category == "IMAGE":
result = _process_image_file(
in_file=in_file,
internal_filename=internal_filename,
out_dir=out_dir_full,
input_dir=input_dir,
)
elif file_category == "VIDEO":
result = _process_video_file(
in_file=in_file,
internal_filename=internal_filename,
out_dir=out_dir_full,
)
else:
print(f"{datetime.now()}: ERROR: Unexpected file category for {in_file}: {file_category}")
logger.error(f"Unexpected file category for {in_file}: {file_category}")
return in_file, None
return result
def _process_other_file(
in_file: Path,
internal_filename: str,
out_dir: Path,
) -> tuple[Path, Optional[Path]]:
"""
Processes a file of file category OTHER.
There is no real derivate created,
but the original file is copied,
and a sidecar file is created.
Args:
in_file: the input file that should be processed
internal_filename: the internal filename that should be used for the output file
out_dir: the output directory where the processed file should be written to,
e.g. tmp/in/te/ if the internal filename is "internal_file_name"
Returns:
a tuple of the original file path and the path to the processed file.
If there was an error, the internal filename is None.
"""
converted_file_full_path = out_dir / Path(internal_filename).with_suffix(in_file.suffix)
try:
shutil.copyfile(in_file, converted_file_full_path)
except Exception: # pylint: disable=broad-exception-caught
print(f"{datetime.now()}: ERROR: Couldn't process file of category OTHER: {in_file}")
logger.error(f"Couldn't process file of category OTHER: {in_file}", exc_info=True)
return in_file, None
if not _create_sidecar_file(
orig_file=in_file,
converted_file=converted_file_full_path,
file_category="OTHER",
):
print(f"{datetime.now()}: ERROR: Couldn't create sidecar file for: {in_file}")
logger.error(f"Couldn't create sidecar file for: {in_file}")
return in_file, None
return in_file, converted_file_full_path
def _process_image_file(
in_file: Path,
internal_filename: str,
out_dir: Path,
input_dir: Path,
) -> tuple[Path, Optional[Path]]:
"""
Processes a file of file category IMAGE
Args:
in_file: the input file that should be processed
internal_filename: the internal filename that should be used for the output file
out_dir: the output directory where the processed file should be written to,
e.g. tmp/in/te/ if the internal filename is "internal_file_name"
input_dir: root directory of the input files
Returns:
a tuple of the original file path and the path to the processed file.
If there was an error, the internal filename is None.
"""
converted_file_full_path = out_dir / Path(internal_filename).with_suffix(".jp2")
sipi_result = _convert_file_with_sipi(
in_file_local_path=in_file,
input_dir=input_dir,
out_file_local_path=converted_file_full_path,
output_dir=out_dir,
)
if not sipi_result:
print(f"{datetime.now()}: ERROR: Couldn't process file of category IMAGE: {in_file}")
logger.error(f"Couldn't process file of category IMAGE: {in_file}")
return in_file, None
if not _create_sidecar_file(
orig_file=in_file,
converted_file=converted_file_full_path,
file_category="IMAGE",
):
print(f"{datetime.now()}: ERROR: Couldn't create sidecar file for: {in_file}")
logger.error(f"Couldn't create sidecar file for: {in_file}")
return in_file, None
return in_file, converted_file_full_path
def _process_video_file(
in_file: Path,
internal_filename: str,
out_dir: Path,
) -> tuple[Path, Optional[Path]]:
"""
Processes a file of file category VIDEO
Args:
in_file: the input file that should be processed
internal_filename: the internal filename that should be used for the output file
out_dir: the output directory where the processed file should be written to,
e.g. tmp/in/te/ if the internal filename is "internal_file_name"
Returns:
a tuple of the original file path and the path to the processed file.
If there was an error, the internal filename is None.
"""
converted_file_full_path = out_dir / Path(internal_filename).with_suffix(in_file.suffix)
# create derivate file (identical to original file)
try:
shutil.copyfile(in_file, converted_file_full_path)
except Exception: # pylint: disable=broad-exception-caught
print(f"{datetime.now()}: ERROR: Couldn't create derivate file for video '{in_file}'")
logger.error(f"Couldn't create derivate file for video '{in_file}'", exc_info=True)
return in_file, None
# create preview image
preview_result = _extract_preview_from_video(converted_file_full_path)
if not preview_result:
print(f"{datetime.now()}: ERROR: Couldn't create preview image for video '{in_file}'")
logger.error(f"Couldn't create preview image for video '{in_file}'")
return in_file, None
# create sidecar file
if not _create_sidecar_file(
orig_file=in_file,
converted_file=converted_file_full_path,
file_category="VIDEO",
):
print(f"{datetime.now()}: ERROR: Couldn't create sidecar file for video '{in_file}'")
logger.error(f"Couldn't create sidecar file for video '{in_file}'")
return in_file, None
return in_file, converted_file_full_path
def _write_processed_and_unprocessed_files_to_txt_files(
all_files: list[Path],
processed_files: list[tuple[Path, Optional[Path]]],
) -> None:
"""
Determine the files that were processed until now
(taking into account a possibly existing file 'processed_files.txt')
and write them to 'processed_files.txt'.
Determine the files that were not processed until now,
and write them to 'unprocessed_files.txt'
(possibly overwriting an existing file).
Args:
all_files: list of all paths that should be processed
processed_files: list of tuples (orig path, processed path). 2nd path is None if a file could not be processed.
"""
processed_original_paths = [x[0] for x in processed_files]
if Path("processed_files.txt").is_file():
with open("processed_files.txt", "r", encoding="utf-8") as f:
previously_processed_files = [Path(x) for x in f.read().splitlines()]
processed_original_paths = processed_original_paths + previously_processed_files
with open("processed_files.txt", "w", encoding="utf-8") as f:
f.write("\n".join([str(x) for x in processed_original_paths]))
msg = "Wrote 'processed_files.txt'"
unprocessed_original_paths = [x for x in all_files if x not in processed_original_paths]
if unprocessed_original_paths:
with open("unprocessed_files.txt", "w", encoding="utf-8") as f:
f.write("\n".join([str(x) for x in unprocessed_original_paths]))
msg += " and 'unprocessed_files.txt'"
elif Path("unprocessed_files.txt").is_file():
Path("unprocessed_files.txt").unlink()
msg += " and removed 'unprocessed_files.txt'"
print(f"{datetime.now()}: {msg}")
logger.info(msg)
def handle_interruption(
all_files: list[Path],
processed_files: list[tuple[Path, Optional[Path]]],
exception: BaseException,
) -> None:
"""
Handles an interruption of the processing.
Writes the pickle file,
and the txt files with the processed and unprocessed files,
and exits the program with exit code 1.
Args:
all_files: list of all paths that should be processed
processed_files: list of tuples (orig path, processed path). 2nd path is None if a file could not be processed.
exception: the exception that was raised
"""
msg = "ERROR while processing the files. Writing pickle file and human-readable txt files..."
print(f"{datetime.now()}: {msg}")
logger.error(msg, exc_info=exception)
_write_processed_and_unprocessed_files_to_txt_files(
all_files=all_files,
processed_files=processed_files,
)
_write_result_to_pkl_file(processed_files)
sys.exit(1)
def double_check_unprocessed_files(
all_files: list[Path],
processed_files: list[Path],
unprocessed_files: list[Path],
) -> None:
"""
Checks if the files in 'unprocessed_files.txt' are consistent with the files in 'processed_files.txt'.
Args:
all_files: list of all paths in the <bitstream> tags of the XML file
processed_files: the paths from 'processed_files.txt'
unprocessed_files: the paths from 'unprocessed_files.txt' (or all_files if there is no such file)
Raises:
UserError: if there is a file 'unprocessed_files.txt', but no file 'processed_files.txt'
UserError: if the files 'unprocessed_files.txt' and 'processed_files.txt' are inconsistent
"""
unprocessed_files_txt_exists = sorted(unprocessed_files) != sorted(all_files)
if unprocessed_files_txt_exists and not processed_files:
raise UserError("There is a file 'unprocessed_files.txt', but no file 'processed_files.txt'")
if processed_files and sorted(unprocessed_files) == sorted(all_files):
raise UserError("There is a file 'processed_files.txt', but no file 'unprocessed_files.txt'")
if unprocessed_files_txt_exists:
# there is a 'unprocessed_files.txt' file. check it for consistency
unprocessed_files_from_processed_files = [x for x in all_files if x not in processed_files]
if not sorted(unprocessed_files_from_processed_files) == sorted(unprocessed_files):
raise UserError("The files 'unprocessed_files.txt' and 'processed_files.txt' are inconsistent")
def _determine_next_batch(
all_files: list[Path],
batch_size: int,
) -> tuple[list[Path], bool]:
"""
Looks in the input directory for txt files that contain the already processed files and the still unprocessed files.
If no such files are found, this run of `dsp-tools process-files` is the first one.
In this case, the first batch_size files (or less if there are less) are returned.
If such files are found, the already processed files are read from them,
and the next batch_size files are returned.
If all files have been processed, an empty list is returned.
Args:
all_files: list of all paths in the <bitstream> tags of the XML file
batch_size: number of files to process before Python exits
Raises:
UserError: if the files 'unprocessed_files.txt' and 'processed_files.txt' are inconsistent
Returns:
- the next batch of up to batch_size files that should be processed
(or empty list if all files have been processed)
- a boolean indicating if this is the last batch
"""
# read the already processed files
if Path("processed_files.txt").is_file():
with open("processed_files.txt", "r", encoding="utf-8") as f:
processed_files = [Path(x.strip()) for x in f.readlines()]
else:
processed_files = []
# read the still unprocessed files
if Path("unprocessed_files.txt").is_file():
with open("unprocessed_files.txt", "r", encoding="utf-8") as f:
unprocessed_files = [Path(x.strip()) for x in f.readlines()]
else:
unprocessed_files = all_files
# consistency check
double_check_unprocessed_files(
all_files=all_files,
processed_files=processed_files,
unprocessed_files=unprocessed_files,
)
# determine next batch
if len(unprocessed_files) <= batch_size:
next_batch = unprocessed_files
is_last_batch = True
else:
next_batch = unprocessed_files[:batch_size]
is_last_batch = False
# print and log
msg = (
f"Found {len(all_files)} bitstreams in the XML file, {len(unprocessed_files)} of them unprocessed. "
f"Process batch of {len(next_batch)} files..."
)
print(f"{datetime.now()}: {msg}")
logger.info(msg)
return next_batch, is_last_batch
def process_files(
input_dir: str,
output_dir: str,
xml_file: str,
nthreads: Optional[int],
batch_size: int,
) -> bool:
"""
Process the files referenced in the given XML file.
Writes the processed files
(derivative, .orig file, sidecar file, as well as the preview file for movies)
to the given output directory.
Additionally, writes a pickle file containing the mapping between the original files and the processed files,
e.g. Path('multimedia/nested/subfolder/test.tif'), Path('tmp/0b/22/0b22570d-515f-4c3d-a6af-e42b458e7b2b.jp2').
Due to a resource leak, the Python interpreter must be quitted after a while.
For this reason, the processing is done in batches, each batch containing batch_size files.
After each batch, the Python interpreter exits, and the CLI command must be executed again.
It automatically resumes where it left off.
Args:
input_dir: path to the directory where the files should be read from
output_dir: path to the directory where the transformed / created files should be written to
xml_file: path to xml file containing the resources
nthreads: number of threads to use for processing
batch_size: number of files to process before Python exits
Returns:
True --> exit code 0: all multimedia files in the XML file were processed
False --> exit code 1: an error occurred while processing the current batch
Error raised --> exit code 1: an error occurred while processing the current batch
exit with code 2: Python interpreter exits after each batch
"""
# check the input parameters
input_dir_path, output_dir_path, xml_file_path = _check_input_params(
input_dir=input_dir,
out_dir=output_dir,
xml_file=xml_file,
)
# startup the SIPI container
_start_sipi_container_and_mount_volumes(
input_dir=input_dir_path,
output_dir=output_dir_path,
)
# get the files referenced in the XML file
all_files = _get_file_paths_from_xml(xml_file_path)
# find out if there was a previous processing attempt that should be continued
files_to_process, is_last_batch = _determine_next_batch(
all_files=all_files,
batch_size=batch_size,
)
# get shell script for processing video files
if any(path.suffix == ".mp4" for path in files_to_process):
_get_export_moving_image_frames_script()
# process the files in parallel
start_time = datetime.now()
print(f"{start_time}: Start local file processing...")
logger.info("Start local file processing...")
processed_files: list[tuple[Path, Optional[Path]]] = []
unprocessed_files = files_to_process
while unprocessed_files:
try:
result, unprocessed_files = _process_files_in_parallel(
files_to_process=unprocessed_files,
input_dir=input_dir_path,
output_dir=output_dir_path,
nthreads=nthreads,
)
processed_files.extend(result)
except BaseException as exc: # pylint: disable=broad-exception-caught
handle_interruption(
all_files=all_files,
processed_files=processed_files,
exception=exc,
)
end_time = datetime.now()
print(f"{end_time}: Processing files took: {end_time - start_time}")
logger.info(f"Processing files took: {end_time - start_time}")
# write results to files
_write_processed_and_unprocessed_files_to_txt_files(
all_files=all_files,
processed_files=processed_files,
)
_write_result_to_pkl_file(processed_files)
# check if all files were processed
success, exit_code = _determine_success_status_and_exit_code(
files_to_process=files_to_process,
processed_files=processed_files,
is_last_batch=is_last_batch,
)