Skip to content

Commit

Permalink
Debug and move function to file size
Browse files Browse the repository at this point in the history
  • Loading branch information
kshitijrajsharma committed Jan 25, 2024
1 parent 36f6365 commit dcb6e34
Showing 1 changed file with 79 additions and 133 deletions.
212 changes: 79 additions & 133 deletions tasks/task_runners.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,19 @@ def stop_task(name):
task.finished_at = timezone.now()
task.save()

def write_file_size(response):
if response:
LOG.debug(response)
for item in response:
if item:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(item["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{item['download_url'].split('/')[-1]}_size.ini"
)
with open([size_path], "w") as configfile:
config.write(configfile)

def finish_task(name, created_files=None, response_back=None, planet_file=False):
LOG.debug("Task Finish: {0} for run: {1}".format(name, run_uid))
task = ExportTask.objects.get(run__uid=run_uid, name=name)
Expand Down Expand Up @@ -437,18 +450,14 @@ def add_metadata(z, theme):

if geojson:
try:
LOG.debug("Galaxy fetch started geojson for run: {0}".format(run_uid))
LOG.debug(
"Raw Data API fetch started geojson for run: {0}".format(run_uid)
)
response_back = geojson.fetch("geojson", is_hdx_export=True)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)

LOG.debug("Galaxy fetch ended for geojson run: {0}".format(run_uid))
write_file_size(response_back)
LOG.debug(
"Raw Data API fetch ended for geojson run: {0}".format(run_uid)
)
finish_task("geojson", response_back=response_back)
all_zips += response_back
except Exception as ex:
Expand All @@ -457,7 +466,7 @@ def add_metadata(z, theme):

if csv:
try:
LOG.debug("Galaxy fetch started for csv run: {0}".format(run_uid))
LOG.debug("Raw Data API fetch started for csv run: {0}".format(run_uid))
response_back = csv.fetch("csv", is_hdx_export=True)
for r in response_back:
config = configparser.ConfigParser()
Expand All @@ -468,7 +477,7 @@ def add_metadata(z, theme):
with open(size_path, "w") as configfile:
config.write(configfile)

LOG.debug("Galaxy fetch ended for csv run: {0}".format(run_uid))
LOG.debug("Raw Data API fetch ended for csv run: {0}".format(run_uid))
finish_task("csv", response_back=response_back)
all_zips += response_back

Expand All @@ -480,20 +489,16 @@ def add_metadata(z, theme):
try:
if settings.USE_RAW_DATA_API_FOR_HDX:
LOG.debug(
"Galaxy fetch started for geopackage run: {0}".format(run_uid)
"Raw Data API fetch started for geopackage run: {0}".format(
run_uid
)
)
response_back = geopackage.fetch("gpkg", is_hdx_export=True)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)

write_file_size(response_back)
LOG.debug(
"Galaxy fetch ended for geopackage run: {0}".format(run_uid)
"Raw Data API fetch ended for geopackage run: {0}".format(
run_uid
)
)
finish_task("geopackage", response_back=response_back)
all_zips += response_back
Expand Down Expand Up @@ -532,19 +537,15 @@ def add_metadata(z, theme):
if shp:
try:
if settings.USE_RAW_DATA_API_FOR_HDX:
LOG.debug("Galaxy fetch started for shp run: {0}".format(run_uid))
LOG.debug(
"Raw Data API fetch started for shp run: {0}".format(run_uid)
)

response_back = shp.fetch("shp", is_hdx_export=True)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)

LOG.debug("Galaxy fetch ended for shp run: {0}".format(run_uid))
write_file_size(response_back)
LOG.debug(
"Raw Data API fetch ended for shp run: {0}".format(run_uid)
)
finish_task("shp", response_back=response_back)
all_zips += response_back
else:
Expand Down Expand Up @@ -582,18 +583,14 @@ def add_metadata(z, theme):
if kml:
try:
if settings.USE_RAW_DATA_API_FOR_HDX:
LOG.debug("Galaxy fetch started for kml run: {0}".format(run_uid))
LOG.debug(
"Raw Data API fetch started for kml run: {0}".format(run_uid)
)
response_back = kml.fetch("kml", is_hdx_export=True)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)

LOG.debug("Galaxy fetch ended for kml run: {0}".format(run_uid))
write_file_size(response_back)
LOG.debug(
"Raw Data API fetch ended for kml run: {0}".format(run_uid)
)
finish_task("kml", response_back=response_back)
all_zips += response_back

Expand Down Expand Up @@ -789,95 +786,68 @@ def add_metadata(z, theme):

if geojson:
try:
LOG.debug("Galaxy fetch started for geojson run: {0}".format(run_uid))
LOG.debug(
"Raw Data API fetch started for geojson run: {0}".format(run_uid)
)
all_feature_filter_json = join(
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
)
response_back = geojson.fetch(
"geojson", all_feature_filter_json=all_feature_filter_json
)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)
write_file_size(response_back)

LOG.debug("Galaxy fetch ended for geojson run: {0}".format(run_uid))
LOG.debug(
"Raw Data API fetch ended for geojson run: {0}".format(run_uid)
)
finish_task("geojson", response_back=response_back)
except Exception as ex:
stop_task("geojson")
raise ex

if fgb:
try:
LOG.debug("Galaxy fetch started for fgb run: {0}".format(run_uid))
LOG.debug("Raw Data API fetch started for fgb run: {0}".format(run_uid))
all_feature_filter_json = join(
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
)
response_back = fgb.fetch(
"fgb", all_feature_filter_json=all_feature_filter_json
)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)

LOG.debug("Galaxy fetch ended for fgb run: {0}".format(run_uid))
write_file_size(response_back)
LOG.debug("Raw Data API fetch ended for fgb run: {0}".format(run_uid))
finish_task("fgb", response_back=response_back)
except Exception as ex:
stop_task("fgb")
raise ex

if csv:
try:
LOG.debug("Galaxy fetch started for csv run: {0}".format(run_uid))
LOG.debug("Raw Data API fetch started for csv run: {0}".format(run_uid))
all_feature_filter_json = join(
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
)
response_back = csv.fetch(
"csv", all_feature_filter_json=all_feature_filter_json
)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)

LOG.debug("Galaxy fetch ended for csv run: {0}".format(run_uid))
write_file_size(response_back)
LOG.debug("Raw Data API fetch ended for csv run: {0}".format(run_uid))
finish_task("csv", response_back=response_back)
except Exception as ex:
stop_task("csv")
raise ex

if sql:
try:
LOG.debug("Galaxy fetch started for sql run: {0}".format(run_uid))
LOG.debug("Raw Data API fetch started for sql run: {0}".format(run_uid))
all_feature_filter_json = join(
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
)
response_back = sql.fetch(
"sql", all_feature_filter_json=all_feature_filter_json
)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)

LOG.debug("Galaxy fetch ended for sql run: {0}".format(run_uid))
write_file_size(response_back)
LOG.debug("Raw Data API fetch ended for sql run: {0}".format(run_uid))
finish_task("sql", response_back=response_back)
except Exception as ex:
stop_task("sql")
Expand All @@ -886,69 +856,49 @@ def add_metadata(z, theme):
if geopackage:
try:
LOG.debug(
"Galaxy fetch started for geopackage run: {0}".format(run_uid)
"Raw Data API fetch started for geopackage run: {0}".format(run_uid)
)
all_feature_filter_json = join(
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
)
response_back = geopackage.fetch(
"gpkg", all_feature_filter_json=all_feature_filter_json
)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)

LOG.debug("Galaxy fetch ended for geopackage run: {0}".format(run_uid))
write_file_size(response_back)
LOG.debug(
"Raw Data API fetch ended for geopackage run: {0}".format(run_uid)
)
finish_task("geopackage", response_back=response_back)
except Exception as ex:
stop_task("geopackage")
raise ex

if shp:
try:
LOG.debug("Galaxy fetch started for shp run: {0}".format(run_uid))
LOG.debug(
"Raw Data API fetch started for shp run: {0}".format(run_uid)
)
response_back = shp.fetch(
"shp", all_feature_filter_json=all_feature_filter_json
)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)

LOG.debug("Galaxy fetch ended for shp run: {0}".format(run_uid))
write_file_size(response_back)
LOG.debug("Raw Data API fetch ended for shp run: {0}".format(run_uid))
finish_task("shp", response_back=response_back)
except Exception as ex:
stop_task("shp")
raise ex

if kml:
try:
LOG.debug("Galaxy fetch started for kml run: {0}".format(run_uid))
LOG.debug("Raw Data API fetch started for kml run: {0}".format(run_uid))
all_feature_filter_json = join(
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
)
response_back = kml.fetch(
"kml", all_feature_filter_json=all_feature_filter_json
)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)

LOG.debug("Galaxy fetch ended for kml run: {0}".format(run_uid))
write_file_size(response_back)
LOG.debug("Raw Data API fetch ended for kml run: {0}".format(run_uid))
finish_task("kml", response_back=response_back)

except Exception as ex:
Expand All @@ -965,7 +915,9 @@ def add_metadata(z, theme):
access_token=settings.RAW_DATA_ACCESS_TOKEN,
)
start_task("mbtiles")
LOG.debug("Galaxy fetch started for mbtiles run: {0}".format(run_uid))
LOG.debug(
"Raw Data API fetch started for mbtiles run: {0}".format(run_uid)
)
all_feature_filter_json = join(
os.getcwd(), "tasks/tests/fixtures/all_features_filters.json"
)
Expand All @@ -975,16 +927,10 @@ def add_metadata(z, theme):
min_zoom=job.mbtiles_minzoom,
max_zoom=job.mbtiles_maxzoom,
)
for r in response_back:
config = configparser.ConfigParser()
config["FileInfo"] = {"FileSize": str(r["zip_file_size_bytes"])}
size_path = join(
download_dir, f"{r['download_url'].split('/')[-1]}_size.ini"
)
with open(size_path, "w") as configfile:
config.write(configfile)

LOG.debug("Galaxy fetch ended for mbtiles run: {0}".format(run_uid))
write_file_size(response_back)
LOG.debug(
"Raw Data API fetch ended for mbtiles run: {0}".format(run_uid)
)
finish_task("mbtiles", response_back=response_back)

except Exception as ex:
Expand Down

0 comments on commit dcb6e34

Please sign in to comment.