Skip to content
Permalink
Browse files
fix: in google live science backend, save multiple logs per rule name…
… and overwrite existing logs (#1504)

* remove check for existing blob so logs overwrite

* add jobid to dest_path for logs

* add .txt to stderr, stdout, and output to view in browser

* testing adding .txt to log files

* add content_type arg to upload for txt extension

* change script url

* get rid of uneeded print
  • Loading branch information
cademirch committed Mar 23, 2022
1 parent 142a452 commit 9e92d63b9e68b29ccd680c34171994b0a2041efb
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 9 deletions.
@@ -708,13 +708,12 @@ def _generate_log_action(self, job):
"""generate an action to save the pipeline logs to storage."""
# script should be changed to this when added to version control!
# https://raw.githubusercontent.com/snakemake/snakemake/main/snakemake/executors/google_lifesciences_helper.py

# Save logs from /google/logs/output to source/logs in bucket
commands = [
"/bin/bash",
"-c",
"wget -O /gls.py https://raw.githubusercontent.com/snakemake/snakemake/main/snakemake/executors/google_lifesciences_helper.py && chmod +x /gls.py && source activate snakemake || true && python /gls.py save %s /google/logs %s/%s"
% (self.bucket.name, self.gs_logs, job.name),
"wget -O /gls.py https://raw.githubusercontent.com/snakemake/snakemake/main/snakemake/executors/google_lifesciences_helper.py && chmod +x /gls.py && source activate snakemake || true && python /gls.py save %s /google/logs %s/%s/jobid_%s"
% (self.bucket.name, self.gs_logs, job.name, job.jobid),
]

# Always run the action to generate log output
@@ -725,6 +724,7 @@ def _generate_log_action(self, job):
"labels": self._generate_pipeline_labels(job),
"alwaysRun": True,
}

return action

def _generate_job_action(self, job):
@@ -49,19 +49,15 @@ def save_files(bucket_name, source_path, destination_path):

# The relative path of the filename from the source path
relative_path = filename.replace(source_path, "", 1).strip("/")

# The path in storage includes relative path from destination_path
storage_path = os.path.join(destination_path, relative_path)
full_path = os.path.join(bucket_name, storage_path)
print(
"{filename} -> {full_path}".format(filename=filename, full_path=full_path)
)

# Get the blob
blob = bucket.blob(storage_path)
if not blob.exists():
print("Uploading %s to %s" % (filename, full_path))
blob.upload_from_filename(filename)
print("Uploading %s to %s" % (filename, full_path))
blob.upload_from_filename(filename, content_type=".txt")


def get_source_files(source_path):

0 comments on commit 9e92d63

Please sign in to comment.