Skip to content

Commit

Permalink
Clean up before major refactoring (#102)
Browse files Browse the repository at this point in the history
* unused import removed

* also excluding bad

* title

* swithced to new situation

* removed dali from partition list

* added Lanqing as assignee
  • Loading branch information
FaroutYLq committed Jun 6, 2023
1 parent ee94684 commit f70530b
Show file tree
Hide file tree
Showing 6 changed files with 16 additions and 13 deletions.
4 changes: 4 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ updates:
directory: "/"
schedule:
interval: "monthly"
assignees:
- FaroutYLq
# Maintain the requirements requirements folder
- package-ecosystem: "pip"
directory: "/extra_requirements"
Expand All @@ -19,3 +21,5 @@ updates:
# to pip against the `develop` branch
target-branch: "master"
open-pull-requests-limit: 15
assignees:
- FaroutYLq
2 changes: 1 addition & 1 deletion reprox/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def get_context(package=config['context']['package'],
return st


def parse_args(description='nton reprocessing on dali',
def parse_args(description='nton reprocessing on midway',
include_find_args=False,
include_processing_args=False,
include_workflow_args=False,
Expand Down
2 changes: 1 addition & 1 deletion reprox/find_runs.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def determine_data_to_reprocess(
:param _max_workers: Max workers for finding the stored data
:return:
"""
runs = st.select_runs(exclude_tags=('messy', 'abandoned'))
runs = st.select_runs(exclude_tags=('messy', 'bad', 'abandoned'))
core.log.info(f"Found {len(runs)} runs in total")

if exclude_from_invalid_cmt:
Expand Down
1 change: 0 additions & 1 deletion reprox/process_job.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import os
import utilix
from utilix import batchq
from reprox import core


Expand Down
18 changes: 9 additions & 9 deletions reprox/reprocessing.ini
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
## Config file with defaults for reprocessing data with nton

[context]
base_folder = /dali/lgrandi/xenonnt/data_management_reprocessing/
destination_folder = /dali/lgrandi/xenonnt/processed/
base_folder = /project/lgrandi/xenonnt/data_management_reprocessing/
destination_folder = /project/lgrandi/xenonnt/processed/
runs_to_do = to_do_runs.csv
context = xenonnt_v7
cmt_version = global_v7
context = xenonnt_offline
cmt_version = False
package = cutax
# set to 26800 for testing otherwise 17900
minimum_run_number = 17900
Expand All @@ -19,7 +19,7 @@ status_fig = ./status.png

[processing]
# allowed_partitions should be seperated by ","
allowed_partitions = dali,xenon1t
allowed_partitions = xenon1t, broadwl

# Ignore lines with these characters in the logs for checking the status of a job.
# Again seperated by ","
Expand All @@ -30,7 +30,7 @@ max_jobs = 100
logging_level = INFO
# 0 means submit everything
submit_only = 0
ram = 24000
cpus_per_job = 4
job_timeout_hours = 4
container_tag=development
ram = 28000
cpus_per_job = 2
job_timeout_hours = 8
container_tag=2023.05.2
2 changes: 1 addition & 1 deletion reprox/submit_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def can_submit_more_jobs(nmax=core.config['processing']['max_jobs']):
return n_jobs_running() < int(nmax)


def cycle_queue(queues=('xenon1t', 'dali', 'broadwl')
def cycle_queue(queues=('xenon1t', 'broadwl')
):
res = {}
cmd = f'squeue -u {os.environ["USER"]}'
Expand Down

0 comments on commit f70530b

Please sign in to comment.