Skip to content

Commit

Permalink
Move control of logging verbosity to CLI
Browse files Browse the repository at this point in the history
  • Loading branch information
chrisjsewell committed Oct 24, 2020
1 parent b853e8c commit 19da2ad
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 51 deletions.
16 changes: 12 additions & 4 deletions aiida/cmdline/commands/cmd_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,13 @@ def inspect(archive, version, data, meta_data):
@options.NODES()
@options.ARCHIVE_FORMAT()
@options.FORCE(help='overwrite output file if it already exists')
@options.VERBOSE(help='Do not remove progress bars after a process completes')
@click.option(
'-v',
'--verbosity',
default='INFO',
type=click.Choice(['DEBUG', 'INFO', 'WARNING', 'CRITICAL']),
help='Control the verbosity of console logging'
)
@options.graph_traversal_rules(GraphTraversalRules.EXPORT.value)
@click.option(
'--include-logs/--exclude-logs',
Expand All @@ -84,7 +90,7 @@ def inspect(archive, version, data, meta_data):
@decorators.with_dbenv()
def create(
output_file, codes, computers, groups, nodes, archive_format, force, input_calc_forward, input_work_forward,
create_backward, return_backward, call_calc_backward, call_work_backward, include_comments, include_logs, verbose
create_backward, return_backward, call_calc_backward, call_work_backward, include_comments, include_logs, verbosity
):
"""
Export subsets of the provenance graph to file for sharing.
Expand All @@ -97,7 +103,7 @@ def create(
"""
from tqdm import tqdm
from aiida.common.progress_reporter import set_progress_reporter
from aiida.tools.importexport import export, ExportFileFormat
from aiida.tools.importexport import export, ExportFileFormat, EXPORT_LOGGER
from aiida.tools.importexport.common.exceptions import ArchiveExportError
from aiida.tools.importexport.common.config import BAR_FORMAT

Expand Down Expand Up @@ -136,7 +142,9 @@ def create(
elif archive_format == 'tar.gz':
export_format = ExportFileFormat.TAR_GZIPPED

set_progress_reporter(partial(tqdm, bar_format=BAR_FORMAT, leave=verbose))
if verbosity in ['DEBUG', 'INFO']:
set_progress_reporter(partial(tqdm, bar_format=BAR_FORMAT, leave=(verbosity == 'DEBUG')))
EXPORT_LOGGER.setLevel(verbosity)

try:
export(entities, filename=output_file, file_format=export_format, **kwargs)
Expand Down
103 changes: 56 additions & 47 deletions aiida/tools/importexport/dbexport/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ def export(
filename: Optional[str] = None,
file_format: str = ExportFileFormat.ZIP,
overwrite: bool = False,
silent: bool = False,
silent: Optional[bool] = None,
use_compression: bool = True,
include_comments: bool = True,
include_logs: bool = True,
Expand All @@ -408,6 +408,21 @@ def export(
) -> ExportReport:
"""Export AiiDA data to an archive file.
Note, the logging level and progress reporter should be set externally, for example::
from functools import partial
from tqdm import tqdm
from aiida.common.progress_reporter import set_progress_reporter
from aiida.tools.importexport.common.config import BAR_FORMAT
EXPORT_LOGGER.setLevel('DEBUG')
set_progress_reporter(partial(tqdm, bar_format=BAR_FORMAT, leave=True))
export(...)
.. deprecated:: 1.5.0
Support for the parameter `silent` will be removed in `v2.0.0`.
Please set the logger level and progress bar implementation independently.
.. deprecated:: 1.2.1
Support for the parameters `what` and `outfile` will be removed in `v2.0.0`.
Please use `entities` and `filename` instead, respectively.
Expand All @@ -425,8 +440,6 @@ def export(
:py:class:`~aiida.tools.importexport.common.exceptions.ArchiveExportError`
if the output file already exists.
:param silent: suppress console prints and progress bar.
:param use_compression: Whether or not to compress the archive file
(only valid for the zip file format).
Expand Down Expand Up @@ -485,6 +498,11 @@ def export(
},
),
)
if silent is not None:
warnings.warn(
'silent keyword is deprecated and will be removed in AiiDA v2.0.0, set the logger level explicitly instead',
AiidaDeprecationWarning
) # pylint: disable=no-member

type_check(
entities,
Expand All @@ -506,55 +524,46 @@ def export(

writer = get_writer(file_format)(filename=filename, use_compression=use_compression, **(writer_init or {}))

if silent:
logging.disable(logging.CRITICAL)

try:
summary(
file_format=writer.file_format_verbose,
export_version=writer.export_version,
outfile=filename,
include_comments=include_comments,
include_logs=include_logs,
traversal_rules=full_traversal_rules
)

report_data: Dict[str, Any] = {'time_write_start': None, 'time_write_stop': None, 'writer_data': None}
summary(
file_format=writer.file_format_verbose,
export_version=writer.export_version,
outfile=filename,
include_comments=include_comments,
include_logs=include_logs,
traversal_rules=full_traversal_rules
)

report_data['time_collect_start'] = time.time()
export_data = _collect_archive_data(
entities=entities,
allowed_licenses=allowed_licenses,
forbidden_licenses=forbidden_licenses,
include_comments=include_comments,
include_logs=include_logs,
**traversal_rules
)
report_data['time_collect_stop'] = time.time()
report_data: Dict[str, Any] = {'time_write_start': None, 'time_write_stop': None, 'writer_data': None}

extract_time = report_data['time_collect_stop'] - report_data['time_collect_start']
EXPORT_LOGGER.debug(f'Data extracted in {extract_time:6.2g} s.')
report_data['time_collect_start'] = time.time()
export_data = _collect_archive_data(
entities=entities,
allowed_licenses=allowed_licenses,
forbidden_licenses=forbidden_licenses,
include_comments=include_comments,
include_logs=include_logs,
**traversal_rules
)
report_data['time_collect_stop'] = time.time()

if export_data is not None:
try:
report_data['time_write_start'] = time.time()
report_data['writer_data'] = writer.write(export_data=export_data) # type: ignore
report_data['time_write_stop'] = time.time()
except (exceptions.ArchiveExportError, LicensingException) as exc:
if os.path.exists(filename):
os.remove(filename)
raise exc
extract_time = report_data['time_collect_stop'] - report_data['time_collect_start']
EXPORT_LOGGER.debug(f'Data extracted in {extract_time:6.2g} s.')

write_time = report_data['time_write_stop'] - report_data['time_write_start']
EXPORT_LOGGER.debug(f'Data written in {write_time:6.2g} s.')
if export_data is not None:
try:
report_data['time_write_start'] = time.time()
report_data['writer_data'] = writer.write(export_data=export_data) # type: ignore
report_data['time_write_stop'] = time.time()
except (exceptions.ArchiveExportError, LicensingException) as exc:
if os.path.exists(filename):
os.remove(filename)
raise exc

else:
EXPORT_LOGGER.debug('No data to write.')
write_time = report_data['time_write_stop'] - report_data['time_write_start']
EXPORT_LOGGER.debug(f'Data written in {write_time:6.2g} s.')

finally:
# Reset logging level
if silent:
logging.disable(logging.NOTSET)
else:
EXPORT_LOGGER.debug('No data to write.')

return ExportReport(**report_data)

Expand Down Expand Up @@ -842,7 +851,7 @@ def _collect_entity_queries(
given_comment_entry_ids = set()
all_fields_info, _ = get_all_fields_info()

total = 1 + ((1 if include_logs else 0) + (1 if include_logs else 0) if node_ids_to_be_exported else 0)
total = 1 + (((1 if include_logs else 0) + (1 if include_comments else 0)) if node_ids_to_be_exported else 0)
with get_progress_reporter()(desc='Initializing export of all entities', total=total) as progress:

# Universal "entities" attributed to all types of nodes
Expand Down

0 comments on commit 19da2ad

Please sign in to comment.