Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix style for the latest release of Black #7438

Merged
merged 4 commits into from
Aug 27, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions airflow/datadog_checks/airflow/airflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,7 @@

class AirflowCheck(AgentCheck):
def __init__(self, name, init_config, instances):
super(AirflowCheck, self).__init__(
name, init_config, instances,
)
super(AirflowCheck, self).__init__(name, init_config, instances)

self._url = self.instance.get('url', '')
self._tags = self.instance.get('tags', [])
Expand Down
2 changes: 1 addition & 1 deletion airflow/tests/compose/dags/tuto.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
"""

t3 = BashOperator(
task_id="templated", bash_command=templated_command, params={"my_param": "Parameter I passed in"}, dag=dag,
task_id="templated", bash_command=templated_command, params={"my_param": "Parameter I passed in"}, dag=dag
)

t2.set_upstream(t1)
Expand Down
4 changes: 2 additions & 2 deletions apache/datadog_checks/apache/apache.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,8 +128,8 @@ def check(self, instance):

def _submit_metadata(self, value):
"""Possible formats:
Apache | Apache/X | Apache/X.Y | Apache/X.Y.Z | Apache/X.Y.Z (<OS>) | Apache/X.Y.Z (<OS>) <not specified>
https://httpd.apache.org/docs/2.4/mod/core.html#servertokens
Apache | Apache/X | Apache/X.Y | Apache/X.Y.Z | Apache/X.Y.Z (<OS>) | Apache/X.Y.Z (<OS>) <not specified>
https://httpd.apache.org/docs/2.4/mod/core.html#servertokens
"""
match = self.VERSION_REGEX.match(value)

Expand Down
4 changes: 2 additions & 2 deletions cacti/datadog_checks/cacti/cacti.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,8 +177,8 @@ def _read_rrd(self, rrd_path, hostname, device_name, tags):
return metric_count

def _fetch_rrd_meta(self, connection, rrd_path_root, whitelist, field_names, tags):
""" Fetch metadata about each RRD in this Cacti DB, returning a list of
tuples of (hostname, device_name, rrd_path).
"""Fetch metadata about each RRD in this Cacti DB, returning a list of
tuples of (hostname, device_name, rrd_path).
"""

def _in_whitelist(rrd):
Expand Down
2 changes: 1 addition & 1 deletion cisco_aci/tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -641,7 +641,7 @@


class FakeSessionWrapper(SessionWrapper):
""" This mock:
"""This mock:
1. Takes the requested path and replace all special characters to underscore
2. Fetch the corresponding hash from common.FIXTURE_LIST_FILE_MAP
3. Returns the corresponding file content
Expand Down
4 changes: 1 addition & 3 deletions clickhouse/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@ def dd_environment():
'clickhouse-0{}'.format(i + 1), 'Logging errors to /var/log/clickhouse-server/clickhouse-server.err.log'
)
)
with docker_run(
common.COMPOSE_FILE, conditions=conditions, sleep=10,
):
with docker_run(common.COMPOSE_FILE, conditions=conditions, sleep=10):
yield common.CONFIG


Expand Down
2 changes: 1 addition & 1 deletion consul/tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ def test_cull_services_list():
# Num. services < MAX_SERVICES should be no-op in absence of whitelist
num_services = MAX_SERVICES - 1
services = consul_mocks.mock_get_n_services_in_cluster(num_services)
assert len(consul_check._cull_services_list(services,)) == num_services
assert len(consul_check._cull_services_list(services)) == num_services

# Num. services < MAX_SERVICES should spit out only the whitelist when one is defined
consul_check.service_whitelist = ['service_1', 'service_2', 'service_3']
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,10 +146,10 @@ def _make_counter_path(self, machine_name, counter_name, instance_name, counters
"""
path = ""
if WinPDHCounter._use_en_counter_names:
'''
"""
In this case, we don't have any translations. Just attempt to make the
counter path
'''
"""
try:
path = win32pdh.MakeCounterPath((machine_name, self._class_name, instance_name, None, 0, counter_name))
self.logger.debug("Successfully created English-only path")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,9 +118,7 @@ def _get_tag_query_tag(self, sampler, wmi_obj, tag_query):

Returns: tag or TagQueryUniquenessFailure exception.
"""
self.log.debug(
u"`tag_queries` parameter found. wmi_object=%s - query=%s", wmi_obj, tag_query,
)
self.log.debug(u"`tag_queries` parameter found. wmi_object=%s - query=%s", wmi_obj, tag_query)

# Extract query information
target_class, target_property, filters = self._format_tag_query(sampler, wmi_obj, tag_query)
Expand Down Expand Up @@ -213,13 +211,11 @@ def _extract_metrics(self, wmi_sampler, tag_by, tag_queries, constant_tags):
extracted_metrics.append(WMIMetric(wmi_property, float(wmi_value), tags))
except ValueError:
self.log.warning(
u"When extracting metrics with WMI, found a non digit value for property '%s'.", wmi_property,
u"When extracting metrics with WMI, found a non digit value for property '%s'.", wmi_property
)
continue
except TypeError:
self.log.warning(
u"When extracting metrics with WMI, found a missing property '%s'", wmi_property,
)
self.log.warning(u"When extracting metrics with WMI, found a missing property '%s'", wmi_property)
continue
return extracted_metrics

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,19 +154,19 @@ def calculate_perf_average_timer(previous, current, property_name):
PERF_AVERAGE_TIMER

https://msdn.microsoft.com/en-us/library/ms804010.aspx
Description This counter type measures the time it takes, on average, to
Description This counter type measures the time it takes, on average, to
complete a process or operation. Counters of this type display a ratio of
the total elapsed time of the sample interval to the number of processes
or operations completed during that time. This counter type measures time
in ticks of the system clock. The F variable represents the number of
ticks per second. The value of F is factored into the equation so that
the result can be displayed in seconds.

Generic type Average
Formula ((N1 - N0) / F) / (D1 - D0), where the numerator (N) represents the number of ticks counted during the last sample interval,
Generic type Average
Formula ((N1 - N0) / F) / (D1 - D0), where the numerator (N) represents the number of ticks counted during the last sample interval,
F represents the frequency of the ticks, and the denominator (D) represents the number of operations completed during the last sample interval.
Average ((Nx - N0) / F) / (Dx - D0)
Example PhysicalDisk\ Avg. Disk sec/Transfer
Average ((Nx - N0) / F) / (Dx - D0)
Example PhysicalDisk\ Avg. Disk sec/Transfer
"""
n0 = previous[property_name]
n1 = current[property_name]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ def _get_property_calculator(self, counter_type):
calculator = get_calculator(counter_type)
except UndefinedCalculator:
self.logger.warning(
u"Undefined WMI calculator for counter_type %s. Values are reported as RAW.", counter_type,
u"Undefined WMI calculator for counter_type %s. Values are reported as RAW.", counter_type
)

return calculator
Expand Down Expand Up @@ -565,7 +565,7 @@ def _parse_results(self, raw_results, includes_qualifiers):
)
else:
self.logger.debug(
u"CounterType qualifier not found for %s.%s", self.class_name, wmi_property.Name,
u"CounterType qualifier not found for %s.%s", self.class_name, wmi_property.Name
)

try:
Expand Down
2 changes: 1 addition & 1 deletion datadog_checks_base/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def get_requirements(fpath, exclude=None, only=None):
exclude=['kubernetes', 'orjson', 'pysocks', 'requests-kerberos', 'requests_ntlm', 'win-inet-pton'],
),
'http': get_requirements(
'requirements.in', only=['pysocks', 'requests-kerberos', 'requests_ntlm', 'win-inet-pton'],
'requirements.in', only=['pysocks', 'requests-kerberos', 'requests_ntlm', 'win-inet-pton']
),
'json': get_requirements('requirements.in', only=['orjson']),
'kube': get_requirements('requirements.in', only=['kubernetes']),
Expand Down
18 changes: 5 additions & 13 deletions datadog_checks_base/tests/test_openmetrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,7 @@ def test_submit_summary(

if sum_monotonic_gauge:
aggregator.assert_metric(
'prometheus.custom.summary.sum.total', 120512.0, tags=[], count=1, metric_type=aggregator.MONOTONIC_COUNT,
'prometheus.custom.summary.sum.total', 120512.0, tags=[], count=1, metric_type=aggregator.MONOTONIC_COUNT
)

aggregator.assert_all_metrics_covered()
Expand All @@ -557,18 +557,10 @@ def assert_histogram_counts(aggregator, count_type, suffix=False):
if suffix:
metric_name += '.total'

aggregator.assert_metric(
metric_name, 4, tags=['upper_bound:none'], count=1, metric_type=count_type,
)
aggregator.assert_metric(
metric_name, 1, tags=['upper_bound:1.0'], count=1, metric_type=count_type,
)
aggregator.assert_metric(
metric_name, 2, tags=['upper_bound:31104000.0'], count=1, metric_type=count_type,
)
aggregator.assert_metric(
metric_name, 3, tags=['upper_bound:432400000.0'], count=1, metric_type=count_type,
)
aggregator.assert_metric(metric_name, 4, tags=['upper_bound:none'], count=1, metric_type=count_type)
aggregator.assert_metric(metric_name, 1, tags=['upper_bound:1.0'], count=1, metric_type=count_type)
aggregator.assert_metric(metric_name, 2, tags=['upper_bound:31104000.0'], count=1, metric_type=count_type)
aggregator.assert_metric(metric_name, 3, tags=['upper_bound:432400000.0'], count=1, metric_type=count_type)


@pytest.mark.parametrize(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@


@click.command(
context_settings=CONTEXT_SETTINGS, short_help="Generate a markdown file of integrations in an Agent release",
context_settings=CONTEXT_SETTINGS, short_help="Generate a markdown file of integrations in an Agent release"
)
@click.option('--since', help="Initial Agent version", default='6.3.0')
@click.option('--to', help="Final Agent version")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,9 @@ def display_path_tree(path_tree):
@click.pass_context
def create(ctx, name, integration_type, location, non_interactive, quiet, dry_run):
"""
Create scaffolding for a new integration.
Create scaffolding for a new integration.

NAME: The display name of the integration that will appear in documentation.
NAME: The display name of the integration that will appear in documentation.
"""

if name.islower():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,17 +70,14 @@ def _change(self, commit):
return {'SHA': commit.sha, 'Title': title, 'URL': url, 'Teams': ' & '.join(teams), 'Next tag': next_tag}


@click.command(
context_settings=CONTEXT_SETTINGS, short_help="Writes the CSV report about a specific release",
)
@click.command(context_settings=CONTEXT_SETTINGS, short_help="Writes the CSV report about a specific release")
@click.option('--from-ref', '-f', help="Reference to start stats on", required=True)
@click.option('--to-ref', '-t', help="Reference to end stats at", required=True)
@click.option('--release-version', '-r', help="Release version to analyze", required=True)
@click.option('--output-folder', '-o', help="Path to output folder")
@click.pass_context
def csv_report(ctx, from_ref, to_ref, release_version, output_folder=None):
"""Computes the release report and writes it to a specific directory
"""
"""Computes the release report and writes it to a specific directory"""
if output_folder is None:
output_folder = release_version

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def pick_card_member(config: dict, author: str, team: str) -> Optional[str]:
@click.option('--milestone', help='The PR milestone to filter by')
@click.option('--dry-run', '-n', is_flag=True, help='Only show the changes')
@click.option(
'--update-rc-builds-cards', is_flag=True, help='Update cards in RC builds column with `target_ref` version',
'--update-rc-builds-cards', is_flag=True, help='Update cards in RC builds column with `target_ref` version'
)
@click.pass_context
def testable(
Expand Down Expand Up @@ -227,8 +227,7 @@ def testable(
See trello subcommand for details on how to setup access:

`ddev release trello -h`.

"""
"""
root = get_root()
repo = basepath(root)
if repo not in ('integrations-core', 'datadog-agent'):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,9 @@ def recommended_monitors():
result = [i for i in decoded.get('tags') if i.startswith('integration:')]
if len(result) < 1:
file_failed = True
display_queue.append((echo_failure, f" {monitor_filename} must have an `integration` tag"),)
display_queue.append(
(echo_failure, f" {monitor_filename} must have an `integration` tag"),
)

display_name = manifest.get("display_name").lower()
monitor_name = decoded.get('name').lower()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,7 @@


class ParamProperties:
"""Class to represent a parameter declared using the '@param' annotation
"""
"""Class to represent a parameter declared using the '@param' annotation"""

def __init__(self, var_name, type_name, required=True, default_value=None):
self.var_name = var_name
Expand Down Expand Up @@ -128,8 +127,7 @@ def _validate_type(self, errors):

@classmethod
def parse_from_strings(cls, start, config_lines, indent, errors):
"""Main method used to parse a block starting at line 'start' with a given indentation.
"""
"""Main method used to parse a block starting at line 'start' with a given indentation."""
idx = start

# Let's first check if the block is a simple comment. If so, let's return and go to the next block
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,16 +68,14 @@ def is_blank(line):


def is_exactly_indented(line, indent):
"""Returns true if the line has the expected indentation. Empty line has no indentation
"""
"""Returns true if the line has the expected indentation. Empty line has no indentation"""
if is_blank(line):
return False
return get_indent(line) == indent


def is_at_least_indented(line, indent):
"""Returns true if the line has at least the expected indentation. Empty line has no indentation
"""
"""Returns true if the line has at least the expected indentation. Empty line has no indentation"""
if is_blank(line):
return False
return get_indent(line) >= indent
8 changes: 4 additions & 4 deletions datadog_checks_dev/datadog_checks/dev/tooling/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,22 +32,22 @@ def get_auth_info(config=None):

def get_commit(repo, commit_sha, config):
response = requests.get(
f'https://api.github.com/repos/DataDog/{repo}/git/commits/{commit_sha}', auth=get_auth_info(config),
f'https://api.github.com/repos/DataDog/{repo}/git/commits/{commit_sha}', auth=get_auth_info(config)
)

response.raise_for_status()
return response.json()


def get_tag(repo, ref, config):
response = requests.get(f'https://api.github.com/repos/DataDog/{repo}/git/tags/{ref}', auth=get_auth_info(config),)
response = requests.get(f'https://api.github.com/repos/DataDog/{repo}/git/tags/{ref}', auth=get_auth_info(config))

response.raise_for_status()
return response.json()


def get_tags(repo, config):
response = requests.get(f'https://api.github.com/repos/DataDog/{repo}/git/refs/tags', auth=get_auth_info(config),)
response = requests.get(f'https://api.github.com/repos/DataDog/{repo}/git/refs/tags', auth=get_auth_info(config))

response.raise_for_status()
return response.json()
Expand Down Expand Up @@ -110,7 +110,7 @@ def get_pr(pr_num, config=None, raw=False, org='DataDog'):

def get_pr_from_hash(commit_hash, repo, config=None, raw=False):
response = requests.get(
f'https://api.github.com/search/issues?q=sha:{commit_hash}+repo:DataDog/{repo}', auth=get_auth_info(config),
f'https://api.github.com/search/issues?q=sha:{commit_hash}+repo:DataDog/{repo}', auth=get_auth_info(config)
)

if raw:
Expand Down
4 changes: 2 additions & 2 deletions datadog_checks_downloader/datadog_checks/downloader/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@


def __is_canonical(version):
'''
"""
https://www.python.org/dev/peps/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions
'''
"""

P = r'^([1-9]\d*!)?(0|[1-9]\d*)(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*))?$'
return re.match(P, version) is not None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,13 +167,13 @@ def __download_custom(self, target, extension):
return target_abspaths

def __download_in_toto_layout_pubkeys(self, target, target_relpath):
'''
"""
NOTE: We assume that all the public keys needed to verify any in-toto
root layout, or sublayout, metadata file has been directly signed by
the top-level TUF targets role using *OFFLINE* keys. This is a
reasonable assumption, as TUF does not offer meaningful security
guarantees if _ALL_ targets were signed using _online_ keys.
'''
"""

pubkey_abspaths = self.__download_custom(target, '.pub')
if not len(pubkey_abspaths):
Expand Down Expand Up @@ -265,11 +265,11 @@ def __download_with_tuf_in_toto(self, target_relpath):
return target_abspath

def download(self, target_relpath):
'''
"""
Returns:
If download over TUF and in-toto is successful, this function will
return the complete filepath to the desired target.
'''
"""
return self.__download_with_tuf_in_toto(target_relpath)

def __get_versions(self, standard_distribution_name):
Expand Down Expand Up @@ -304,11 +304,11 @@ def __get_versions(self, standard_distribution_name):
return wheels

def get_wheel_relpath(self, standard_distribution_name, version=None):
'''
"""
Returns:
If download over TUF is successful, this function will return the
latest known version of the Datadog integration.
'''
"""
wheels = self.__get_versions(standard_distribution_name)

if not wheels:
Expand Down
Loading