Skip to content
This repository has been archived by the owner on Feb 23, 2022. It is now read-only.

Commit

Permalink
Merge pull request #266 from multinet-app/add-flake8-plugins
Browse files Browse the repository at this point in the history
Add flake8 plugins
  • Loading branch information
jjnesbitt committed Jan 30, 2020
2 parents 46b6105 + beb6bab commit c1f6215
Show file tree
Hide file tree
Showing 14 changed files with 191 additions and 130 deletions.
3 changes: 2 additions & 1 deletion .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ max-line-length = 88
exclude =
__pycache__,
build,
scripts
scripts,
docs
extend-ignore =
# See https://github.com/PyCQA/pycodestyle/issues/373
E203,
Expand Down
8 changes: 6 additions & 2 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,18 @@ flake8-black = "==0.1.0"
flake8-bugbear = "==19.8.0"
flake8-docstrings = "==1.3.1"
flake8-quotes = "==2.1.0"
flake8-builtins = "==1.4.2"
flake8-comprehensions = "==3.2.2"
flake8-eradicate = "==0.2.4"
flake8-mutable = "==1.2.0"
flake8-mypy = "==17.8.0"
pep8-naming = "==0.9.1"
sphinx = "==2.3.1"
pytest = "==5.1.0"
pytest-cov = "==2.7.1"
pytest-xdist = "==1.29.0"
recommonmark = "==0.5.0"
pre-commit = "==1.18.2"
flake8-mypy = "*"
pep8-naming = "*"
flasgger = "*"
uwsgi = "*"

Expand Down
243 changes: 147 additions & 96 deletions Pipfile.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion data/openflights/script/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ def main():

with open(sys.argv[1]) as nodes:
reader = csv.DictReader(nodes)
ids = set(n["_key"] for n in reader)
ids = {n["_key"] for n in reader}

reader = csv.DictReader(sys.stdin)
writer = csv.DictWriter(sys.stdout, reader.fieldnames)
Expand Down
2 changes: 1 addition & 1 deletion multinet/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def get_workspace(workspace: str) -> Any:
@bp.route("/workspaces/<workspace>/tables", methods=["GET"])
@use_kwargs({"type": fields.Str()})
@swag_from("swagger/workspace_tables.yaml")
def get_workspace_tables(workspace: str, type: TableType = "all") -> Any:
def get_workspace_tables(workspace: str, type: TableType = "all") -> Any: # noqa: A002
"""Retrieve the tables of a single workspace."""
tables = db.workspace_tables(workspace, type)
return util.stream(tables)
Expand Down
10 changes: 5 additions & 5 deletions multinet/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def get_workspaces(arango: ArangoClient) -> Generator[str, None, None]:

@with_client
def workspace_tables(
workspace: str, type: TableType, arango: ArangoClient
workspace: str, table_type: TableType, arango: ArangoClient
) -> Generator[str, None, None]:
"""Return a list of all table names in the workspace named `workspace`."""

Expand All @@ -163,14 +163,14 @@ def is_edge(x: Tuple[Any, bool]) -> bool:
def is_node(x: Tuple[Any, bool]) -> bool:
return not is_edge(x)

if type == "all":
if table_type == "all":
desired_type = pass_all
elif type == "node":
elif table_type == "node":
desired_type = is_node
elif type == "edge":
elif table_type == "edge":
desired_type = is_edge
else:
raise BadQueryArgument("type", type, ["all", "node", "edge"])
raise BadQueryArgument("type", table_type, ["all", "node", "edge"])

return (table[0] for table in tables if desired_type(table))

Expand Down
8 changes: 4 additions & 4 deletions multinet/downloaders/d3_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,10 @@ def download(workspace: str, graph: str) -> Any:
links.append(edge)

response = make_response(
dict(
nodes=list(generate_filtered_docs(nodes)),
links=list(generate_filtered_docs(links)),
)
{
"nodes": list(generate_filtered_docs(nodes)),
"links": list(generate_filtered_docs(links)),
}
)
response.headers["Content-Disposition"] = f"attachment; filename={graph}.json"
response.headers["Content-type"] = "application/json"
Expand Down
10 changes: 5 additions & 5 deletions multinet/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,14 @@ def flask_response(self) -> FlaskTuple:
class NotFound(ServerError):
"""Base exception for 404 errors of various types."""

def __init__(self, type: str, item: str):
def __init__(self, item_type: str, item: str):
"""
Initialize the instance with the type and identity of the missing item.
`type` - the kind of item that is not found
`item_type` - the kind of item that is not found
`item` - the name of the not found item
"""
self.type = type
self.type = item_type
self.item = item

def flask_response(self) -> FlaskTuple:
Expand Down Expand Up @@ -98,9 +98,9 @@ def flask_response(self) -> FlaskTuple:
class AlreadyExists(ServerError):
"""Exception for attempting to create a resource that already exists."""

def __init__(self, type: str, item: str):
def __init__(self, item_type: str, item: str):
"""Initialize the exception."""
self.type = type
self.type = item_type
self.item = item

def flask_response(self) -> FlaskTuple:
Expand Down
2 changes: 1 addition & 1 deletion multinet/uploaders/csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,4 +100,4 @@ def upload(workspace: str, table: str) -> Any:

# Insert the data into the collection.
results = coll.insert_many(rows)
return dict(count=len(results))
return {"count": len(results)}
4 changes: 2 additions & 2 deletions multinet/uploaders/d3_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def validate_d3_json(data: dict) -> Sequence[ValidationFailure]:
data_errors.append(InconsistentLinkKeys())

# Check for duplicated nodes
ids = set(row["id"] for row in data["nodes"])
ids = {row["id"] for row in data["nodes"]}
if len(data["nodes"]) != len(ids):
data_errors.append(NodeDuplicates())

Expand Down Expand Up @@ -113,4 +113,4 @@ def upload(workspace: str, table: str) -> Any:
nodes_coll.insert_many(nodes)
links_coll.insert_many(links)

return dict(nodecount=len(nodes), edgecount=len(links))
return {"nodecount": len(nodes), "edgecount": len(links)}
13 changes: 7 additions & 6 deletions multinet/uploaders/nested_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,14 @@ def analyze_nested_json(
`data` - the text of a nested_json file
`(nodes, edges)` - a node and edge table describing the tree.
"""
id = itertools.count(100)
ident = itertools.count(100)
data = json.loads(raw_data)

def keyed(rec: dict) -> dict:
if "_key" in rec:
return rec

# keyed = dict(rec)
rec["_key"] = str(next(id))
rec["_key"] = str(next(ident))

return rec

Expand Down Expand Up @@ -115,6 +114,8 @@ def upload(workspace: str, table: str) -> Any:
int_nodetable.insert_many(nodes[0])
leaf_nodetable.insert_many(nodes[1])

return dict(
edgecount=len(edges), int_nodecount=len(nodes[0]), leaf_nodecount=len(nodes[1])
)
return {
"edgecount": len(edges),
"int_nodecount": len(nodes[0]),
"leaf_nodecount": len(nodes[1]),
}
2 changes: 1 addition & 1 deletion multinet/uploaders/newick.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,4 +119,4 @@ def read_tree(parent: Optional[str], node: newick.Node) -> None:

read_tree(None, tree[0])

return dict(edgecount=edgecount, nodecount=nodecount)
return {"edgecount": edgecount, "nodecount": nodecount}
10 changes: 7 additions & 3 deletions multinet/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,11 @@ def get_edge_table_properties(workspace: str, edge_table: str) -> EdgeTablePrope
else:
tables_to_keys[table] = {key}

return dict(table_keys=tables_to_keys, from_tables=from_tables, to_tables=to_tables)
return {
"table_keys": tables_to_keys,
"from_tables": from_tables,
"to_tables": to_tables,
}


def generate(iterator: Sequence[Any]) -> Generator[str, None, None]:
Expand All @@ -80,10 +84,10 @@ def require_db() -> None:
raise DatabaseNotLive()


def decode_data(input: bytes) -> str:
def decode_data(data: bytes) -> str:
"""Decode the request data assuming utf8 encoding."""
try:
body = input.decode("utf8")
body = data.decode("utf8")
except UnicodeDecodeError as e:
raise DecodeFailed(str(e))

Expand Down
4 changes: 2 additions & 2 deletions test/test_csv_uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def test_validate_csv():

validation_resp = v_error.value.errors
correct = [err.asdict() for err in [DuplicateKey(key="2"), DuplicateKey(key="5")]]
assert all([err in validation_resp for err in correct])
assert all(err in validation_resp for err in correct)

# Test invalid syntax
with open(invalid_headers_file_path) as test_file:
Expand All @@ -51,7 +51,7 @@ def test_validate_csv():
InvalidRow(row=5, fields=["_from", "_to"]),
]
]
assert all([err in validation_resp for err in correct])
assert all(err in validation_resp for err in correct)

# Test unicode decode errors
test_data = b"\xff\xfe_\x00k\x00e\x00y\x00,\x00n\x00a\x00m\x00e\x00\n"
Expand Down

0 comments on commit c1f6215

Please sign in to comment.