Skip to content

Commit

Permalink
chore: more sourcery refactorings (#647)
Browse files Browse the repository at this point in the history
  • Loading branch information
jnussbaum committed Nov 16, 2023
1 parent 8b445f2 commit 300bfa0
Show file tree
Hide file tree
Showing 28 changed files with 288 additions and 338 deletions.
5 changes: 1 addition & 4 deletions .sourcery.yaml
Expand Up @@ -13,10 +13,7 @@ ignore:
rule_settings:
enable:
- default
disable: [
inline-immediately-returned-variable,
max-min-default,
]
disable: []
rule_types:
- refactoring
- suggestion
Expand Down
2 changes: 1 addition & 1 deletion src/dsp_tools/cli.py
Expand Up @@ -322,7 +322,7 @@ def _log_cli_arguments(parsed_args: argparse.Namespace) -> None:

parameter_lines = []
parameters_to_log = {key: value for key, value in vars(parsed_args).items() if key != "action"}
longest_key_length = max(len(key) for key in parameters_to_log) if parameters_to_log else 0
longest_key_length = max((len(key) for key in parameters_to_log), default=0)
for key, value in parameters_to_log.items():
if key == "password":
parameter_lines.append(f"{key:<{longest_key_length}} = {'*' * len(value)}")
Expand Down
3 changes: 1 addition & 2 deletions src/dsp_tools/commands/excel2json/utils.py
Expand Up @@ -122,9 +122,8 @@ def check_required_values(df: pd.DataFrame, required_values_columns: list[str])
"""
# It checks if any of the values in a specified column are empty. If they are, they are added to the dictionary
# with the column name as key and a boolean series as value that contain true for every pd.NA
res_dict = {col: df[col].isnull() for col in required_values_columns if df[col].isnull().any()}
# If all the columns are filled, then it returns an empty dictionary.
return res_dict
return {col: df[col].isnull() for col in required_values_columns if df[col].isnull().any()}


def turn_bool_array_into_index_numbers(series: pd.Series[bool], true_remains: bool = True) -> list[int]:
Expand Down
10 changes: 3 additions & 7 deletions src/dsp_tools/commands/excel2xml/excel2xml_cli.py
Expand Up @@ -327,8 +327,7 @@ def _get_prop_function(
}
if row.get("prop type") not in proptype_2_function:
raise BaseError(f"Invalid prop type for property {row.get('prop name')} in resource {resource_id}")
make_prop_function = proptype_2_function[row["prop type"]]
return make_prop_function
return proptype_2_function[row["prop type"]]


def _convert_row_to_property_elements(
Expand Down Expand Up @@ -437,13 +436,12 @@ def _convert_property_row_to_xml(
)

# create the property
prop = _create_property(
return _create_property(
make_prop_function=make_prop_function,
row=row,
property_elements=property_elements,
resource_id=resource_id,
)
return prop


def _create_property(
Expand Down Expand Up @@ -473,9 +471,7 @@ def _create_property(
if check_notna(row.get("prop list")):
kwargs_propfunc["list_name"] = str(row["prop list"])

prop = make_prop_function(**kwargs_propfunc)

return prop
return make_prop_function(**kwargs_propfunc)


def excel2xml(
Expand Down
15 changes: 5 additions & 10 deletions src/dsp_tools/commands/excel2xml/excel2xml_lib.py
Expand Up @@ -273,7 +273,7 @@ def make_root(
schema_url = "https://raw.githubusercontent.com/dasch-swiss/dsp-tools/main/src/dsp_tools/resources/schema/data.xsd"
schema_location_key = str(etree.QName("http://www.w3.org/2001/XMLSchema-instance", "schemaLocation"))
schema_location_value = f"https://dasch.swiss/schema {schema_url}"
root = etree.Element(
return etree.Element(
"{%s}knora" % xml_namespace_map[None],
attrib={
schema_location_key: schema_location_value,
Expand All @@ -282,7 +282,6 @@ def make_root(
},
nsmap=xml_namespace_map,
)
return root


def append_permissions(root_element: etree._Element) -> etree._Element:
Expand Down Expand Up @@ -394,8 +393,7 @@ def make_resource(
) from None
kwargs["creation_date"] = creation_date

resource_ = etree.Element("{%s}resource" % xml_namespace_map[None], **kwargs) # type: ignore[arg-type]
return resource_
return etree.Element("{%s}resource" % xml_namespace_map[None], **kwargs) # type: ignore[arg-type]


def make_bitstream_prop(
Expand Down Expand Up @@ -1495,11 +1493,10 @@ def make_region(
) from None
kwargs["creation_date"] = creation_date

region_ = etree.Element(
return etree.Element(
"{%s}region" % xml_namespace_map[None],
**kwargs, # type: ignore[arg-type]
)
return region_


def make_annotation(
Expand Down Expand Up @@ -1553,11 +1550,10 @@ def make_annotation(
) from None
kwargs["creation_date"] = creation_date

annotation_ = etree.Element(
return etree.Element(
"{%s}annotation" % xml_namespace_map[None],
**kwargs, # type: ignore[arg-type]
)
return annotation_


def make_link(
Expand Down Expand Up @@ -1611,11 +1607,10 @@ def make_link(
) from None
kwargs["creation_date"] = creation_date

link_ = etree.Element(
return etree.Element(
"{%s}link" % xml_namespace_map[None],
**kwargs, # type: ignore[arg-type]
)
return link_


def create_json_excel_list_mapping(
Expand Down
4 changes: 1 addition & 3 deletions src/dsp_tools/commands/fast_xmlupload/upload_files.py
Expand Up @@ -421,9 +421,7 @@ def upload_files(
end_time = datetime.now()
print(f"{datetime.now()}: Uploading files took {end_time - start_time}")
logger.info(f"Uploading files took {end_time - start_time}")
success = _check_if_all_files_were_uploaded(
return _check_if_all_files_were_uploaded(
result=result,
internal_filenames_of_processed_files=internal_filenames_of_processed_files,
)

return success
3 changes: 2 additions & 1 deletion src/dsp_tools/commands/project/create/project_create.py
Expand Up @@ -367,7 +367,8 @@ def _get_projects_where_user_is_admin(
continue
in_project = in_project_list[0]

project_info[str(in_project.iri)] = bool(project_role == "admin")
is_admin = project_role == "admin"
project_info[str(in_project.iri)] = is_admin
if verbose:
print(f"\tAdded user '{username}' as {project_role} to project '{in_project.shortname}'.")
logger.info(f"Added user '{username}' as {project_role} to project '{in_project.shortname}'.")
Expand Down
26 changes: 10 additions & 16 deletions src/dsp_tools/commands/project/create/project_create_lists.py
Expand Up @@ -109,26 +109,24 @@ def create_lists_on_server(
overall_success = False

current_project_lists: dict[str, Any] = {}
for new_list in lists_to_create:
# if list exists already, add it to "current_project_lists" (for later usage), then skip it
existing_list = [x for x in existing_lists if x.project == project_remote.iri and x.name == new_list["name"]]
if existing_list:
existing_list_name = existing_list[0].name
for new_lst in lists_to_create:
if existing_lst := [x for x in existing_lists if x.project == project_remote.iri and x.name == new_lst["name"]]:
existing_list_name = existing_lst[0].name
if not existing_list_name:
raise BaseError(f"Node {existing_list[0]} has no name.")
raise BaseError(f"Node {existing_lst[0]} has no name.")
current_project_lists[existing_list_name] = {
"id": existing_list[0].iri,
"nodes": new_list["nodes"],
"id": existing_lst[0].iri,
"nodes": new_lst["nodes"],
}
print(f"\tWARNING: List '{new_list['name']}' already exists on the DSP server. Skipping...")
print(f"\tWARNING: List '{new_lst['name']}' already exists on the DSP server. Skipping...")
overall_success = False
continue

created_list, success = _create_list_node(con=con, project=project_remote, node=new_list)
created_list, success = _create_list_node(con=con, project=project_remote, node=new_lst)
current_project_lists.update(created_list)
if not success:
overall_success = False
print(f"\tCreated list '{new_list['name']}'.")
print(f"\tCreated list '{new_lst['name']}'.")

return current_project_lists, overall_success

Expand Down Expand Up @@ -178,8 +176,6 @@ def create_lists(
or one of the nodes could not be created),
it is False.
"""
overall_success = True

project_definition = parse_json_input(project_file_as_path_or_parsed=project_file_as_path_or_parsed)
if not project_definition.get("project", {}).get("lists"):
return {}, True
Expand Down Expand Up @@ -207,7 +203,5 @@ def create_lists(
con=con,
project_remote=project_remote,
)
if not success:
overall_success = False

return current_project_lists, overall_success
return current_project_lists, success
24 changes: 12 additions & 12 deletions src/dsp_tools/commands/project/create/project_validate.py
Expand Up @@ -296,18 +296,18 @@ def _check_cardinalities_of_circular_references(project_definition: dict[Any, An

if len(errors) == 0:
return True
else:
error_message = (
"ERROR: Your ontology contains properties derived from 'hasLinkTo' that allow circular references "
"between resources. This is not a problem in itself, but if you try to upload data that actually "
"contains circular references, these 'hasLinkTo' properties will be temporarily removed from the "
"affected resources. Therefore, it is necessary that all involved 'hasLinkTo' properties have a "
"cardinality of 0-1 or 0-n. \n"
"Please make sure that the following properties have a cardinality of 0-1 or 0-n:"
)
for error in errors:
error_message = f"{error_message}\n\t- Resource {error[0]}, property {error[1]}"
raise BaseError(error_message)

error_message = (
"ERROR: Your ontology contains properties derived from 'hasLinkTo' that allow circular references "
"between resources. This is not a problem in itself, but if you try to upload data that actually "
"contains circular references, these 'hasLinkTo' properties will be temporarily removed from the "
"affected resources. Therefore, it is necessary that all involved 'hasLinkTo' properties have a "
"cardinality of 0-1 or 0-n. \n"
"Please make sure that the following properties have a cardinality of 0-1 or 0-n:"
)
for error in errors:
error_message = f"{error_message}\n\t- Resource {error[0]}, property {error[1]}"
raise BaseError(error_message)


def _collect_link_properties(project_definition: dict[Any, Any]) -> dict[str, list[str]]:
Expand Down
3 changes: 1 addition & 2 deletions src/dsp_tools/commands/project/models/group.py
Expand Up @@ -244,10 +244,9 @@ def getAllGroupsForProject(con: Connection, proj_iri: str) -> Optional[list[Grou
return [g for g in Group.getAllGroups(con) if g.project == proj_iri]

def createDefinitionFileObj(self) -> dict[str, Any]:
group = {
return {
"name": self.name,
"descriptions": self.descriptions.createDefinitionFileObj(),
"selfjoin": self.selfjoin,
"status": self.status,
}
return group
6 changes: 2 additions & 4 deletions src/dsp_tools/commands/rosetta.py
Expand Up @@ -68,15 +68,14 @@ def _create_json(rosetta_folder: Path) -> bool:
True if the project could be created without problems, False if something went wrong during the creation process
"""
print("Execute 'dsp-tools create rosetta.json'...")
success = create_project(
return create_project(
project_file_as_path_or_parsed=rosetta_folder / "rosetta.json",
server="http://0.0.0.0:3333",
user_mail="root@example.com",
password="test",
verbose=False,
dump=False,
)
return success


def _upload_xml(rosetta_folder: Path) -> bool:
Expand All @@ -90,7 +89,7 @@ def _upload_xml(rosetta_folder: Path) -> bool:
True if all data could be uploaded without problems, False if something went wrong during the upload process
"""
print("Execute 'dsp-tools xmlupload rosetta.xml'...")
success = xmlupload(
return xmlupload(
input_file=rosetta_folder / "rosetta.xml",
server="http://0.0.0.0:3333",
user="root@example.com",
Expand All @@ -99,7 +98,6 @@ def _upload_xml(rosetta_folder: Path) -> bool:
sipi="http://0.0.0.0:1024",
config=UploadConfig(),
)
return success


def upload_rosetta() -> bool:
Expand Down
3 changes: 1 addition & 2 deletions src/dsp_tools/commands/xmlupload/resource_multimedia.py
Expand Up @@ -39,11 +39,10 @@ def _upload_bitstream(
filepath=str(Path(imgdir) / Path(pth)),
)
internal_file_name_bitstream = img["uploadedFiles"][0]["internalFilename"] # type: ignore[index]
resource_bitstream = resource.get_bitstream_information(
return resource.get_bitstream_information(
internal_file_name_bitstream=internal_file_name_bitstream,
permissions_lookup=permissions_lookup,
)
return resource_bitstream


def handle_bitstream(
Expand Down
Expand Up @@ -67,8 +67,7 @@ def _create_text_link_objects(subject_id: str, text_prop: etree._Element) -> lis
# if the same ID is in several separate <text> values of one <text-prop>, they are considered separate links
xml_props = []
for text in text_prop.getchildren():
links = _extract_ids_from_one_text_value(text)
if links:
if links := _extract_ids_from_one_text_value(text):
xml_link = XMLLink(subject_id, links)
xml_props.append(xml_link)
# this UUID is so that the links that were stashed can be identified in the XML data file
Expand Down Expand Up @@ -173,8 +172,7 @@ def _find_cheapest_outgoing_links(
node_value = node_cost / node_gain
costs.append(Cost(source, target, node_value))
cheapest_cost = sorted(costs, key=lambda x: x.node_value)[0]
cheapest_links = [x for x in edges if x.source == cheapest_cost.source and x.target == cheapest_cost.target]
return cheapest_links
return [x for x in edges if x.source == cheapest_cost.source and x.target == cheapest_cost.target]


def _remove_edges_to_stash(
Expand Down
Expand Up @@ -98,7 +98,7 @@ def stash_circular_references(
stashed_link_values: list[LinkValueStashItem] = []

for res in resources:
if not res.id in stash_lookup:
if res.id not in stash_lookup:
continue
for link_prop in res.get_props_with_links():
assert link_prop.valtype in ["text", "resptr"]
Expand All @@ -115,9 +115,7 @@ def stash_circular_references(

standoff_stash = StandoffStash.make(stashed_standoff_values)
link_value_stash = LinkValueStash.make(stashed_link_values)
stash = Stash.make(standoff_stash, link_value_stash)

return stash
return Stash.make(standoff_stash, link_value_stash)


def identify_circular_references(root: etree._Element) -> tuple[dict[str, list[str]], list[str]]:
Expand Down
Expand Up @@ -112,5 +112,4 @@ def _create_resptr_prop_json_object_to_update(
f"{stash.prop_name}Value": linkVal,
"@context": context,
}
jsondata = json.dumps(jsonobj)
return jsondata
return json.dumps(jsonobj)
2 changes: 1 addition & 1 deletion src/dsp_tools/import_scripts
12 changes: 4 additions & 8 deletions src/dsp_tools/utils/connection_live.py
Expand Up @@ -190,8 +190,7 @@ def post(
response=response,
)
check_for_api_error(response)
json_response = cast(dict[str, Any], response.json())
return json_response
return cast(dict[str, Any], response.json())

def get(
self,
Expand Down Expand Up @@ -231,8 +230,7 @@ def get(
response=response,
)
check_for_api_error(response)
json_response = cast(dict[str, Any], response.json())
return json_response
return cast(dict[str, Any], response.json())

def put(
self,
Expand Down Expand Up @@ -282,8 +280,7 @@ def put(
response=response,
)
check_for_api_error(response)
json_response = cast(dict[str, Any], response.json())
return json_response
return cast(dict[str, Any], response.json())

def delete(
self,
Expand Down Expand Up @@ -322,5 +319,4 @@ def delete(
response=response,
)
check_for_api_error(response)
json_response = cast(dict[str, Any], response.json())
return json_response
return cast(dict[str, Any], response.json())

0 comments on commit 300bfa0

Please sign in to comment.