Skip to content

Commit

Permalink
Clean up print statements (#1442)
Browse files Browse the repository at this point in the history
* Removed print statements from test_data, test_enhancements and test_generators

* Replace some print statements with TODO comments, since that looks like it was the intent

* More print -> TODO:

* Removed print statements (as long as there's an assert) in test_issues

* Most print statements removed from test_rewrite_rules, test_scripts, test_utils and test_validation. Definitely some methods that need assertions so we can remove the print statements.
  • Loading branch information
kevinschaper committed May 9, 2023
1 parent 9e6114b commit f06d45b
Show file tree
Hide file tree
Showing 35 changed files with 16 additions and 160 deletions.
9 changes: 0 additions & 9 deletions tests/test_data/test_sqlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,14 +69,7 @@ def test_sqlite_store(self):
q = session.query(endpoint.module.Person)
all_objs = q.all()
self.assertEqual(2, len(all_objs))
for p in all_objs:
print(p)
for rel in p.has_familial_relationships:
print(rel)
print(rel.type)
q = session.query(endpoint.module.FamilialRelationship)
for r in q.all():
print(r)
session.close()
# step 4: test loading from SQLStore
# 4a: first test load_all, diff to original data should be empty
Expand Down Expand Up @@ -120,15 +113,13 @@ def test_mixin(self):
b.add_class("my_abstract", slots=["my_abstract_slot"], abstract=True)
b.add_class("my_class1", is_a="my_abstract", mixins=["my_mixin"])
b.add_class("my_class2", slots=["ref_to_c1"])
# print(yaml_dumper.dumps(b.schema))
endpoint = SQLStore(b.schema, database_path=TMP_DB)
endpoint.db_exists(force=True)
mod = endpoint.compile_native()
i1 = mod.MyClass1(my_mixin_slot="v1", my_abstract_slot="v2")
i2 = mod.MyClass2(ref_to_c1=i1)
endpoint.dump(i2)
i2_recap = endpoint.load(target_class=mod.MyClass2)
# print(yaml_dumper.dumps(i2_recap))
diff = compare_objs(i2, i2_recap)
self.assertEqual(diff, "")

Expand Down
7 changes: 0 additions & 7 deletions tests/test_data/test_sqlite_pydantic.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,14 +71,7 @@ def test_sqlite_store(self):
q = session.query(endpoint.module.Person)
all_objs = q.all()
self.assertEqual(2, len(all_objs))
for p in all_objs:
print(p)
for rel in p.has_familial_relationships:
print(rel)
print(rel.type)
q = session.query(endpoint.module.FamilialRelationship)
for r in q.all():
print(r)
# step 4: test loading from SQLStore
# 4a: first test load_all, diff to original data should be empty
[returned_container] = endpoint.load_all(target_class=Container)
Expand Down
1 change: 0 additions & 1 deletion tests/test_enhancements/test_enumeration.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,6 @@ def test_notebook_model_2(self):
"Something",
[module.UnusualEnumPatterns.M, module.UnusualEnumPatterns["% ! -- whoo"]],
)
print(str(t))

def test_notebook_model_3(self):
file = "notebook_model_3"
Expand Down
1 change: 0 additions & 1 deletion tests/test_generators/test_contextgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ def test_context(self):
json_dumper.dump(element=inst, to_file=DATA_JSON)
with open(CONTEXT_OUT, "w") as stream:
stream.write(ContextGenerator(SCHEMA).serialize())
print(rdf_dumper.dumps(inst, CONTEXT_OUT))


if __name__ == "__main__":
Expand Down
5 changes: 0 additions & 5 deletions tests/test_generators/test_owlgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ def test_owlgen(self):
self.assertGreater(len(owl_classes), 10)
for c in owl_classes:
types = list(g.objects(c, RDF.type))
# print(f'Class={c} {types}')
self.assertCountEqual(types, [OWL.Class])
assert KS.MedicalEvent in owl_classes
# test that enums are treated as classes
Expand All @@ -55,13 +54,11 @@ def test_owlgen(self):
assert len(owl_object_properties) > 10
for p in owl_object_properties:
types = list(g.objects(p, RDF.type))
# print(f'Class={c} {types}')
self.assertCountEqual(types, [OWL.ObjectProperty])
owl_datatype_properties = list(g.subjects(RDF.type, OWL.DatatypeProperty))
assert len(owl_datatype_properties) > 10
for p in owl_datatype_properties:
types = list(g.objects(p, RDF.type))
# print(f'Class={c} {types}')
self.assertCountEqual(types, [OWL.DatatypeProperty])
# check that definitions are present, and use the default profile
self.assertIn(
Expand Down Expand Up @@ -123,8 +120,6 @@ def test_definition_uris(self):
URIRef('http://www.w3.org/2004/02/skos/core#exactMatch'),
URIRef('http://schema.org/Person'))
]
#for t in triples:
# print(t)
for t in expected:
self.assertIn(t, triples)

Expand Down
4 changes: 0 additions & 4 deletions tests/test_generators/test_pydanticgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,24 +49,20 @@ def test_dynamic():
# NOTE: generated pydantic doesn't yet do validation
e1 = EmploymentEvent(is_current=True)
p1 = Person(id="x", has_employment_history=[e1])
print(p1)
assert p1.id == "x"
assert p1.name is None
json = {"id": "P1", "has_employment_history": [{"is_current": True}]}
p2 = Person(**json)
print(p2)
p2 = Person(**dataset_dict["persons"][0])
print(dataset_dict)
ds1 = Dataset(**dataset_dict)
print(ds1)
assert len(ds1.persons) == 2

test_dynamic()

def test_compile_pydantic(self):
"""Generate and compile pydantic classes"""
gen = PydanticGenerator(SCHEMA, package=PACKAGE)
print(gen.allow_extra)
code = gen.serialize()
mod = compile_python(code, PACKAGE)
p = mod.Person(id="P:1")
Expand Down
2 changes: 0 additions & 2 deletions tests/test_generators/test_pythongen.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ def test_pythongen(self):
# however, inline in a non-list context does not
p2dict = {"id": "P:2", "has_birth_event": {"started_at_time": "1981-01-01"}}
p2 = json_loader.loads(p2dict, kitchen_module.Person)
print(p2)
self.assertEqual(
"Person(id='P:1', name=None, has_employment_history=[EmploymentEvent(started_at_time=None, ended_at_time=None, is_current=None, metadata=None, employed_at='ROR:1', type=None)], has_familial_relationships=[], has_medical_history=[], age_in_years=None, addresses=[], has_birth_event=None, species_name=None, stomach_count=None, is_living=None, aliases=[])",
str(p),
Expand All @@ -71,7 +70,6 @@ def test_pythongen(self):

diagnosis = kitchen_module.DiagnosisConcept(id="CODE:D0001", name="headache")
event = kitchen_module.MedicalEvent(in_location="GEO:1234", diagnosis=diagnosis)
print(str(event))
self.assertEqual(
"MedicalEvent(started_at_time=None, ended_at_time=None, is_current=None, metadata=None, in_location='GEO:1234', diagnosis=DiagnosisConcept(id='CODE:D0001', name='headache', in_code_system=None), procedure=None)",
str(event),
Expand Down
1 change: 0 additions & 1 deletion tests/test_generators/test_rdfgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ def test_rdf_type_in_jsonld(self):
graph = Graph()
graph.parse(data=JSONLD, format="json-ld", prefix=True)
ttl_str = graph.serialize(format='turtle').decode()
print(ttl_str)
graph.parse(data=ttl_str, format="turtle")


Expand Down
2 changes: 0 additions & 2 deletions tests/test_generators/test_shexgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,8 @@ def test_shex(self):
)
return
raise e
# print(g)
nodes = set()
for s, p, o in g.triples((None, None, None)):
# print(f'{s} {p} {o}')
nodes.add(s)
for node in nodes:
r = evaluate(g, shexstr, focus=node)
Expand Down
11 changes: 0 additions & 11 deletions tests/test_generators/test_sqlalchemygen.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,6 @@ def test_sqla_declarative_on_metamodel(self):
"""
sv = package_schemaview("linkml_runtime.linkml_model.meta")
gen = SQLAlchemyGenerator(sv.schema)
print(f"SQLAGEN: ****")
code = gen.generate_sqla(template=TemplateEnum.DECLARATIVE)
assert "class ClassDefinition(" in code
assert "class Annotation(" in code
Expand Down Expand Up @@ -211,8 +210,6 @@ def test_sqla_imperative_dataclasses_exec(self):
session.add(p1)
q = session.query(mod.Person).where(mod.Person.name == p1.name)
persons = q.all()
# for person in persons:
# print(f'Person={person}')
assert len(persons) == 1
assert p1 in persons
p1 = persons[0]
Expand All @@ -235,8 +232,6 @@ def test_sqla_imperative_dataclasses_exec(self):
session.commit()
q = session.query(mod.Person).where(mod.Person.id == p2.id)
persons = q.all()
# for person in persons:
# print(f'Person={person}')
assert len(persons) == 1
p2_recap = persons[0]
p2mh = p2_recap.has_medical_history
Expand Down Expand Up @@ -285,8 +280,6 @@ def test_sqla_imperative_pydantic_exec(self):
q = session.query(mod.Person).where(mod.Person.name == p1.name)
# q = session.query(mod.Person)
persons = q.all()
for person in persons:
print(f"Person={person}")
self.assertEqual(1, len(persons))
assert p1 in persons
p1 = persons[0]
Expand All @@ -302,8 +295,6 @@ def test_sqla_imperative_pydantic_exec(self):
q = session.query(mod.Person).where(mod.Person.id == p2.id)
persons = q.all()
assert len(persons) == 1
# for person in persons:
# print(f'Person={person.has_medical_history}')
p2_recap = persons[0]
p2mh = p2_recap.has_medical_history
assert p2mh[0].duration == e1.duration
Expand Down Expand Up @@ -361,7 +352,6 @@ def test_sqla_declarative_exec(self):
session.add(mod.Person(id="P2", aliases=["Fred"], has_news_events=[news_event]))
# session.add(mod.Person(id='P3', has_familial_relationships=[{"related_to": "P4"}]))
session.commit()
# print(f'QUERYING:: {mod.NewsEvent} // {type(mod.NewsEvent)}')
q = session.query(mod.NewsEvent)
all_news = q.all()
# ensure news object is shared between persons
Expand All @@ -371,7 +361,6 @@ def test_sqla_declarative_exec(self):
for person in persons:
assert isinstance(person, mod.NamedThing)
logging.info(f"Person={person}")
# print(f' Person.address={person.current_address}')
for a in person.aliases:
logging.info(f" ALIAS={a}")
for e in person.has_medical_history:
Expand Down
9 changes: 0 additions & 9 deletions tests/test_generators/test_sqlddlgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,9 +90,6 @@ def test_sqlddl(self):
gen = SQLDDLGenerator(SCHEMA, mergeimports=True, rename_foreign_keys=True)
ddl = gen.serialize()
with open(SQLDDLLOG, "w") as log:
# with open(DDL_PATH, 'w') as stream:
# stream.write(ddl)
# print(ddl)
try:
os.remove(DB)
except OSError:
Expand All @@ -116,11 +113,9 @@ def test_sqlddl(self):
log.write(f"{cur.fetchall()}\n")
con.commit()
con.close()
# print(gen.to_sqla_python())
# output = StringIO()
# with redirect_stdout(output):
# gen.write_sqla_python_imperative('output.kitchen_sink')
# print(output.getvalue())
# with open(SQLA_CODE, 'w') as stream:
# stream.write(output.getvalue())
kitchen_module = create_and_compile_sqla_bindings(gen, SQLA_CODE)
Expand All @@ -134,8 +129,6 @@ def test_sqlddl(self):
kitchen_module.Person.name == NAME
)
log.write(f"Q={q}\n")
# for row in q.all():
# print(f'Row={row}')
agent = kitchen_module.Agent(id="Agent03")
log.write(f"Agent={agent}\n")
activity = kitchen_module.Activity(id="Act01", was_associated_with=agent)
Expand Down Expand Up @@ -181,8 +174,6 @@ def test_sqlddl(self):
log.write(f" Address={a}\n")
# if a.city == CITY:
# is_found_address = True
# for alias in p.aliases:
# print(f' AKA={a}')
# assert is_found_address
session.commit()

Expand Down
5 changes: 1 addition & 4 deletions tests/test_generators/test_sqltablegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ def test_inject_primary_key(self):
b.add_defaults()
gen = SQLTableGenerator(b.schema)
ddl = gen.generate_ddl()
# print(ddl)
assert "PRIMARY KEY (id)" in ddl
assert "full_name TEXT" in ddl
assert 'CREATE TABLE "dummy class"' in ddl
Expand All @@ -56,7 +55,6 @@ def test_no_injection(self):
b.add_defaults()
gen = SQLTableGenerator(b.schema, use_foreign_keys=False)
ddl = gen.generate_ddl()
# print(ddl)
assert "PRIMARY KEY (id)" not in ddl
assert "full_name TEXT" in ddl
assert 'CREATE TABLE "dummy class"' in ddl
Expand All @@ -80,7 +78,7 @@ def test_dialect(self):
for dialect in ["postgresql", "sqlite", "mysql"]:
gen = SQLTableGenerator(b.schema, dialect=dialect)
ddl = gen.generate_ddl()
# print(f"DIALECT: {dialect}\n SQL:\n{ddl}")

if dialect == "postgresql":
assert "id SERIAL" in ddl
assert "COMMENT ON TABLE" in ddl
Expand Down Expand Up @@ -218,7 +216,6 @@ def test_sqlddl_basic(self):
with open(SQLDDLLOG, "w") as log:
# with open(DDL_PATH, 'w') as stream:
# stream.write(ddl)
# print(ddl)
try:
os.remove(DB)
except OSError:
Expand Down
14 changes: 0 additions & 14 deletions tests/test_generators/test_sqltransform.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ def test_inject_primary_key(self):
b.add_class(DUMMY_CLASS, slots)
results = self._translate(b)
rel_schema = self._translate(b).schema
# print(yaml_dumper.dumps(rel_schema))
self.assertCountEqual(
slots + ["id"], list(rel_schema.classes[DUMMY_CLASS].attributes.keys())
)
Expand Down Expand Up @@ -111,7 +110,6 @@ def test_no_inject_primary_key(self):
b.add_class(DUMMY_CLASS, slots).set_slot("name", identifier=True)
results = self._translate(b)
rel_schema = self._translate(b).schema
# print(yaml_dumper.dumps(rel_schema))
self.assertCountEqual(
slots, list(rel_schema.classes[DUMMY_CLASS].attributes.keys())
)
Expand All @@ -132,7 +130,6 @@ def test_multivalued_literal(self):
)
results = self._translate(b)
rel_schema = self._translate(b).schema
# print(yaml_dumper.dumps(rel_schema))
rsv = SchemaView(rel_schema)
c = rsv.get_class("c")
assert c
Expand All @@ -157,7 +154,6 @@ def test_inject_foreign_key(self):
results = self._translate(b)
rel_schema = self._translate(b).schema
rsv = SchemaView(rel_schema)
# print(yaml_dumper.dumps(rel_schema))
c = rsv.get_class("c")
d = rsv.get_class("d")
self.assertCountEqual(
Expand Down Expand Up @@ -186,7 +182,6 @@ def test_inject_backref_foreign_key(self):
results = self._translate(b)
rel_schema = self._translate(b).schema
rsv = SchemaView(rel_schema)
# print(yaml_dumper.dumps(rel_schema))
c = rsv.get_class("c")
d = rsv.get_class("d")
self.assertCountEqual(
Expand Down Expand Up @@ -223,7 +218,6 @@ def test_inject_many_to_many(self):
results = self._translate(b)
rel_schema = self._translate(b).schema
rsv = SchemaView(rel_schema)
# print(yaml_dumper.dumps(rel_schema))
c = rsv.get_class("c")
d = rsv.get_class("d")
c_has_d = rsv.get_class("c_has_d")
Expand Down Expand Up @@ -252,11 +246,9 @@ def test_inject_many_to_many_with_inheritance(self):
"c1", is_a="c", slot_usage={"has_ds": SlotDefinition("has_ds", range="d1")}
)
b.add_class("d1", is_a="d")
# print(yaml_dumper.dumps(b.schema))
results = self._translate(b)
rel_schema = self._translate(b).schema
rsv = SchemaView(rel_schema)
# print(yaml_dumper.dumps(rel_schema))
c = rsv.get_class("c")
d = rsv.get_class("d")
c1 = rsv.get_class("c1")
Expand Down Expand Up @@ -294,7 +286,6 @@ def test_no_foreign_keys(self):
result = sqltr.transform()
rel_schema = result.schema
rsv = SchemaView(rel_schema)
print(yaml_dumper.dumps(rel_schema))
assert "c_has_d" not in rsv.all_classes()
c1 = rsv.get_class("c1")
self.assertCountEqual(c1.attributes.keys(), ["name", "description", "has_ds"])
Expand All @@ -318,7 +309,6 @@ def test_aliases(self):
results = self._translate(b)
rel_schema = self._translate(b).schema
rsv = SchemaView(rel_schema)
# print(yaml_dumper.dumps(rel_schema))
c = rsv.get_class("c")
d = rsv.get_class("d")
c_has_d = rsv.get_class("c_has_d")
Expand All @@ -339,7 +329,6 @@ def test_sqlt_on_metamodel(self):
sqltr = RelationalModelTransformer(sv)
result = sqltr.transform()
rschema = result.schema
# print(rschema.imports)
with open(META_OUT_PATH, "w") as stream:
stream.write(yaml_dumper.dumps(rschema))
# test Annotation is handled correctly. This has a key annotation_tag with alias 'tag'
Expand Down Expand Up @@ -393,7 +382,6 @@ def test_sqlt_complete_example(self):
"MedicalEvent",
]:
c = sv.get_class(relationship_class)
# print(f'RC: {relationship_class} // {c}')
assert any(
a
for a in c.attributes.values()
Expand All @@ -409,8 +397,6 @@ def test_sqlt_complete_example(self):

for cn in ["Person", "Organization"]:
c = sv.get_class(f"{cn}_has_news_event")
# print(list(c.attributes.keys()))
# print(list(c.attributes.values()))
a1 = c.attributes["has_news_event_id"]
self.assertEqual(a1.range, "NewsEvent")
a2 = c.attributes[f"{cn}_id"]
Expand Down

0 comments on commit f06d45b

Please sign in to comment.