Skip to content

Commit

Permalink
Merge pull request #89 from monarch-initiative/refactor
Browse files Browse the repository at this point in the history
Refactor
  • Loading branch information
kingmanzhang committed Jan 29, 2019
2 parents eabef03 + d250e5d commit dfe74ba
Show file tree
Hide file tree
Showing 48 changed files with 1,439 additions and 1,442 deletions.
4 changes: 2 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ Additional changes for this version

* Prevent app from crashing when user's local HPO is outdated

* Added classes to model patient phenopacket (phenotype only)
* Added classes to appTempData patient phenopacket (phenotype only)
* Refactored to use phenol 1.0.0

## v1.1.5
Expand All @@ -141,5 +141,5 @@ Additional changes for this version

## v1.1.7

* Add algorithms to model patients
* Add algorithms to appTempData patients

2 changes: 1 addition & 1 deletion docs/source/Tutorial.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ Follow the steps to start the curation process.

- Import LOINC codes. If you have configured the app properly, you should be able to see there are contents in the LOINC table. You can always click **"Initialize Loinc Table"** on the left upper corner to import Loinc codes from the Loinc Core Table file. Try using the "Search" function to select some Loinc codes, e.g. try searching for "10449-7" and then "glucose" (You will get one result for "10449-7" and many results for "glucose").

- Import HPO. Click "Initialize HPO model" on the left upper corner to import all HPO terms to the app. If you configured the app properly, this will automatically run when you start the app.
- Import HPO. Click "Initialize HPO appTempData" on the left upper corner to import all HPO terms to the app. If you configured the app properly, this will automatically run when you start the app.

- After completing the above steps, you should be able to start annotating LOINC codes!
- Go to the Loinc Table in the bottom half of the tab, and choose the LOINC code that you want to annotate. When you click the **"Auto Query"** button or double click on the LOINC code, the app will automatically find candidate HPO terms for you, listed from the most likely term to the least likely.
Expand Down
18 changes: 2 additions & 16 deletions loinc2hpo-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,7 @@


<dependencies>
<!-- <dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.4</version>
</dependency>
<dependency>
<groupId>commons-net</groupId>
<artifactId>commons-net</artifactId>
<version>3.3</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency> -->



</dependencies>
Expand Down Expand Up @@ -131,7 +117,7 @@
<!-- set useUniqueVersions=false in order that the classpath has the
SNAPSHOT instead of the build number prefixed to the dependency -->
<!--<useUniqueVersions>false</useUniqueVersions>-->
<mainClass>org.monarchinitiative.loinc2hpo.Loinc2Hpo</mainClass>
<!--<mainClass>org.monarchinitiative.loinc2hpo.Loinc2Hpo</mainClass>-->
</manifest>
</archive>
</configuration>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@ public class Constants {
//folder for LOINC categories
public static final String LOINCCategory = "LOINC CATEGORY";

//folder for Data folder
public static final String DATAFOLDER = "Data";


public static final String LOINCSYSTEM = "http://loinc.org";
//public static final String HAPIFHIRTESTSERVER = "http://fhirtest.uhn.ca/baseDstu3";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,16 @@
import org.monarchinitiative.loinc2hpo.loinc.LoincId;
import org.monarchinitiative.loinc2hpo.loinc.LoincPanel;
import org.monarchinitiative.phenol.base.PhenolException;
import org.monarchinitiative.phenol.formats.hpo.HpoOntology;
import org.monarchinitiative.phenol.io.obo.hpo.HpOboParser;
import org.monarchinitiative.phenol.ontology.data.Ontology;
import org.monarchinitiative.phenol.ontology.data.Term;
import org.monarchinitiative.phenol.ontology.data.TermId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

/**
* This class manages all resources that are required for this app. Whenever a new resource is required, just call a getter to retrieve it.
Expand All @@ -39,7 +35,7 @@ public class ResourceCollection {
private String loincPanelPath;
//private String loincPanelAnnotationPath;
private HpoOntologyParser hpoOntologyParser;
private HpoOntology hpo;
private Ontology hpo;
private Map<TermId, Term> termidTermMap;
private Map<String, Term> termnameTermMap;
private ImmutableMap<LoincId, LoincEntry> loincEntryMap;
Expand Down Expand Up @@ -152,7 +148,7 @@ private void addLoincPanelAnnotation(String loincPanelAnnotationPath, Map<LoincI

}

public HpoOntology getHPO() throws PhenolException, FileNotFoundException {
public Ontology getHPO() throws PhenolException, FileNotFoundException {
if (this.hpo != null) {
return this.hpo;
}
Expand All @@ -164,4 +160,23 @@ public HpoOntology getHPO() throws PhenolException, FileNotFoundException {
return this.hpo;
}

public String getLoincEntryPath() {
return loincEntryPath;
}

public String getHpoOboPath() {
return hpoOboPath;
}

public String getHpoOwlPath() {
return hpoOwlPath;
}

public String getAnnotationMapPath() {
return annotationMapPath;
}

public String getLoincPanelPath() {
return loincPanelPath;
}
}
Original file line number Diff line number Diff line change
@@ -1,18 +1,11 @@
package org.monarchinitiative.loinc2hpo.io;

import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedMap;
import org.monarchinitiative.phenol.base.PhenolException;
import org.monarchinitiative.phenol.formats.hpo.HpoOntology;
import org.monarchinitiative.phenol.io.owl.OwlOntologyLoader;
import org.monarchinitiative.phenol.ontology.data.*;

import java.io.File;

/**
* This class parses the Human Phenotype Ontology in the owl format.
*/

@Deprecated
public class HpOwlParser {

private final File owlFile;
Expand All @@ -28,28 +21,28 @@ public HpOwlParser(File owlFile) {
this(owlFile,false);
}

public HpoOntology parse() throws PhenolException {
Ontology ontology;

//final OwlImmutableOntologyLoader loader = new OwlImmutableOntologyLoader(owlFile);
OwlOntologyLoader loader = new OwlOntologyLoader(owlFile);
ontology = loader.load();
if (debug) {
System.err.println(String.format("Parsed a total of %d HP terms",ontology.countAllTerms()));
}

// hpo root termid
//TermId hpoRoot = new TermId(new TermPrefix("HP"), "0000001");
TermId hpoRoot = TermId.of("HP", "0000001");

return new HpoOntology(
(ImmutableSortedMap<String, String>) ontology.getMetaInfo(),
ontology.getGraph(),
hpoRoot,
ontology.getNonObsoleteTermIds(),
ontology.getObsoleteTermIds(),
(ImmutableMap<TermId, Term>) ontology.getTermMap(),
(ImmutableMap<Integer, Relationship>) ontology.getRelationMap());
}
// public HpoOntology parse() throws PhenolException {
// Ontology ontology;
//
// //final OwlImmutableOntologyLoader loader = new OwlImmutableOntologyLoader(owlFile);
// OwlOntologyLoader loader = new OwlOntologyLoader(owlFile);
// ontology = loader.load();
// if (debug) {
// System.err.println(String.format("Parsed a total of %d HP terms",ontology.countAllTerms()));
// }
//
// // hpo root termid
// //TermId hpoRoot = new TermId(new TermPrefix("HP"), "0000001");
// TermId hpoRoot = TermId.of("HP", "0000001");
//
// return new HpoOntology(
// (ImmutableSortedMap<String, String>) ontology.getMetaInfo(),
// ontology.getGraph(),
// hpoRoot,
// ontology.getNonObsoleteTermIds(),
// ontology.getObsoleteTermIds(),
// (ImmutableMap<TermId, Term>) ontology.getTermMap(),
// (ImmutableMap<Integer, Relationship>) ontology.getRelationMap());
// }

}
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.monarchinitiative.phenol.base.PhenolException;
import org.monarchinitiative.phenol.formats.hpo.HpoOntology;
import org.monarchinitiative.phenol.io.obo.hpo.HpOboParser;
import org.monarchinitiative.phenol.io.OntologyLoader;
import org.monarchinitiative.phenol.ontology.data.*;

import java.io.File;
Expand All @@ -23,7 +22,7 @@ public class HpoOntologyParser {
private String hpoOboPath =null;
private String hpoOwlPath = null;
private boolean isObo = false;
private HpoOntology hpoOntology;
private Ontology hpoOntology;


/** Map of all of the Phenotypic abnormality terms (i.e., not the inheritance terms). */
Expand All @@ -45,14 +44,8 @@ public HpoOntologyParser(String path){
* @throws PhenolException, OWLOntologyCreationException
*/
public void parseOntology() throws PhenolException, FileNotFoundException {
if (isObo) {
HpOboParser hpoOboParser = new HpOboParser(new File(hpoOboPath));
logger.debug("ontology path: " + hpoOboPath);
this.hpoOntology = hpoOboParser.parse();
} else {
HpOwlParser hpoOwlParser = new HpOwlParser(new File(hpoOwlPath));
this.hpoOntology = hpoOwlParser.parse();
}
logger.debug("ontology path: " + hpoOboPath);
this.hpoOntology = OntologyLoader.loadOntology(new File(hpoOboPath));
}

private void initTermMaps() {
Expand All @@ -64,7 +57,7 @@ private void initTermMaps() {
}
}

public HpoOntology getOntology() {
public Ontology getOntology() {

return this.hpoOntology;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,6 @@
import org.monarchinitiative.loinc2hpo.loinc.LOINC2HpoAnnotationImpl;
import org.monarchinitiative.loinc2hpo.loinc.LoincId;
import org.monarchinitiative.phenol.ontology.data.TermId;
//import org.monarchinitiative.phenol.ontology.data.ImmutableTermId;
import org.monarchinitiative.phenol.ontology.data.TermPrefix;
//import org.monarchinitiative.phenol.ontology.data.ImmutableTermPrefix;

import java.io.IOException;
import java.util.Map;
Expand All @@ -17,8 +14,8 @@ public interface LoincAnnotationSerializer {
Map<LoincId, LOINC2HpoAnnotationImpl> parse(String filepath) throws Exception;

default TermId convertToTermID(String record) {
TermPrefix prefix = new TermPrefix("HP");
if (!record.startsWith(prefix.getValue()) || record.length() <= 3) {
String prefix = "HP";
if (!record.startsWith(prefix) || record.length() <= 3) {
return null;
}
String id = record.substring(3);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public LoincAnnotationSerializerToTSVSingleFile(Map<TermId, Term> hpoTermMap) {
@Override
public void serialize(Map<LoincId, LOINC2HpoAnnotationImpl> annotationmap, String filepath) throws IOException {


logger.info("file path: " + filepath);

BufferedWriter writer = new BufferedWriter(new FileWriter(filepath));
writer.write(header);
Expand All @@ -68,7 +68,8 @@ public Map<LoincId, LOINC2HpoAnnotationImpl> parse(String filepath) throws FileN
Map<LoincId, LOINC2HpoAnnotationImpl> deserializedMap = new LinkedHashMap<>();
Map<LoincId, LOINC2HpoAnnotationImpl.Builder> builders = new LinkedHashMap<>();
BufferedReader reader = new BufferedReader(new FileReader(filepath));
reader.lines().forEach(serialized -> {

reader.lines().filter(l -> !l.isEmpty()).forEach(serialized -> {
String[] elements = serialized.split("\\t");
if (elements.length == 13 && !serialized.startsWith("loincId")) {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.monarchinitiative.phenol.ontology.data.TermId;
import org.monarchinitiative.phenol.ontology.data.TermPrefix;


import java.io.*;
Expand Down Expand Up @@ -69,8 +68,8 @@ public static <K extends Serializable, V extends Serializable> Map<K, V> deseria


public static TermId convertToTermID(String record) {
TermPrefix prefix = new TermPrefix("HP");
if (!record.startsWith(prefix.getValue()) || record.length() <= 3) {
String prefix = "HP";
if (!record.startsWith(prefix) || record.length() <= 3) {
logger.error("Non HPO termId is detected from TSV: " + record);
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,12 @@
import org.monarchinitiative.loinc2hpo.codesystems.Loinc2HPOCodedValue;
import org.monarchinitiative.loinc2hpo.loinc.*;
import org.monarchinitiative.loinc2hpo.testresult.LabTestOutcome;
import org.monarchinitiative.phenol.base.PhenolException;
import org.monarchinitiative.phenol.formats.hpo.HpoOntology;
import org.monarchinitiative.phenol.io.obo.hpo.HpOboParser;
import org.monarchinitiative.phenol.io.OntologyLoader;
import org.monarchinitiative.phenol.ontology.data.Ontology;
import org.monarchinitiative.phenol.ontology.data.Term;
import org.monarchinitiative.phenol.ontology.data.TermId;

import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;

Expand All @@ -34,13 +32,7 @@ public static void setup() throws Exception{
observation = FhirResourceRetriever.parseJsonFile2Observation(path);

String hpo_obo = FhirObservationAnalyzerTest.class.getClassLoader().getResource("obo/hp.obo").getPath();
HpOboParser hpoOboParser = new HpOboParser(new File(hpo_obo));
HpoOntology hpo = null;
try {
hpo = hpoOboParser.parse();
} catch (PhenolException e) {
e.printStackTrace();
}
Ontology hpo = OntologyLoader.loadOntology(new File(hpo_obo));
ImmutableMap.Builder<String,Term> termmap = new ImmutableMap.Builder<>();
if (hpo !=null) {
List<Term> res = hpo.getTermMap().values().stream().distinct()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,8 @@
import org.monarchinitiative.loinc2hpo.exception.MalformedLoincCodeException;
import org.monarchinitiative.loinc2hpo.exception.UnrecognizedCodeException;
import org.monarchinitiative.loinc2hpo.loinc.*;
import org.monarchinitiative.phenol.base.PhenolException;
import org.monarchinitiative.phenol.formats.hpo.HpoOntology;
import org.monarchinitiative.phenol.io.obo.hpo.HpOboParser;
import org.monarchinitiative.phenol.io.OntologyLoader;
import org.monarchinitiative.phenol.ontology.data.Ontology;
import org.monarchinitiative.phenol.ontology.data.Term;
import org.monarchinitiative.phenol.ontology.data.TermId;

Expand Down Expand Up @@ -50,13 +49,8 @@ public static void setup() throws MalformedLoincCodeException, DataFormatExcepti


String hpo_obo = FhirObservationAnalyzerTest.class.getClassLoader().getResource("obo/hp.obo").getPath();
HpOboParser hpoOboParser = new HpOboParser(new File(hpo_obo));
HpoOntology hpo = null;
try {
hpo = hpoOboParser.parse();
} catch (PhenolException e) {
e.printStackTrace();
}
Ontology hpo = OntologyLoader.loadOntology(new File(hpo_obo));

ImmutableMap.Builder<String,Term> termmap = new ImmutableMap.Builder<>();
ImmutableMap.Builder<TermId,Term> termmap2 = new ImmutableMap.Builder<>();
if (hpo !=null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,8 @@
import org.monarchinitiative.loinc2hpo.exception.AmbiguousResultsFoundException;
import org.monarchinitiative.loinc2hpo.exception.MalformedLoincCodeException;
import org.monarchinitiative.loinc2hpo.loinc.*;
import org.monarchinitiative.phenol.base.PhenolException;
import org.monarchinitiative.phenol.formats.hpo.HpoOntology;
import org.monarchinitiative.phenol.io.obo.hpo.HpOboParser;
import org.monarchinitiative.phenol.io.OntologyLoader;
import org.monarchinitiative.phenol.ontology.data.Ontology;
import org.monarchinitiative.phenol.ontology.data.Term;
import org.monarchinitiative.phenol.ontology.data.TermId;

Expand Down Expand Up @@ -42,13 +41,7 @@ public static void setup() throws MalformedLoincCodeException, IOException, Data
observations[1] = observation2;

String hpo_obo = FhirObservationAnalyzerTest.class.getClassLoader().getResource("obo/hp.obo").getPath();
HpOboParser hpoOboParser = new HpOboParser(new File(hpo_obo));
HpoOntology hpo = null;
try {
hpo = hpoOboParser.parse();
} catch (PhenolException e) {
e.printStackTrace();
}
Ontology hpo = OntologyLoader.loadOntology(new File(hpo_obo));
ImmutableMap.Builder<String,Term> termmap = new ImmutableMap.Builder<>();
ImmutableMap.Builder<TermId,Term> termmap2 = new ImmutableMap.Builder<>();
if (hpo !=null) {
Expand Down

0 comments on commit dfe74ba

Please sign in to comment.