Skip to content

Commit

Permalink
Added schema generator for KRS.
Browse files Browse the repository at this point in the history
  • Loading branch information
Thomas Børlum committed Nov 22, 2011
1 parent 201acbc commit b163591
Show file tree
Hide file tree
Showing 7 changed files with 332 additions and 33 deletions.
Expand Up @@ -27,14 +27,17 @@
package com.trifork.stamdata.importer.jobs.sor;

import java.io.File;
import java.io.IOException;

import javax.xml.parsers.*;

import com.trifork.stamdata.importer.parsers.exceptions.ParserException;
import com.trifork.stamdata.importer.persistence.Persister;
import org.slf4j.*;

import com.trifork.stamdata.importer.config.KeyValueStore;
import com.trifork.stamdata.importer.jobs.FileParser;
import org.xml.sax.SAXException;


/**
Expand Down Expand Up @@ -79,38 +82,35 @@ public void parse(File[] files, Persister persister, KeyValueStore keyValueStore
{
for (File file : files)
{
MDC.put("filename", file.getName());

SORDataSets dataSets = parse(file);
persister.persistCompleteDataset(dataSets.getPraksisDS());
persister.persistCompleteDataset(dataSets.getYderDS());
persister.persistCompleteDataset(dataSets.getSygehusDS());
persister.persistCompleteDataset(dataSets.getSygehusAfdelingDS());
persister.persistCompleteDataset(dataSets.getApotekDS());

MDC.remove("filename");
}
}

public static SORDataSets parse(File file) throws Exception
{
public static SORDataSets parse(File file) throws SAXException, ParserConfigurationException, IOException
{
SORDataSets dataSets = new SORDataSets();
SOREventHandler handler = new SOREventHandler(dataSets);
SAXParserFactory factory = SAXParserFactory.newInstance();

try
{
SAXParser parser = factory.newSAXParser();

if (file.getName().toLowerCase().endsWith("xml"))
{
parser.parse(file, handler);
}
else
{
logger.warn("Can only parse files with extension 'xml'! The file is ignored. file={}", file.getAbsolutePath());
}
}
catch (Exception e)
{
throw new Exception("Error parsing data from file: " + file.getAbsolutePath(), e);
}
SAXParser parser = factory.newSAXParser();

if (file.getName().toLowerCase().endsWith("xml"))
{
parser.parse(file, handler);
}
else
{
logger.warn("Can only parse files with extension 'xml'! The file is ignored. file={}", file.getAbsolutePath());
}

return dataSets;
}
Expand Down
Expand Up @@ -95,13 +95,10 @@ public void add(T entity)

if (previousValue != null)
{
// DO NOT CHANGE THIS TO 'WARN' IT IS AN ERROR.
//
// We might not be able to fix this for the given register but it should
// be handled in the future. At least it should be an conscious choice
// that we ignore double keys in a register.

logger.error("Two entries in a single import contains the same id. type={}, id={}", type.getSimpleName(), id);
// FIXME: This is actually and error, but it has always been this way.
// Double keys should not happen.

logger.warn("Two entries in a single import contains the same id. type={}, id={}", type.getSimpleName(), id);
}
}
}
@@ -0,0 +1,234 @@
/**
* The contents of this file are subject to the Mozilla Public
* License Version 1.1 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
* Contributor(s): Contributors are attributed in the source code
* where applicable.
*
* The Original Code is "Stamdata".
*
* The Initial Developer of the Original Code is Trifork Public A/S.
*
* Portions created for the Original Code are Copyright 2011,
* Lægemiddelstyrelsen. All Rights Reserved.
*
* Portions created for the FMKi Project are Copyright 2011,
* National Board of e-Health (NSI). All Rights Reserved.
*/
package dk.nsi.stamdata.replication.tools;

import com.google.inject.Guice;
import com.google.inject.Key;
import com.google.inject.TypeLiteral;
import com.trifork.stamdata.Namespace;
import com.trifork.stamdata.Nullable;
import com.trifork.stamdata.persistence.RecordSpecification;
import com.trifork.stamdata.specs.SikredeRecordSpecs;
import com.trifork.stamdata.specs.YderregisterRecordSpecs;
import dk.nsi.stamdata.views.View;
import dk.nsi.stamdata.views.ViewModule;
import org.dom4j.Document;
import org.dom4j.DocumentFactory;
import org.dom4j.Element;
import org.dom4j.io.OutputFormat;
import org.dom4j.io.XMLWriter;

import javax.xml.bind.annotation.XmlSchema;
import javax.xml.bind.annotation.XmlTransient;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.lang.reflect.Field;
import java.math.BigInteger;
import java.util.Collection;
import java.util.Date;
import java.util.Map;

import static com.trifork.stamdata.Preconditions.checkArgument;

public class SchemaGenerator
{
private static final Key<Map<String, Class<? extends View>>> view = Key.get(new TypeLiteral<Map<String, Class<? extends View>>>() {});

public static void main(String[] args) throws IOException
{
File outputDir = new File(args[0]);

Collection<Class<? extends View>> views = Guice.createInjector(new ViewModule()).getInstance(view).values();

for (Class<? extends View> view : views)
{
String packageName = view.getPackage().getName();
String subDirName = packageName.substring(packageName.lastIndexOf(".") + 1);
String viewName = view.getSimpleName().toLowerCase();

Writer writer = createWriterFor(outputDir, subDirName, viewName);

generate(view, writer);

writer.flush();
writer.close();
}

// HACK: Make generic for all record specs.

generateRecordXsd(outputDir, "sikrede", "sikrede", SikredeRecordSpecs.ENTRY_RECORD_SPEC);
generateRecordXsd(outputDir, "yderregisteret", "yder", YderregisterRecordSpecs.YDER_RECORD_TYPE);
generateRecordXsd(outputDir, "yderregisteret", "person", YderregisterRecordSpecs.PERSON_RECORD_TYPE);
}

private static void generateRecordXsd(File outputDir, String register, String entityName, RecordSpecification spec) throws IOException
{
Writer writer = createWriterFor(outputDir, register, entityName);

generate(spec, writer, register);

writer.flush();
writer.close();
}

private static FileWriter createWriterFor(File outputDir, String register, String entityName) throws IOException
{
File registerDir = new File(outputDir, register);
registerDir.mkdirs();

File schemaFile = new File(registerDir, entityName + ".xsd");
return new FileWriter(schemaFile);
}

public static void generate(Class<? extends View> entity, Writer writer) throws IOException
{
Document doc = DocumentFactory.getInstance().createDocument();

String targetNamespace = entity.getPackage().getAnnotation(XmlSchema.class).namespace();
String entityName = entity.getSimpleName().toLowerCase();

Element all = generate(doc, targetNamespace, entityName);

for (Field method : entity.getDeclaredFields())
{
if (method.isAnnotationPresent(XmlTransient.class)) continue;

String name = method.getName();
String type = convert2SchemaType(method);
addElement(all, name, type, null);
}

XMLWriter xmlWriter = new XMLWriter(writer, OutputFormat.createPrettyPrint());
xmlWriter.write(doc);
}

private static Element generate(Document doc, String namespace, String entityName)
{
Element root = doc.addElement("xs:schema");

root.addNamespace("xs", "http://www.w3.org/2001/XMLSchema");
root.addNamespace("tns", namespace);
root.addAttribute("targetNamespace", namespace);

Element element = root.addElement("xs:element");
element.addAttribute("name", entityName);

Element complexType = element.addElement("xs:complexType");

// We use "all" and not "sequence" we cause we cannot tell from the
// class the order of the elements.
//
return complexType.addElement("xs:all");
}

private static void addElement(Element parent, String name, String type, @Nullable Integer length)
{
Element field = parent.addElement("xs:element");
field.addAttribute("name", name);

if (length != null && type.equals("xs:string"))
{
field.addElement("xs:simpleType").addElement("xs:restriction").addElement("xs:maxLength").addAttribute("value", length.toString());
}
else
{
field.addAttribute("type", type);
}
}

private static String convert2SchemaType(Field field)
{
if (String.class.isAssignableFrom(field.getType()))
{
return "xs:string";
}
else if (Long.class.isAssignableFrom(field.getType()) || BigInteger.class.isAssignableFrom(field.getType()) || long.class.isAssignableFrom(field.getType()) || int.class.isAssignableFrom(field.getType()))
{
return "xs:integer";
}
else if (Date.class.isAssignableFrom(field.getType()))
{
return "xs:dateTime";
}
else if (Float.class.isAssignableFrom(field.getType()))
{
return "xs:float";
}
else if (Double.class.isAssignableFrom(field.getType()))
{
return "xs:double";
}
else if (Boolean.class.isAssignableFrom(field.getType()) || boolean.class.isAssignableFrom(field.getType()))
{
return "xs:boolean";
}
else
{
checkArgument(false, "Return type '%s' on field %s is not supported.", field.getType(), field.toString());
return null;
}
}

public static void generate(RecordSpecification specification, Writer writer, String register) throws IOException
{
Document doc = DocumentFactory.getInstance().createDocument();

// FIXME: Register added to record spec.

String namespace = Namespace.STAMDATA_3_0 + "/" + register;
String entityName = specification.getTable().toLowerCase();

Element all = generate(doc, namespace, entityName);

for (RecordSpecification.FieldSpecification field : specification.getFieldSpecs())
{
addElement(all, field.name, convert2XsdType(field.type), field.length);
}

all.addElement("xs:element").addAttribute("name", "validFrom").addAttribute("type", "xs:dateTime");
all.addElement("xs:element").addAttribute("name", "validTo").addAttribute("type", "xs:dateTime");

XMLWriter xmlWriter = new XMLWriter(writer, OutputFormat.createPrettyPrint());
xmlWriter.write(doc);
}

private static String convert2XsdType(RecordSpecification.RecordFieldType fieldType)
{
if (fieldType == RecordSpecification.RecordFieldType.ALPHANUMERICAL)
{
return "xs:string";
}
else if (fieldType == RecordSpecification.RecordFieldType.NUMERICAL)
{
return "xs:integer";
}
else
{
throw new AssertionError();
}
}
}
Expand Up @@ -43,8 +43,8 @@
@XmlType(namespace=STAMDATA_3_0 + "/common")
@XmlAccessorType(XmlAccessType.FIELD)
@XmlAccessorOrder(XmlAccessOrder.UNDEFINED)
public abstract class View {

public abstract class View
{
public abstract String getId();

public abstract BigInteger getRecordID();
Expand All @@ -56,8 +56,8 @@ public abstract class View {
*
* @see HistoryOffset
*/
public String getOffset() {

public String getOffset()
{
return new HistoryOffset(getRecordID().toString(), getUpdated()).toString();
}
}
Expand Up @@ -62,11 +62,9 @@ public class ATC extends View
@XmlTransient
private Date modifiedDate;

@XmlTransient
@Temporal(TIMESTAMP)
protected Date validFrom;

@XmlTransient
@Temporal(TIMESTAMP)
protected Date validTo;

Expand Down
Expand Up @@ -54,7 +54,7 @@ public class Administrationsvej extends View
private BigInteger recordID;

@Column(name = "AdministrationsvejKode")
private String id;
protected String id;

@Column(name = "AdministrationsvejTekst")
protected String tekst;
Expand Down

0 comments on commit b163591

Please sign in to comment.