Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Incremental publishing #4050

Draft
wants to merge 6 commits into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ public AbstractPipelineOutput execute(final Map<String, String> input) throws DI
original.forEach(fi -> job.remove(fi));
// rewrite results
final Collection<FileInfo> rewritten = rewrite(original);
// FIXME this only rewrites FileInfos, we need to also rewrite dependency graph
// move temp files and update links
final Job tempJob = new Job(job, emptyMap(), rewritten);
filter.setJob(tempJob);
Expand Down Expand Up @@ -176,7 +177,8 @@ private Collection<FileInfo> rewrite(final Collection<FileInfo> fis) throws DITA
try {
final DOMSource source = new DOMSource(serialize(fis));
final Map<URI, FileInfo> files = new HashMap<>();
final Destination result = new SAXDestination(new Job.JobHandler(new HashMap<>(), files));
final UriGraph dependencyGraph = new UriGraph(files.size());
final Destination result = new SAXDestination(new Job.JobHandler(new HashMap<>(), files, dependencyGraph));
rewriteTransformer.setSource(source);
rewriteTransformer.setDestination(result);
rewriteTransformer.transform();
Expand Down
24 changes: 24 additions & 0 deletions src/main/java/org/dita/dost/module/GenMapAndTopicListModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,8 @@ public final class GenMapAndTopicListModule extends SourceReaderModule {
/** Formats for source topics */
// XXX This is a hack to retain format. A better solution would be to keep the format with the source URI
private final Map<URI, String> sourceFormat = new HashMap<>();
/** Dependency graph for source resources. */
private final UriGraph dependencyGraph = new UriGraph(16);

/**
* Create a new instance and do the initialization.
Expand Down Expand Up @@ -195,6 +197,8 @@ public AbstractPipelineOutput execute(final AbstractPipelineInput input) throws
try {
parseInputParameters(input);

initCache(input);

initFilters();
initXmlReader();

Expand All @@ -214,6 +218,10 @@ public AbstractPipelineOutput execute(final AbstractPipelineInput input) throws
return null;
}

private void initCache(final AbstractPipelineInput input) {
String cacheDir = input.getAttribute("cache.dir");
}

private void readResourceFiles() throws DITAOTException {
if (!resources.isEmpty()) {
for (URI resource : resources) {
Expand Down Expand Up @@ -407,6 +415,7 @@ private void processFile(final Reference ref) throws DITAOTException {

if (listFilter.isValidInput()) {
processParseResult(currentFile);
readDependencies(currentFile);
categorizeCurrentFile(ref);
} else if (!currentFile.equals(rootFile)) {
logger.error(MessageUtils.getMessage("DOTJ021E", params).toString());
Expand Down Expand Up @@ -471,6 +480,12 @@ private void processFile(final Reference ref) throws DITAOTException {

}

private void readDependencies(URI currentFile) {
for (URI hrefTarget : listFilter.getHrefTargets()) {
dependencyGraph.add(currentFile, hrefTarget);
}
}

/**
* Process results from parsing a single topic or map
*
Expand Down Expand Up @@ -873,6 +888,15 @@ private void outputResult() throws DITAOTException {
.isInput(true)
.build());

for (Map.Entry<URI, URI> entry : dependencyGraph.getAll()) {
// URI from = baseInputDir.relativize(entry.getKey());
// URI to = baseInputDir.relativize(entry.getValue());
URI from = entry.getKey();
URI to = entry.getValue();
job.addDependency(from, to);

}

try {
logger.info("Serializing job specification");
job.write();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,8 @@ public abstract class AbstractReaderModule extends AbstractPipelineModuleImpl {
TopicFragmentFilter topicFragmentFilter;
/** Files found during additional resource crawl. **/
final Set<URI> additionalResourcesSet = ConcurrentHashMap.newKeySet();
/** Dependency graph for source resources. */
final UriGraph dependencyGraph = new UriGraph(16);

public abstract void readStartFile() throws DITAOTException;

Expand Down Expand Up @@ -378,6 +380,7 @@ void readFile(final Reference ref, final URI parseFile) throws DITAOTException {

if (listFilter.isValidInput()) {
processParseResult(currentFile);
readDependencies(currentFile);
categorizeCurrentFile(ref);
} else if (!currentFile.equals(rootFile)) {
logger.error(MessageUtils.getMessage("DOTJ021E", params).toString());
Expand Down Expand Up @@ -755,6 +758,14 @@ void outputResult() throws DITAOTException {
.isInput(true)
.build());

for (Map.Entry<URI, URI> entry : dependencyGraph.getAll()) {
// URI from = baseInputDir.relativize(entry.getKey());
// URI to = baseInputDir.relativize(entry.getValue());
URI from = entry.getKey();
URI to = entry.getValue();
job.addDependency(from, to);
}

try {
logger.info("Serializing job specification");
job.write();
Expand Down Expand Up @@ -927,4 +938,13 @@ void init() throws SAXException {
initFilters();
}

void initCache(final AbstractPipelineInput input) {
String cacheDir = input.getAttribute("cache.dir");
}

private void readDependencies(URI currentFile) {
for (URI hrefTarget : listFilter.getHrefTargets()) {
dependencyGraph.add(currentFile, hrefTarget);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ public AbstractPipelineOutput execute(final AbstractPipelineInput input) throws
parseInputParameters(input);
init();

initCache(input);

readResourceFiles();
readStartFile();
processWaitList();
Expand Down
77 changes: 77 additions & 0 deletions src/main/java/org/dita/dost/util/Graph.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
/*
* This file is part of the DITA Open Toolkit project.
*
* Copyright 2022 Jarno Elovirta
*
* See the accompanying LICENSE file for applicable license.
*/

package org.dita.dost.util;

/**
* Implementation of directed Graph using Adjacent Matrix.
*
* @see <a href="https://en.wikipedia.org/wiki/Adjacency_matrix#Directed_graphs">Adjacent Matrix</a>
*/
public class Graph {
private int size;
private boolean adjacentMatrix[][];

public Graph(int size) {
if (size < 0) {
throw new IllegalArgumentException();
}
this.size = size;
adjacentMatrix = new boolean[size][size];
}

public int getSize() {
return size;
}

public boolean[][] getData() {
boolean[][] res = new boolean[size][size];
for (int i = 0; i < size; i++) {
System.arraycopy(adjacentMatrix[i], 0, res[i], 0, size);
}
return res;
}

public void addEdge(int source, int destination) {
if (source < 0 || destination < 0) {
throw new IllegalArgumentException();
}
if (source >= size || destination >= size) {
synchronized (this) {
int newSize = Math.max(source, destination) + 1;
boolean[][] newAdjacentMatrix = new boolean[newSize][newSize];
for (int i = 0; i < size; i++) {
System.arraycopy(adjacentMatrix[i], 0, newAdjacentMatrix[i], 0, size);
}
size = newSize;
adjacentMatrix = newAdjacentMatrix;
}
}
adjacentMatrix[source][destination] = true;
}

public void removeEdge(int source, int destination) {
if (source < 0 || destination < 0) {
throw new IllegalArgumentException();
}
if (source < size && destination < size) {
adjacentMatrix[source][destination] = false;
}
}

public boolean isEdge(int source, int destination) {
if (source < 0 || destination < 0) {
throw new IllegalArgumentException();
}
if (destination < size && source < size) {
return adjacentMatrix[source][destination];
}
return false;
}

}
39 changes: 31 additions & 8 deletions src/main/java/org/dita/dost/util/Job.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,25 +12,19 @@
import org.dita.dost.store.Store;
import org.w3c.dom.Document;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;

import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import javax.xml.transform.Result;
import javax.xml.transform.dom.DOMResult;
import java.io.*;
import java.lang.reflect.Field;
import java.net.URI;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
Expand Down Expand Up @@ -127,6 +121,7 @@ public final class Job {
private final Map<URI, FileInfo> files = new ConcurrentHashMap<>();
private long lastModified;
private final Store store;
private final UriGraph dependencyGraph;

/**
* Create new job configuration instance. Initialise by reading temporary configuration files.
Expand All @@ -146,6 +141,7 @@ public Job(final File tempDir, final Store store) throws IOException {
tempDirURI = tmpDirUri.toString().endsWith("/") ? tmpDirUri : URI.create(tmpDirUri + "/");
jobFile = new File(tempDir, JOB_FILE);
prop = new HashMap<>();
dependencyGraph = new UriGraph(16);
read();
for (Map.Entry<String, String> e : configuration.entrySet()) {
if (!prop.containsKey(e.getKey())) {
Expand All @@ -161,6 +157,7 @@ public Job(final Job job, final Map<String, Object> prop, final Collection<FileI
this.jobFile = new File(tempDir, JOB_FILE);
this.prop = prop;
this.files.putAll(files.stream().collect(Collectors.toMap(fi -> fi.uri, Function.identity())));
this.dependencyGraph = new UriGraph(files.size());
}

public Store getStore() {
Expand All @@ -186,7 +183,7 @@ private void read() throws IOException {
lastModified = getStore().getLastModified(jobFile.toURI());
if (getStore().exists(jobFile.toURI())) {
try (final InputStream in = new FileInputStream(jobFile)) {
getStore().transform(jobFile.toURI(), new JobHandler(prop, files));
getStore().transform(jobFile.toURI(), new JobHandler(prop, files, dependencyGraph));
} catch (final DITAOTException e) {
throw new IOException("Failed to read job file: " + e.getMessage());
}
Expand All @@ -202,15 +199,17 @@ public final static class JobHandler extends DefaultHandler {

private final Map<String, Object> prop;
private final Map<URI, FileInfo> files;
private final UriGraph dependencyGraph;
private StringBuilder buf;
private String name;
private String key;
private Set<String> set;
private Map<String, String> map;

public JobHandler(final Map<String, Object> prop, final Map<URI, FileInfo> files) {
public JobHandler(final Map<String, Object> prop, final Map<URI, FileInfo> files, UriGraph dependencyGraph) {
this.prop = prop;
this.files = files;
this.dependencyGraph = dependencyGraph;
}

@Override
Expand Down Expand Up @@ -270,6 +269,11 @@ public void startElement(final String ns, final String localName, final String q
}
files.put(i.uri, i);
break;
case "dependency":
final URI from = toURI(atts.getValue("from"));
final URI to = toURI(atts.getValue("to"));
dependencyGraph.add(from, to);
break;
}
}

Expand Down Expand Up @@ -411,6 +415,22 @@ public void serialize(XMLStreamWriter out, Map<String, Object> props, Collection
out.writeEndElement(); //file
}
out.writeEndElement(); //files

out.writeStartElement("cache");
out.writeStartElement("dependencies");
for (Map.Entry<URI, URI> entry : dependencyGraph.getAll()) {
out.writeStartElement("dependency");
// FileInfo from = getFileInfo(entry.getKey());
// FileInfo to = getFileInfo(entry.getValue());
// out.writeAttribute("from", from.uri.toString());
// out.writeAttribute("to", to.uri.toString());
out.writeAttribute("from", entry.getKey().toString());
out.writeAttribute("to", entry.getValue().toString());
out.writeEndElement(); //dependency
}
out.writeEndElement(); //dependencies
out.writeEndElement(); //cache

out.writeEndElement(); //job
out.writeEndDocument();
}
Expand Down Expand Up @@ -1041,4 +1061,7 @@ public TempFileNameScheme getTempFileNameScheme() {
return tempFileNameScheme;
}

public void addDependency(URI currentFile, URI hrefTarget) {
dependencyGraph.add(currentFile, hrefTarget);
}
}
Loading