Permalink
Browse files

move project compilation to server

  • Loading branch information...
Daniel Norberg
Daniel Norberg committed Jan 16, 2017
1 parent 15f3f93 commit 78e4c40f02e9a259d66928bea2a30188147f6ea6
@@ -86,14 +86,11 @@ private void archive(Injector injector)
ConfigFactory cf = injector.getInstance(ConfigFactory.class);
ConfigLoaderManager loader = injector.getInstance(ConfigLoaderManager.class);
// read parameters
Config overrideParams = loadParams(cf, loader, loadSystemProperties(), paramsFile, params);
// load project
Path projectPath = (projectDirName == null) ?
Paths.get("").toAbsolutePath() :
Paths.get(projectDirName).normalize().toAbsolutePath();
injector.getInstance(Archiver.class).createArchive(projectPath, Paths.get(output), overrideParams);
injector.getInstance(Archiver.class).createArchive(projectPath, Paths.get(output));
out.println("Created " + output + ".");
out.println("Use `" + programName + " upload <path.tar.gz> <project> <revision>` to upload it a server.");
@@ -5,11 +5,10 @@
import io.digdag.cli.StdOut;
import io.digdag.cli.YamlMapper;
import io.digdag.client.config.Config;
import io.digdag.core.archive.ArchiveMetadata;
import io.digdag.client.config.ConfigFactory;
import io.digdag.core.archive.ProjectArchive;
import io.digdag.core.archive.ProjectArchiveLoader;
import io.digdag.core.archive.WorkflowResourceMatcher;
import io.digdag.core.repository.WorkflowDefinition;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.compress.archivers.tar.TarConstants;
@@ -19,34 +18,35 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.PosixFilePermission;
import java.util.Date;
import java.util.Set;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Locale.ENGLISH;
class Archiver
{
private final PrintStream out;
private final ConfigFactory cf;
private final ProjectArchiveLoader projectLoader;
private final YamlMapper yamlMapper;
@Inject
Archiver(@StdOut PrintStream out, ProjectArchiveLoader projectLoader, YamlMapper yamlMapper)
Archiver(@StdOut PrintStream out, ProjectArchiveLoader projectLoader, ConfigFactory cf)
{
this.out = out;
this.projectLoader = projectLoader;
this.yamlMapper = yamlMapper;
this.cf = cf;
}
void createArchive(Path projectPath, Path output, Config overrideParams)
void createArchive(Path projectPath, Path output)
throws IOException
{
out.println("Creating " + output + "...");
ProjectArchive project = projectLoader.load(projectPath, WorkflowResourceMatcher.defaultMatcher(), overrideParams);
ArchiveMetadata meta = project.getArchiveMetadata();
ProjectArchive project = projectLoader.load(projectPath, WorkflowResourceMatcher.defaultMatcher(), cf.create());
try (TarArchiveOutputStream tar = new TarArchiveOutputStream(new GzipCompressorOutputStream(Files.newOutputStream(output)))) {
// default mode for file names longer than 100 bytes is throwing an exception (LONGFILE_ERROR)
@@ -56,7 +56,7 @@ void createArchive(Path projectPath, Path output, Config overrideParams)
if (!Files.isDirectory(absPath)) {
out.println(" Archiving " + resourceName);
TarArchiveEntry e = buildTarArchiveEntry(project, absPath, resourceName);
TarArchiveEntry e = buildTarArchiveEntry(projectPath, absPath, resourceName);
tar.putArchiveEntry(e);
if (e.isSymbolicLink()) {
out.println(" symlink -> " + e.getLinkName());
@@ -69,39 +69,23 @@ void createArchive(Path projectPath, Path output, Config overrideParams)
tar.closeArchiveEntry();
}
});
// create .digdag.dig
// TODO set default time zone if not set?
byte[] metaBody = yamlMapper.toYaml(meta).getBytes(StandardCharsets.UTF_8);
TarArchiveEntry metaEntry = new TarArchiveEntry(ArchiveMetadata.FILE_NAME);
metaEntry.setSize(metaBody.length);
metaEntry.setModTime(new Date());
tar.putArchiveEntry(metaEntry);
tar.write(metaBody);
tar.closeArchiveEntry();
}
out.println("Workflows:");
for (WorkflowDefinition workflow : meta.getWorkflowList().get()) {
out.println(" " + workflow.getName());
}
out.println("");
}
private TarArchiveEntry buildTarArchiveEntry(ProjectArchive project, Path absPath, String name)
private TarArchiveEntry buildTarArchiveEntry(Path projectPath, Path absPath, String name)
throws IOException
{
TarArchiveEntry e;
if (Files.isSymbolicLink(absPath)) {
e = new TarArchiveEntry(name, TarConstants.LF_SYMLINK);
Path rawDest = Files.readSymbolicLink(absPath);
Path normalizedAbsDest = absPath.getParent().resolve(rawDest).normalize();
try {
project.pathToResourceName(normalizedAbsDest);
}
catch (IllegalArgumentException ex) {
throw new IllegalArgumentException("Invalid symbolic link: " + ex.getMessage());
if (!normalizedAbsDest.startsWith(projectPath)) {
throw new IllegalArgumentException(String.format(ENGLISH,
"Invalid symbolic link: Given path '%s' is outside of project directory '%s'", normalizedAbsDest, projectPath));
}
// absolute path will be invalid on a server. convert it to a relative path
Path normalizedRelativeDest = absPath.getParent().relativize(normalizedAbsDest);
@@ -36,12 +36,6 @@
@Parameter(names = {"--project"})
String projectDirName = null;
@DynamicParameter(names = {"-p", "--param"})
Map<String, String> params = new HashMap<>();
@Parameter(names = {"-P", "--params-file"})
String paramsFile = null;
@Parameter(names = {"-r", "--revision"})
String revision = null;
@@ -96,12 +90,6 @@ private void push(String projName)
.initialize()
.getInjector();
ConfigFactory cf = injector.getInstance(ConfigFactory.class);
ConfigLoaderManager loader = injector.getInstance(ConfigLoaderManager.class);
// read parameters
Config overrideParams = loadParams(cf, loader, loadSystemProperties(), paramsFile, params);
// schedule_from will be server's current time if not set
Optional<Instant> scheduleFrom;
if (scheduleFromString == null) {
@@ -115,7 +103,7 @@ private void push(String projName)
Path projectPath = (projectDirName == null) ?
Paths.get("").toAbsolutePath() :
Paths.get(projectDirName).normalize().toAbsolutePath();
injector.getInstance(Archiver.class).createArchive(projectPath, archivePath, overrideParams);
injector.getInstance(Archiver.class).createArchive(projectPath, archivePath);
DigdagClient client = buildClient();
if ("".equals(revision)) {
@@ -60,7 +60,7 @@ public void listFiles(PathConsumer consumer)
}
// reused by ProjectArchiveLoader.load
static void listFiles(Path projectPath, PathConsumer consumer)
public static void listFiles(Path projectPath, PathConsumer consumer)
throws IOException
{
listFilesRecursively(projectPath, projectPath, consumer, new HashSet<>());
@@ -58,6 +58,9 @@
import io.digdag.core.TempFileManager.TempDir;
import io.digdag.core.TempFileManager.TempFile;
import io.digdag.core.archive.ArchiveMetadata;
import io.digdag.core.archive.ProjectArchive;
import io.digdag.core.archive.ProjectArchiveLoader;
import io.digdag.core.archive.WorkflowResourceMatcher;
import io.digdag.core.config.YamlConfigLoader;
import io.digdag.core.repository.ArchiveType;
import io.digdag.core.repository.Project;
@@ -160,6 +163,7 @@
private final TempFileManager tempFiles;
private final SessionStoreManager ssm;
private final SecretControlStoreManager scsp;
private final ProjectArchiveLoader projectArchiveLoader;
@Inject
public ProjectResource(
@@ -172,7 +176,8 @@ public ProjectResource(
SchedulerManager srm,
TempFileManager tempFiles,
SessionStoreManager ssm,
SecretControlStoreManager scsp)
SecretControlStoreManager scsp,
ProjectArchiveLoader projectArchiveLoader)
{
this.cf = cf;
this.rawLoader = rawLoader;
@@ -184,6 +189,7 @@ public ProjectResource(
this.tempFiles = tempFiles;
this.ssm = ssm;
this.scsp = scsp;
this.projectArchiveLoader = projectArchiveLoader;
}
private static StoredProject ensureNotDeletedProject(StoredProject proj)
@@ -589,14 +595,13 @@ private ArchiveMetadata readArchiveMetadata(InputStream in, String projectName)
totalSize, ARCHIVE_TOTAL_SIZE_LIMIT));
}
Config renderedConfig = rawLoader.loadFile(
dir.child(ArchiveMetadata.FILE_NAME).toFile()).toConfig(cf);
return renderedConfig.convert(ArchiveMetadata.class);
ProjectArchive archive = projectArchiveLoader.load(dir.get(), WorkflowResourceMatcher.defaultMatcher(), cf.create());
return archive.getArchiveMetadata();
}
}
// TODO here doesn't have to extract files exception ArchiveMetadata.FILE_NAME
// rawLoader.loadFile doesn't have to render the file because it's already rendered.
// TODO: only write .dig files
private long extractConfigFiles(java.nio.file.Path dir, TarArchiveInputStream archive)
throws IOException
{
@@ -1,7 +1,7 @@
package acceptance;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Resources;
import io.digdag.cli.TimeUtil;
import io.digdag.client.DigdagClient;
import io.digdag.client.api.Id;
@@ -31,6 +31,8 @@
import java.util.List;
import java.util.stream.Collectors;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
@@ -382,9 +384,11 @@ public void testSkipOnOvertime()
{
File outfile = folder.newFile();
Files.createDirectories(projectDir);
addWorkflow(projectDir, "acceptance/schedule/skip_on_overtime.dig", "schedule.dig");
pushProject(server.endpoint(), projectDir, "schedule", ImmutableMap.of(
"outfile", outfile.toPath().toAbsolutePath().toString()));
Files.write(projectDir.resolve("schedule.dig"), asList(Resources.toString(
Resources.getResource("acceptance/schedule/skip_on_overtime.dig"), UTF_8)
.replace("${outfile}", outfile.toString())));
pushProject(server.endpoint(), projectDir, "schedule");
TestUtils.expect(Duration.ofMinutes(1),
() -> Files.readAllLines(outfile.toPath()).size() >= 6);
@@ -2,7 +2,6 @@
import com.amazonaws.AmazonServiceException;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
@@ -408,11 +407,6 @@ public static Id pushProject(String endpoint, Path project)
}
public static Id pushProject(String endpoint, Path project, String projectName)
{
return pushProject(endpoint, project, projectName, ImmutableMap.of());
}
public static Id pushProject(String endpoint, Path project, String projectName, Map<String, String> params)
{
List<String> command = new ArrayList<>();
command.addAll(asList(
@@ -421,7 +415,6 @@ public static Id pushProject(String endpoint, Path project, String projectName,
projectName,
"-c", "/dev/null",
"-e", endpoint));
params.forEach((k, v) -> command.addAll(asList("-p", k + "=" + v)));
CommandStatus pushStatus = main(command);
assertThat(pushStatus.errUtf8(), pushStatus.code(), is(0));
Matcher matcher = PROJECT_ID_PATTERN.matcher(pushStatus.outUtf8());

0 comments on commit 78e4c40

Please sign in to comment.