Skip to content

Commit

Permalink
Annoying merge follow-up (#1484)
Browse files Browse the repository at this point in the history
* Annoying merge follow-up
* Add check for duplicate files
  • Loading branch information
denis-yuen committed Jun 12, 2018
1 parent bd1c4b8 commit 20d07e3
Show file tree
Hide file tree
Showing 17 changed files with 214 additions and 50 deletions.
3 changes: 2 additions & 1 deletion .travis.yml
Expand Up @@ -85,7 +85,8 @@ before_install:
#- npm install -g swagger2openapi@2.11.16
# decrypt migration before initial build
- scripts/decrypt.sh
- scripts/check-swagger.sh
# turn this back on with updates to swagger (particularly swagger-maven-plugin), current implementation is too non-deterministic
#- scripts/check-swagger.sh

install:
- docker version
Expand Down
Expand Up @@ -16,6 +16,8 @@

package io.dockstore.client.cli;

import java.util.List;

import io.dockstore.client.cli.nested.ToolClient;
import io.dockstore.common.CommonTestUtilities;
import io.dockstore.common.ConfidentialTest;
Expand All @@ -24,6 +26,7 @@
import io.dockstore.common.SourceControl;
import io.dockstore.common.ToolTest;
import io.dropwizard.testing.ResourceHelpers;
import org.apache.commons.dbutils.handlers.ColumnListHandler;
import org.apache.commons.dbutils.handlers.ScalarHandler;
import org.junit.Assert;
import org.junit.Before;
Expand Down Expand Up @@ -1141,8 +1144,21 @@ public void testTestJson() {
"--script" });
final long count5 = testingPostgres
.runSelectStatement("select count(*) from sourcefile where type='CWL_TEST_JSON'", new ScalarHandler<>());
Assert.assertEquals("there should be three sourcefiles that are test parameter files, there are " + count5, 2, count5);
Assert.assertTrue("there should be two sourcefiles that are test parameter files, there are " + count5, count5 == 2);

// refreshing again with the default paths set should not create extra redundant test parameter files
Client.main(new String[] { "--config", ResourceHelpers.resourceFilePath("config_file.txt"), "tool", "update_tool", "--entry",
"quay.io/dockstoretestuser/test_input_json", "--test-cwl-path", "test.cwl.json" });
Client.main(new String[] { "--config", ResourceHelpers.resourceFilePath("config_file.txt"), "tool", "update_tool", "--entry",
"quay.io/dockstoretestuser/test_input_json", "--test-wdl-path", "test.wdl.json" });
Client.main(new String[] { "--config", ResourceHelpers.resourceFilePath("config_file.txt"), "tool", "refresh", "--entry",
"quay.io/dockstoretestuser/test_input_json" });
final List<Long> testJsonCounts = testingPostgres
.runSelectStatement("select count(*) from sourcefile s, version_sourcefile vs where (s.type = 'CWL_TEST_JSON' or s.type = 'WDL_TEST_JSON') and s.id = vs.sourcefileid group by vs.versionid", new ColumnListHandler<>());
Assert.assertTrue("there should be at least three sets of test json sourcefiles " + testJsonCounts.size(), testJsonCounts.size() >= 3);
for(Long testJsonCount : testJsonCounts) {
Assert.assertTrue("there should be at most two test json for each version", testJsonCount <= 2);
}
}

/**
Expand Down
Expand Up @@ -29,9 +29,7 @@
import io.swagger.client.ApiClient;
import io.swagger.client.ApiException;
import io.swagger.client.api.ContainersApi;
import io.swagger.client.api.UsersApi;
import io.swagger.client.model.DockstoreTool;
import io.swagger.client.model.PublishRequest;
import io.swagger.client.model.SourceFile;
import io.swagger.client.model.Tag;
import org.apache.commons.dbutils.handlers.ScalarHandler;
Expand Down
Expand Up @@ -830,6 +830,7 @@ public void testGitlab() {
*/
@Test
@Category(SlowTest.class)
@Ignore("Broken on hotfix due to 'API V3 is no longer supported. Use API V4 instead'")
public void testManualPublishGitlab() {
// Setup DB
final CommonTestUtilities.TestingPostgres testingPostgres = getTestingPostgres();
Expand Down
Expand Up @@ -198,9 +198,10 @@ public void testRefreshWorkflowsByOrg() throws IOException {
assertThat(currentWorkflows.size() - previousWorkflows.size()).isGreaterThanOrEqualTo(newDockstore_TestUser2Workflows.size());
previousWorkflows = currentWorkflows;

testRefreshWorkflowsByOrg5();
currentWorkflows = getWorkflows();
assertThat(currentWorkflows.size() - previousWorkflows.size()).isGreaterThanOrEqualTo(newDockstoreDotTestDotUser2Workflows.size());
//TODO: "Broken on hotfix due to 'API V3 is no longer supported. Use API V4 instead'"
// testRefreshWorkflowsByOrg5();
// currentWorkflows = getWorkflows();
// assertThat(currentWorkflows.size() - previousWorkflows.size()).isGreaterThanOrEqualTo(newDockstoreDotTestDotUser2Workflows.size());
}

/**
Expand Down
Expand Up @@ -50,6 +50,8 @@
import io.swagger.client.model.Tool;
import io.swagger.client.model.User;
import io.swagger.client.model.ToolDescriptor;
import io.swagger.client.model.ToolFile;
import io.swagger.client.model.ToolTests;
import io.swagger.client.model.Workflow;
import io.swagger.client.model.WorkflowVersion;
import org.apache.commons.dbutils.handlers.ScalarHandler;
Expand All @@ -70,6 +72,7 @@
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

/**
* Extra confidential integration tests, focus on testing workflow interactions
Expand Down Expand Up @@ -179,6 +182,16 @@ public void testTargettedRefresh() throws ApiException, URISyntaxException, IOEx
checkForRelativeFile(ga4Ghv2Api, "#workflow/" + DOCKSTORE_TEST_USER2_DOCKSTORE_WORKFLOW, "master", "grep.cwl");


workflowApi.publish(workflowByPathBitbucket.getId(), new PublishRequest(){
public Boolean isPublish() { return true;}
});
// check on URLs for workflows via ga4gh calls
toolDescriptor = ga4Ghv2Api
.toolsIdVersionsVersionIdTypeDescriptorGet("CWL", "#workflow/" + DOCKSTORE_TEST_USER2_DOCKSTORE_WORKFLOW, "master");
content = IOUtils.toString(new URI(toolDescriptor.getUrl()), StandardCharsets.UTF_8);
Assert.assertTrue("could not find content from generated URL", !content.isEmpty());
checkForRelativeFile(ga4Ghv2Api, "#workflow/" + DOCKSTORE_TEST_USER2_DOCKSTORE_WORKFLOW, "master", "grep.cwl");

// check on commit ids for github
boolean allHaveCommitIds = refreshGithub.getWorkflowVersions().stream().noneMatch(version -> version.getCommitID().isEmpty());
assertTrue("not all workflows seem to have commit ids", allHaveCommitIds);
Expand Down Expand Up @@ -482,6 +495,53 @@ public void testManualRegisterThenPublish() throws ApiException {
assertEquals("There should be 5 valid version tags, there are " + count4, 6, count4);
}

/**
* This tests that a nested WDL workflow (three levels) is properly parsed
* @throws ApiException
*/
@Test
public void testNestedWdlWorkflow() throws ApiException {
final ApiClient webClient = getWebClient();
WorkflowsApi workflowApi = new WorkflowsApi(webClient);

UsersApi usersApi = new UsersApi(webClient);
final Long userId = usersApi.getUser().getId();

// Set up postgres
final CommonTestUtilities.TestingPostgres testingPostgres = getTestingPostgres();

// Manually register workflow github
Workflow githubWorkflow = workflowApi
.manualRegister("github", "DockstoreTestUser2/nested-wdl", "/Dockstore.wdl", "altname", "wdl", "/test.json");

// Assert some things
final long count = testingPostgres
.runSelectStatement("select count(*) from workflow where mode = '" + Workflow.ModeEnum.FULL + "'", new ScalarHandler<>());
assertEquals("No workflows are in full mode", 0,count);

// Refresh the workflow
workflowApi.refresh(githubWorkflow.getId());

// Confirm that correct number of sourcefiles are found
githubWorkflow = workflowApi.getWorkflow(githubWorkflow.getId());
List<WorkflowVersion> versions = githubWorkflow.getWorkflowVersions();
assertEquals("There should be two versions", 2, versions.size());

Optional<WorkflowVersion> loopVersion = versions.stream().filter(version -> Objects.equals(version.getReference(), "infinite-loop")).findFirst();
if (loopVersion.isPresent()) {
assertEquals("There should be two sourcefiles", 2, loopVersion.get().getSourceFiles().size());
} else {
fail("Could not find version infinite-loop");
}

Optional<WorkflowVersion> masterVersion = versions.stream().filter(version -> Objects.equals(version.getReference(), "master")).findFirst();
if (masterVersion.isPresent()) {
assertEquals("There should be three sourcefiles", 3, masterVersion.get().getSourceFiles().size());
} else {
fail("Could not find version master");
}
}


/**
* Tests manual registration of a tool and check that descriptors are downloaded properly.
Expand Down Expand Up @@ -681,5 +741,19 @@ public void testRelativeSecondaryFileOperations() throws ApiException, URISyntax
checkForRelativeFile(ga4Ghv2Api, "#workflow/" + DOCKSTORE_TEST_USER2_RELATIVE_IMPORTS_WORKFLOW, "master", "adtex.cwl");
// ignore extra separators
checkForRelativeFile(ga4Ghv2Api, "#workflow/" + DOCKSTORE_TEST_USER2_RELATIVE_IMPORTS_WORKFLOW, "master", "/adtex.cwl");
// test json should use relative path with ".."
checkForRelativeFile(ga4Ghv2Api, "#workflow/" + DOCKSTORE_TEST_USER2_RELATIVE_IMPORTS_WORKFLOW, "master", "../test.json");
List<ToolFile> toolFiles = ga4Ghv2Api
.toolsIdVersionsVersionIdTypeFilesGet("CWL", "#workflow/" + DOCKSTORE_TEST_USER2_RELATIVE_IMPORTS_WORKFLOW, "master");
assertTrue("should have at least 5 files", toolFiles.size() >= 5);
assertTrue("all files should have relative paths", toolFiles.stream().filter(toolFile -> !toolFile.getPath().startsWith("/")).count() >= 5);
// check on urls created for test files
List<ToolTests> toolTests = ga4Ghv2Api
.toolsIdVersionsVersionIdTypeTestsGet("CWL", "#workflow/" + DOCKSTORE_TEST_USER2_RELATIVE_IMPORTS_WORKFLOW, "master");
assertTrue("could not find tool tests", toolTests.size() > 0);
for(ToolTests test: toolTests) {
content = IOUtils.toString(new URI(test.getUrl()), StandardCharsets.UTF_8);
Assert.assertTrue("could not find content from generated test JSON URL", !content.isEmpty());
}
}
}
@@ -1,14 +1,14 @@
[
{
"path": "\/Dockerfile",
"path": "Dockerfile",
"file_type": "CONTAINERFILE"
},
{
"path": "\/Dockstore.cwl",
"path": "Dockstore.cwl",
"file_type": "PRIMARY_DESCRIPTOR"
},{"file_type":"TEST_FILE","path":"/nested/test.cwl.json"},
},{"file_type":"TEST_FILE","path":"nested/test.cwl.json"},
{
"path": "\/test.cwl.json",
"path": "test.cwl.json",
"file_type": "TEST_FILE"
}

Expand Down
@@ -1,14 +1,14 @@
[
{
"path": "\/Dockerfile",
"path": "Dockerfile",
"file_type": "CONTAINERFILE"
},
{
"path": "\/Dockstore.wdl",
"path": "Dockstore.wdl",
"file_type": "PRIMARY_DESCRIPTOR"
},{"file_type":"TEST_FILE","path":"/nested/test.wdl.json"},
},{"file_type":"TEST_FILE","path":"nested/test.wdl.json"},
{
"path": "\/test.wdl.json",
"path": "test.wdl.json",
"file_type": "TEST_FILE"
}
]
Expand Up @@ -30,6 +30,7 @@
import javax.persistence.Table;

import com.google.common.base.MoreObjects;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.collect.ComparisonChain;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
Expand Down Expand Up @@ -116,6 +117,16 @@ public void setPath(String path) {
this.path = path;
}

@JsonIgnore
public Timestamp getDbCreateDate() {
return dbCreateDate;
}

@JsonIgnore
public Timestamp getDbUpdateDate() {
return dbUpdateDate;
}

@Override
public int hashCode() {
return Objects.hash(id, type, content, path);
Expand Down
Expand Up @@ -286,10 +286,13 @@ private void updateTags(List<Tag> newTags, Tool tool, SourceCodeRepoInterface so
oldTag.setCwlPath(tool.getDefaultCwlPath());
oldTag.setWdlPath(tool.getDefaultWdlPath());
oldTag.setDockerfilePath(tool.getDefaultDockerfilePath());
if (tool.getDefaultTestCwlParameterFile() != null) {
//TODO: keep an eye on this, this used to always create new test params no matter what
if (tool.getDefaultTestCwlParameterFile() != null && oldTag.getSourceFiles().stream()
.noneMatch(file -> file.getPath().equals(tool.getDefaultTestCwlParameterFile()))) {
oldTag.getSourceFiles().add(createSourceFile(tool.getDefaultTestCwlParameterFile(), SourceFile.FileType.CWL_TEST_JSON));
}
if (tool.getDefaultTestWdlParameterFile() != null) {
if (tool.getDefaultTestWdlParameterFile() != null && oldTag.getSourceFiles().stream()
.noneMatch(file -> file.getPath().equals(tool.getDefaultTestWdlParameterFile()))) {
oldTag.getSourceFiles().add(createSourceFile(tool.getDefaultTestWdlParameterFile(), SourceFile.FileType.WDL_TEST_JSON));
}
}
Expand Down
@@ -1,12 +1,12 @@
package io.dockstore.webservice.helpers;

import java.util.ArrayList;
import java.util.List;

import io.dockstore.common.Registry;
import io.dockstore.webservice.core.Tag;
import io.dockstore.webservice.core.Tool;

import java.util.ArrayList;
import java.util.List;

/**
* A no-op interface intended as a place-holder for where we will implement seven bridges functionality when they get around to exposing and
* implementing their API.
Expand Down
Expand Up @@ -38,6 +38,7 @@
import io.dockstore.webservice.core.WorkflowVersion;
import io.dockstore.webservice.languages.LanguageHandlerFactory;
import io.dockstore.webservice.languages.LanguageHandlerInterface;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -288,7 +289,7 @@ Entry updateEntryMetadata(Entry entry, LanguageType type) {
* @param entry
* @return
*/
String getBranchNameFromDefaultVersion(Entry entry) {
public String getBranchNameFromDefaultVersion(Entry entry) {
String defaultVersion = entry.getDefaultVersion();
if (entry instanceof Tool) {
for (Tag tag : ((Tool)entry).getVersions()) {
Expand Down Expand Up @@ -352,12 +353,13 @@ WorkflowVersion initializeWorkflowVersion(String branch, Optional<Workflow> exis
* @param version
* @return workflow version
*/
WorkflowVersion combineVersionAndSourcefile(String repositoryId, SourceFile sourceFile, Workflow workflow, SourceFile.FileType identifiedType,
WorkflowVersion version, Map<String, WorkflowVersion> existingDefaults) {
WorkflowVersion combineVersionAndSourcefile(String repositoryId, SourceFile sourceFile, Workflow workflow,
SourceFile.FileType identifiedType, WorkflowVersion version, Map<String, WorkflowVersion> existingDefaults) {
Set<SourceFile> sourceFileSet = new HashSet<>();

if (sourceFile != null && sourceFile.getContent() != null) {
final Map<String, SourceFile> stringSourceFileMap = this.resolveImports(repositoryId, sourceFile.getContent(), identifiedType, version);
final Map<String, SourceFile> stringSourceFileMap = this
.resolveImports(repositoryId, sourceFile.getContent(), identifiedType, version);
sourceFileSet.addAll(stringSourceFileMap.values());
}

Expand All @@ -367,15 +369,21 @@ WorkflowVersion combineVersionAndSourcefile(String repositoryId, SourceFile sour
SourceFile.FileType workflowDescriptorType = workflow.getTestParameterType();

List<SourceFile> testParameterFiles = existingVersion.getSourceFiles().stream()
.filter((SourceFile u) -> u.getType() == workflowDescriptorType).collect(Collectors.toList());
testParameterFiles.forEach(file -> this.readFile(repositoryId, existingVersion, sourceFileSet, workflowDescriptorType, file.getPath()));
.filter((SourceFile u) -> u.getType() == workflowDescriptorType).collect(Collectors.toList());
testParameterFiles
.forEach(file -> this.readFile(repositoryId, existingVersion, sourceFileSet, workflowDescriptorType, file.getPath()));
}

// If source file is found and valid then add it
if (sourceFile != null && sourceFile.getContent() != null) {
version.getSourceFiles().add(sourceFile);
}

// look for a mutated version and delete it first (can happen due to leading slash)
Set<SourceFile> collect = sourceFileSet.stream().filter(
file -> file.getPath().equals(sourceFile.getPath()) || file.getPath()
.equals(StringUtils.stripStart(sourceFile.getPath(), "/"))).collect(Collectors.toSet());
sourceFileSet.removeAll(collect);
// add extra source files here (dependencies from "main" descriptor)
if (sourceFileSet.size() > 0) {
version.getSourceFiles().addAll(sourceFileSet);
Expand Down

0 comments on commit 20d07e3

Please sign in to comment.