Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
109 changes: 102 additions & 7 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,9 @@ commands:
- gradle/collect_test_results:
reports_path: pmd-cataloger/build/reports/
test_results_path: pmd-cataloger/build/test-results/
- gradle/collect_test_results:
reports_path: sfge/build/reports/
test_results_path: sfge/build/test-results/
- store_test_results:
path: test-results
- store_artifacts: # upload nyc test coverage as artifact.
Expand Down Expand Up @@ -218,12 +221,21 @@ jobs:

# Purpose: Runs the unit tests in a Windows environment.
windows-unit-tests:
# `parallelism` indicates how many simultaneous executors should be run, allowing us to split
# long-running tasks across multiple executors.
parallelism: 4 # larger values didn't seem to affect performance greatly
executor:
name: win/default # executor type
size: "medium"
size: "large"
shell: bash.exe
parameters:
node-version: *node_version_param
# Specify a subset of unit tests to be run, instead of the whole suite.
# This allows us to work around the suboptimal performance of the Windows executor by running
# multiple executors in parallel where different unit tests are ran in each.
test-type:
type: string
default: all
working_directory: C:\repo
steps:
- attach_workspace:
Expand Down Expand Up @@ -259,12 +271,90 @@ jobs:
- run: mkdir test-results

# Unit tests
- run:
name: test
# Necessary to explicitly use bash, otherwise gradlew's status code won't be received and the job will hang.
shell: bash.exe
command: yarn test --reporter mocha-junit-reporter --reporter-option mochaFile=test-results/mocha/test-results.xml
when: always
- when:
condition:
equal: [ all, << parameters.test-type >> ]
steps:
- run:
name: test
# Necessary to explicitly use bash, otherwise gradlew's status code won't be received and the job will hang.
shell: bash.exe
command: yarn test --reporter mocha-junit-reporter --reporter-option mochaFile=test-results/mocha/test-results.xml
when: always

- when:
condition:
equal: [ sfge, << parameters.test-type >> ]
steps:
- run:
name: test-sfge
# Necessary to explicitly use bash, otherwise gradlew's status code won't be received and the job will hang.
shell: bash.exe
# Identify all the test files and allocate them between parallelized executors by timing data.
# Then turn the array of tests into something that gradle can accept, and run the tests.
command: |
TESTGLOB=$(circleci tests glob "sfge/src/test/**/*Test.java" | circleci tests split --split-by=timings)
echo $TESTGLOB
TESTARRAY=($TESTGLOB)
TESTARG=""
for element in "${TESTARRAY[@]}"
do
TESTARG="$TESTARG --tests `basename $element .java`"
done
echo $TESTARG
yarn test-sfge $TESTARG
when: always

- when:
condition:
equal: [ cli-messaging, << parameters.test-type >> ]
steps:
- run:
name: test-cli-messaging
# Necessary to explicitly use bash, otherwise gradlew's status code won't be received and the job will hang.
shell: bash.exe
# This unit test suite is fast, so we have the first parallel executor run the tests, and all others exit early.
command: |
if [[ $CIRCLE_NODE_INDEX -gt 0 ]]
then
exit 0
fi
yarn test-cli-messaging
when: always

- when:
condition:
equal: [ pmd-cataloger, << parameters.test-type >> ]
steps:
- run:
name: test-pmd-cataloger
# Necessary to explicitly use bash, otherwise gradlew's status code won't be received and the job will hang.
shell: bash.exe
# This unit test suite is fast, so we have the first parallel executor run the tests, and all others exit early.
command: |
if [[ $CIRCLE_NODE_INDEX -gt 0 ]]
then
exit 0
fi
yarn test-pmd-cataloger
when: always

- when:
condition:
equal: [ ts, << parameters.test-type >> ]
steps:
- run:
name: test-ts
# Explicitly using bash, for simplicity of required shell script.
shell: bash.exe
# This unit test suite is relatively fast, so we have the first parallel executor run the tests, and all others exit early.
command: |
if [[ $CIRCLE_NODE_INDEX -gt 0 ]]
then
exit 0
fi
yarn test-ts --reporter mocha-junit-reporter --reporter-option mochaFile=test-results/mocha/test-results.xml
when: always

# Linting
- run:
Expand Down Expand Up @@ -510,6 +600,11 @@ workflows:
<<: *testing_filters
requires:
- setup
matrix:
parameters:
# The values of the parameters will be appended to the jobs they create.
# So we'll get "windows-unit-tests-pmd-cataloger", "windows-unit-tests-ts", etc.
test-type: [pmd-cataloger, cli-messaging, ts, sfge]
- linux-tarball-test:
filters:
<<: *testing_filters
Expand Down
4 changes: 4 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,10 @@
"postpack": "rm -f oclif.manifest.json",
"lint": "eslint ./src --ext .ts",
"test": "./gradlew test jacocoTestCoverageVerification && nyc mocha --timeout 10000 --retries 5 \"./test/**/*.test.ts\"",
"test-cli-messaging": "./gradlew cli-messaging:test",
"test-pmd-cataloger": "./gradlew pmd-cataloger:test",
"test-sfge": "./gradlew sfge:test",
"test-ts": "nyc mocha --timeout 10000 --retries 5 \"./test/**/*.test.ts\"",
"coverage": "nyc report --reporter text",
"version": "oclif-dev readme && git add README.md"
}
Expand Down
39 changes: 28 additions & 11 deletions sfge/src/test/java/com/salesforce/graph/ops/MethodUtilTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -175,12 +175,21 @@ public void getTargetMethods_targetMultipleMethods() {
List<MethodVertex> methodVertices = MethodUtil.getTargetedMethods(g, targets);

MatcherAssert.assertThat(methodVertices, hasSize(equalTo(2)));
MethodVertex firstVertex = methodVertices.get(0);
assertEquals(METHOD_WITHOUT_OVERLOADS_1, firstVertex.getName());

MethodVertex secondVertex = methodVertices.get(1);
assertEquals(METHOD_WITHOUT_OVERLOADS_2, secondVertex.getName());

boolean method1Found = false;
boolean method2Found = false;
for (MethodVertex methodVertex : methodVertices) {
String name = methodVertex.getName();
if (METHOD_WITHOUT_OVERLOADS_1.equals(name)) {
method1Found = true;
} else if (METHOD_WITHOUT_OVERLOADS_2.equals(name)) {
method2Found = true;
} else {
fail("Unexpected method name " + name);
}
}
assertTrue(method1Found);
assertTrue(method2Found);
String messages = CliMessager.getInstance().getAllMessages();
assertEquals("[]", messages);
}
Expand Down Expand Up @@ -227,13 +236,21 @@ public void getTargetMethods_targetNameDupedMethods() {
List<MethodVertex> methodVertices = MethodUtil.getTargetedMethods(g, targets);

MatcherAssert.assertThat(methodVertices, hasSize(equalTo(2)));
MethodVertex firstVertex = methodVertices.get(0);
assertEquals(METHOD_WITH_EXTERNAL_NAME_DUPLICATION, firstVertex.getName());
assertEquals(18, firstVertex.getBeginLine());

MethodVertex secondVertex = methodVertices.get(1);
assertEquals(METHOD_WITH_EXTERNAL_NAME_DUPLICATION, secondVertex.getName());
assertEquals(22, secondVertex.getBeginLine());
boolean line18Found = false;
boolean line22Found = false;
for (MethodVertex methodVertex : methodVertices) {
assertEquals(METHOD_WITH_EXTERNAL_NAME_DUPLICATION, methodVertex.getName());
if (methodVertex.getBeginLine() == 18) {
line18Found = true;
} else if (methodVertex.getBeginLine() == 22) {
line22Found = true;
} else {
fail("Unexpected line number " + methodVertex.getBeginLine());
}
}
assertTrue(line18Found);
assertTrue(line22Found);

String messages = CliMessager.getInstance().getAllMessages();
MatcherAssert.assertThat(
Expand Down