Skip to content

Commit

Permalink
Add more endpoint tests (#201)
Browse files Browse the repository at this point in the history
  • Loading branch information
ypc-faros committed Jul 13, 2022
1 parent e243df5 commit abed191
Show file tree
Hide file tree
Showing 43 changed files with 545 additions and 45 deletions.
4 changes: 2 additions & 2 deletions init/src/hasura/init.ts
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,7 @@ export class HasuraInit {
// The query collection from resources doesn't exist in the metadata.
// Safely create a new query collection.
this.logger.info(
"Creating query collection '%s'. %d queries added",
'Creating query collection \'%s\'. %d queries added',
queryCollectionFromResources.name,
queryCollectionFromResources.definition.queries.length
);
Expand Down Expand Up @@ -350,7 +350,7 @@ export class HasuraInit {

if (toAdd.length > 0) {
this.logger.info(
"Updating query collection '%s'. %d queries added.",
'Updating query collection \'%s\'. %d queries added.',
queryCollectionFromResources.name,
toAdd.length
);
Expand Down
112 changes: 112 additions & 0 deletions init/test/integration-tests/hasura_endpoint.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import fs from 'fs-extra';
import {isEqual} from 'lodash';
import path from 'path';

import {HasuraClient} from './hasura-client';
import {TestDefinition} from './types';

let hasuraAdminSecret: string;
let hasuraClient: HasuraClient;

beforeAll(async () => {
hasuraAdminSecret = process.env.HASURA_GRAPHQL_ADMIN_SECRET;

hasuraClient = new HasuraClient('http://localhost:8080', hasuraAdminSecret);
await hasuraClient.waitUntilHealthy();
}, 60 * 1000);

describe('hasura endpoint tests', () => {
const RESOURCES_DIR = path.join(__dirname, '..', 'resources');

test('check cicd_organization Hasura endpoint', async () => {
await loadTestDefinition('cicd_organization.json').then((test) =>
checkHasuraEndpoint(test)
);
});

test('check cicd_organization_from_run Hasura endpoint', async () => {
await loadTestDefinition('cicd_organization_from_run.json').then((test) =>
checkHasuraEndpoint(test)
);
});

test('check cicd_artifact Hasura endpoint', async () => {
await loadTestDefinition('cicd_artifact.json').then((test) =>
checkHasuraEndpoint(test)
);
});

test('check cicd_artifact_commit_association Hasura endpoint', async () => {
await loadTestDefinition('cicd_artifact_commit_association.json').then(
(test) => checkHasuraEndpoint(test)
);
});

test('check cicd_artifact_deployment Hasura endpoint', async () => {
await loadTestDefinition('cicd_artifact_deployment.json').then((test) =>
checkHasuraEndpoint(test)
);
});

test('check cicd_artifact_with_build Hasura endpoint', async () => {
await loadTestDefinition('cicd_artifact_with_build.json').then((test) =>
checkHasuraEndpoint(test)
);
});

test('check cicd_build Hasura endpoint', async () => {
await loadTestDefinition('cicd_build.json').then((test) =>
checkHasuraEndpoint(test)
);
});

test('check cicd_build_with_start_end Hasura endpoint', async () => {
await loadTestDefinition('cicd_build_with_start_end.json').then((test) =>
checkHasuraEndpoint(test)
);
});

test('check cicd_pipeline Hasura endpoint', async () => {
await loadTestDefinition('cicd_pipeline.json').then((test) =>
checkHasuraEndpoint(test)
);
});

test('check cicd_repository Hasura endpoint', async () => {
await loadTestDefinition('cicd_repository.json').then((test) =>
checkHasuraEndpoint(test)
);
});

test('check compute_application Hasura endpoint', async () => {
await loadTestDefinition('compute_application.json').then((test) =>
checkHasuraEndpoint(test)
);
});

async function loadTestDefinition(
testDefinitionFileName: string
): Promise<TestDefinition> {
const directory = path.join(RESOURCES_DIR, 'hasura', 'test_definitions');

return JSON.parse(
await fs.readFile(path.join(directory, testDefinitionFileName), 'utf8')
);
}

async function checkHasuraEndpoint(test: TestDefinition) {
const directory = path.join(RESOURCES_DIR, 'hasura', 'test_data');

const input = await fs.readFile(path.join(directory, test.input), 'utf8');
const expectedOutput = await fs.readFile(
path.join(directory, test.output),
'utf8'
);
const query = await fs.readFile(path.join(directory, test.query), 'utf8');

await hasuraClient.hitEndpoint(test.endpoint, input);
const output = await hasuraClient.makeQuery(query);

expect(isEqual(output, JSON.parse(expectedOutput))).toBe(true);
}
});
52 changes: 9 additions & 43 deletions init/test/integration-tests/integration.test.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
import {execSync} from 'child_process';
import fs from 'fs-extra';
import {isEqual} from 'lodash';
import os from 'os';
import path from 'path';

import {AirbyteClient, ConnectionConfiguration} from './airbyte-client';
import {HasuraClient} from './hasura-client';
import {TestDefinition} from './types';

let destinationId: string;
let hasuraAdminSecret: string;
Expand All @@ -27,11 +25,15 @@ beforeAll(async () => {
describe('integration tests', () => {
const RESOURCES_DIR = path.join(__dirname, '..', 'resources');

test('check connection to the Faros destination', async () => {
expect(await airbyteClient.checkDestinationConnection(destinationId)).toBe(
true
);
}, 60 * 1000);
test(
'check connection to the Faros destination',
async () => {
expect(
await airbyteClient.checkDestinationConnection(destinationId)
).toBe(true);
},
60 * 1000
);

test(
'verify writes in Hasura',
Expand Down Expand Up @@ -101,16 +103,6 @@ describe('integration tests', () => {
60 * 1000
);

test(
'check cicd_organization Hasura endpoint',
async () => {
await loadTestDefinition('cicd_organization.json').then((test) =>
checkHasuraEndpoint(test)
);
},
5 * 1000
);

function writeRecords(tmpDir: string) {
execSync(`docker pull farosai/airbyte-faros-destination \
&& cat ${tmpDir}/streams.in \
Expand Down Expand Up @@ -154,30 +146,4 @@ describe('integration tests', () => {
--origin ${origin} \
--community_edition`);
}

async function loadTestDefinition(
testDefinitionFileName: string
): Promise<TestDefinition> {
const directory = path.join(RESOURCES_DIR, 'hasura', 'test_definitions');

return JSON.parse(
await fs.readFile(path.join(directory, testDefinitionFileName), 'utf8')
);
}

async function checkHasuraEndpoint(test: TestDefinition) {
const directory = path.join(RESOURCES_DIR, 'hasura', 'test_data');

const input = await fs.readFile(path.join(directory, test.input), 'utf8');
const expectedOutput = await fs.readFile(
path.join(directory, test.output),
'utf8'
);
const query = await fs.readFile(path.join(directory, test.query), 'utf8');

await hasuraClient.hitEndpoint(test.endpoint, input);
const output = await hasuraClient.makeQuery(query);

expect(isEqual(output, JSON.parse(expectedOutput))).toBe(true);
}
});
13 changes: 13 additions & 0 deletions init/test/resources/hasura/test_data/cicd_artifact.gql
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
query MyQuery {
cicd_Artifact(where: {origin: {_eq: "cicd_artifact_test"}}) {
build
id
name
origin
repository
tags
type
uid
url
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
query MyQuery {
cicd_ArtifactCommitAssociation(where: {origin: {_eq: "cicd_artifact_commit_association_test"}}) {
artifact
commit
id
origin
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"data_artifact_repository": "artifact_repository1",
"data_commit_organization": "commit_organization1",
"data_artifact_id": "artifact_id1",
"data_commit_repository": "commit_repository1",
"data_artifact_organization": "artifact_organization1",
"data_commit_sha": "commit_sha1",
"data_commit_source": "commit_source1",
"data_artifact_source": "artifact_source1",
"data_origin": "cicd_artifact_commit_association_test"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"data": {
"cicd_ArtifactCommitAssociation": [
{
"artifact": "artifact_source1|artifact_organization1|artifact_repository1|artifact_id1",
"commit": "commit_source1|commit_organization1|commit_repository1|commit_sha1",
"id": "artifact_source1|artifact_organization1|artifact_repository1|artifact_id1|commit_source1|commit_organization1|commit_repository1|commit_sha1",
"origin": "cicd_artifact_commit_association_test"
}
]
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
query MyQuery {
cicd_ArtifactDeployment(where: {origin: {_eq: "cicd_artifact_deployment_test"}}) {
artifact
deployment
id
origin
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"data_artifact_id": "artifact_id1",
"data_artifact_organization": "artifact_organization1",
"data_artifact_repository": "artifact_repository1",
"data_artifact_source": "artifact_source1",
"data_deploy_id": "deploy_id1",
"data_deploy_source": "deploy_source1",
"data_origin": "cicd_artifact_deployment_test"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"data": {
"cicd_ArtifactDeployment": [
{
"artifact": "artifact_source1|artifact_organization1|artifact_repository1|artifact_id1",
"deployment": "deploy_source1|deploy_id1",
"id": "artifact_source1|artifact_organization1|artifact_repository1|artifact_id1|deploy_source1|deploy_id1",
"origin": "cicd_artifact_deployment_test"
}
]
}
}
8 changes: 8 additions & 0 deletions init/test/resources/hasura/test_data/cicd_artifact_in.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"data_origin": "cicd_artifact_test",
"data_artifact_repository": "repository1",
"data_artifact_id": "artifact1",
"data_artifact_organization": "organization1",
"data_artifact_source": "source1"
}

17 changes: 17 additions & 0 deletions init/test/resources/hasura/test_data/cicd_artifact_out.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{
"data": {
"cicd_Artifact": [
{
"build": null,
"id": "source1|organization1|repository1|artifact1",
"name": null,
"origin": "cicd_artifact_test",
"repository": "source1|organization1|repository1",
"tags": null,
"type": null,
"uid": "artifact1",
"url": null
}
]
}
}
13 changes: 13 additions & 0 deletions init/test/resources/hasura/test_data/cicd_artifact_with_build.gql
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
query MyQuery {
cicd_Artifact(where: {origin: {_eq: "cicd_artifact_with_build_test"}}) {
build
id
name
origin
repository
tags
type
uid
url
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"data_artifact_repository": "artifact_repository1",
"data_run_source": "run_source1",
"data_run_pipeline": "run_pipeline1",
"data_artifact_id": "artifact_id2",
"data_run_organization": "run_organization1",
"data_artifact_organization": "artifact_organization1",
"data_run_id": "run_id1",
"data_artifact_source": "artifact_source1",
"data_origin": "cicd_artifact_with_build_test"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{
"data": {
"cicd_Artifact": [
{
"build": "run_source1|run_organization1|run_pipeline1|run_id1",
"id": "artifact_source1|artifact_organization1|artifact_repository1|artifact_id2",
"name": null,
"origin": "cicd_artifact_with_build_test",
"repository": "artifact_source1|artifact_organization1|artifact_repository1",
"tags": null,
"type": null,
"uid": "artifact_id2",
"url": null
}
]
}
}
17 changes: 17 additions & 0 deletions init/test/resources/hasura/test_data/cicd_build.gql
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
query MyQuery {
cicd_Build(where: {origin: {_eq: "cicd_build_test"}}) {
createdAt
endedAt
id
name
number
origin
pipeline
startedAt
status
statusCategory
statusDetail
uid
url
}
}
8 changes: 8 additions & 0 deletions init/test/resources/hasura/test_data/cicd_build_in.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"data_run_source": "run_source1",
"data_run_pipeline": "run_pipeline1",
"data_run_organization": "run_organization1",
"run_status": {"category": "A", "detail": "B"},
"data_run_id": "run_id2",
"data_origin": "cicd_build_test"
}

0 comments on commit abed191

Please sign in to comment.