Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

test(misc): misc test updates #6890

Merged
merged 3 commits into from
Dec 29, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions .github/workflows/docker-unified.yml
Original file line number Diff line number Diff line change
Expand Up @@ -312,8 +312,7 @@ jobs:
fetch-depth: 0
- name: Pre-build artifacts for docker image
run: |
export USE_SYSTEM_NODE="true"
./gradlew :datahub-frontend:dist -PuseSystemNode=${USE_SYSTEM_NODE} -x test -x yarnTest -x yarnLint --parallel
./gradlew :datahub-frontend:dist -x test -x yarnTest -x yarnLint --parallel
mv ./datahub-frontend/build/distributions/datahub-frontend-*.zip datahub-frontend.zip
- name: Build and push
uses: ./.github/actions/docker-custom-build-and-push
Expand Down
2 changes: 1 addition & 1 deletion datahub-web-react/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ task yarnQuickBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) {
}

task cleanExtraDirs {
delete 'node_modules'
delete 'node_modules/.yarn-integrity'
delete 'dist'
delete 'tmp'
delete 'just'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.persistence.EntityNotFoundException;

import io.ebean.PagedList;
import lombok.Value;
Expand Down Expand Up @@ -1588,7 +1589,12 @@ public RollbackRunResult deleteUrn(Urn urn) {
final AspectSpec keySpec = spec.getKeyAspectSpec();
String keyAspectName = getKeyAspectName(urn);

EntityAspect latestKey = _aspectDao.getLatestAspect(urn.toString(), keyAspectName);
EntityAspect latestKey = null;
try {
latestKey = _aspectDao.getLatestAspect(urn.toString(), keyAspectName);
} catch (EntityNotFoundException e) {
log.warn("Entity to delete does not exist. {}", urn.toString());
}
if (latestKey == null || latestKey.getSystemMetadata() == null) {
return new RollbackRunResult(removedAspects, rowsDeletedFromEntityDeletion);
}
Expand Down
3 changes: 2 additions & 1 deletion smoke-test/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ psutil
tenacity
-e ../metadata-ingestion[datahub-rest,datahub-kafka,mysql]
slack-sdk==3.18.1
aiohttp
aiohttp
joblib
1 change: 0 additions & 1 deletion smoke-test/run-quickstart.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,4 @@ echo "test_user:test_pass" >> ~/.datahub/plugins/frontend/auth/user.props
echo "DATAHUB_VERSION = $DATAHUB_VERSION"
DATAHUB_TELEMETRY_ENABLED=false \
DOCKER_COMPOSE_BASE="file://$( dirname "$DIR" )" \

datahub docker quickstart --standalone_consumers --dump-logs-on-failure
46 changes: 27 additions & 19 deletions smoke-test/tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from datetime import datetime, timedelta
from typing import Any, Dict, List, Tuple
from time import sleep
from joblib import Parallel, delayed

import requests_wrapper as requests

Expand Down Expand Up @@ -112,7 +113,7 @@ def ingest_file_via_rest(filename: str) -> Pipeline:
return pipeline


def delete_urns_from_file(filename: str) -> None:
def delete_urns_from_file(filename: str, shared_data: bool = False) -> None:
if not cli_utils.get_boolean_env_variable("CLEANUP_DATA", True):
print("Not cleaning data to save time")
return
Expand All @@ -124,26 +125,33 @@ def delete_urns_from_file(filename: str) -> None:
}
)

def delete(entry):
is_mcp = "entityUrn" in entry
urn = None
# Kill Snapshot
if is_mcp:
urn = entry["entityUrn"]
else:
snapshot_union = entry["proposedSnapshot"]
snapshot = list(snapshot_union.values())[0]
urn = snapshot["urn"]
payload_obj = {"urn": urn}

cli_utils.post_delete_endpoint_with_session_and_url(
session,
get_gms_url() + "/entities?action=delete",
payload_obj,
)

with open(filename) as f:
d = json.load(f)
for entry in d:
is_mcp = "entityUrn" in entry
urn = None
# Kill Snapshot
if is_mcp:
urn = entry["entityUrn"]
else:
snapshot_union = entry["proposedSnapshot"]
snapshot = list(snapshot_union.values())[0]
urn = snapshot["urn"]
payload_obj = {"urn": urn}

cli_utils.post_delete_endpoint_with_session_and_url(
session,
get_gms_url() + "/entities?action=delete",
payload_obj,
)
sleep(requests.ELASTICSEARCH_REFRESH_INTERVAL_SECONDS)
Parallel(n_jobs=10)(delayed(delete)(entry) for entry in d)

# Deletes require 60 seconds when run between tests operating on common data, otherwise standard sync wait
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

:(

if shared_data:
sleep(60)
else:
sleep(requests.ELASTICSEARCH_REFRESH_INTERVAL_SECONDS)


# Fixed now value
Expand Down