Skip to content
Merged
7 changes: 7 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,11 @@ test-prod: build
docker run -it -v ${PWD}:/usr/src -w /usr/src \
-e LABELBOX_TEST_ENVIRON="prod" \
-e LABELBOX_TEST_API_KEY_PROD=${LABELBOX_TEST_API_KEY_PROD} \
local/labelbox-python:test pytest $(PATH_TO_TEST)

test-onprem: build
docker run -it -v ${PWD}:/usr/src -w /usr/src \
-e LABELBOX_TEST_ENVIRON="onprem" \
-e LABELBOX_TEST_API_KEY_ONPREM=${LABELBOX_TEST_API_KEY_ONPREM} \
-e LABELBOX_TEST_ONPREM_HOSTNAME=${LABELBOX_TEST_ONPREM_HOSTNAME} \
local/labelbox-python:test pytest $(PATH_TO_TEST)
2 changes: 1 addition & 1 deletion pytest.ini
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[pytest]
addopts = -s -vv --reruns 5 --reruns-delay 10
addopts = -s -vv -x --reruns 5 --reruns-delay 10
markers =
slow: marks tests as slow (deselect with '-m "not slow"')
15 changes: 9 additions & 6 deletions tests/integration/annotation_import/test_label_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,18 @@ def test_create_from_url(client, project, annotation_import_test_helpers):
annotation_import_test_helpers.check_running_state(label_import, name, url)


def test_create_from_objects(client, project, object_predictions,
def test_create_from_objects(client, configured_project, object_predictions,
annotation_import_test_helpers):
"""this test should check running state only to validate running, not completed"""
name = str(uuid.uuid4())

label_import = LabelImport.create_from_objects(client=client,
project_id=project.uid,
name=name,
labels=object_predictions)
assert label_import.parent_id == project.uid
label_import = LabelImport.create_from_objects(
client=client,
project_id=configured_project.uid,
name=name,
labels=object_predictions)

assert label_import.parent_id == configured_project.uid
annotation_import_test_helpers.check_running_state(label_import, name)
annotation_import_test_helpers.assert_file_content(
label_import.input_file_url, object_predictions)
Expand Down
4 changes: 4 additions & 0 deletions tests/integration/annotation_import/test_model_run.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import time
import os
import pytest


def test_model_run(client, configured_project_with_label, rand_gen):
Expand Down Expand Up @@ -84,6 +86,8 @@ def test_model_run_export_labels(model_run_with_model_run_data_rows):
assert len(labels) == 3


@pytest.mark.skipif(condition=os.environ['LABELBOX_TEST_ENVIRON'] == "onprem",
reason="does not work for onprem")
def test_model_run_status(model_run_with_model_run_data_rows):

def get_model_run_status():
Expand Down
9 changes: 9 additions & 0 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ class Environ(Enum):
LOCAL = 'local'
PROD = 'prod'
STAGING = 'staging'
ONPREM = 'onprem'


@pytest.fixture(scope="session")
Expand All @@ -46,6 +47,12 @@ def graphql_url(environ: str) -> str:
return 'https://api.labelbox.com/graphql'
elif environ == Environ.STAGING:
return 'https://staging-api.labelbox.com/graphql'
elif environ == Environ.ONPREM:
hostname = os.environ.get('LABELBOX_TEST_ONPREM_HOSTNAME', None)
if hostname is None:
raise Exception(f"Missing LABELBOX_TEST_ONPREM_INSTANCE")
instance_id = hostname.split("-")[1].split(".")[0]
return f"https://app.replicated-{instance_id}.labelbox.dev/api/_gql"
return 'http://host.docker.internal:8080/graphql'


Expand All @@ -54,6 +61,8 @@ def testing_api_key(environ: str) -> str:
return os.environ["LABELBOX_TEST_API_KEY_PROD"]
elif environ == Environ.STAGING:
return os.environ["LABELBOX_TEST_API_KEY_STAGING"]
elif environ == Environ.ONPREM:
return os.environ["LABELBOX_TEST_API_KEY_ONPREM"]
return os.environ["LABELBOX_TEST_API_KEY_LOCAL"]


Expand Down
3 changes: 3 additions & 0 deletions tests/integration/test_label.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import pytest
import requests
import os

from labelbox import Label

Expand Down Expand Up @@ -35,6 +36,8 @@ def test_label_export(configured_project_with_label):
# The new exporter doesn't work with the create_label mutation


@pytest.mark.skipif(condition=os.environ['LABELBOX_TEST_ENVIRON'] == "onprem",
reason="does not work for onprem")
def test_label_update(configured_project_with_label):
_, _, _, label = configured_project_with_label
label.update(label="something else")
Expand Down
6 changes: 5 additions & 1 deletion tests/integration/test_labeler_performance.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
from datetime import datetime, timezone, timedelta
import pytest
import os


@pytest.mark.skipif(
condition=os.environ['LABELBOX_TEST_ENVIRON'] == "onprem",
reason="longer runtime than expected for onprem. unskip when resolved.")
def test_labeler_performance(configured_project_with_label):
project, _, _, _ = configured_project_with_label

Expand All @@ -13,4 +17,4 @@ def test_labeler_performance(configured_project_with_label):
assert isinstance(my_performance.last_activity_time, datetime)
now_utc = datetime.now().astimezone(timezone.utc)
assert timedelta(0) < now_utc - my_performance.last_activity_time < \
timedelta(seconds=30)
timedelta(seconds=60)
7 changes: 7 additions & 0 deletions tests/integration/test_project.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
import time
import os

import pytest
import requests
Expand Down Expand Up @@ -124,6 +125,8 @@ def test_extend_reservations(project):
project.extend_reservations("InvalidQueueType")


@pytest.mark.skipif(condition=os.environ['LABELBOX_TEST_ENVIRON'] == "onprem",
reason="new mutation does not work for onprem")
def test_attach_instructions(client, project):
with pytest.raises(ValueError) as execinfo:
project.upsert_instructions('tests/integration/media/sample_pdf.pdf')
Expand All @@ -147,6 +150,8 @@ def test_attach_instructions(client, project):
exc_info.value)


@pytest.mark.skipif(condition=os.environ['LABELBOX_TEST_ENVIRON'] == "onprem",
reason="new mutation does not work for onprem")
def test_html_instructions(configured_project):
html_file_path = '/tmp/instructions.html'
sample_html_str = "<html></html>"
Expand All @@ -161,6 +166,8 @@ def test_html_instructions(configured_project):
assert requests.get(instructions).text == sample_html_str


@pytest.mark.skipif(condition=os.environ['LABELBOX_TEST_ENVIRON'] == "onprem",
reason="new mutation does not work for onprem")
def test_same_ontology_after_instructions(
configured_project_with_complex_ontology):
project, _ = configured_project_with_complex_ontology
Expand Down