diff --git a/backend/btrixcloud/profiles.py b/backend/btrixcloud/profiles.py index 72c5203040..1a0d7e0935 100644 --- a/backend/btrixcloud/profiles.py +++ b/backend/btrixcloud/profiles.py @@ -270,7 +270,10 @@ async def do_commit_to_profile( existing_profile.resource.size if existing_profile.resource else 0 ) - origins = existing_profile.origins + # only set origins from existing profile if browser + # actually launched with that profile (eg. not a reset) + if metadata.baseprofile == profileid: + origins = existing_profile.origins else: profileid = metadata.profileid diff --git a/backend/test/conftest.py b/backend/test/conftest.py index 72cff12467..369afd4ed6 100644 --- a/backend/test/conftest.py +++ b/backend/test/conftest.py @@ -639,33 +639,30 @@ def url_list_config_id(crawler_auth_headers, default_org_id): @pytest.fixture(scope="session") def profile_browser_id(admin_auth_headers, default_org_id): - return _create_profile_browser(admin_auth_headers, default_org_id) + return create_profile_browser(admin_auth_headers, default_org_id) @pytest.fixture(scope="session") def profile_browser_2_id(admin_auth_headers, default_org_id): - return _create_profile_browser( + return create_profile_browser( admin_auth_headers, default_org_id, "https://specs.webrecorder.net" ) -@pytest.fixture(scope="session") -def profile_browser_3_id(admin_auth_headers, default_org_id): - return _create_profile_browser(admin_auth_headers, default_org_id) - - -@pytest.fixture(scope="session") -def profile_browser_4_id(admin_auth_headers, default_org_id): - return _create_profile_browser(admin_auth_headers, default_org_id) - - -def _create_profile_browser( - headers: Dict[str, str], oid: UUID, url: str = "https://old.webrecorder.net" +def create_profile_browser( + headers: Dict[str, str], + oid: UUID, + url="https://old.webrecorder.net", + baseprofile="", ): + data = {"url": url} + if baseprofile: + data["profileId"] = baseprofile + r = requests.post( f"{API_PREFIX}/orgs/{oid}/profiles/browser", headers=headers, - json={"url": url}, + json=data, ) assert r.status_code == 200 browser_id = r.json()["browserid"] @@ -711,10 +708,7 @@ def echo_server(): def prepare_browser_for_profile_commit( - browser_id: str, - headers: Dict[str, str], - oid: UUID, - url="https://old.webrecorder.net/tools", + browser_id: str, headers: Dict[str, str], oid: UUID, url=None ) -> None: # Ping to make sure it doesn't expire r = requests.post( @@ -740,14 +734,15 @@ def prepare_browser_for_profile_commit( assert data["scale"] assert data["oid"] == oid - # Navigate to new URL - r = requests.post( - f"{API_PREFIX}/orgs/{oid}/profiles/browser/{browser_id}/navigate", - headers=headers, - json={"url": url}, - ) - assert r.status_code == 200 - assert r.json()["success"] + # Navigate to new URL, if provided + if url: + r = requests.post( + f"{API_PREFIX}/orgs/{oid}/profiles/browser/{browser_id}/navigate", + headers=headers, + json={"url": url}, + ) + assert r.status_code == 200 + assert r.json()["success"] # Ping browser until ready max_attempts = 20 @@ -770,7 +765,10 @@ def prepare_browser_for_profile_commit( @pytest.fixture(scope="session") def profile_id(admin_auth_headers, default_org_id, profile_browser_id): prepare_browser_for_profile_commit( - profile_browser_id, admin_auth_headers, default_org_id + profile_browser_id, + admin_auth_headers, + default_org_id, + url="https://old.webrecorder.net/tools", ) # Create profile @@ -862,7 +860,10 @@ def profile_config_id(admin_auth_headers, default_org_id, profile_id): @pytest.fixture(scope="session") def profile_2_id(admin_auth_headers, default_org_id, profile_browser_2_id): prepare_browser_for_profile_commit( - profile_browser_2_id, admin_auth_headers, default_org_id + profile_browser_2_id, + admin_auth_headers, + default_org_id, + url="https://old.webrecorder.net/tools", ) # Create profile diff --git a/backend/test/test_profiles.py b/backend/test/test_profiles.py index 9dcff944f4..f82771cf96 100644 --- a/backend/test/test_profiles.py +++ b/backend/test/test_profiles.py @@ -17,6 +17,7 @@ PROFILE_2_TAGS, PROFILE_TAGS_UPDATED, prepare_browser_for_profile_commit, + create_profile_browser, ) @@ -253,7 +254,7 @@ def test_update_profile_metadata(crawler_auth_headers, default_org_id, profile_i def test_commit_browser_to_existing_profile( - admin_auth_headers, default_org_id, profile_browser_3_id, profile_id + admin_auth_headers, default_org_id, profile_id ): # Get original modified time r = requests.get( @@ -265,11 +266,20 @@ def test_commit_browser_to_existing_profile( original_created = data["created"] original_modified = data["modified"] + url = "https://example-com.webrecorder.net/" + + # create browser with existing profile + browser_id = create_profile_browser( + admin_auth_headers, + default_org_id, + url=url, + baseprofile=profile_id, + ) + prepare_browser_for_profile_commit( - profile_browser_3_id, + browser_id, admin_auth_headers, default_org_id, - url="https://example-com.webrecorder.net", ) time.sleep(10) @@ -280,7 +290,7 @@ def test_commit_browser_to_existing_profile( f"{API_PREFIX}/orgs/{default_org_id}/profiles/{profile_id}", headers=admin_auth_headers, json={ - "browserid": profile_browser_3_id, + "browserid": browser_id, "name": PROFILE_NAME_UPDATED, "description": PROFILE_DESC_UPDATED, "tags": PROFILE_TAGS_UPDATED, @@ -315,6 +325,53 @@ def test_commit_browser_to_existing_profile( ] +def test_commit_reset_browser_to_existing_profile( + admin_auth_headers, default_org_id, profile_id +): + url = "https://example-com.webrecorder.net/" + + # create new browser w/o existing profile to reset + browser_id = create_profile_browser(admin_auth_headers, default_org_id, url=url) + + prepare_browser_for_profile_commit( + browser_id, admin_auth_headers, default_org_id, url=url + ) + + time.sleep(10) + + # Commit new browser to existing profile + while True: + r = requests.patch( + f"{API_PREFIX}/orgs/{default_org_id}/profiles/{profile_id}", + headers=admin_auth_headers, + json={ + "browserid": browser_id, + "name": PROFILE_NAME_UPDATED, + "description": PROFILE_DESC_UPDATED, + "tags": PROFILE_TAGS_UPDATED, + }, + ) + assert r.status_code == 200 + if r.json().get("detail") == "waiting_for_browser": + time.sleep(5) + continue + + break + + assert r.json()["updated"] + + r = requests.get( + f"{API_PREFIX}/orgs/{default_org_id}/profiles/{profile_id}", + headers=admin_auth_headers, + ) + assert r.status_code == 200 + data = r.json() + + assert data.get("origins") == [ + "https://example-com.webrecorder.net", + ] + + @pytest.mark.parametrize( "sort_by,sort_direction,profile_1_index,profile_2_index", [ @@ -419,9 +476,7 @@ def test_delete_profile(admin_auth_headers, default_org_id, profile_2_id): assert r.json()["detail"] == "profile_not_found" -def test_create_profile_read_only_org( - admin_auth_headers, default_org_id, profile_browser_4_id -): +def test_create_profile_read_only_org(admin_auth_headers, default_org_id): # Set org to read-only r = requests.post( f"{API_PREFIX}/orgs/{default_org_id}/read-only", @@ -430,9 +485,9 @@ def test_create_profile_read_only_org( ) assert r.json()["updated"] - prepare_browser_for_profile_commit( - profile_browser_4_id, admin_auth_headers, default_org_id - ) + browser_id = create_profile_browser(admin_auth_headers, default_org_id) + + prepare_browser_for_profile_commit(browser_id, admin_auth_headers, default_org_id) # Try to create profile, verify we get 403 forbidden start_time = time.monotonic() @@ -443,7 +498,7 @@ def test_create_profile_read_only_org( f"{API_PREFIX}/orgs/{default_org_id}/profiles", headers=admin_auth_headers, json={ - "browserid": profile_browser_4_id, + "browserid": browser_id, "name": "uncreatable", "description": "because org is read-only", }, diff --git a/backend/test/test_run_crawl.py b/backend/test/test_run_crawl.py index f7807be143..8668873cb0 100644 --- a/backend/test/test_run_crawl.py +++ b/backend/test/test_run_crawl.py @@ -25,7 +25,8 @@ # newly started crawl for this test suite # (not using the fixture to be able to test running crawl) -admin_crawl_id = None +curr_admin_crawl_id = None +curr_admin_config_id = None seed_file_crawl_id = None @@ -86,14 +87,17 @@ def test_start_crawl(admin_auth_headers, default_org_id, profile_id): ) data = r.json() - global admin_crawl_id - admin_crawl_id = data["run_now_job"] + global curr_admin_crawl_id + curr_admin_crawl_id = data["run_now_job"] + + global curr_admin_config_id + curr_admin_config_id = data["id"] def test_wait_for_running(admin_auth_headers, default_org_id): while True: r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/replay.json", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/replay.json", headers=admin_auth_headers, ) data = r.json() @@ -105,14 +109,14 @@ def test_wait_for_running(admin_auth_headers, default_org_id): def test_crawl_queue(admin_auth_headers, default_org_id): # 422 - requires offset and count r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/queue", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/queue", headers=admin_auth_headers, ) assert r.status_code == 422 while True: r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/queue?offset=0&count=20", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/queue?offset=0&count=20", headers=admin_auth_headers, ) assert r.status_code == 200 @@ -127,13 +131,13 @@ def test_crawl_queue(admin_auth_headers, default_org_id): def test_crawl_queue_match(admin_auth_headers, default_org_id): # 422, regex required r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/queueMatchAll", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/queueMatchAll", headers=admin_auth_headers, ) assert r.status_code == 422 r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/queueMatchAll?regex=webrecorder&offset=0", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/queueMatchAll?regex=webrecorder&offset=0", headers=admin_auth_headers, ) @@ -145,7 +149,7 @@ def test_crawl_queue_match(admin_auth_headers, default_org_id): def test_add_exclusion(admin_auth_headers, default_org_id): r = requests.post( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/exclusions?regex=test", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/exclusions?regex=test", headers=admin_auth_headers, ) assert r.json()["success"] == True @@ -153,7 +157,7 @@ def test_add_exclusion(admin_auth_headers, default_org_id): def test_add_invalid_exclusion(admin_auth_headers, default_org_id): r = requests.post( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/exclusions?regex=[", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/exclusions?regex=[", headers=admin_auth_headers, ) assert r.status_code == 400 @@ -162,7 +166,7 @@ def test_add_invalid_exclusion(admin_auth_headers, default_org_id): def test_remove_exclusion(admin_auth_headers, default_org_id): r = requests.delete( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/exclusions?regex=test", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/exclusions?regex=test", headers=admin_auth_headers, ) assert r.json()["success"] == True @@ -173,7 +177,7 @@ def test_wait_for_complete(admin_auth_headers, default_org_id): data = None while True: r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/replay.json", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/replay.json", headers=admin_auth_headers, ) data = r.json() @@ -189,7 +193,7 @@ def test_wait_for_complete(admin_auth_headers, default_org_id): assert len(data["initialPages"]) == 4 assert data["pagesQueryUrl"].endswith( - f"/orgs/{default_org_id}/crawls/{admin_crawl_id}/pagesSearch" + f"/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/pagesSearch" ) assert data["downloadUrl"] is None @@ -211,21 +215,21 @@ def test_queue_and_exclusions_error_crawl_not_running( admin_auth_headers, default_org_id ): r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/queue?offset=0&count=20", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/queue?offset=0&count=20", headers=admin_auth_headers, ) assert r.status_code == 400 assert r.json()["detail"] == "crawl_not_running" r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/queueMatchAll?regex=webrecorder&offset=0", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/queueMatchAll?regex=webrecorder&offset=0", headers=admin_auth_headers, ) assert r.status_code == 400 assert r.json()["detail"] == "crawl_not_running" r = requests.post( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/exclusions?regex=test2", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/exclusions?regex=test2", headers=admin_auth_headers, ) assert r.status_code == 400 @@ -234,7 +238,7 @@ def test_queue_and_exclusions_error_crawl_not_running( def test_crawl_info(admin_auth_headers, default_org_id): r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, ) data = r.json() @@ -248,7 +252,7 @@ def test_crawl_info(admin_auth_headers, default_org_id): def test_crawls_include_seed_info(admin_auth_headers, default_org_id): r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, ) data = r.json() @@ -280,7 +284,7 @@ def test_crawls_include_seed_info(admin_auth_headers, default_org_id): def test_crawl_seeds_endpoint(admin_auth_headers, default_org_id): r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/seeds", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/seeds", headers=admin_auth_headers, ) assert r.status_code == 200 @@ -294,7 +298,7 @@ def test_crawl_seeds_endpoint(admin_auth_headers, default_org_id): def test_crawls_exclude_errors(admin_auth_headers, default_org_id): # Get endpoint r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, ) assert r.status_code == 200 @@ -303,7 +307,7 @@ def test_crawls_exclude_errors(admin_auth_headers, default_org_id): # replay.json endpoint r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/replay.json", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/replay.json", headers=admin_auth_headers, ) assert r.status_code == 200 @@ -324,7 +328,7 @@ def test_crawls_exclude_errors(admin_auth_headers, default_org_id): def test_crawls_exclude_full_seeds(admin_auth_headers, default_org_id): # Get endpoint r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, ) assert r.status_code == 200 @@ -334,7 +338,7 @@ def test_crawls_exclude_full_seeds(admin_auth_headers, default_org_id): # replay.json endpoint r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/replay.json", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/replay.json", headers=admin_auth_headers, ) assert r.status_code == 200 @@ -355,7 +359,7 @@ def test_crawls_exclude_full_seeds(admin_auth_headers, default_org_id): def test_crawls_include_file_error_page_counts(admin_auth_headers, default_org_id): r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/replay.json", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}/replay.json", headers=admin_auth_headers, ) data = r.json() @@ -363,6 +367,25 @@ def test_crawls_include_file_error_page_counts(admin_auth_headers, default_org_i assert data["errorPageCount"] >= 0 +def test_profile_updated_by_crawl(admin_auth_headers, default_org_id, profile_id): + r = requests.get( + f"{API_PREFIX}/orgs/{default_org_id}/profiles/{profile_id}", + headers=admin_auth_headers, + ) + assert r.status_code == 200 + data = r.json() + assert data["id"] == profile_id + assert data["oid"] == default_org_id + + assert data["modifiedCrawlId"] == curr_admin_crawl_id + assert data["modifiedCrawlCid"] == curr_admin_config_id + + assert data["modifiedCrawlDate"] >= data["modified"] + + assert data["createdByName"] == "admin" + assert data["modifiedByName"] == "admin" + + def test_download_wacz(): r = requests.get(HOST_PREFIX + wacz_path) assert r.status_code == 200 @@ -407,11 +430,11 @@ def test_verify_wacz(): ], ) def test_download_wacz_crawls( - admin_auth_headers, default_org_id, admin_crawl_id, type_path + admin_auth_headers, default_org_id, type_path ): with TemporaryFile() as fh: with requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/{type_path}/{admin_crawl_id}/download", + f"{API_PREFIX}/orgs/{default_org_id}/{type_path}/{curr_admin_crawl_id}/download", headers=admin_auth_headers, stream=True, ) as r: @@ -448,11 +471,11 @@ def test_download_wacz_crawls( ], ) def test_download_wacz_crawls_as_single_wacz( - admin_auth_headers, default_org_id, admin_crawl_id, type_path + admin_auth_headers, default_org_id, type_path ): with TemporaryFile() as fh: with requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/{type_path}/{admin_crawl_id}/download?preferSingleWACZ=true", + f"{API_PREFIX}/orgs/{default_org_id}/{type_path}/{curr_admin_crawl_id}/download?preferSingleWACZ=true", headers=admin_auth_headers, stream=True, ) as r: @@ -504,16 +527,15 @@ def test_download_wacz_crawls_as_single_wacz( def test_update_crawl( admin_auth_headers, default_org_id, - admin_crawl_id, ): r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, ) assert r.status_code == 200 data = r.json() assert sorted(data["tags"]) == ["wr-test-1", "wr-test-2"] - assert len(data["collectionIds"]) == 1 + assert len(data["collectionIds"]) == 0 # Make new collection r = requests.post( @@ -529,7 +551,7 @@ def test_update_crawl( UPDATED_NAME = "Updated crawl name" UPDATED_COLLECTION_IDS = [new_coll_id] r = requests.patch( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, json={ "tags": UPDATED_TAGS, @@ -544,7 +566,7 @@ def test_update_crawl( # Verify update was successful r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, ) assert r.status_code == 200 @@ -557,7 +579,7 @@ def test_update_crawl( # Update reviewStatus and verify r = requests.patch( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, json={ "reviewStatus": 5, @@ -568,7 +590,7 @@ def test_update_crawl( assert data["updated"] r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, ) assert r.status_code == 200 @@ -581,7 +603,7 @@ def test_update_crawl( ) assert r.status_code == 200 crawls = r.json()["items"] - assert crawls[0]["id"] == admin_crawl_id + assert crawls[0]["id"] == curr_admin_crawl_id assert crawls[0]["reviewStatus"] == 5 r = requests.get( @@ -590,7 +612,7 @@ def test_update_crawl( ) assert r.status_code == 200 crawls = r.json()["items"] - assert crawls[-1]["id"] == admin_crawl_id + assert crawls[-1]["id"] == curr_admin_crawl_id assert crawls[-1]["reviewStatus"] == 5 # Test sorting on reviewStatus for all-crawls @@ -600,7 +622,7 @@ def test_update_crawl( ) assert r.status_code == 200 crawls = r.json()["items"] - assert crawls[0]["id"] == admin_crawl_id + assert crawls[0]["id"] == curr_admin_crawl_id assert crawls[0]["reviewStatus"] == 5 r = requests.get( @@ -609,12 +631,12 @@ def test_update_crawl( ) assert r.status_code == 200 crawls = r.json()["items"] - assert crawls[-1]["id"] == admin_crawl_id + assert crawls[-1]["id"] == curr_admin_crawl_id assert crawls[-1]["reviewStatus"] == 5 # Try to update to invalid reviewStatus r = requests.patch( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, json={ "reviewStatus": "invalid", @@ -623,7 +645,7 @@ def test_update_crawl( assert r.status_code == 422 r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, ) assert r.status_code == 200 @@ -631,14 +653,14 @@ def test_update_crawl( # Verify deleting works as well r = requests.patch( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, json={"tags": [], "description": None}, ) assert r.status_code == 200 r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, ) assert r.status_code == 200 @@ -1269,7 +1291,7 @@ def test_delete_crawls_crawler(crawler_auth_headers, default_org_id, crawler_cra r = requests.post( f"{API_PREFIX}/orgs/{default_org_id}/crawls/delete", headers=crawler_auth_headers, - json={"crawl_ids": [admin_crawl_id]}, + json={"crawl_ids": [curr_admin_crawl_id]}, ) assert r.status_code == 403 data = r.json() @@ -1333,7 +1355,6 @@ def test_delete_crawls_org_owner( admin_auth_headers, crawler_auth_headers, default_org_id, - admin_crawl_id, crawler_crawl_id, wr_specs_crawl_id, ): @@ -1341,7 +1362,7 @@ def test_delete_crawls_org_owner( r = requests.post( f"{API_PREFIX}/orgs/{default_org_id}/crawls/delete", headers=admin_auth_headers, - json={"crawl_ids": [admin_crawl_id]}, + json={"crawl_ids": [curr_admin_crawl_id]}, ) assert r.status_code == 200 data = r.json() @@ -1349,7 +1370,7 @@ def test_delete_crawls_org_owner( assert data["storageQuotaReached"] is False r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{curr_admin_crawl_id}", headers=admin_auth_headers, ) assert r.status_code == 404