Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Panaetius committed Nov 18, 2021
1 parent 9d7d6fc commit d6228f4
Showing 1 changed file with 46 additions and 45 deletions.
91 changes: 46 additions & 45 deletions tests/service/views/test_cache_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def test_serve_api_spec(svc_client):
"Content-Type": "application/json",
"accept": "application/json",
}
response = svc_client.get("/spec.json", headers=headers)
response = svc_client.get("spec.json", headers=headers)

assert 0 != len(response.json.keys())
assert 200 == response.status_code
Expand All @@ -47,7 +47,7 @@ def test_serve_api_spec(svc_client):
@pytest.mark.service
def test_list_upload_files_all(svc_client, identity_headers):
"""Check list uploaded files view."""
response = svc_client.get("/cache.files_list", headers=identity_headers)
response = svc_client.get("/0.9/cache.files_list", headers=identity_headers)

assert {"result"} == set(response.json.keys())

Expand All @@ -62,7 +62,7 @@ def test_list_upload_files_all_no_auth(svc_client):
"Content-Type": "application/json",
"accept": "application/json",
}
response = svc_client.get("/cache.files_list", headers=headers)
response = svc_client.get("/0.9/cache.files_list", headers=headers)

assert 200 == response.status_code

Expand Down Expand Up @@ -164,7 +164,9 @@ def test_file_upload_same_file(svc_client, identity_headers):
@pytest.mark.service
def test_file_upload_no_auth(svc_client):
"""Check failed file upload."""
response = svc_client.post("/cache.files_upload", data=dict(file=(io.BytesIO(b"this is a test"), "datafile.txt")))
response = svc_client.post(
"/0.9/cache.files_upload", data=dict(file=(io.BytesIO(b"this is a test"), "datafile.txt"))
)

assert response
assert 200 == response.status_code
Expand Down Expand Up @@ -214,7 +216,7 @@ def test_file_upload_with_users(svc_client, identity_headers):
assert response
assert {"result"} == set(response.json.keys())

response = svc_client.get("/cache.files_list", headers=headers_user1)
response = svc_client.get("/0.9/cache.files_list", headers=headers_user1)

assert response

Expand Down Expand Up @@ -243,7 +245,7 @@ def test_clone_projects_no_auth(svc_client, identity_headers, it_remote_repo_url
err_message = "user identification is incorrect or missing"
assert err_message == response.json["error"]["reason"]

response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
response = svc_client.post("/0.9/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
assert response
assert {"result"} == set(response.json.keys())

Expand All @@ -257,7 +259,7 @@ def test_clone_projects_with_auth(svc_client, identity_headers, it_remote_repo_u
"git_url": it_remote_repo_url,
}

response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
response = svc_client.post("/0.9/cache.project_clone", data=json.dumps(payload), headers=identity_headers)

assert response
assert {"result"} == set(response.json.keys())
Expand All @@ -275,31 +277,31 @@ def test_clone_projects_multiple(svc_client, identity_headers, it_remote_repo_ur
"git_url": it_remote_repo_url,
}

response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
response = svc_client.post("/0.9/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
assert response

assert {"result"} == set(response.json.keys())
project_ids.append(response.json["result"])

response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
response = svc_client.post("/0.9/cache.project_clone", data=json.dumps(payload), headers=identity_headers)

assert response
assert {"result"} == set(response.json.keys())
project_ids.append(response.json["result"])

response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
response = svc_client.post("/0.9/cache.project_clone", data=json.dumps(payload), headers=identity_headers)

assert response
assert {"result"} == set(response.json.keys())
project_ids.append(response.json["result"])

response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
response = svc_client.post("/0.9/cache.project_clone", data=json.dumps(payload), headers=identity_headers)

assert response
assert {"result"} == set(response.json.keys())
last_pid = response.json["result"]["project_id"]

response = svc_client.get("/cache.project_list", headers=identity_headers)
response = svc_client.get("/0.9/cache.project_list", headers=identity_headers)

assert response
assert {"result"} == set(response.json.keys())
Expand All @@ -320,7 +322,7 @@ def test_clone_projects_list_view_errors(svc_client, identity_headers, it_remote
"git_url": it_remote_repo_url,
}

response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
response = svc_client.post("/0.9/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
assert response
assert {"result"} == set(response.json.keys())

Expand All @@ -335,7 +337,7 @@ def test_clone_projects_list_view_errors(svc_client, identity_headers, it_remote
assert {"error"} == set(response.json.keys())
assert INVALID_HEADERS_ERROR_CODE == response.json["error"]["code"]

response = svc_client.get("/cache.project_list", headers=identity_headers)
response = svc_client.get("/0.9/cache.project_list", headers=identity_headers)

assert response
assert {"result"} == set(response.json.keys())
Expand All @@ -355,7 +357,7 @@ def test_clone_projects_invalid_headers(svc_client, identity_headers, it_remote_
"git_url": it_remote_repo_url,
}

response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
response = svc_client.post("/0.9/cache.project_clone", data=json.dumps(payload), headers=identity_headers)
assert response

assert {"result"} == set(response.json.keys())
Expand All @@ -368,7 +370,7 @@ def test_clone_projects_invalid_headers(svc_client, identity_headers, it_remote_
assert {"error"} == set(response.json.keys())
assert INVALID_HEADERS_ERROR_CODE == response.json["error"]["code"]

response = svc_client.get("/cache.project_list", headers=identity_headers)
response = svc_client.get("/0.9/cache.project_list", headers=identity_headers)

assert response
assert {"result"} == set(response.json.keys())
Expand Down Expand Up @@ -449,7 +451,7 @@ def test_upload_tar_unpack_archive(datapack_tar, svc_client_with_repo):
assert not file_["is_archive"]
assert not file_["unpack_archive"]

response = svc_client.get("/cache.files_list", headers=headers)
response = svc_client.get("/0.9/cache.files_list", headers=headers)

assert response
assert 200 == response.status_code
Expand Down Expand Up @@ -515,7 +517,7 @@ def test_upload_gz_unpack_archive(datapack_gz, svc_client_with_repo):
assert not file_["is_archive"]
assert not file_["unpack_archive"]

response = svc_client.get("/cache.files_list", headers=headers)
response = svc_client.get("/0.9/cache.files_list", headers=headers)

assert response
assert 200 == response.status_code
Expand Down Expand Up @@ -635,37 +637,37 @@ def test_check_migrations_local(svc_client_setup):
"""Check if migrations are required for a local project."""
svc_client, headers, project_id, _, _ = svc_client_setup

response = svc_client.get("/1.0/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers)
response = svc_client.get("/0.9/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers)
assert 200 == response.status_code

assert response.json["result"]["core_compatibility_status"]["migration_required"]
assert not response.json["result"]["template_status"]["newer_template_available"]
assert not response.json["result"]["dockerfile_renku_status"]["automated_dockerfile_update"]
assert response.json["result"]["migration_required"]
assert not response.json["result"]["template_update_possible"]
assert not response.json["result"]["docker_update_possible"]
assert response.json["result"]["project_supported"]
assert response.json["result"]["project_renku_version"]
assert response.json["result"]["core_renku_version"]
assert "template_source" in response.json["result"]["template_status"]
assert "template_ref" in response.json["result"]["template_status"]
assert "template_id" in response.json["result"]["template_status"]
assert "automated_template_update" in response.json["result"]["template_status"]
assert response.json["result"]["project_version"]
assert response.json["result"]["latest_version"]
assert "template_source" in response.json["result"]
assert "template_ref" in response.json["result"]
assert "template_id" in response.json["result"]
assert "automated_template_update" in response.json["result"]


@pytest.mark.service
@pytest.mark.integration
def test_check_migrations_remote(svc_client, identity_headers, it_remote_repo_url):
"""Check if migrations are required for a remote project."""
response = svc_client.get(
"/1.0/cache.migrations_check", query_string=dict(git_url=it_remote_repo_url), headers=identity_headers
"/cache.migrations_check", query_string=dict(git_url=it_remote_repo_url), headers=identity_headers
)

assert 200 == response.status_code

assert response.json["result"]["core_compatibility_status"]["migration_required"]
assert not response.json["result"]["template_status"]["newer_template_available"]
assert not response.json["result"]["dockerfile_renku_status"]["automated_dockerfile_update"]
assert response.json["result"]["migration_required"]
assert not response.json["result"]["template_update_possible"]
assert not response.json["result"]["docker_update_possible"]
assert response.json["result"]["project_supported"]
assert response.json["result"]["project_renku_version"]
assert response.json["result"]["core_renku_version"]
assert response.json["result"]["project_version"]
assert response.json["result"]["latest_version"]


@pytest.mark.service
Expand All @@ -674,13 +676,12 @@ def test_check_no_migrations(svc_client_with_repo):
"""Check if migrations are not required."""
svc_client, headers, project_id, _ = svc_client_with_repo

response = svc_client.get("/1.0/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers)
response = svc_client.get("/0.9/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers)

assert 200 == response.status_code

assert not response.json["result"]["core_compatibility_status"]["migration_required"]
assert not response.json["result"]["template_status"]["newer_template_available"]
assert not response.json["result"]["dockerfile_renku_status"]["automated_dockerfile_update"]
assert not response.json["result"]["migration_required"]
assert not response.json["result"]["template_update_possible"]
assert not response.json["result"]["docker_update_possible"]
assert response.json["result"]["project_supported"]


Expand Down Expand Up @@ -718,9 +719,9 @@ def test_migrating_protected_branch(svc_protected_old_repo):
"""Check migrating on a protected branch does not change cache state."""
svc_client, headers, project_id, _, _ = svc_protected_old_repo

response = svc_client.get("/1.0/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers)
response = svc_client.get("/0.9/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers)
assert 200 == response.status_code
assert response.json["result"]["core_compatibility_status"]["migration_required"]
assert response.json["result"]["migration_required"]

response = svc_client.post(
"/cache.migrate", data=json.dumps(dict(project_id=project_id, skip_docker_update=True)), headers=headers
Expand All @@ -732,9 +733,9 @@ def test_migrating_protected_branch(svc_protected_old_repo):
m.startswith("Successfully applied") and m.endswith("migrations.") for m in response.json["result"]["messages"]
)

response = svc_client.get("/1.0/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers)
response = svc_client.get("/0.9/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers)
assert 200 == response.status_code
assert response.json["result"]["core_compatibility_status"]["migration_required"]
assert response.json["result"]["migration_required"]


@pytest.mark.service
Expand Down Expand Up @@ -762,7 +763,7 @@ def test_cache_gets_synchronized(local_remote_repository, directory_tree, quick_
"project_id": project_id,
}

response = svc_client.get("/datasets.list", query_string=params, headers=identity_headers)
response = svc_client.get("/0.9/datasets.list", query_string=params, headers=identity_headers)
assert response
assert 200 == response.status_code

Expand All @@ -774,7 +775,7 @@ def test_cache_gets_synchronized(local_remote_repository, directory_tree, quick_
"name": uuid.uuid4().hex,
}

response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=identity_headers)
response = svc_client.post("/0.9/datasets.create", data=json.dumps(payload), headers=identity_headers)

assert response
assert 200 == response.status_code
Expand Down

0 comments on commit d6228f4

Please sign in to comment.