Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions backend/btrixcloud/crawls.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,8 @@ class Crawl(BaseMongoModel):
colls: Optional[List[str]] = []
tags: Optional[List[str]] = []

notes: Optional[str]


# ============================================================================
class CrawlOut(Crawl):
Expand Down Expand Up @@ -124,6 +126,8 @@ class ListCrawlOut(BaseMongoModel):
colls: Optional[List[str]] = []
tags: Optional[List[str]] = []

notes: Optional[str]


# ============================================================================
class ListCrawls(BaseModel):
Expand All @@ -149,9 +153,10 @@ class CrawlCompleteIn(BaseModel):

# ============================================================================
class UpdateCrawl(BaseModel):
"""Update crawl tags"""
"""Update crawl"""

tags: Optional[List[str]] = []
notes: Optional[str]


# ============================================================================
Expand Down Expand Up @@ -376,8 +381,8 @@ async def add_new_crawl(self, crawl_id: str, crawlconfig):
return False

async def update_crawl(self, crawl_id: str, org: Organization, update: UpdateCrawl):
"""Update existing crawl (tags only for now)"""
query = update.dict(exclude_unset=True, exclude_none=True)
"""Update existing crawl (tags and notes only for now)"""
query = update.dict(exclude_unset=True)

if len(query) == 0:
raise HTTPException(status_code=400, detail="no_update_data")
Expand Down
18 changes: 13 additions & 5 deletions backend/test/test_run_crawl.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,21 +98,27 @@ def test_verify_wacz():
assert '"https://webrecorder.net/"' in pages


def test_update_tags(admin_auth_headers, default_org_id, admin_crawl_id):
def test_update_crawl(admin_auth_headers, default_org_id, admin_crawl_id):
r = requests.get(
f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}",
headers=admin_auth_headers,
)
assert r.status_code == 200
data = r.json()
assert sorted(data["tags"]) == ["wr-test-1", "wr-test-2"]
# Add exception handling for old crawls without notes field
try:
assert not data["notes"]
except KeyError:
pass

# Submit patch request to update tags
# Submit patch request to update tags and notes
UPDATED_TAGS = ["wr-test-1-updated", "wr-test-2-updated"]
UPDATED_NOTES = "Lorem ipsum test note."
r = requests.patch(
f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}",
headers=admin_auth_headers,
json={"tags": UPDATED_TAGS},
json={"tags": UPDATED_TAGS, "notes": UPDATED_NOTES},
)
assert r.status_code == 200
data = r.json()
Expand All @@ -126,12 +132,13 @@ def test_update_tags(admin_auth_headers, default_org_id, admin_crawl_id):
assert r.status_code == 200
data = r.json()
assert sorted(data["tags"]) == sorted(UPDATED_TAGS)
assert data["notes"] == UPDATED_NOTES

# Verify deleting all tags works as well
# Verify deleting works as well
r = requests.patch(
f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}",
headers=admin_auth_headers,
json={"tags": []},
json={"tags": [], "notes": None},
)
assert r.status_code == 200

Expand All @@ -142,3 +149,4 @@ def test_update_tags(admin_auth_headers, default_org_id, admin_crawl_id):
assert r.status_code == 200
data = r.json()
assert data["tags"] == []
assert not data["notes"]