Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion backend/app/rabbitmq/listeners.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ async def submit_file_job(
parameters: dict,
user: UserOut,
rabbitmq_client: BlockingChannel,
token: str = Depends(get_token),
):
# Create an entry in job history with unique ID
job = EventListenerJobDB(
Expand Down
10 changes: 4 additions & 6 deletions backend/app/routers/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,6 @@ async def save_file(
file: UploadFile = File(...),
es=Depends(dependencies.get_elasticsearchclient),
rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq),
credentials: HTTPAuthorizationCredentials = Security(security),
allow: bool = Depends(Authorization("uploader")),
):
if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None:
Expand All @@ -485,14 +484,13 @@ async def save_file(
status_code=404, detail=f"Folder {folder_id} not found"
)

access_token = credentials.credentials
# access_token = credentials.credentials
await add_file_entry(
new_file,
user,
fs,
es,
rabbitmq_client,
access_token,
file.file,
content_type=file.content_type,
)
Expand Down Expand Up @@ -583,8 +581,8 @@ async def download_dataset(
if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None:
current_temp_dir = tempfile.mkdtemp(prefix="rocratedownload")
crate = ROCrate()
user_full_name = user.first_name + " " + user.last_name
user_crate_id = str(user.id)
user_full_name = user["first_name"] + " " + user["last_name"]
user_crate_id = str(user["id"])
crate.add(Person(crate, user_crate_id, properties={"name": user_full_name}))

manifest_path = os.path.join(current_temp_dir, "manifest-md5.txt")
Expand Down Expand Up @@ -676,7 +674,7 @@ async def download_dataset(
f.write("Internal-Sender-Identifier: " + dataset_id + "\n")
f.write("Internal-Sender-Description: " + dataset.description + "\n")
f.write("Contact-Name: " + user_full_name + "\n")
f.write("Contact-Email: " + user.email + "\n")
f.write("Contact-Email: " + user["email"] + "\n")
crate.add_file(
bagit_path, dest_path="bagit.txt", properties={"name": "bagit.txt"}
)
Expand Down
2 changes: 0 additions & 2 deletions backend/app/routers/feeds.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ async def check_feed_listeners(
file_out: FileOut,
user: UserOut,
rabbitmq_client: BlockingChannel,
token: str,
):
"""Automatically submit new file to listeners on feeds that fit the search criteria."""
listener_ids_found = []
Expand All @@ -64,7 +63,6 @@ async def check_feed_listeners(
{}, # parameters
user,
rabbitmq_client,
token,
)
return listener_ids_found

Expand Down
8 changes: 5 additions & 3 deletions backend/app/routers/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,6 @@ async def add_file_entry(
fs: Minio,
es: Elasticsearch,
rabbitmq_client: BlockingChannel,
token: str,
file: Optional[io.BytesIO] = None,
content_type: Optional[str] = None,
):
Expand All @@ -111,7 +110,7 @@ async def add_file_entry(

await new_file.insert()
new_file_id = new_file.id
content_type_obj = get_content_type(content_type, file)
content_type_obj = get_content_type(new_file.name, content_type)

# Use unique ID as key for Minio and get initial version ID
response = fs.put_object(
Expand Down Expand Up @@ -146,7 +145,10 @@ async def add_file_entry(

# Submit file job to any qualifying feeds
await check_feed_listeners(
es, FileOut(**new_file.dict()), user, rabbitmq_client, token
es,
FileOut(**new_file.dict()),
user,
rabbitmq_client,
)


Expand Down
5 changes: 3 additions & 2 deletions backend/app/routers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@


def get_content_type(
content_type: Optional[str] = None, file: Optional[io.BytesIO] = None
filename: str,
content_type: Optional[str] = None,
):
"""Returns ContentType object given a content_tyoe, also guessed the content_type if none is provided

Expand All @@ -16,7 +17,7 @@ def get_content_type(
"""

if content_type is None:
content_type = mimetypes.guess_type(file.name)
content_type = mimetypes.guess_type(filename)
content_type = content_type[0] if len(content_type) > 1 else content_type
type_main = content_type.split("/")[0] if type(content_type) is str else "N/A"
return ContentType(content_type=content_type, main_type=type_main)
2 changes: 1 addition & 1 deletion backend/app/routers/visualization.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ async def add_Visualization(
)

await visualization_db.insert()
visualization_db.content_type = get_content_type(file.content_type, file.file)
visualization_db.content_type = get_content_type(file.filename, file.content_type)
visualization_id = visualization_db.id

# Use unique ID as key for Minio
Expand Down