Skip to content

Commit

Permalink
fix: issue exports in self hosted instances (#1996)
Browse files Browse the repository at this point in the history
* fix: issue exports in self hosted instances

* dev: remove print logs

* dev: update url creation function

* fix: changed the presigned url for self hosted exports

---------

Co-authored-by: NarayanBavisetti <narayan3119@gmail.com>
  • Loading branch information
pablohashescobar and NarayanBavisetti committed Aug 29, 2023
1 parent abcdebe commit e1ad385
Show file tree
Hide file tree
Showing 2 changed files with 74 additions and 34 deletions.
79 changes: 54 additions & 25 deletions apiserver/plane/bgtasks/export_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import json
import boto3
import zipfile
from urllib.parse import urlparse, urlunparse

# Django imports
from django.conf import settings
Expand All @@ -23,9 +24,11 @@ def dateTimeConverter(time):
if time:
return time.strftime("%a, %d %b %Y %I:%M:%S %Z%z")


def dateConverter(time):
if time:
return time.strftime("%a, %d %b %Y")
return time.strftime("%a, %d %b %Y")


def create_csv_file(data):
csv_buffer = io.StringIO()
Expand Down Expand Up @@ -66,28 +69,53 @@ def create_zip_file(files):


def upload_to_s3(zip_file, workspace_id, token_id, slug):
s3 = boto3.client(
"s3",
region_name=settings.AWS_REGION,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version="s3v4"),
)
file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip"
expires_in = 7 * 24 * 60 * 60

s3.upload_fileobj(
zip_file,
settings.AWS_S3_BUCKET_NAME,
file_name,
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
)
if settings.DOCKERIZED and settings.USE_MINIO:
s3 = boto3.client(
"s3",
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version="s3v4"),
)
s3.upload_fileobj(
zip_file,
settings.AWS_STORAGE_BUCKET_NAME,
file_name,
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
)
presigned_url = s3.generate_presigned_url(
"get_object",
Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name},
ExpiresIn=expires_in,
)
# Create the new url with updated domain and protocol
presigned_url = presigned_url.replace(
"http://plane-minio:9000/uploads/",
f"{settings.AWS_S3_URL_PROTOCOL}//{settings.AWS_S3_CUSTOM_DOMAIN}/",
)
else:
s3 = boto3.client(
"s3",
region_name=settings.AWS_REGION,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version="s3v4"),
)
s3.upload_fileobj(
zip_file,
settings.AWS_S3_BUCKET_NAME,
file_name,
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
)

expires_in = 7 * 24 * 60 * 60
presigned_url = s3.generate_presigned_url(
"get_object",
Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name},
ExpiresIn=expires_in,
)
presigned_url = s3.generate_presigned_url(
"get_object",
Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name},
ExpiresIn=expires_in,
)

exporter_instance = ExporterHistory.objects.get(token=token_id)

Expand All @@ -98,7 +126,7 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
else:
exporter_instance.status = "failed"

exporter_instance.save(update_fields=["status", "url","key"])
exporter_instance.save(update_fields=["status", "url", "key"])


def generate_table_row(issue):
Expand Down Expand Up @@ -145,7 +173,7 @@ def generate_json_row(issue):
else "",
"Labels": issue["labels__name"],
"Cycle Name": issue["issue_cycle__cycle__name"],
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
"Module Name": issue["issue_module__module__name"],
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]),
Expand Down Expand Up @@ -242,7 +270,9 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
workspace_issues = (
(
Issue.objects.filter(
workspace__id=workspace_id, project_id__in=project_ids
workspace__id=workspace_id,
project_id__in=project_ids,
project__project_projectmember__member=exporter_instance.initiated_by_id,
)
.select_related("project", "workspace", "state", "parent", "created_by")
.prefetch_related(
Expand Down Expand Up @@ -275,7 +305,7 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
"labels__name",
)
)
.order_by("project__identifier","sequence_id")
.order_by("project__identifier", "sequence_id")
.distinct()
)
# CSV header
Expand Down Expand Up @@ -338,7 +368,6 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
exporter_instance.status = "failed"
exporter_instance.reason = str(e)
exporter_instance.save(update_fields=["status", "reason"])

# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
Expand Down
29 changes: 20 additions & 9 deletions apiserver/plane/bgtasks/exporter_expired_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,18 +21,29 @@ def delete_old_s3_link():
expired_exporter_history = ExporterHistory.objects.filter(
Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8))
).values_list("key", "id")

s3 = boto3.client(
"s3",
region_name="ap-south-1",
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version="s3v4"),
)
if settings.DOCKERIZED and settings.USE_MINIO:
s3 = boto3.client(
"s3",
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version="s3v4"),
)
else:
s3 = boto3.client(
"s3",
region_name="ap-south-1",
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version="s3v4"),
)

for file_name, exporter_id in expired_exporter_history:
# Delete object from S3
if file_name:
s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name)
if settings.DOCKERIZED and settings.USE_MINIO:
s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name)
else:
s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name)

ExporterHistory.objects.filter(id=exporter_id).update(url=None)

1 comment on commit e1ad385

@vercel
Copy link

@vercel vercel bot commented on e1ad385 Aug 29, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

plane-dev – ./apps/app

plane-dev-plane.vercel.app
plane-dev-git-develop-plane.vercel.app
plane-dev.vercel.app

Please sign in to comment.