Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Expose the fileuploads API endpoint #894

Merged
merged 2 commits into from
Sep 23, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions tableauserverclient/server/endpoint/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from .databases_endpoint import Databases
from .endpoint import Endpoint
from .favorites_endpoint import Favorites
from .fileuploads_endpoint import Fileuploads
from .flows_endpoint import Flows
from .exceptions import (
ServerResponseError,
Expand Down
3 changes: 1 addition & 2 deletions tableauserverclient/server/endpoint/datasources_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from .exceptions import InternalServerError, MissingRequiredFieldError
from .permissions_endpoint import _PermissionsEndpoint
from .dqw_endpoint import _DataQualityWarningEndpoint
from .fileuploads_endpoint import Fileuploads
from .resource_tagger import _ResourceTagger
from .. import RequestFactory, DatasourceItem, PaginationItem, ConnectionItem
from ..query import QuerySet
Expand Down Expand Up @@ -244,7 +243,7 @@ def publish(
# Determine if chunking is required (64MB is the limit for single upload method)
if file_size >= FILESIZE_LIMIT:
logger.info("Publishing {0} to server with chunking method (datasource over 64MB)".format(filename))
upload_session_id = Fileuploads.upload_chunks(self.parent_srv, file)
upload_session_id = self.parent_srv.fileuploads.upload(file)
url = "{0}&uploadSessionId={1}".format(url, upload_session_id)
xml_request, content_type = RequestFactory.Datasource.publish_req_chunked(
datasource_item, connection_credentials, connections
Expand Down
54 changes: 24 additions & 30 deletions tableauserverclient/server/endpoint/fileuploads_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
class Fileuploads(Endpoint):
def __init__(self, parent_srv):
super(Fileuploads, self).__init__(parent_srv)
self.upload_id = ""

@property
def baseurl(self):
Expand All @@ -25,45 +24,40 @@ def initiate(self):
url = self.baseurl
server_response = self.post_request(url, "")
fileupload_item = FileuploadItem.from_response(server_response.content, self.parent_srv.namespace)
self.upload_id = fileupload_item.upload_session_id
logger.info("Initiated file upload session (ID: {0})".format(self.upload_id))
return self.upload_id
upload_id = fileupload_item.upload_session_id
logger.info("Initiated file upload session (ID: {0})".format(upload_id))
return upload_id

@api(version="2.0")
def append(self, xml_request, content_type):
if not self.upload_id:
error = "File upload session must be initiated first."
raise MissingRequiredFieldError(error)
url = "{0}/{1}".format(self.baseurl, self.upload_id)
server_response = self.put_request(url, xml_request, content_type)
logger.info("Uploading a chunk to session (ID: {0})".format(self.upload_id))
def append(self, upload_id, data, content_type):
url = "{0}/{1}".format(self.baseurl, upload_id)
server_response = self.put_request(url, data, content_type)
logger.info("Uploading a chunk to session (ID: {0})".format(upload_id))
return FileuploadItem.from_response(server_response.content, self.parent_srv.namespace)

def read_chunks(self, file):
def _read_chunks(self, file):
file_opened = False
try:
file_content = open(file, "rb")
file_opened = True
except TypeError:
file_content = file

while True:
chunked_content = file_content.read(CHUNK_SIZE)
if not chunked_content:
if file_opened:
file_content.close()
break
yield chunked_content

@classmethod
def upload_chunks(cls, parent_srv, file):
file_uploader = cls(parent_srv)
upload_id = file_uploader.initiate()

chunks = file_uploader.read_chunks(file)
for chunk in chunks:
xml_request, content_type = RequestFactory.Fileupload.chunk_req(chunk)
fileupload_item = file_uploader.append(xml_request, content_type)
try:
while True:
chunked_content = file_content.read(CHUNK_SIZE)
if not chunked_content:
break
yield chunked_content
finally:
if file_opened:
file_content.close()

def upload(self, file):
upload_id = self.initiate()
for chunk in self._read_chunks(file):
request, content_type = RequestFactory.Fileupload.chunk_req(chunk)
fileupload_item = self.append(upload_id, request, content_type)
logger.info("\tPublished {0}MB".format(fileupload_item.file_size))
logger.info("\tCommitting file upload...")
logger.info("File upload finished (ID: {0})".format(upload_id))
return upload_id
3 changes: 1 addition & 2 deletions tableauserverclient/server/endpoint/flows_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from .exceptions import InternalServerError, MissingRequiredFieldError
from .permissions_endpoint import _PermissionsEndpoint
from .dqw_endpoint import _DataQualityWarningEndpoint
from .fileuploads_endpoint import Fileuploads
from .resource_tagger import _ResourceTagger
from .. import RequestFactory, FlowItem, PaginationItem, ConnectionItem
from ...filesys_helpers import to_filename, make_download_path
Expand Down Expand Up @@ -169,7 +168,7 @@ def publish(self, flow_item, file_path, mode, connections=None):
# Determine if chunking is required (64MB is the limit for single upload method)
if os.path.getsize(file_path) >= FILESIZE_LIMIT:
logger.info("Publishing {0} to server with chunking method (flow over 64MB)".format(filename))
upload_session_id = Fileuploads.upload_chunks(self.parent_srv, file_path)
upload_session_id = self.parent_srv.fileuploads.upload(file_path)
url = "{0}&uploadSessionId={1}".format(url, upload_session_id)
xml_request, content_type = RequestFactory.Flow.publish_req_chunked(flow_item, connections)
else:
Expand Down
3 changes: 1 addition & 2 deletions tableauserverclient/server/endpoint/workbooks_endpoint.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from .endpoint import QuerysetEndpoint, api, parameter_added_in
from .exceptions import InternalServerError, MissingRequiredFieldError
from .permissions_endpoint import _PermissionsEndpoint
from .fileuploads_endpoint import Fileuploads
from .resource_tagger import _ResourceTagger
from .. import RequestFactory, WorkbookItem, ConnectionItem, ViewItem, PaginationItem
from ...models.job_item import JobItem
Expand Down Expand Up @@ -344,7 +343,7 @@ def publish(
# Determine if chunking is required (64MB is the limit for single upload method)
if file_size >= FILESIZE_LIMIT:
logger.info("Publishing {0} to server with chunking method (workbook over 64MB)".format(workbook_item.name))
upload_session_id = Fileuploads.upload_chunks(self.parent_srv, file)
upload_session_id = self.parent_srv.fileuploads.upload(file)
url = "{0}&uploadSessionId={1}".format(url, upload_session_id)
conn_creds = connection_credentials
xml_request, content_type = RequestFactory.Workbook.publish_req_chunked(
Expand Down
2 changes: 2 additions & 0 deletions tableauserverclient/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
DataAccelerationReport,
Favorites,
DataAlerts,
Fileuploads,
)
from .endpoint.exceptions import (
EndpointUnavailableError,
Expand Down Expand Up @@ -82,6 +83,7 @@ def __init__(self, server_address, use_server_version=False):
self.webhooks = Webhooks(self)
self.data_acceleration_report = DataAccelerationReport(self)
self.data_alerts = DataAlerts(self)
self.fileuploads = Fileuploads(self)
self._namespace = Namespace()

if use_server_version:
Expand Down
15 changes: 4 additions & 11 deletions test/test_fileuploads.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

from ._utils import asset
from tableauserverclient.server import Server
from tableauserverclient.server.endpoint.fileuploads_endpoint import Fileuploads

TEST_ASSET_DIR = os.path.join(os.path.dirname(__file__), 'assets')
FILEUPLOAD_INITIALIZE = os.path.join(TEST_ASSET_DIR, 'fileupload_initialize.xml')
Expand All @@ -22,23 +21,18 @@ def setUp(self):
self.baseurl = '{}/sites/{}/fileUploads'.format(self.server.baseurl, self.server.site_id)

def test_read_chunks_file_path(self):
fileuploads = Fileuploads(self.server)

file_path = asset('SampleWB.twbx')
chunks = fileuploads.read_chunks(file_path)
chunks = self.server.fileuploads._read_chunks(file_path)
for chunk in chunks:
self.assertIsNotNone(chunk)

def test_read_chunks_file_object(self):
fileuploads = Fileuploads(self.server)

with open(asset('SampleWB.twbx'), 'rb') as f:
chunks = fileuploads.read_chunks(f)
chunks = self.server.fileuploads._read_chunks(f)
for chunk in chunks:
self.assertIsNotNone(chunk)

def test_upload_chunks_file_path(self):
fileuploads = Fileuploads(self.server)
file_path = asset('SampleWB.twbx')
upload_id = '7720:170fe6b1c1c7422dadff20f944d58a52-1:0'

Expand All @@ -49,12 +43,11 @@ def test_upload_chunks_file_path(self):
with requests_mock.mock() as m:
m.post(self.baseurl, text=initialize_response_xml)
m.put(self.baseurl + '/' + upload_id, text=append_response_xml)
actual = fileuploads.upload_chunks(self.server, file_path)
actual = self.server.fileuploads.upload(file_path)

self.assertEqual(upload_id, actual)

def test_upload_chunks_file_object(self):
fileuploads = Fileuploads(self.server)
upload_id = '7720:170fe6b1c1c7422dadff20f944d58a52-1:0'

with open(asset('SampleWB.twbx'), 'rb') as file_content:
Expand All @@ -65,6 +58,6 @@ def test_upload_chunks_file_object(self):
with requests_mock.mock() as m:
m.post(self.baseurl, text=initialize_response_xml)
m.put(self.baseurl + '/' + upload_id, text=append_response_xml)
actual = fileuploads.upload_chunks(self.server, file_content)
actual = self.server.fileuploads.upload(file_content)

self.assertEqual(upload_id, actual)