Skip to content

Commit

Permalink
Merge pull request #530 from microsoftgraph/shem/large_file_upload
Browse files Browse the repository at this point in the history
Large File Upload Task
  • Loading branch information
shemogumbe committed May 2, 2024
2 parents 1218c18 + 9754d1c commit eb0fd78
Show file tree
Hide file tree
Showing 9 changed files with 450 additions and 4 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -133,3 +133,4 @@ dmypy.json
.idea/

app*.py
app*
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [1.0.1] - 2024-04-22

### Added

### Changed
-Enabled Large File Upload and Page iterator support


## [1.0.0] - 2023-10-31

### Added
Expand Down
2 changes: 1 addition & 1 deletion src/msgraph_core/_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,5 @@
"""
DEFAULT_REQUEST_TIMEOUT = 100
DEFAULT_CONNECTION_TIMEOUT = 30
SDK_VERSION = '1.0.0'
SDK_VERSION = '1.0.1'
MS_DEFAULT_SCOPE = 'https://graph.microsoft.com/.default'
4 changes: 3 additions & 1 deletion src/msgraph_core/models/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from .page_result import PageResult
from .large_file_upload_session import LargeFileUploadSession
from .upload_result import UploadResult, UploadSessionDataHolder

__all__ = ['PageResult']
__all__ = ['PageResult', 'LargeFileUploadSession', 'UploadResult', 'UploadSessionDataHolder']
66 changes: 66 additions & 0 deletions src/msgraph_core/models/large_file_upload_session.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
from __future__ import annotations
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
import datetime
from dataclasses import dataclass, field

from kiota_abstractions.serialization import (
AdditionalDataHolder, Parsable, ParseNode, SerializationWriter
)


@dataclass
class LargeFileUploadSession(AdditionalDataHolder, Parsable):

additional_data: Dict[str, Any] = field(default_factory=dict)
expiration_date_time: Optional[datetime.datetime] = None
next_expected_ranges: Optional[List[str]] = None
is_cancelled: Optional[bool] = False
odata_type: Optional[str] = None
# The URL endpoint that accepts PUT requests for byte ranges of the file.
upload_url: Optional[str] = None

@staticmethod
def create_from_discriminator_value(
parse_node: Optional[ParseNode] = None
) -> LargeFileUploadSession:
"""
Creates a new instance of the appropriate class based
on discriminator value param parse_node: The parse node
to use to read the discriminator value and create the object
Returns: UploadSession
"""
if not parse_node:
raise TypeError("parse_node cannot be null.")
return LargeFileUploadSession()

def get_field_deserializers(self, ) -> Dict[str, Callable[[ParseNode], None]]:
"""
The deserialization information for the current model
Returns: Dict[str, Callable[[ParseNode], None]]
"""
fields: Dict[str, Callable[[Any], None]] = {
"expirationDateTime":
lambda n: setattr(self, 'expiration_date_time', n.get_datetime_value()),
"nextExpectedRanges":
lambda n:
setattr(self, 'next_expected_ranges', n.get_collection_of_primitive_values(str)),
"@odata.type":
lambda n: setattr(self, 'odata_type', n.get_str_value()),
"uploadUrl":
lambda n: setattr(self, 'upload_url', n.get_str_value()),
}
return fields

def serialize(self, writer: SerializationWriter) -> None:
"""
Serializes information the current object
param writer: Serialization writer to use to serialize this model
Returns: None
"""
if not writer:
raise TypeError("writer cannot be null.")
writer.write_datetime_value("expirationDateTime", self.expiration_date_time)
writer.write_collection_of_primitive_values("nextExpectedRanges", self.next_expected_ranges)
writer.write_str_value("@odata.type", self.odata_type)
writer.write_str_value("uploadUrl", self.upload_url)
writer.write_additional_data_value(self.additional_data)
3 changes: 1 addition & 2 deletions src/msgraph_core/models/page_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,13 @@
PageResult: Represents a page of items in a paged response.
"""
from __future__ import annotations
from typing import List, Optional, Dict, Callable
from typing import List, Optional, Dict, Callable, TypeVar
from dataclasses import dataclass

from kiota_abstractions.serialization.parsable import Parsable
from kiota_abstractions.serialization.serialization_writer \
import SerializationWriter
from kiota_abstractions.serialization.parse_node import ParseNode
from typing import TypeVar, List, Optional

T = TypeVar('T')

Expand Down
62 changes: 62 additions & 0 deletions src/msgraph_core/models/upload_result.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
from typing import Any, Callable, Dict, List, Optional, TypeVar

from dataclasses import dataclass
from datetime import datetime

from kiota_abstractions.serialization import (
AdditionalDataHolder, Parsable, ParseNode, SerializationWriter
)

T = TypeVar('T')


@dataclass
class UploadSessionDataHolder(AdditionalDataHolder, Parsable):
expiration_date_time: Optional[datetime] = None
next_expected_ranges: Optional[List[str]] = None
upload_url: Optional[str] = None
odata_type: Optional[str] = None

def get_field_deserializers(self, ) -> Dict[str, Callable[[ParseNode], None]]:
"""
The deserialization information for the current model
Returns: Dict[str, Callable[[ParseNode], None]]
"""
fields: Dict[str, Callable[[Any], None]] = {
"expirationDateTime":
lambda n: setattr(self, 'expiration_date_time', n.get_datetime_value()),
"nextExpectedRanges":
lambda n:
setattr(self, 'next_expected_ranges', n.get_collection_of_primitive_values(str)),
"@odata.type":
lambda n: setattr(self, 'odata_type', n.get_str_value()),
"uploadUrl":
lambda n: setattr(self, 'upload_url', n.get_str_value()),
}
return fields

def serialize(self, writer: SerializationWriter) -> None:
"""
Serializes information the current object
param writer: Serialization writer to use to serialize this model
Returns: None
"""
if not writer:
raise TypeError("writer cannot be null.")
writer.write_datetime_value("expirationDateTime", self.expiration_date_time)
writer.write_collection_of_primitive_values("nextExpectedRanges", self.next_expected_ranges)
writer.write_str_value("@odata.type", self.odata_type)
writer.write_str_value("uploadUrl", self.upload_url)
writer.write_additional_data_value(self.additional_data)


class UploadResult:

def __init__(self):
self.upload_session: Optional[UploadSessionDataHolder] = None
self.item_response: Optional[T] = None
self.location: Optional[str] = None

@property
def upload_succeeded(self) -> bool:
return self.item_response is not None or self.location is not None
3 changes: 3 additions & 0 deletions src/msgraph_core/tasks/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,4 @@
from .page_iterator import PageIterator
from .large_file_upload import LargeFileUploadTask

__all__ = ['PageIterator', 'LargeFileUploadTask']

0 comments on commit eb0fd78

Please sign in to comment.