-
Notifications
You must be signed in to change notification settings - Fork 40
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #530 from microsoftgraph/shem/large_file_upload
Large File Upload Task
- Loading branch information
Showing
9 changed files
with
450 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -133,3 +133,4 @@ dmypy.json | |
.idea/ | ||
|
||
app*.py | ||
app* |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,5 @@ | ||
from .page_result import PageResult | ||
from .large_file_upload_session import LargeFileUploadSession | ||
from .upload_result import UploadResult, UploadSessionDataHolder | ||
|
||
__all__ = ['PageResult'] | ||
__all__ = ['PageResult', 'LargeFileUploadSession', 'UploadResult', 'UploadSessionDataHolder'] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
from __future__ import annotations | ||
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union | ||
import datetime | ||
from dataclasses import dataclass, field | ||
|
||
from kiota_abstractions.serialization import ( | ||
AdditionalDataHolder, Parsable, ParseNode, SerializationWriter | ||
) | ||
|
||
|
||
@dataclass | ||
class LargeFileUploadSession(AdditionalDataHolder, Parsable): | ||
|
||
additional_data: Dict[str, Any] = field(default_factory=dict) | ||
expiration_date_time: Optional[datetime.datetime] = None | ||
next_expected_ranges: Optional[List[str]] = None | ||
is_cancelled: Optional[bool] = False | ||
odata_type: Optional[str] = None | ||
# The URL endpoint that accepts PUT requests for byte ranges of the file. | ||
upload_url: Optional[str] = None | ||
|
||
@staticmethod | ||
def create_from_discriminator_value( | ||
parse_node: Optional[ParseNode] = None | ||
) -> LargeFileUploadSession: | ||
""" | ||
Creates a new instance of the appropriate class based | ||
on discriminator value param parse_node: The parse node | ||
to use to read the discriminator value and create the object | ||
Returns: UploadSession | ||
""" | ||
if not parse_node: | ||
raise TypeError("parse_node cannot be null.") | ||
return LargeFileUploadSession() | ||
|
||
def get_field_deserializers(self, ) -> Dict[str, Callable[[ParseNode], None]]: | ||
""" | ||
The deserialization information for the current model | ||
Returns: Dict[str, Callable[[ParseNode], None]] | ||
""" | ||
fields: Dict[str, Callable[[Any], None]] = { | ||
"expirationDateTime": | ||
lambda n: setattr(self, 'expiration_date_time', n.get_datetime_value()), | ||
"nextExpectedRanges": | ||
lambda n: | ||
setattr(self, 'next_expected_ranges', n.get_collection_of_primitive_values(str)), | ||
"@odata.type": | ||
lambda n: setattr(self, 'odata_type', n.get_str_value()), | ||
"uploadUrl": | ||
lambda n: setattr(self, 'upload_url', n.get_str_value()), | ||
} | ||
return fields | ||
|
||
def serialize(self, writer: SerializationWriter) -> None: | ||
""" | ||
Serializes information the current object | ||
param writer: Serialization writer to use to serialize this model | ||
Returns: None | ||
""" | ||
if not writer: | ||
raise TypeError("writer cannot be null.") | ||
writer.write_datetime_value("expirationDateTime", self.expiration_date_time) | ||
writer.write_collection_of_primitive_values("nextExpectedRanges", self.next_expected_ranges) | ||
writer.write_str_value("@odata.type", self.odata_type) | ||
writer.write_str_value("uploadUrl", self.upload_url) | ||
writer.write_additional_data_value(self.additional_data) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
from typing import Any, Callable, Dict, List, Optional, TypeVar | ||
|
||
from dataclasses import dataclass | ||
from datetime import datetime | ||
|
||
from kiota_abstractions.serialization import ( | ||
AdditionalDataHolder, Parsable, ParseNode, SerializationWriter | ||
) | ||
|
||
T = TypeVar('T') | ||
|
||
|
||
@dataclass | ||
class UploadSessionDataHolder(AdditionalDataHolder, Parsable): | ||
expiration_date_time: Optional[datetime] = None | ||
next_expected_ranges: Optional[List[str]] = None | ||
upload_url: Optional[str] = None | ||
odata_type: Optional[str] = None | ||
|
||
def get_field_deserializers(self, ) -> Dict[str, Callable[[ParseNode], None]]: | ||
""" | ||
The deserialization information for the current model | ||
Returns: Dict[str, Callable[[ParseNode], None]] | ||
""" | ||
fields: Dict[str, Callable[[Any], None]] = { | ||
"expirationDateTime": | ||
lambda n: setattr(self, 'expiration_date_time', n.get_datetime_value()), | ||
"nextExpectedRanges": | ||
lambda n: | ||
setattr(self, 'next_expected_ranges', n.get_collection_of_primitive_values(str)), | ||
"@odata.type": | ||
lambda n: setattr(self, 'odata_type', n.get_str_value()), | ||
"uploadUrl": | ||
lambda n: setattr(self, 'upload_url', n.get_str_value()), | ||
} | ||
return fields | ||
|
||
def serialize(self, writer: SerializationWriter) -> None: | ||
""" | ||
Serializes information the current object | ||
param writer: Serialization writer to use to serialize this model | ||
Returns: None | ||
""" | ||
if not writer: | ||
raise TypeError("writer cannot be null.") | ||
writer.write_datetime_value("expirationDateTime", self.expiration_date_time) | ||
writer.write_collection_of_primitive_values("nextExpectedRanges", self.next_expected_ranges) | ||
writer.write_str_value("@odata.type", self.odata_type) | ||
writer.write_str_value("uploadUrl", self.upload_url) | ||
writer.write_additional_data_value(self.additional_data) | ||
|
||
|
||
class UploadResult: | ||
|
||
def __init__(self): | ||
self.upload_session: Optional[UploadSessionDataHolder] = None | ||
self.item_response: Optional[T] = None | ||
self.location: Optional[str] = None | ||
|
||
@property | ||
def upload_succeeded(self) -> bool: | ||
return self.item_response is not None or self.location is not None |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1,4 @@ | ||
from .page_iterator import PageIterator | ||
from .large_file_upload import LargeFileUploadTask | ||
|
||
__all__ = ['PageIterator', 'LargeFileUploadTask'] |
Oops, something went wrong.