From 05c0472e375d188102c62e3ef13ba303e99b1124 Mon Sep 17 00:00:00 2001 From: DropboxBot Date: Thu, 3 Mar 2022 00:45:31 +0000 Subject: [PATCH] Automated Spec Update 9daf01c5b7ac6936956f085792c573c06fb3c905 Change Notes: files Namespace - Update UploadArg - content_hash doc string sharing_folders Namespace - Add traverse to AccessLevel team_log_generated Namespace - Example and docstring updates Co-authored-by: Scott Erickson 895e08a3fa941888c2c57681c8117a294f8fa4b9 Change Notes: common Namespace - Update comments files Namespace - Add UploadArg extends CommitInfo structs - Add UploadSessionAppendError extends UploadSessionLookupError unions - Remove CommitInfoWithProperties extends CommitInfo structs - Remove UploadErrorWithProperties extends UploadError unions Co-authored-by: Brent Bumann --- dropbox/base.py | 120 +++++---- dropbox/files.py | 601 +++++++++++++++++++++++++++----------------- dropbox/sharing.py | 15 ++ dropbox/team_log.py | 18 +- spec | 2 +- 5 files changed, 457 insertions(+), 299 deletions(-) diff --git a/dropbox/base.py b/dropbox/base.py index 009d47ab..a1c6089a 100644 --- a/dropbox/base.py +++ b/dropbox/base.py @@ -848,31 +848,41 @@ def files_alpha_upload(self, client_modified=None, mute=False, property_groups=None, - strict_conflict=False): + strict_conflict=False, + content_hash=None): """ Create a new file with the contents provided in the request. Note that - this endpoint is part of the properties API alpha and is slightly - different from :meth:`files_upload`. Do not use this to upload a file - larger than 150 MB. Instead, create an upload session with - :meth:`files_upload_session_start`. + the behavior of this alpha endpoint is unstable and subject to change. + Do not use this to upload a file larger than 150 MB. Instead, create an + upload session with :meth:`files_upload_session_start`. Route attributes: scope: files.content.write :param bytes f: Contents to upload. + :param Nullable[str] content_hash: A hash of the file content uploaded + in this call. If provided and the uploaded content does not match + this hash, an error will be returned. For more information see our + `Content hash + `_ page. :rtype: :class:`dropbox.files.FileMetadata` + :raises: :class:`.exceptions.ApiError` + + If this raises, ApiError will contain: + :class:`dropbox.files.UploadError` """ warnings.warn( - 'alpha/upload is deprecated. Use alpha/upload.', + 'alpha/upload is deprecated. Use upload.', DeprecationWarning, ) - arg = files.CommitInfoWithProperties(path, - mode, - autorename, - client_modified, - mute, - property_groups, - strict_conflict) + arg = files.UploadArg(path, + mode, + autorename, + client_modified, + mute, + property_groups, + strict_conflict, + content_hash) r = self.request( files.alpha_upload, 'files', @@ -3157,7 +3167,8 @@ def files_upload(self, client_modified=None, mute=False, property_groups=None, - strict_conflict=False): + strict_conflict=False, + content_hash=None): """ Create a new file with the contents provided in the request. Do not use this to upload a file larger than 150 MB. Instead, create an upload @@ -3171,43 +3182,25 @@ def files_upload(self, scope: files.content.write :param bytes f: Contents to upload. - :param str path: Path in the user's Dropbox to save the file. - :param mode: Selects what to do if the file already exists. - :type mode: :class:`dropbox.files.WriteMode` - :param bool autorename: If there's a conflict, as determined by - ``mode``, have the Dropbox server try to autorename the file to - avoid conflict. - :param Nullable[datetime] client_modified: The value to store as the - ``client_modified`` timestamp. Dropbox automatically records the - time at which the file was written to the Dropbox servers. It can - also record an additional timestamp, provided by Dropbox desktop - clients, mobile clients, and API apps of when the file was actually - created or modified. - :param bool mute: Normally, users are made aware of any file - modifications in their Dropbox account via notifications in the - client software. If ``True``, this tells the clients that this - modification shouldn't result in a user notification. - :param Nullable[List[:class:`dropbox.files.PropertyGroup`]] - property_groups: List of custom properties to add to file. - :param bool strict_conflict: Be more strict about how each - :class:`dropbox.files.WriteMode` detects conflict. For example, - always return a conflict error when ``mode`` = ``WriteMode.update`` - and the given "rev" doesn't match the existing file's "rev", even if - the existing file has been deleted. This also forces a conflict even - when the target path refers to a file with identical contents. + :param Nullable[str] content_hash: A hash of the file content uploaded + in this call. If provided and the uploaded content does not match + this hash, an error will be returned. For more information see our + `Content hash + `_ page. :rtype: :class:`dropbox.files.FileMetadata` :raises: :class:`.exceptions.ApiError` If this raises, ApiError will contain: :class:`dropbox.files.UploadError` """ - arg = files.CommitInfo(path, - mode, - autorename, - client_modified, - mute, - property_groups, - strict_conflict) + arg = files.UploadArg(path, + mode, + autorename, + client_modified, + mute, + property_groups, + strict_conflict, + content_hash) r = self.request( files.upload, 'files', @@ -3219,7 +3212,8 @@ def files_upload(self, def files_upload_session_append_v2(self, f, cursor, - close=False): + close=False, + content_hash=None): """ Append more data to an upload session. When the parameter close is set, this call will close the session. A single request should not upload @@ -3240,14 +3234,20 @@ def files_upload_session_append_v2(self, point you won't be able to call :meth:`files_upload_session_append_v2` anymore with the current session. + :param Nullable[str] content_hash: A hash of the file content uploaded + in this call. If provided and the uploaded content does not match + this hash, an error will be returned. For more information see our + `Content hash + `_ page. :rtype: None :raises: :class:`.exceptions.ApiError` If this raises, ApiError will contain: - :class:`dropbox.files.UploadSessionLookupError` + :class:`dropbox.files.UploadSessionAppendError` """ arg = files.UploadSessionAppendArg(cursor, - close) + close, + content_hash) r = self.request( files.upload_session_append_v2, 'files', @@ -3282,7 +3282,7 @@ def files_upload_session_append(self, :raises: :class:`.exceptions.ApiError` If this raises, ApiError will contain: - :class:`dropbox.files.UploadSessionLookupError` + :class:`dropbox.files.UploadSessionAppendError` """ warnings.warn( 'upload_session/append is deprecated. Use upload_session/append.', @@ -3301,7 +3301,8 @@ def files_upload_session_append(self, def files_upload_session_finish(self, f, cursor, - commit): + commit, + content_hash=None): """ Finish an upload session and save the uploaded data to the given file path. A single request should not upload more than 150 MB. The maximum @@ -3321,6 +3322,11 @@ def files_upload_session_finish(self, :param commit: Contains the path and other optional modifiers for the commit. :type commit: :class:`dropbox.files.CommitInfo` + :param Nullable[str] content_hash: A hash of the file content uploaded + in this call. If provided and the uploaded content does not match + this hash, an error will be returned. For more information see our + `Content hash + `_ page. :rtype: :class:`dropbox.files.FileMetadata` :raises: :class:`.exceptions.ApiError` @@ -3328,7 +3334,8 @@ def files_upload_session_finish(self, :class:`dropbox.files.UploadSessionFinishError` """ arg = files.UploadSessionFinishArg(cursor, - commit) + commit, + content_hash) r = self.request( files.upload_session_finish, 'files', @@ -3447,7 +3454,8 @@ def files_upload_session_finish_batch_check(self, def files_upload_session_start(self, f, close=False, - session_type=None): + session_type=None, + content_hash=None): """ Upload sessions allow you to upload a single file in one or more requests, for example where the size of the file is greater than 150 MB. @@ -3495,6 +3503,11 @@ def files_upload_session_start(self, :param Nullable[:class:`dropbox.files.UploadSessionType`] session_type: Type of upload session you want to start. If not specified, default is ``UploadSessionType.sequential``. + :param Nullable[str] content_hash: A hash of the file content uploaded + in this call. If provided and the uploaded content does not match + this hash, an error will be returned. For more information see our + `Content hash + `_ page. :rtype: :class:`dropbox.files.UploadSessionStartResult` :raises: :class:`.exceptions.ApiError` @@ -3502,7 +3515,8 @@ def files_upload_session_start(self, :class:`dropbox.files.UploadSessionStartError` """ arg = files.UploadSessionStartArg(close, - session_type) + session_type, + content_hash) r = self.request( files.upload_session_start, 'files', diff --git a/dropbox/files.py b/dropbox/files.py index 4530b1ab..bc0e9052 100644 --- a/dropbox/files.py +++ b/dropbox/files.py @@ -412,34 +412,6 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): CommitInfo_validator = bv.Struct(CommitInfo) -class CommitInfoWithProperties(CommitInfo): - - __slots__ = [ - ] - - _has_required_fields = True - - def __init__(self, - path=None, - mode=None, - autorename=None, - client_modified=None, - mute=None, - property_groups=None, - strict_conflict=None): - super(CommitInfoWithProperties, self).__init__(path, - mode, - autorename, - client_modified, - mute, - property_groups, - strict_conflict) - - def _process_custom_annotations(self, annotation_type, field_path, processor): - super(CommitInfoWithProperties, self)._process_custom_annotations(annotation_type, field_path, processor) - -CommitInfoWithProperties_validator = bv.Struct(CommitInfoWithProperties) - class ContentSyncSetting(bb.Struct): """ :ivar files.ContentSyncSetting.id: Id of the item this setting is applied @@ -9216,6 +9188,48 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): UnlockFileBatchArg_validator = bv.Struct(UnlockFileBatchArg) +class UploadArg(CommitInfo): + """ + :ivar files.UploadArg.content_hash: A hash of the file content uploaded in + this call. If provided and the uploaded content does not match this + hash, an error will be returned. For more information see our `Content + hash `_ page. + """ + + __slots__ = [ + '_content_hash_value', + ] + + _has_required_fields = True + + def __init__(self, + path=None, + mode=None, + autorename=None, + client_modified=None, + mute=None, + property_groups=None, + strict_conflict=None, + content_hash=None): + super(UploadArg, self).__init__(path, + mode, + autorename, + client_modified, + mute, + property_groups, + strict_conflict) + self._content_hash_value = bb.NOT_SET + if content_hash is not None: + self.content_hash = content_hash + + # Instance attribute type: str (validator is set below) + content_hash = bb.Attribute("content_hash", nullable=True) + + def _process_custom_annotations(self, annotation_type, field_path, processor): + super(UploadArg, self)._process_custom_annotations(annotation_type, field_path, processor) + +UploadArg_validator = bv.Struct(UploadArg) + class UploadError(bb.Union): """ This class acts as a tagged union. Only one of the ``is_*`` methods will @@ -9229,12 +9243,16 @@ class UploadError(bb.Union): groups. :ivar files.UploadError.payload_too_large: The request payload must be at most 150 MB. + :ivar files.UploadError.content_hash_mismatch: The content received by the + Dropbox server in this call does not match the provided content hash. """ _catch_all = 'other' # Attribute is overwritten below the class definition payload_too_large = None # Attribute is overwritten below the class definition + content_hash_mismatch = None + # Attribute is overwritten below the class definition other = None @classmethod @@ -9283,6 +9301,14 @@ def is_payload_too_large(self): """ return self._tag == 'payload_too_large' + def is_content_hash_mismatch(self): + """ + Check if the union tag is ``content_hash_mismatch``. + + :rtype: bool + """ + return self._tag == 'content_hash_mismatch' + def is_other(self): """ Check if the union tag is ``other``. @@ -9321,18 +9347,6 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): UploadError_validator = bv.Union(UploadError) -class UploadErrorWithProperties(UploadError): - """ - This class acts as a tagged union. Only one of the ``is_*`` methods will - return true. To get the associated value of a tag (if one exists), use the - corresponding ``get_*`` method. - """ - - def _process_custom_annotations(self, annotation_type, field_path, processor): - super(UploadErrorWithProperties, self)._process_custom_annotations(annotation_type, field_path, processor) - -UploadErrorWithProperties_validator = bv.Union(UploadErrorWithProperties) - class UploadSessionAppendArg(bb.Struct): """ :ivar files.UploadSessionAppendArg.cursor: Contains the upload session ID @@ -9341,24 +9355,34 @@ class UploadSessionAppendArg(bb.Struct): be closed, at which point you won't be able to call :meth:`dropbox.dropbox_client.Dropbox.files_upload_session_append` anymore with the current session. + :ivar files.UploadSessionAppendArg.content_hash: A hash of the file content + uploaded in this call. If provided and the uploaded content does not + match this hash, an error will be returned. For more information see our + `Content hash + `_ page. """ __slots__ = [ '_cursor_value', '_close_value', + '_content_hash_value', ] _has_required_fields = True def __init__(self, cursor=None, - close=None): + close=None, + content_hash=None): self._cursor_value = bb.NOT_SET self._close_value = bb.NOT_SET + self._content_hash_value = bb.NOT_SET if cursor is not None: self.cursor = cursor if close is not None: self.close = close + if content_hash is not None: + self.content_hash = content_hash # Instance attribute type: UploadSessionCursor (validator is set below) cursor = bb.Attribute("cursor", user_defined=True) @@ -9366,11 +9390,192 @@ def __init__(self, # Instance attribute type: bool (validator is set below) close = bb.Attribute("close") + # Instance attribute type: str (validator is set below) + content_hash = bb.Attribute("content_hash", nullable=True) + def _process_custom_annotations(self, annotation_type, field_path, processor): super(UploadSessionAppendArg, self)._process_custom_annotations(annotation_type, field_path, processor) UploadSessionAppendArg_validator = bv.Struct(UploadSessionAppendArg) +class UploadSessionLookupError(bb.Union): + """ + This class acts as a tagged union. Only one of the ``is_*`` methods will + return true. To get the associated value of a tag (if one exists), use the + corresponding ``get_*`` method. + + :ivar files.UploadSessionLookupError.not_found: The upload session ID was + not found or has expired. Upload sessions are valid for 7 days. + :ivar UploadSessionOffsetError UploadSessionLookupError.incorrect_offset: + The specified offset was incorrect. See the value for the correct + offset. This error may occur when a previous request was received and + processed successfully but the client did not receive the response, e.g. + due to a network error. + :ivar files.UploadSessionLookupError.closed: You are attempting to append + data to an upload session that has already been closed (i.e. committed). + :ivar files.UploadSessionLookupError.not_closed: The session must be closed + before calling upload_session/finish_batch. + :ivar files.UploadSessionLookupError.too_large: You can not append to the + upload session because the size of a file should not reach the max file + size limit (i.e. 350GB). + :ivar files.UploadSessionLookupError.concurrent_session_invalid_offset: For + concurrent upload sessions, offset needs to be multiple of 4194304 + bytes. + :ivar files.UploadSessionLookupError.concurrent_session_invalid_data_size: + For concurrent upload sessions, only chunks with size multiple of + 4194304 bytes can be uploaded. + :ivar files.UploadSessionLookupError.payload_too_large: The request payload + must be at most 150 MB. + """ + + _catch_all = 'other' + # Attribute is overwritten below the class definition + not_found = None + # Attribute is overwritten below the class definition + closed = None + # Attribute is overwritten below the class definition + not_closed = None + # Attribute is overwritten below the class definition + too_large = None + # Attribute is overwritten below the class definition + concurrent_session_invalid_offset = None + # Attribute is overwritten below the class definition + concurrent_session_invalid_data_size = None + # Attribute is overwritten below the class definition + payload_too_large = None + # Attribute is overwritten below the class definition + other = None + + @classmethod + def incorrect_offset(cls, val): + """ + Create an instance of this class set to the ``incorrect_offset`` tag + with value ``val``. + + :param UploadSessionOffsetError val: + :rtype: UploadSessionLookupError + """ + return cls('incorrect_offset', val) + + def is_not_found(self): + """ + Check if the union tag is ``not_found``. + + :rtype: bool + """ + return self._tag == 'not_found' + + def is_incorrect_offset(self): + """ + Check if the union tag is ``incorrect_offset``. + + :rtype: bool + """ + return self._tag == 'incorrect_offset' + + def is_closed(self): + """ + Check if the union tag is ``closed``. + + :rtype: bool + """ + return self._tag == 'closed' + + def is_not_closed(self): + """ + Check if the union tag is ``not_closed``. + + :rtype: bool + """ + return self._tag == 'not_closed' + + def is_too_large(self): + """ + Check if the union tag is ``too_large``. + + :rtype: bool + """ + return self._tag == 'too_large' + + def is_concurrent_session_invalid_offset(self): + """ + Check if the union tag is ``concurrent_session_invalid_offset``. + + :rtype: bool + """ + return self._tag == 'concurrent_session_invalid_offset' + + def is_concurrent_session_invalid_data_size(self): + """ + Check if the union tag is ``concurrent_session_invalid_data_size``. + + :rtype: bool + """ + return self._tag == 'concurrent_session_invalid_data_size' + + def is_payload_too_large(self): + """ + Check if the union tag is ``payload_too_large``. + + :rtype: bool + """ + return self._tag == 'payload_too_large' + + def is_other(self): + """ + Check if the union tag is ``other``. + + :rtype: bool + """ + return self._tag == 'other' + + def get_incorrect_offset(self): + """ + The specified offset was incorrect. See the value for the correct + offset. This error may occur when a previous request was received and + processed successfully but the client did not receive the response, e.g. + due to a network error. + + Only call this if :meth:`is_incorrect_offset` is true. + + :rtype: UploadSessionOffsetError + """ + if not self.is_incorrect_offset(): + raise AttributeError("tag 'incorrect_offset' not set") + return self._value + + def _process_custom_annotations(self, annotation_type, field_path, processor): + super(UploadSessionLookupError, self)._process_custom_annotations(annotation_type, field_path, processor) + +UploadSessionLookupError_validator = bv.Union(UploadSessionLookupError) + +class UploadSessionAppendError(UploadSessionLookupError): + """ + This class acts as a tagged union. Only one of the ``is_*`` methods will + return true. To get the associated value of a tag (if one exists), use the + corresponding ``get_*`` method. + + :ivar files.UploadSessionAppendError.content_hash_mismatch: The content + received by the Dropbox server in this call does not match the provided + content hash. + """ + + # Attribute is overwritten below the class definition + content_hash_mismatch = None + + def is_content_hash_mismatch(self): + """ + Check if the union tag is ``content_hash_mismatch``. + + :rtype: bool + """ + return self._tag == 'content_hash_mismatch' + + def _process_custom_annotations(self, annotation_type, field_path, processor): + super(UploadSessionAppendError, self)._process_custom_annotations(annotation_type, field_path, processor) + +UploadSessionAppendError_validator = bv.Union(UploadSessionAppendError) + class UploadSessionCursor(bb.Struct): """ :ivar files.UploadSessionCursor.session_id: The upload session ID (returned @@ -9414,24 +9619,34 @@ class UploadSessionFinishArg(bb.Struct): and the offset. :ivar files.UploadSessionFinishArg.commit: Contains the path and other optional modifiers for the commit. + :ivar files.UploadSessionFinishArg.content_hash: A hash of the file content + uploaded in this call. If provided and the uploaded content does not + match this hash, an error will be returned. For more information see our + `Content hash + `_ page. """ __slots__ = [ '_cursor_value', '_commit_value', + '_content_hash_value', ] _has_required_fields = True def __init__(self, cursor=None, - commit=None): + commit=None, + content_hash=None): self._cursor_value = bb.NOT_SET self._commit_value = bb.NOT_SET + self._content_hash_value = bb.NOT_SET if cursor is not None: self.cursor = cursor if commit is not None: self.commit = commit + if content_hash is not None: + self.content_hash = content_hash # Instance attribute type: UploadSessionCursor (validator is set below) cursor = bb.Attribute("cursor", user_defined=True) @@ -9439,6 +9654,9 @@ def __init__(self, # Instance attribute type: CommitInfo (validator is set below) commit = bb.Attribute("commit", user_defined=True) + # Instance attribute type: str (validator is set below) + content_hash = bb.Attribute("content_hash", nullable=True) + def _process_custom_annotations(self, annotation_type, field_path, processor): super(UploadSessionFinishArg, self)._process_custom_annotations(annotation_type, field_path, processor) @@ -9705,6 +9923,9 @@ class UploadSessionFinishError(bb.Union): all pieces of data were uploaded before trying to finish the session. :ivar files.UploadSessionFinishError.payload_too_large: The request payload must be at most 150 MB. + :ivar files.UploadSessionFinishError.content_hash_mismatch: The content + received by the Dropbox server in this call does not match the provided + content hash. """ _catch_all = 'other' @@ -9721,6 +9942,8 @@ class UploadSessionFinishError(bb.Union): # Attribute is overwritten below the class definition payload_too_large = None # Attribute is overwritten below the class definition + content_hash_mismatch = None + # Attribute is overwritten below the class definition other = None @classmethod @@ -9828,6 +10051,14 @@ def is_payload_too_large(self): """ return self._tag == 'payload_too_large' + def is_content_hash_mismatch(self): + """ + Check if the union tag is ``content_hash_mismatch``. + + :rtype: bool + """ + return self._tag == 'content_hash_mismatch' + def is_other(self): """ Check if the union tag is ``other``. @@ -9880,157 +10111,6 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): UploadSessionFinishError_validator = bv.Union(UploadSessionFinishError) -class UploadSessionLookupError(bb.Union): - """ - This class acts as a tagged union. Only one of the ``is_*`` methods will - return true. To get the associated value of a tag (if one exists), use the - corresponding ``get_*`` method. - - :ivar files.UploadSessionLookupError.not_found: The upload session ID was - not found or has expired. Upload sessions are valid for 7 days. - :ivar UploadSessionOffsetError UploadSessionLookupError.incorrect_offset: - The specified offset was incorrect. See the value for the correct - offset. This error may occur when a previous request was received and - processed successfully but the client did not receive the response, e.g. - due to a network error. - :ivar files.UploadSessionLookupError.closed: You are attempting to append - data to an upload session that has already been closed (i.e. committed). - :ivar files.UploadSessionLookupError.not_closed: The session must be closed - before calling upload_session/finish_batch. - :ivar files.UploadSessionLookupError.too_large: You can not append to the - upload session because the size of a file should not reach the max file - size limit (i.e. 350GB). - :ivar files.UploadSessionLookupError.concurrent_session_invalid_offset: For - concurrent upload sessions, offset needs to be multiple of 4194304 - bytes. - :ivar files.UploadSessionLookupError.concurrent_session_invalid_data_size: - For concurrent upload sessions, only chunks with size multiple of - 4194304 bytes can be uploaded. - :ivar files.UploadSessionLookupError.payload_too_large: The request payload - must be at most 150 MB. - """ - - _catch_all = 'other' - # Attribute is overwritten below the class definition - not_found = None - # Attribute is overwritten below the class definition - closed = None - # Attribute is overwritten below the class definition - not_closed = None - # Attribute is overwritten below the class definition - too_large = None - # Attribute is overwritten below the class definition - concurrent_session_invalid_offset = None - # Attribute is overwritten below the class definition - concurrent_session_invalid_data_size = None - # Attribute is overwritten below the class definition - payload_too_large = None - # Attribute is overwritten below the class definition - other = None - - @classmethod - def incorrect_offset(cls, val): - """ - Create an instance of this class set to the ``incorrect_offset`` tag - with value ``val``. - - :param UploadSessionOffsetError val: - :rtype: UploadSessionLookupError - """ - return cls('incorrect_offset', val) - - def is_not_found(self): - """ - Check if the union tag is ``not_found``. - - :rtype: bool - """ - return self._tag == 'not_found' - - def is_incorrect_offset(self): - """ - Check if the union tag is ``incorrect_offset``. - - :rtype: bool - """ - return self._tag == 'incorrect_offset' - - def is_closed(self): - """ - Check if the union tag is ``closed``. - - :rtype: bool - """ - return self._tag == 'closed' - - def is_not_closed(self): - """ - Check if the union tag is ``not_closed``. - - :rtype: bool - """ - return self._tag == 'not_closed' - - def is_too_large(self): - """ - Check if the union tag is ``too_large``. - - :rtype: bool - """ - return self._tag == 'too_large' - - def is_concurrent_session_invalid_offset(self): - """ - Check if the union tag is ``concurrent_session_invalid_offset``. - - :rtype: bool - """ - return self._tag == 'concurrent_session_invalid_offset' - - def is_concurrent_session_invalid_data_size(self): - """ - Check if the union tag is ``concurrent_session_invalid_data_size``. - - :rtype: bool - """ - return self._tag == 'concurrent_session_invalid_data_size' - - def is_payload_too_large(self): - """ - Check if the union tag is ``payload_too_large``. - - :rtype: bool - """ - return self._tag == 'payload_too_large' - - def is_other(self): - """ - Check if the union tag is ``other``. - - :rtype: bool - """ - return self._tag == 'other' - - def get_incorrect_offset(self): - """ - The specified offset was incorrect. See the value for the correct - offset. This error may occur when a previous request was received and - processed successfully but the client did not receive the response, e.g. - due to a network error. - - Only call this if :meth:`is_incorrect_offset` is true. - - :rtype: UploadSessionOffsetError - """ - if not self.is_incorrect_offset(): - raise AttributeError("tag 'incorrect_offset' not set") - return self._value - - def _process_custom_annotations(self, annotation_type, field_path, processor): - super(UploadSessionLookupError, self)._process_custom_annotations(annotation_type, field_path, processor) - -UploadSessionLookupError_validator = bv.Union(UploadSessionLookupError) - class UploadSessionOffsetError(bb.Struct): """ :ivar files.UploadSessionOffsetError.correct_offset: The offset up to which @@ -10066,24 +10146,34 @@ class UploadSessionStartArg(bb.Struct): :ivar files.UploadSessionStartArg.session_type: Type of upload session you want to start. If not specified, default is ``UploadSessionType.sequential``. + :ivar files.UploadSessionStartArg.content_hash: A hash of the file content + uploaded in this call. If provided and the uploaded content does not + match this hash, an error will be returned. For more information see our + `Content hash + `_ page. """ __slots__ = [ '_close_value', '_session_type_value', + '_content_hash_value', ] _has_required_fields = False def __init__(self, close=None, - session_type=None): + session_type=None, + content_hash=None): self._close_value = bb.NOT_SET self._session_type_value = bb.NOT_SET + self._content_hash_value = bb.NOT_SET if close is not None: self.close = close if session_type is not None: self.session_type = session_type + if content_hash is not None: + self.content_hash = content_hash # Instance attribute type: bool (validator is set below) close = bb.Attribute("close") @@ -10091,6 +10181,9 @@ def __init__(self, # Instance attribute type: UploadSessionType (validator is set below) session_type = bb.Attribute("session_type", nullable=True, user_defined=True) + # Instance attribute type: str (validator is set below) + content_hash = bb.Attribute("content_hash", nullable=True) + def _process_custom_annotations(self, annotation_type, field_path, processor): super(UploadSessionStartArg, self)._process_custom_annotations(annotation_type, field_path, processor) @@ -10108,6 +10201,9 @@ class UploadSessionStartError(bb.Union): Can not start a closed concurrent upload session. :ivar files.UploadSessionStartError.payload_too_large: The request payload must be at most 150 MB. + :ivar files.UploadSessionStartError.content_hash_mismatch: The content + received by the Dropbox server in this call does not match the provided + content hash. """ _catch_all = 'other' @@ -10118,6 +10214,8 @@ class UploadSessionStartError(bb.Union): # Attribute is overwritten below the class definition payload_too_large = None # Attribute is overwritten below the class definition + content_hash_mismatch = None + # Attribute is overwritten below the class definition other = None def is_concurrent_session_data_not_allowed(self): @@ -10144,6 +10242,14 @@ def is_payload_too_large(self): """ return self._tag == 'payload_too_large' + def is_content_hash_mismatch(self): + """ + Check if the union tag is ``content_hash_mismatch``. + + :rtype: bool + """ + return self._tag == 'content_hash_mismatch' + def is_other(self): """ Check if the union tag is ``other``. @@ -10762,9 +10868,6 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): ('strict_conflict', CommitInfo.strict_conflict.validator), ] -CommitInfoWithProperties._all_field_names_ = CommitInfo._all_field_names_.union(set([])) -CommitInfoWithProperties._all_fields_ = CommitInfo._all_fields_ + [] - ContentSyncSetting.id.validator = FileId_validator ContentSyncSetting.sync_setting.validator = SyncSetting_validator ContentSyncSetting._all_field_names_ = set([ @@ -12713,35 +12816,79 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): UnlockFileBatchArg._all_field_names_ = set(['entries']) UnlockFileBatchArg._all_fields_ = [('entries', UnlockFileBatchArg.entries.validator)] +UploadArg.content_hash.validator = bv.Nullable(Sha256HexHash_validator) +UploadArg._all_field_names_ = CommitInfo._all_field_names_.union(set(['content_hash'])) +UploadArg._all_fields_ = CommitInfo._all_fields_ + [('content_hash', UploadArg.content_hash.validator)] + UploadError._path_validator = UploadWriteFailed_validator UploadError._properties_error_validator = file_properties.InvalidPropertyGroupError_validator UploadError._payload_too_large_validator = bv.Void() +UploadError._content_hash_mismatch_validator = bv.Void() UploadError._other_validator = bv.Void() UploadError._tagmap = { 'path': UploadError._path_validator, 'properties_error': UploadError._properties_error_validator, 'payload_too_large': UploadError._payload_too_large_validator, + 'content_hash_mismatch': UploadError._content_hash_mismatch_validator, 'other': UploadError._other_validator, } UploadError.payload_too_large = UploadError('payload_too_large') +UploadError.content_hash_mismatch = UploadError('content_hash_mismatch') UploadError.other = UploadError('other') -UploadErrorWithProperties._tagmap = { -} -UploadErrorWithProperties._tagmap.update(UploadError._tagmap) - UploadSessionAppendArg.cursor.validator = UploadSessionCursor_validator UploadSessionAppendArg.close.validator = bv.Boolean() +UploadSessionAppendArg.content_hash.validator = bv.Nullable(Sha256HexHash_validator) UploadSessionAppendArg._all_field_names_ = set([ 'cursor', 'close', + 'content_hash', ]) UploadSessionAppendArg._all_fields_ = [ ('cursor', UploadSessionAppendArg.cursor.validator), ('close', UploadSessionAppendArg.close.validator), + ('content_hash', UploadSessionAppendArg.content_hash.validator), ] +UploadSessionLookupError._not_found_validator = bv.Void() +UploadSessionLookupError._incorrect_offset_validator = UploadSessionOffsetError_validator +UploadSessionLookupError._closed_validator = bv.Void() +UploadSessionLookupError._not_closed_validator = bv.Void() +UploadSessionLookupError._too_large_validator = bv.Void() +UploadSessionLookupError._concurrent_session_invalid_offset_validator = bv.Void() +UploadSessionLookupError._concurrent_session_invalid_data_size_validator = bv.Void() +UploadSessionLookupError._payload_too_large_validator = bv.Void() +UploadSessionLookupError._other_validator = bv.Void() +UploadSessionLookupError._tagmap = { + 'not_found': UploadSessionLookupError._not_found_validator, + 'incorrect_offset': UploadSessionLookupError._incorrect_offset_validator, + 'closed': UploadSessionLookupError._closed_validator, + 'not_closed': UploadSessionLookupError._not_closed_validator, + 'too_large': UploadSessionLookupError._too_large_validator, + 'concurrent_session_invalid_offset': UploadSessionLookupError._concurrent_session_invalid_offset_validator, + 'concurrent_session_invalid_data_size': UploadSessionLookupError._concurrent_session_invalid_data_size_validator, + 'payload_too_large': UploadSessionLookupError._payload_too_large_validator, + 'other': UploadSessionLookupError._other_validator, +} + +UploadSessionLookupError.not_found = UploadSessionLookupError('not_found') +UploadSessionLookupError.closed = UploadSessionLookupError('closed') +UploadSessionLookupError.not_closed = UploadSessionLookupError('not_closed') +UploadSessionLookupError.too_large = UploadSessionLookupError('too_large') +UploadSessionLookupError.concurrent_session_invalid_offset = UploadSessionLookupError('concurrent_session_invalid_offset') +UploadSessionLookupError.concurrent_session_invalid_data_size = UploadSessionLookupError('concurrent_session_invalid_data_size') +UploadSessionLookupError.payload_too_large = UploadSessionLookupError('payload_too_large') +UploadSessionLookupError.other = UploadSessionLookupError('other') + +UploadSessionAppendError._content_hash_mismatch_validator = bv.Void() +UploadSessionAppendError._tagmap = { + 'content_hash_mismatch': UploadSessionAppendError._content_hash_mismatch_validator, +} +UploadSessionAppendError._tagmap.update(UploadSessionLookupError._tagmap) + +UploadSessionAppendError.content_hash_mismatch = UploadSessionAppendError('content_hash_mismatch') + UploadSessionCursor.session_id.validator = bv.String() UploadSessionCursor.offset.validator = bv.UInt64() UploadSessionCursor._all_field_names_ = set([ @@ -12755,13 +12902,16 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): UploadSessionFinishArg.cursor.validator = UploadSessionCursor_validator UploadSessionFinishArg.commit.validator = CommitInfo_validator +UploadSessionFinishArg.content_hash.validator = bv.Nullable(Sha256HexHash_validator) UploadSessionFinishArg._all_field_names_ = set([ 'cursor', 'commit', + 'content_hash', ]) UploadSessionFinishArg._all_fields_ = [ ('cursor', UploadSessionFinishArg.cursor.validator), ('commit', UploadSessionFinishArg.commit.validator), + ('content_hash', UploadSessionFinishArg.content_hash.validator), ] UploadSessionFinishBatchArg.entries.validator = bv.List(UploadSessionFinishArg_validator, max_items=1000) @@ -12804,6 +12954,7 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): UploadSessionFinishError._concurrent_session_not_closed_validator = bv.Void() UploadSessionFinishError._concurrent_session_missing_data_validator = bv.Void() UploadSessionFinishError._payload_too_large_validator = bv.Void() +UploadSessionFinishError._content_hash_mismatch_validator = bv.Void() UploadSessionFinishError._other_validator = bv.Void() UploadSessionFinishError._tagmap = { 'lookup_failed': UploadSessionFinishError._lookup_failed_validator, @@ -12815,6 +12966,7 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): 'concurrent_session_not_closed': UploadSessionFinishError._concurrent_session_not_closed_validator, 'concurrent_session_missing_data': UploadSessionFinishError._concurrent_session_missing_data_validator, 'payload_too_large': UploadSessionFinishError._payload_too_large_validator, + 'content_hash_mismatch': UploadSessionFinishError._content_hash_mismatch_validator, 'other': UploadSessionFinishError._other_validator, } @@ -12824,67 +12976,44 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): UploadSessionFinishError.concurrent_session_not_closed = UploadSessionFinishError('concurrent_session_not_closed') UploadSessionFinishError.concurrent_session_missing_data = UploadSessionFinishError('concurrent_session_missing_data') UploadSessionFinishError.payload_too_large = UploadSessionFinishError('payload_too_large') +UploadSessionFinishError.content_hash_mismatch = UploadSessionFinishError('content_hash_mismatch') UploadSessionFinishError.other = UploadSessionFinishError('other') -UploadSessionLookupError._not_found_validator = bv.Void() -UploadSessionLookupError._incorrect_offset_validator = UploadSessionOffsetError_validator -UploadSessionLookupError._closed_validator = bv.Void() -UploadSessionLookupError._not_closed_validator = bv.Void() -UploadSessionLookupError._too_large_validator = bv.Void() -UploadSessionLookupError._concurrent_session_invalid_offset_validator = bv.Void() -UploadSessionLookupError._concurrent_session_invalid_data_size_validator = bv.Void() -UploadSessionLookupError._payload_too_large_validator = bv.Void() -UploadSessionLookupError._other_validator = bv.Void() -UploadSessionLookupError._tagmap = { - 'not_found': UploadSessionLookupError._not_found_validator, - 'incorrect_offset': UploadSessionLookupError._incorrect_offset_validator, - 'closed': UploadSessionLookupError._closed_validator, - 'not_closed': UploadSessionLookupError._not_closed_validator, - 'too_large': UploadSessionLookupError._too_large_validator, - 'concurrent_session_invalid_offset': UploadSessionLookupError._concurrent_session_invalid_offset_validator, - 'concurrent_session_invalid_data_size': UploadSessionLookupError._concurrent_session_invalid_data_size_validator, - 'payload_too_large': UploadSessionLookupError._payload_too_large_validator, - 'other': UploadSessionLookupError._other_validator, -} - -UploadSessionLookupError.not_found = UploadSessionLookupError('not_found') -UploadSessionLookupError.closed = UploadSessionLookupError('closed') -UploadSessionLookupError.not_closed = UploadSessionLookupError('not_closed') -UploadSessionLookupError.too_large = UploadSessionLookupError('too_large') -UploadSessionLookupError.concurrent_session_invalid_offset = UploadSessionLookupError('concurrent_session_invalid_offset') -UploadSessionLookupError.concurrent_session_invalid_data_size = UploadSessionLookupError('concurrent_session_invalid_data_size') -UploadSessionLookupError.payload_too_large = UploadSessionLookupError('payload_too_large') -UploadSessionLookupError.other = UploadSessionLookupError('other') - UploadSessionOffsetError.correct_offset.validator = bv.UInt64() UploadSessionOffsetError._all_field_names_ = set(['correct_offset']) UploadSessionOffsetError._all_fields_ = [('correct_offset', UploadSessionOffsetError.correct_offset.validator)] UploadSessionStartArg.close.validator = bv.Boolean() UploadSessionStartArg.session_type.validator = bv.Nullable(UploadSessionType_validator) +UploadSessionStartArg.content_hash.validator = bv.Nullable(Sha256HexHash_validator) UploadSessionStartArg._all_field_names_ = set([ 'close', 'session_type', + 'content_hash', ]) UploadSessionStartArg._all_fields_ = [ ('close', UploadSessionStartArg.close.validator), ('session_type', UploadSessionStartArg.session_type.validator), + ('content_hash', UploadSessionStartArg.content_hash.validator), ] UploadSessionStartError._concurrent_session_data_not_allowed_validator = bv.Void() UploadSessionStartError._concurrent_session_close_not_allowed_validator = bv.Void() UploadSessionStartError._payload_too_large_validator = bv.Void() +UploadSessionStartError._content_hash_mismatch_validator = bv.Void() UploadSessionStartError._other_validator = bv.Void() UploadSessionStartError._tagmap = { 'concurrent_session_data_not_allowed': UploadSessionStartError._concurrent_session_data_not_allowed_validator, 'concurrent_session_close_not_allowed': UploadSessionStartError._concurrent_session_close_not_allowed_validator, 'payload_too_large': UploadSessionStartError._payload_too_large_validator, + 'content_hash_mismatch': UploadSessionStartError._content_hash_mismatch_validator, 'other': UploadSessionStartError._other_validator, } UploadSessionStartError.concurrent_session_data_not_allowed = UploadSessionStartError('concurrent_session_data_not_allowed') UploadSessionStartError.concurrent_session_close_not_allowed = UploadSessionStartError('concurrent_session_close_not_allowed') UploadSessionStartError.payload_too_large = UploadSessionStartError('payload_too_large') +UploadSessionStartError.content_hash_mismatch = UploadSessionStartError('content_hash_mismatch') UploadSessionStartError.other = UploadSessionStartError('other') UploadSessionStartResult.session_id.validator = bv.String() @@ -13042,9 +13171,9 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): 'alpha/upload', 1, True, - CommitInfoWithProperties_validator, + UploadArg_validator, FileMetadata_validator, - UploadErrorWithProperties_validator, + UploadError_validator, {'auth': 'user', 'host': 'content', 'style': 'upload'}, @@ -13691,7 +13820,7 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): 'upload', 1, False, - CommitInfo_validator, + UploadArg_validator, FileMetadata_validator, UploadError_validator, {'auth': 'user', @@ -13704,7 +13833,7 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): False, UploadSessionAppendArg_validator, bv.Void(), - UploadSessionLookupError_validator, + UploadSessionAppendError_validator, {'auth': 'user', 'host': 'content', 'style': 'upload'}, @@ -13715,7 +13844,7 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): True, UploadSessionCursor_validator, bv.Void(), - UploadSessionLookupError_validator, + UploadSessionAppendError_validator, {'auth': 'user', 'host': 'content', 'style': 'upload'}, diff --git a/dropbox/sharing.py b/dropbox/sharing.py index 85eff028..c0a1ed73 100644 --- a/dropbox/sharing.py +++ b/dropbox/sharing.py @@ -88,6 +88,8 @@ class AccessLevel(bb.Union): folder. :ivar sharing.AccessLevel.viewer_no_comment: The collaborator can only view the shared folder and does not have any access to comments. + :ivar sharing.AccessLevel.traverse: The collaborator can only view the + shared folder that they have access to. """ _catch_all = 'other' @@ -100,6 +102,8 @@ class AccessLevel(bb.Union): # Attribute is overwritten below the class definition viewer_no_comment = None # Attribute is overwritten below the class definition + traverse = None + # Attribute is overwritten below the class definition other = None def is_owner(self): @@ -134,6 +138,14 @@ def is_viewer_no_comment(self): """ return self._tag == 'viewer_no_comment' + def is_traverse(self): + """ + Check if the union tag is ``traverse``. + + :rtype: bool + """ + return self._tag == 'traverse' + def is_other(self): """ Check if the union tag is ``other``. @@ -11040,12 +11052,14 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): AccessLevel._editor_validator = bv.Void() AccessLevel._viewer_validator = bv.Void() AccessLevel._viewer_no_comment_validator = bv.Void() +AccessLevel._traverse_validator = bv.Void() AccessLevel._other_validator = bv.Void() AccessLevel._tagmap = { 'owner': AccessLevel._owner_validator, 'editor': AccessLevel._editor_validator, 'viewer': AccessLevel._viewer_validator, 'viewer_no_comment': AccessLevel._viewer_no_comment_validator, + 'traverse': AccessLevel._traverse_validator, 'other': AccessLevel._other_validator, } @@ -11053,6 +11067,7 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): AccessLevel.editor = AccessLevel('editor') AccessLevel.viewer = AccessLevel('viewer') AccessLevel.viewer_no_comment = AccessLevel('viewer_no_comment') +AccessLevel.traverse = AccessLevel('traverse') AccessLevel.other = AccessLevel('other') AclUpdatePolicy._owner_validator = bv.Void() diff --git a/dropbox/team_log.py b/dropbox/team_log.py index 98b95650..08ca6d00 100644 --- a/dropbox/team_log.py +++ b/dropbox/team_log.py @@ -7080,7 +7080,7 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): class EmailIngestPolicy(bb.Union): """ - Policy for deciding whether a team can use Email to my Dropbox feature + Policy for deciding whether a team can use Email to Dropbox feature This class acts as a tagged union. Only one of the ``is_*`` methods will return true. To get the associated value of a tag (if one exists), use the @@ -7126,7 +7126,7 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): class EmailIngestPolicyChangedDetails(bb.Struct): """ - Changed email to my Dropbox policy for team. + Changed email to Dropbox policy for team. :ivar team_log.EmailIngestPolicyChangedDetails.new_value: To. :ivar team_log.EmailIngestPolicyChangedDetails.previous_value: From. @@ -7184,7 +7184,7 @@ def _process_custom_annotations(self, annotation_type, field_path, processor): class EmailIngestReceiveFileDetails(bb.Struct): """ - Received files via Email to my Dropbox. + Received files via Email to Dropbox. :ivar team_log.EmailIngestReceiveFileDetails.inbox_name: Inbox name. :ivar team_log.EmailIngestReceiveFileDetails.attachment_names: Submitted @@ -22354,7 +22354,7 @@ class EventType(bb.Union): :ivar UserTagsRemovedType EventType.user_tags_removed: (file_operations) Removed tags :ivar EmailIngestReceiveFileType EventType.email_ingest_receive_file: - (file_requests) Received files via Email to my Dropbox + (file_requests) Received files via Email to Dropbox :ivar FileRequestChangeType EventType.file_request_change: (file_requests) Changed file request :ivar FileRequestCloseType EventType.file_request_close: (file_requests) @@ -22984,7 +22984,7 @@ class EventType(bb.Union): EventType.directory_restrictions_remove_members: (team_policies) Removed members from directory restrictions list :ivar EmailIngestPolicyChangedType EventType.email_ingest_policy_changed: - (team_policies) Changed email to my Dropbox policy for team + (team_policies) Changed email to Dropbox policy for team :ivar EmmAddExceptionType EventType.emm_add_exception: (team_policies) Added members to EMM exception list :ivar EmmChangePolicyType EventType.emm_change_policy: (team_policies) @@ -33594,7 +33594,7 @@ def get_user_tags_removed(self): def get_email_ingest_receive_file(self): """ - (file_requests) Received files via Email to my Dropbox + (file_requests) Received files via Email to Dropbox Only call this if :meth:`is_email_ingest_receive_file` is true. @@ -36879,7 +36879,7 @@ def get_directory_restrictions_remove_members(self): def get_email_ingest_policy_changed(self): """ - (team_policies) Changed email to my Dropbox policy for team + (team_policies) Changed email to Dropbox policy for team Only call this if :meth:`is_email_ingest_policy_changed` is true. @@ -38425,7 +38425,7 @@ class EventTypeArg(bb.Union): :ivar team_log.EventTypeArg.user_tags_removed: (file_operations) Removed tags :ivar team_log.EventTypeArg.email_ingest_receive_file: (file_requests) - Received files via Email to my Dropbox + Received files via Email to Dropbox :ivar team_log.EventTypeArg.file_request_change: (file_requests) Changed file request :ivar team_log.EventTypeArg.file_request_close: (file_requests) Closed file @@ -38967,7 +38967,7 @@ class EventTypeArg(bb.Union): :ivar team_log.EventTypeArg.directory_restrictions_remove_members: (team_policies) Removed members from directory restrictions list :ivar team_log.EventTypeArg.email_ingest_policy_changed: (team_policies) - Changed email to my Dropbox policy for team + Changed email to Dropbox policy for team :ivar team_log.EventTypeArg.emm_add_exception: (team_policies) Added members to EMM exception list :ivar team_log.EventTypeArg.emm_change_policy: (team_policies) diff --git a/spec b/spec index 13bbef72..9daf01c5 160000 --- a/spec +++ b/spec @@ -1 +1 @@ -Subproject commit 13bbef7298d235257ee5576025b5938f748de526 +Subproject commit 9daf01c5b7ac6936956f085792c573c06fb3c905