From afadb2d6bf4dd13b747350f66d9c1c79d653929a Mon Sep 17 00:00:00 2001 From: Matt Bogosian Date: Thu, 11 May 2017 00:33:28 -0700 Subject: [PATCH] - Fix obsoleted lint errors from updated pylint. - Fix busted lint from auto-generated files. - Remove obsolete files. - Update stone. --- dropbox/babel_serializers.py | 773 ------------------ dropbox/babel_validators.py | 562 ------------- dropbox/base.py | 3 + dropbox/base_team.py | 3 + dropbox/dropbox.py | 6 +- dropbox/oauth.py | 9 +- dropbox/rest.py | 2 +- dropbox/session.py | 2 +- .../backup-and-restore-example.py | 2 +- stone | 2 +- tox.ini | 5 +- 11 files changed, 23 insertions(+), 1346 deletions(-) delete mode 100644 dropbox/babel_serializers.py delete mode 100644 dropbox/babel_validators.py diff --git a/dropbox/babel_serializers.py b/dropbox/babel_serializers.py deleted file mode 100644 index 84582e3b..00000000 --- a/dropbox/babel_serializers.py +++ /dev/null @@ -1,773 +0,0 @@ -""" -Serializers that marshal Babel data types into wire formats. - -Currently, only JSON is supported. If possible, serializers should be kept -separate from the RPC format. - -This module should be dropped into a project that requires the use of Babel. In -the future, this could be imported from a pre-installed Python package, rather -than being added to a project. - -EDITING THIS FILE? Please modify the version in the babelapi repo, -""" - -from __future__ import absolute_import - -import base64 -import collections -import datetime -import functools -import json -import six - -try: - from . import babel_validators as bv -except (SystemError, ValueError): - # Catch errors raised when importing a relative module when not in a package. - # This makes testing this file directly (outside of a package) easier. - import babel_validators as bv - - -# -------------------------------------------------------------- -# JSON Encoder - -def json_encode(data_type, obj, alias_validators=None, old_style=False): - """Encodes an object into JSON based on its type. - - Args: - data_type (Validator): Validator for obj. - obj (object): Object to be serialized. - alias_validators (Optional[Mapping[bv.Validator, Callable[[], None]]]): - Custom validation functions. These must raise bv.ValidationError on - failure. - - Returns: - str: JSON-encoded object. - - This function will also do additional validation that wasn't done by the - objects themselves: - - 1. The passed in obj may not have been validated with data_type yet. - 2. If an object that should be a Struct was assigned to a field, its - type has been validated, but the presence of all required fields - hasn't been. - 3. If an object that should be a Union was assigned to a field, whether - or not a tag has been set has not been validated. - 4. A list may have passed validation initially, but been mutated since. - - Example of serializing a struct to JSON: - - struct FileRef - path String - rev String - - > fr = FileRef() - > fr.path = 'a/b/c' - > fr.rev = '1234' - > JsonEncoder.encode(fr) - "{'path': 'a/b/c', 'rev': '1234'}" - - Example of serializing a union to JSON: - - union UploadMode - add - overwrite - update FileRef - - > um = UploadMode() - > um.set_add() - > JsonEncoder.encode(um) - '"add"' - > um.update = fr - > JsonEncoder.encode(um) - "{'update': {'path': 'a/b/c', 'rev': '1234'}}" - """ - return json.dumps( - json_compat_obj_encode( - data_type, obj, alias_validators, old_style)) - - -def json_compat_obj_encode( - data_type, obj, alias_validators=None, old_style=False, - for_msgpack=False): - """Encodes an object into a JSON-compatible dict based on its type. - - Args: - data_type (Validator): Validator for obj. - obj (object): Object to be serialized. - - Returns: - An object that when passed to json.dumps() will produce a string - giving the JSON-encoded object. - - See json_encode() for additional information about validation. - """ - if isinstance(data_type, (bv.Struct, bv.Union)): - # Only validate the type because fields are validated on assignment. - data_type.validate_type_only(obj) - else: - data_type.validate(obj) - return _json_compat_obj_encode_helper( - data_type, obj, alias_validators, old_style, for_msgpack) - - -def _json_compat_obj_encode_helper( - data_type, obj, alias_validators, old_style, for_msgpack): - """ - See json_encode() for argument descriptions. - """ - if isinstance(data_type, bv.List): - return _encode_list( - data_type, obj, alias_validators, old_style=old_style, - for_msgpack=for_msgpack) - elif isinstance(data_type, bv.Nullable): - return _encode_nullable( - data_type, obj, alias_validators, old_style=old_style, - for_msgpack=for_msgpack) - elif isinstance(data_type, bv.Primitive): - return _make_json_friendly( - data_type, obj, alias_validators, for_msgpack=for_msgpack) - elif isinstance(data_type, bv.StructTree): - return _encode_struct_tree( - data_type, obj, alias_validators, old_style=old_style, - for_msgpack=for_msgpack) - elif isinstance(data_type, bv.Struct): - return _encode_struct( - data_type, obj, alias_validators, old_style=old_style, - for_msgpack=for_msgpack) - elif isinstance(data_type, bv.Union): - if old_style: - return _encode_union_old( - data_type, obj, alias_validators, for_msgpack=for_msgpack) - else: - return _encode_union( - data_type, obj, alias_validators, for_msgpack=for_msgpack) - else: - raise AssertionError('Unsupported data type %r' % - type(data_type).__name__) - - -def _encode_list(data_type, obj, alias_validators, old_style, for_msgpack): - """ - The data_type argument must be a List. - See json_encode() for argument descriptions. - """ - # Because Lists are mutable, we always validate them during serialization. - obj = data_type.validate(obj) - return [ - _json_compat_obj_encode_helper( - data_type.item_validator, item, alias_validators, old_style, for_msgpack) - for item in obj - ] - - -def _encode_nullable(data_type, obj, alias_validators, old_style, for_msgpack): - """ - The data_type argument must be a Nullable. - See json_encode() for argument descriptions. - """ - if obj is not None: - return _json_compat_obj_encode_helper( - data_type.validator, obj, alias_validators, old_style, for_msgpack) - else: - return None - - -def _encode_struct(data_type, obj, alias_validators, old_style, for_msgpack): - """ - The data_type argument must be a Struct or StructTree. - See json_encode() for argument descriptions. - """ - # We skip validation of fields with primitive data types in structs and - # unions because they've already been validated on assignment. - d = collections.OrderedDict() - for field_name, field_data_type in data_type.definition._all_fields_: - try: - val = getattr(obj, field_name) - except AttributeError as e: - raise bv.ValidationError(e.args[0]) - presence_key = '_%s_present' % field_name - if val is not None and getattr(obj, presence_key): - # This check makes sure that we don't serialize absent struct - # fields as null, even if there is a default. - try: - d[field_name] = _json_compat_obj_encode_helper( - field_data_type, val, alias_validators, old_style, - for_msgpack) - except bv.ValidationError as e: - e.add_parent(field_name) - raise - return d - - -def _encode_union(data_type, obj, alias_validators, for_msgpack): - """ - The data_type argument must be a Union. - See json_encode() for argument descriptions. - """ - if obj._tag is None: - raise bv.ValidationError('no tag set') - field_data_type = data_type.definition._tagmap[obj._tag] - - if (isinstance(field_data_type, bv.Void) or - (isinstance(field_data_type, bv.Nullable) and obj._value is None)): - return {'.tag': obj._tag} - else: - try: - encoded_val = _json_compat_obj_encode_helper( - field_data_type, obj._value, alias_validators, False, - for_msgpack) - except bv.ValidationError as e: - e.add_parent(obj._tag) - raise - else: - if isinstance(field_data_type, bv.Nullable): - # We've already checked for the null case above, so now we're - # only interested in what the wrapped validator is. - field_data_type = field_data_type.validator - if (isinstance(field_data_type, bv.Struct) and - not isinstance(field_data_type, bv.StructTree)): - d = collections.OrderedDict() - d['.tag'] = obj._tag - d.update(encoded_val) - return d - else: - return collections.OrderedDict([ - ('.tag', obj._tag), - (obj._tag, encoded_val)]) - - -def _encode_union_old(data_type, obj, alias_validators, for_msgpack): - """ - The data_type argument must be a Union. - See json_encode() for argument descriptions. - """ - if obj._tag is None: - raise bv.ValidationError('no tag set') - field_data_type = data_type.definition._tagmap[obj._tag] - if field_data_type is None: - return obj._tag - else: - if (isinstance(field_data_type, bv.Void) or - (isinstance(field_data_type, bv.Nullable) and - obj._value is None)): - return obj._tag - else: - try: - encoded_val = _json_compat_obj_encode_helper( - field_data_type, obj._value, alias_validators, True, - for_msgpack) - except bv.ValidationError as e: - e.add_parent(obj._tag) - raise - else: - return {obj._tag: encoded_val} - - -def _encode_struct_tree( - data_type, obj, alias_validators, old_style, for_msgpack): - """ - Args: - data_type (StructTree) - as_root (bool): If a struct with enumerated subtypes is designated as a - root, then its fields including those that are inherited are - encoded in the outermost JSON object together. - - See json_encode() for other argument descriptions. - """ - assert type(obj) in data_type.definition._pytype_to_tag_and_subtype_, ( - '%r is not a serializable subtype of %r.' % - (type(obj), data_type.definition)) - tags, subtype = data_type.definition._pytype_to_tag_and_subtype_[type(obj)] - assert len(tags) == 1, tags - assert not isinstance(subtype, bv.StructTree), ( - 'Cannot serialize type %r because it enumerates subtypes.' % - subtype.definition) - if old_style: - return { - tags[0]: - _encode_struct( - subtype, obj, alias_validators, old_style, for_msgpack) - } - d = collections.OrderedDict() - d['.tag'] = tags[0] - d.update( - _encode_struct(subtype, obj, alias_validators, old_style, for_msgpack)) - return d - - -def _make_json_friendly(data_type, val, alias_validators, for_msgpack): - """ - Convert a primitive type to a Python type that can be serialized by the - json package. - """ - if alias_validators is not None and data_type in alias_validators: - alias_validators[data_type](val) - if isinstance(data_type, bv.Void): - return None - elif isinstance(data_type, bv.Timestamp): - return val.strftime(data_type.format) - elif isinstance(data_type, bv.Bytes): - if for_msgpack: - return val - else: - return base64.b64encode(val).decode('ascii') - elif isinstance(data_type, bv.Integer) and isinstance(val, bool): - # A bool is a subclass of an int so it passes Integer validation. But, - # we want the bool to be encoded as an Integer (1/0) rather than T/F. - return int(val) - else: - return val - - -# -------------------------------------------------------------- -# JSON Decoder - -def json_decode( - data_type, serialized_obj, alias_validators=None, strict=True, - old_style=False): - """Performs the reverse operation of json_encode. - - Args: - data_type (Validator): Validator for serialized_obj. - serialized_obj (str): The JSON string to deserialize. - alias_validators (Optional[Mapping[bv.Validator, Callable[[], None]]]): - Custom validation functions. These must raise bv.ValidationError on - failure. - strict (bool): If strict, then unknown struct fields will raise an - error, and unknown union variants will raise an error even if a - catch all field is specified. strict should only be used by a - recipient of serialized JSON if it's guaranteed that its Babel - specs are at least as recent as the senders it receives messages - from. - - Returns: - The returned object depends on the input data_type. - - Boolean -> bool - - Bytes -> bytes - - Float -> float - - Integer -> long - - List -> list - - Nullable -> None or its wrapped type. - - String -> unicode (PY2) or str (PY3) - - Struct -> An instance of its definition attribute. - - Timestamp -> datetime.datetime - - Union -> An instance of its definition attribute. - """ - try: - deserialized_obj = json.loads(serialized_obj) - except ValueError: - raise bv.ValidationError('could not decode input as JSON') - else: - return json_compat_obj_decode( - data_type, deserialized_obj, alias_validators, strict, old_style) - - -def json_compat_obj_decode( - data_type, obj, alias_validators=None, strict=True, old_style=False, - for_msgpack=False): - """ - Decodes a JSON-compatible object based on its data type into a - representative Python object. - - Args: - data_type (Validator): Validator for serialized_obj. - obj: The JSON-compatible object to decode based on data_type. - strict (bool): If strict, then unknown struct fields will raise an - error, and unknown union variants will raise an error even if a - catch all field is specified. See json_decode() for more. - - Returns: - See json_decode(). - """ - if isinstance(data_type, bv.Primitive): - return _make_babel_friendly( - data_type, obj, alias_validators, strict, True, for_msgpack) - else: - return _json_compat_obj_decode_helper( - data_type, obj, alias_validators, strict, old_style, for_msgpack) - - -def _json_compat_obj_decode_helper( - data_type, obj, alias_validators, strict, old_style, for_msgpack): - """ - See json_compat_obj_decode() for argument descriptions. - """ - if isinstance(data_type, bv.StructTree): - return _decode_struct_tree( - data_type, obj, alias_validators, strict, for_msgpack) - elif isinstance(data_type, bv.Struct): - return _decode_struct( - data_type, obj, alias_validators, strict, old_style, for_msgpack) - elif isinstance(data_type, bv.Union): - if old_style: - return _decode_union_old( - data_type, obj, alias_validators, strict, for_msgpack) - else: - return _decode_union( - data_type, obj, alias_validators, strict, for_msgpack) - elif isinstance(data_type, bv.List): - return _decode_list( - data_type, obj, alias_validators, strict, old_style, for_msgpack) - elif isinstance(data_type, bv.Nullable): - return _decode_nullable( - data_type, obj, alias_validators, strict, old_style, for_msgpack) - elif isinstance(data_type, bv.Primitive): - # Set validate to false because validation will be done by the - # containing struct or union when the field is assigned. - return _make_babel_friendly( - data_type, obj, alias_validators, strict, False, for_msgpack) - else: - raise AssertionError('Cannot handle type %r.' % data_type) - - -def _decode_struct( - data_type, obj, alias_validators, strict, old_style, for_msgpack): - """ - The data_type argument must be a Struct. - See json_compat_obj_decode() for argument descriptions. - """ - if obj is None and data_type.has_default(): - return data_type.get_default() - elif not isinstance(obj, dict): - raise bv.ValidationError('expected object, got %s' % - bv.generic_type_name(obj)) - if strict: - for key in obj: - if (key not in data_type.definition._all_field_names_ and - not key.startswith('.tag')): - raise bv.ValidationError("unknown field '%s'" % key) - ins = data_type.definition() - _decode_struct_fields( - ins, data_type.definition._all_fields_, obj, alias_validators, strict, - old_style, for_msgpack) - # Check that all required fields have been set. - data_type.validate_fields_only(ins) - return ins - - -def _decode_struct_fields( - ins, fields, obj, alias_validators, strict, old_style, for_msgpack): - """ - Args: - ins: An instance of the class representing the data type being decoded. - The object will have its fields set. - fields: A tuple of (field_name: str, field_validator: Validator) - obj (dict): JSON-compatible dict that is being decoded. - strict (bool): See :func:`json_compat_obj_decode`. - - Returns: - None: `ins` has its fields set based on the contents of `obj`. - """ - for name, field_data_type in fields: - if name in obj: - try: - v = _json_compat_obj_decode_helper( - field_data_type, obj[name], alias_validators, strict, - old_style, for_msgpack) - setattr(ins, name, v) - except bv.ValidationError as e: - e.add_parent(name) - raise - elif field_data_type.has_default(): - setattr(ins, name, field_data_type.get_default()) - - -def _decode_union(data_type, obj, alias_validators, strict, for_msgpack): - """ - The data_type argument must be a Union. - See json_compat_obj_decode() for argument descriptions. - """ - val = None - if isinstance(obj, six.string_types): - # Handles the shorthand format where the union is serialized as only - # the string of the tag. - tag = obj - if tag in data_type.definition._tagmap: - val_data_type = data_type.definition._tagmap[tag] - if not isinstance(val_data_type, (bv.Void, bv.Nullable)): - raise bv.ValidationError( - "expected object for '%s', got symbol" % tag) - if tag == data_type.definition._catch_all: - raise bv.ValidationError( - "unexpected use of the catch-all tag '%s'" % tag) - else: - if not strict and data_type.definition._catch_all: - tag = data_type.definition._catch_all - else: - raise bv.ValidationError("unknown tag '%s'" % tag) - elif isinstance(obj, dict): - tag, val = _decode_union_dict( - data_type, obj, alias_validators, strict, for_msgpack) - else: - raise bv.ValidationError("expected string or object, got %s" % - bv.generic_type_name(obj)) - return data_type.definition(tag, val) - - -def _decode_union_dict(data_type, obj, alias_validators, strict, for_msgpack): - if '.tag' not in obj: - raise bv.ValidationError("missing '.tag' key") - tag = obj['.tag'] - if not isinstance(tag, six.string_types): - raise bv.ValidationError( - 'tag must be string, got %s' % bv.generic_type_name(tag)) - - if tag not in data_type.definition._tagmap: - if not strict and data_type.definition._catch_all: - return data_type.definition._catch_all, None - else: - raise bv.ValidationError("unknown tag '%s'" % tag) - if tag == data_type.definition._catch_all: - raise bv.ValidationError( - "unexpected use of the catch-all tag '%s'" % tag) - - val_data_type = data_type.definition._tagmap[tag] - if isinstance(val_data_type, bv.Nullable): - val_data_type = val_data_type.validator - nullable = True - else: - nullable = False - - if isinstance(val_data_type, bv.Void): - if tag in obj: - if obj[tag] is not None: - raise bv.ValidationError('expected null, got %s' % - bv.generic_type_name(obj[tag])) - for key in obj: - if key != tag and key != '.tag': - raise bv.ValidationError("unexpected key '%s'" % key) - val = None - elif isinstance(val_data_type, - (bv.Primitive, bv.List, bv.StructTree, bv.Union)): - if tag in obj: - raw_val = obj[tag] - try: - val = _json_compat_obj_decode_helper( - val_data_type, raw_val, alias_validators, strict, False, for_msgpack) - except bv.ValidationError as e: - e.add_parent(tag) - raise - else: - # Check no other keys - if nullable: - val = None - else: - raise bv.ValidationError("missing '%s' key" % tag) - for key in obj: - if key != tag and key != '.tag': - raise bv.ValidationError("unexpected key '%s'" % key) - elif isinstance(val_data_type, bv.Struct): - if nullable and len(obj) == 1: # only has a .tag key - val = None - else: - # assume it's not null - raw_val = obj - try: - val = _json_compat_obj_decode_helper( - val_data_type, raw_val, alias_validators, strict, False, - for_msgpack) - except bv.ValidationError as e: - e.add_parent(tag) - raise - else: - assert False, type(val_data_type) - return tag, val - - -def _decode_union_old(data_type, obj, alias_validators, strict, for_msgpack): - """ - The data_type argument must be a Union. - See json_compat_obj_decode() for argument descriptions. - """ - val = None - if isinstance(obj, six.string_types): - # Union member has no associated value - tag = obj - if tag in data_type.definition._tagmap: - val_data_type = data_type.definition._tagmap[tag] - if not isinstance(val_data_type, (bv.Void, bv.Nullable)): - raise bv.ValidationError( - "expected object for '%s', got symbol" % tag) - else: - if not strict and data_type.definition._catch_all: - tag = data_type.definition._catch_all - else: - raise bv.ValidationError("unknown tag '%s'" % tag) - elif isinstance(obj, dict): - # Union member has value - if len(obj) != 1: - raise bv.ValidationError('expected 1 key, got %s' % len(obj)) - tag = list(obj)[0] - raw_val = obj[tag] - if tag in data_type.definition._tagmap: - val_data_type = data_type.definition._tagmap[tag] - if isinstance(val_data_type, bv.Nullable) and raw_val is None: - val = None - elif isinstance(val_data_type, bv.Void): - if raw_val is None or not strict: - # If raw_val is None, then this is the more verbose - # representation of a void union member. If raw_val isn't - # None, then maybe the spec has changed, so check if we're - # in strict mode. - val = None - else: - raise bv.ValidationError('expected null, got %s' % - bv.generic_type_name(raw_val)) - else: - try: - val = _json_compat_obj_decode_helper( - val_data_type, raw_val, alias_validators, strict, True, - for_msgpack) - except bv.ValidationError as e: - e.add_parent(tag) - raise - else: - if not strict and data_type.definition._catch_all: - tag = data_type.definition._catch_all - else: - raise bv.ValidationError("unknown tag '%s'" % tag) - else: - raise bv.ValidationError("expected string or object, got %s" % - bv.generic_type_name(obj)) - return data_type.definition(tag, val) - - -def _decode_struct_tree(data_type, obj, alias_validators, strict, for_msgpack): - """ - The data_type argument must be a StructTree. - See json_compat_obj_decode() for argument descriptions. - """ - subtype = _determine_struct_tree_subtype(data_type, obj, strict) - return _decode_struct( - subtype, obj, alias_validators, strict, False, for_msgpack) - - -def _determine_struct_tree_subtype(data_type, obj, strict): - """ - Searches through the JSON-object-compatible dict using the data type - definition to determine which of the enumerated subtypes `obj` is. - """ - if '.tag' not in obj: - raise bv.ValidationError("missing '.tag' key") - if not isinstance(obj['.tag'], six.string_types): - raise bv.ValidationError('expected string, got %s' % - bv.generic_type_name(obj['.tag']), - parent='.tag') - - # Find the subtype the tags refer to - full_tags_tuple = (obj['.tag'],) - if full_tags_tuple in data_type.definition._tag_to_subtype_: - subtype = data_type.definition._tag_to_subtype_[full_tags_tuple] - if isinstance(subtype, bv.StructTree): - raise bv.ValidationError("tag '%s' refers to non-leaf subtype" % - ('.'.join(full_tags_tuple))) - return subtype - else: - if strict: - # In strict mode, the entirety of the tag hierarchy should - # point to a known subtype. - raise bv.ValidationError("unknown subtype '%s'" % - '.'.join(full_tags_tuple)) - else: - # If subtype was not found, use the base. - if data_type.definition._is_catch_all_: - return data_type - else: - raise bv.ValidationError( - "unknown subtype '%s' and '%s' is not a catch-all" % - ('.'.join(full_tags_tuple), data_type.definition.__name__)) - - -def _decode_list( - data_type, obj, alias_validators, strict, old_style, for_msgpack): - """ - The data_type argument must be a List. - See json_compat_obj_decode() for argument descriptions. - """ - if not isinstance(obj, list): - raise bv.ValidationError( - 'expected list, got %s' % bv.generic_type_name(obj)) - return [ - _json_compat_obj_decode_helper( - data_type.item_validator, item, alias_validators, strict, - old_style, for_msgpack) - for item in obj] - - -def _decode_nullable( - data_type, obj, alias_validators, strict, old_style, for_msgpack): - """ - The data_type argument must be a Nullable. - See json_compat_obj_decode() for argument descriptions. - """ - if obj is not None: - return _json_compat_obj_decode_helper( - data_type.validator, obj, alias_validators, strict, old_style, - for_msgpack) - else: - return None - - -def _make_babel_friendly( - data_type, val, alias_validators, strict, validate, for_msgpack): - """ - Convert a Python object to a type that will pass validation by its - validator. - - Validation by ``alias_validators`` is performed even if ``validate`` is - false. - """ - if isinstance(data_type, bv.Timestamp): - try: - ret = datetime.datetime.strptime(val, data_type.format) - except ValueError as e: - raise bv.ValidationError(e.args[0]) - elif isinstance(data_type, bv.Bytes): - if for_msgpack: - if isinstance(val, six.text_type): - ret = val.encode('utf-8') - else: - ret = val - else: - try: - ret = base64.b64decode(val) - except TypeError: - raise bv.ValidationError('invalid base64-encoded bytes') - elif isinstance(data_type, bv.Void): - if strict and val is not None: - raise bv.ValidationError("expected null, got value") - return None - else: - if validate: - data_type.validate(val) - ret = val - if alias_validators is not None and data_type in alias_validators: - alias_validators[data_type](ret) - return ret - -try: - import msgpack -except ImportError: - pass -else: - msgpack_compat_obj_encode = functools.partial(json_compat_obj_encode, - for_msgpack=True) - - def msgpack_encode(data_type, obj): - return msgpack.dumps( - msgpack_compat_obj_encode(data_type, obj), encoding='utf-8') - - msgpack_compat_obj_decode = functools.partial(json_compat_obj_decode, - for_msgpack=True) - - def msgpack_decode( - data_type, serialized_obj, alias_validators=None, strict=True): - # We decode everything as utf-8 because we want all object keys to be - # unicode. Otherwise, we need to do a lot more refactoring to make - # json/msgpack share the same code. We expect byte arrays to fail - # decoding, but when they don't, we have to convert them to bytes. - deserialized_obj = msgpack.loads( - serialized_obj, encoding='utf-8', unicode_errors='ignore') - return msgpack_compat_obj_decode( - data_type, deserialized_obj, alias_validators, strict) diff --git a/dropbox/babel_validators.py b/dropbox/babel_validators.py deleted file mode 100644 index ce72424a..00000000 --- a/dropbox/babel_validators.py +++ /dev/null @@ -1,562 +0,0 @@ -""" -Defines classes to represent each Babel type in Python. These classes should -be used to validate Python objects and normalize them for a given type. - -The data types defined here should not be specific to an RPC or serialization -format. - -This module should be dropped into a project that requires the use of Babel. In -the future, this could be imported from a pre-installed Python package, rather -than being added to a project. - -EDITING THIS FILE? Please modify the version in the babelapi repo, -""" - -from abc import ABCMeta, abstractmethod -import datetime -import math -import numbers -import re -import six - -if six.PY3: - _binary_types = (bytes, memoryview) -else: - _binary_types = (bytes, buffer) # noqa: E501,F821; pylint: disable=undefined-variable,useless-suppression - - -class ValidationError(Exception): - """Raised when a value doesn't pass validation by its validator.""" - - def __init__(self, message, parent=None): - """ - Args: - message (str): Error message detailing validation failure. - parent (str): Adds the parent as the closest reference point for - the error. Use :meth:`add_parent` to add more. - """ - super(ValidationError, self).__init__(message) - self.message = message - self._parents = [] - if parent: - self._parents.append(parent) - - def add_parent(self, parent): - """ - Args: - parent (str): Adds the parent to the top of the tree of references - that lead to the validator that failed. - """ - self._parents.append(parent) - - def __str__(self): - """ - Returns: - str: A descriptive message of the validation error that may also - include the path to the validator that failed. - """ - if self._parents: - return '{}: {}'.format('.'.join(self._parents[::-1]), self.message) - else: - return self.message - - def __repr__(self): - # Not a perfect repr, but includes the error location information. - return 'ValidationError(%r)' % str(self) - - -def generic_type_name(v): - """Return a descriptive type name that isn't Python specific. For example, - an int value will return 'integer' rather than 'int'.""" - if isinstance(v, numbers.Integral): - # Must come before real numbers check since integrals are reals too - return 'integer' - elif isinstance(v, numbers.Real): - return 'float' - elif isinstance(v, (tuple, list)): - return 'list' - elif isinstance(v, six.string_types): - return 'string' - elif v is None: - return 'null' - else: - return type(v).__name__ - - -class Validator(object): - """All primitive and composite data types should be a subclass of this.""" - __metaclass__ = ABCMeta - - @abstractmethod - def validate(self, val): - """Validates that val is of this data type. - - Returns: A normalized value if validation succeeds. - Raises: ValidationError - """ - pass - - def has_default(self): - return False - - def get_default(self): - raise AssertionError('No default available.') - - -class Primitive(Validator): # pylint: disable=abstract-method - """A basic type that is defined by Babel.""" - pass - - -class Boolean(Primitive): - - def validate(self, val): - if not isinstance(val, bool): - raise ValidationError('%r is not a valid boolean' % val) - return val - - -class Integer(Primitive): - """ - Do not use this class directly. Extend it and specify a 'minimum' and - 'maximum' value as class variables for a more restrictive integer range. - """ - minimum = None - maximum = None - - def __init__(self, min_value=None, max_value=None): - """ - A more restrictive minimum or maximum value can be specified than the - range inherent to the defined type. - """ - if min_value is not None: - assert isinstance(min_value, numbers.Integral), \ - 'min_value must be an integral number' - assert min_value >= self.minimum, \ - 'min_value cannot be less than the minimum value for this ' \ - 'type (%d < %d)' % (min_value, self.minimum) - self.minimum = min_value - if max_value is not None: - assert isinstance(max_value, numbers.Integral), \ - 'max_value must be an integral number' - assert max_value <= self.maximum, \ - 'max_value cannot be greater than the maximum value for ' \ - 'this type (%d < %d)' % (max_value, self.maximum) - self.maximum = max_value - - def validate(self, val): - if not isinstance(val, numbers.Integral): - raise ValidationError('expected integer, got %s' - % generic_type_name(val)) - elif not (self.minimum <= val <= self.maximum): - raise ValidationError('%d is not within range [%d, %d]' - % (val, self.minimum, self.maximum)) - return val - - def __repr__(self): - return '%s()' % self.__class__.__name__ - - -class Int32(Integer): - minimum = -2**31 - maximum = 2**31 - 1 - - -class UInt32(Integer): - minimum = 0 - maximum = 2**32 - 1 - - -class Int64(Integer): - minimum = -2**63 - maximum = 2**63 - 1 - - -class UInt64(Integer): - minimum = 0 - maximum = 2**64 - 1 - - -class Real(Primitive): - """ - Do not use this class directly. Extend it and optionally set a 'minimum' - and 'maximum' value to enforce a range that's a subset of the Python float - implementation. Python floats are doubles. - """ - minimum = None - maximum = None - - def __init__(self, min_value=None, max_value=None): - """ - A more restrictive minimum or maximum value can be specified than the - range inherent to the defined type. - """ - if min_value is not None: - assert isinstance(min_value, numbers.Real), \ - 'min_value must be a real number' - if not isinstance(min_value, float): - try: - min_value = float(min_value) - except OverflowError: - raise AssertionError('min_value is too small for a float') - if self.minimum is not None and min_value < self.minimum: - raise AssertionError('min_value cannot be less than the ' - 'minimum value for this type (%f < %f)' % - (min_value, self.minimum)) - self.minimum = min_value - if max_value is not None: - assert isinstance(max_value, numbers.Real), \ - 'max_value must be a real number' - if not isinstance(max_value, float): - try: - max_value = float(max_value) - except OverflowError: - raise AssertionError('max_value is too large for a float') - if self.maximum is not None and max_value > self.maximum: - raise AssertionError('max_value cannot be greater than the ' - 'maximum value for this type (%f < %f)' % - (max_value, self.maximum)) - self.maximum = max_value - - def validate(self, val): - if not isinstance(val, numbers.Real): - raise ValidationError('expected real number, got %s' % - generic_type_name(val)) - if not isinstance(val, float): - # This checks for the case where a number is passed in with a - # magnitude larger than supported by float64. - try: - val = float(val) - except OverflowError: - raise ValidationError('too large for float') - if math.isnan(val) or math.isinf(val): - raise ValidationError('%f values are not supported' % val) - if self.minimum is not None and val < self.minimum: - raise ValidationError('%f is not greater than %f' % - (val, self.minimum)) - if self.maximum is not None and val > self.maximum: - raise ValidationError('%f is not less than %f' % - (val, self.maximum)) - return val - - def __repr__(self): - return '%s()' % self.__class__.__name__ - - -class Float32(Real): - # Maximum and minimums from the IEEE 754-1985 standard - minimum = -3.40282 * 10**38 - maximum = 3.40282 * 10**38 - - -class Float64(Real): - pass - - -class String(Primitive): - """Represents a unicode string.""" - - def __init__(self, min_length=None, max_length=None, pattern=None): - if min_length is not None: - assert isinstance(min_length, numbers.Integral), \ - 'min_length must be an integral number' - assert min_length >= 0, 'min_length must be >= 0' - if max_length is not None: - assert isinstance(max_length, numbers.Integral), \ - 'max_length must be an integral number' - assert max_length > 0, 'max_length must be > 0' - if min_length and max_length: - assert max_length >= min_length, 'max_length must be >= min_length' - if pattern is not None: - assert isinstance(pattern, six.string_types), \ - 'pattern must be a string' - - self.min_length = min_length - self.max_length = max_length - self.pattern = pattern - self.pattern_re = None - - if pattern: - try: - self.pattern_re = re.compile(r"\A(?:" + pattern + r")\Z") - except re.error as e: - raise AssertionError('Regex {!r} failed: {}'.format( - pattern, e.args[0])) - - def validate(self, val): - """ - A unicode string of the correct length and pattern will pass validation. - In PY2, we enforce that a str type must be valid utf-8, and a unicode - string will be returned. - """ - if not isinstance(val, six.string_types): - raise ValidationError("'%s' expected to be a string, got %s" - % (val, generic_type_name(val))) - if not six.PY3 and isinstance(val, str): - try: - val = val.decode('utf-8') - except UnicodeDecodeError: - raise ValidationError("'%s' was not valid utf-8") - - if self.max_length is not None and len(val) > self.max_length: - raise ValidationError("'%s' must be at most %d characters, got %d" - % (val, self.max_length, len(val))) - if self.min_length is not None and len(val) < self.min_length: - raise ValidationError("'%s' must be at least %d characters, got %d" - % (val, self.min_length, len(val))) - - if self.pattern and not self.pattern_re.match(val): - raise ValidationError("'%s' did not match pattern '%s'" - % (val, self.pattern)) - return val - - -class Bytes(Primitive): - - def __init__(self, min_length=None, max_length=None): - if min_length is not None: - assert isinstance(min_length, numbers.Integral), \ - 'min_length must be an integral number' - assert min_length >= 0, 'min_length must be >= 0' - if max_length is not None: - assert isinstance(max_length, numbers.Integral), \ - 'max_length must be an integral number' - assert max_length > 0, 'max_length must be > 0' - if min_length is not None and max_length is not None: - assert max_length >= min_length, 'max_length must be >= min_length' - - self.min_length = min_length - self.max_length = max_length - - def validate(self, val): - if not isinstance(val, _binary_types): - raise ValidationError("expected bytes type, got %s" - % generic_type_name(val)) - elif self.max_length is not None and len(val) > self.max_length: - raise ValidationError("'%s' must have at most %d bytes, got %d" - % (val, self.max_length, len(val))) - elif self.min_length is not None and len(val) < self.min_length: - raise ValidationError("'%s' has fewer than %d bytes, got %d" - % (val, self.min_length, len(val))) - return val - - -class Timestamp(Primitive): - """Note that while a format is specified, it isn't used in validation - since a native Python datetime object is preferred. The format, however, - can and should be used by serializers.""" - - def __init__(self, format): # pylint: disable=redefined-builtin - """format must be composed of format codes that the C standard (1989) - supports, most notably in its strftime() function.""" - assert isinstance(format, six.text_type), 'format must be a string' - self.format = format - - def validate(self, val): - if not isinstance(val, datetime.datetime): - raise ValidationError('expected timestamp, got %s' - % generic_type_name(val)) - elif val.tzinfo is not None and \ - val.tzinfo.utcoffset(val).total_seconds() != 0: - raise ValidationError('timestamp should have either a UTC ' - 'timezone or none set at all') - return val - - -class Composite(Validator): # pylint: disable=abstract-method - """Validator for a type that builds on other primitive and composite - types.""" - pass - - -class List(Composite): - """Assumes list contents are homogeneous with respect to types.""" - - def __init__(self, item_validator, min_items=None, max_items=None): - """Every list item will be validated with item_validator.""" - self.item_validator = item_validator - if min_items is not None: - assert isinstance(min_items, numbers.Integral), \ - 'min_items must be an integral number' - assert min_items >= 0, 'min_items must be >= 0' - if max_items is not None: - assert isinstance(max_items, numbers.Integral), \ - 'max_items must be an integral number' - assert max_items > 0, 'max_items must be > 0' - if min_items is not None and max_items is not None: - assert max_items >= min_items, 'max_items must be >= min_items' - - self.min_items = min_items - self.max_items = max_items - - def validate(self, val): - if not isinstance(val, (tuple, list)): - raise ValidationError('%r is not a valid list' % val) - elif self.max_items is not None and len(val) > self.max_items: - raise ValidationError('%r has more than %s items' - % (val, self.max_items)) - elif self.min_items is not None and len(val) < self.min_items: - raise ValidationError('%r has fewer than %s items' - % (val, self.min_items)) - return [self.item_validator.validate(item) for item in val] - - -class Struct(Composite): - - def __init__(self, definition): - """ - Args: - definition (class): A generated class representing a Babel struct - from a spec. Must have a _fields_ attribute with the following - structure: - - _fields_ = [(field_name, validator), ...] - - where - field_name: Name of the field (str). - validator: Validator object. - """ - self.definition = definition - - def validate(self, val): - """ - For a val to pass validation, val must be of the correct type and have - all required fields present. - """ - self.validate_type_only(val) - self.validate_fields_only(val) - return val - - def validate_fields_only(self, val): - """ - To pass field validation, no required field should be missing. - - This method assumes that the contents of each field have already been - validated on assignment, so it's merely a presence check. - - FIXME(kelkabany): Since the definition object does not maintain a list - of which fields are required, all fields are scanned. - """ - for field_name, _ in self.definition._all_fields_: - if not hasattr(val, field_name): - raise ValidationError("missing required field '%s'" % - field_name) - - def validate_type_only(self, val): - """ - Use this when you only want to validate that the type of an object - is correct, but not yet validate each field. - """ - # Since the definition maintains the list of fields for serialization, - # we're okay with a subclass that might have extra information. This - # makes it easier to return one subclass for two routes, one of which - # relies on the parent class. - if not isinstance(val, self.definition): - raise ValidationError('expected type %s, got %s' % - (self.definition.__name__, generic_type_name(val))) - - def has_default(self): - return not self.definition._has_required_fields - - def get_default(self): - assert not self.definition._has_required_fields, 'No default available.' - return self.definition() - - -class StructTree(Struct): - """Validator for structs with enumerated subtypes. - - NOTE: validate_fields_only() validates the fields known to this base - struct, but does not do any validation specific to the subtype. - """ - - def __init__(self, definition): - super(StructTree, self).__init__(definition) - - -class Union(Composite): - - def __init__(self, definition): - """ - Args: - definition (class): A generated class representing a Babel union - from a spec. Must have a _tagmap attribute with the following - structure: - - _tagmap = {field_name: validator, ...} - - where - field_name (str): Tag name. - validator (Validator): Tag value validator. - """ - self.definition = definition - - def validate(self, val): - """ - For a val to pass validation, it must have a _tag set. This assumes - that the object validated that _tag is a valid tag, and that any - associated value has also been validated. - """ - self.validate_type_only(val) - if not hasattr(val, '_tag') or val._tag is None: - raise ValidationError('no tag set') - return val - - def validate_type_only(self, val): - """ - Use this when you only want to validate that the type of an object - is correct, but not yet validate each field. - - We check whether val is a Python parent class of the definition. This - is because Union subtyping works in the opposite direction of Python - inheritance. For example, if a union U2 extends U1 in Python, this - validator will accept U1 in places where U2 is expected. - """ - if not issubclass(self.definition, type(val)): - raise ValidationError('expected type %s or subtype, got %s' % - (self.definition.__name__, generic_type_name(val))) - - -class Void(Primitive): - - def validate(self, val): - if val is not None: - raise ValidationError('expected NoneType, got %s' % - generic_type_name(val)) - - def has_default(self): - return True - - def get_default(self): - return None - - -class Nullable(Validator): - - def __init__(self, validator): - assert isinstance(validator, (Primitive, Composite)), \ - 'validator must be for a primitive or composite type' - assert not isinstance(validator, Nullable), \ - 'nullables cannot be stacked' - assert not isinstance(validator, Void), \ - 'void cannot be made nullable' - self.validator = validator - - def validate(self, val): - if val is None: - return - else: - return self.validator.validate(val) - - def validate_type_only(self, val): - """Use this only if Nullable is wrapping a Composite.""" - if val is None: - return - else: - return self.validator.validate_type_only(val) - - def has_default(self): - return True - - def get_default(self): - return None diff --git a/dropbox/base.py b/dropbox/base.py index 33d96bc0..9c081340 100644 --- a/dropbox/base.py +++ b/dropbox/base.py @@ -1,4 +1,7 @@ +# -*- coding: utf-8 -*- # Auto-generated by Stone, do not modify. +# flake8: noqa +# pylint: skip-file from abc import ABCMeta, abstractmethod import warnings diff --git a/dropbox/base_team.py b/dropbox/base_team.py index 1ea3fd79..e745ceda 100644 --- a/dropbox/base_team.py +++ b/dropbox/base_team.py @@ -1,4 +1,7 @@ +# -*- coding: utf-8 -*- # Auto-generated by Stone, do not modify. +# flake8: noqa +# pylint: skip-file from abc import ABCMeta, abstractmethod import warnings diff --git a/dropbox/dropbox.py b/dropbox/dropbox.py index 2ede06b1..bb0c9302 100644 --- a/dropbox/dropbox.py +++ b/dropbox/dropbox.py @@ -4,7 +4,9 @@ 'create_session', ] -__version__ = '7.3.0' +# This should always be 0.0.0 in master. Only update this after tagging +# before release. +__version__ = '0.0.0' import contextlib import json @@ -429,7 +431,7 @@ def request_json_string(self, if r.headers.get('content-type') == 'application/json': err = stone_serializers.json_compat_obj_decode( RateLimitError_validator, r.json()['error']) - retry_after = err.retry_after # pylint: disable=no-member + retry_after = err.retry_after else: retry_after_str = r.headers.get('retry-after') if retry_after_str is not None: diff --git a/dropbox/oauth.py b/dropbox/oauth.py index ac3c2df2..a670c5f0 100644 --- a/dropbox/oauth.py +++ b/dropbox/oauth.py @@ -207,9 +207,12 @@ def __init__(self, consumer_key, consumer_secret, locale=None): error messages; this setting tells the server which locale to use. By default, the server uses "en_US". """ - super(DropboxOAuth2FlowNoRedirect, self).__init__(consumer_key, - consumer_secret, - locale) + # pylint: disable=useless-super-delegation + super(DropboxOAuth2FlowNoRedirect, self).__init__( + consumer_key, + consumer_secret, + locale, + ) def start(self): """ diff --git a/dropbox/rest.py b/dropbox/rest.py index 88559cef..301bb7b1 100644 --- a/dropbox/rest.py +++ b/dropbox/rest.py @@ -209,7 +209,7 @@ def request(self, method, url, post_params=None, body=None, headers=None, raw_re # Handle StringIO/BytesIO instances, because urllib3 doesn't. if hasattr(body, 'getvalue'): - body = body.getvalue() # pylint: disable=no-member + body = body.getvalue() # Reject any headers containing newlines; the error from the server isn't pretty. for key, value in headers.items(): diff --git a/dropbox/session.py b/dropbox/session.py index 0ddc2822..544a2218 100644 --- a/dropbox/session.py +++ b/dropbox/session.py @@ -13,7 +13,7 @@ # TODO(kelkabany): We probably only want to instantiate this once so that even # if multiple Dropbox objects are instantiated, they all share the same pool. class _SSLAdapter(HTTPAdapter): - def init_poolmanager(self, connections, maxsize, block=False): + def init_poolmanager(self, connections, maxsize, block=False): # noqa: E501; pylint: disable=arguments-differ self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, diff --git a/example/back-up-and-restore/backup-and-restore-example.py b/example/back-up-and-restore/backup-and-restore-example.py index a85646f8..dffc00f8 100644 --- a/example/back-up-and-restore/backup-and-restore-example.py +++ b/example/back-up-and-restore/backup-and-restore-example.py @@ -58,7 +58,7 @@ def restore(rev=None): def select_revision(): # Get the revisions for a file (and sort by the datetime object, "server_modified") print("Finding available revisions on Dropbox...") - entries = dbx.files_list_revisions(BACKUPPATH, limit=30).entries # pylint: disable=no-member + entries = dbx.files_list_revisions(BACKUPPATH, limit=30).entries revisions = sorted(entries, key=lambda entry: entry.server_modified) for revision in revisions: diff --git a/stone b/stone index cafc3b3e..cf2a43d7 160000 --- a/stone +++ b/stone @@ -1 +1 @@ -Subproject commit cafc3b3ee6cb2aa6f9f7c167ae3cf31e06060169 +Subproject commit cf2a43d7b94c950237c1dbd4680a0f0d3fdebd27 diff --git a/tox.ini b/tox.ini index 1c441e88..4206141f 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ pypy = lint 3.3 = lint 3.4 = lint 3.5 = lint -3.6 = check # add lint back once pylint > 1.6.5 is released; see +3.6 = check, lint pypy3 = lint @@ -55,7 +55,8 @@ commands = test/test_dropbox.py deps = flake8 - pylint + # Needed to support Python 3.6 + pylint >= 1.7.0 # This probably breaks on Windows. See # . -rtest/requirements.txt