Skip to content

Commit

Permalink
correct types
Browse files Browse the repository at this point in the history
  • Loading branch information
blablatdinov committed Oct 15, 2022
1 parent cb6d6f3 commit 2da746f
Show file tree
Hide file tree
Showing 2 changed files with 130 additions and 107 deletions.
84 changes: 51 additions & 33 deletions boltons/dictutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,15 +83,24 @@

try:
from typeutils import make_sentinel
_MISSING = make_sentinel(var_name='_MISSING')

_MISSING = make_sentinel(var_name="_MISSING")
except ImportError:
_MISSING = object()


PREV, NEXT, KEY, VALUE, SPREV, SNEXT = range(6)


__all__ = ['MultiDict', 'OMD', 'OrderedMultiDict', 'OneToOne', 'ManyToMany', 'subdict', 'FrozenDict']
__all__ = [
"MultiDict",
"OMD",
"OrderedMultiDict",
"OneToOne",
"ManyToMany",
"subdict",
"FrozenDict",
]

try:
profile
Expand Down Expand Up @@ -173,10 +182,13 @@ class OrderedMultiDict(dict):
behavior, just use :meth:`~OrderedMultiDict.todict()`.
"""

def __init__(self, *args, **kwargs):
if len(args) > 1:
raise TypeError('%s expected at most 1 argument, got %s'
% (self.__class__.__name__, len(args)))
raise TypeError(
"%s expected at most 1 argument, got %s"
% (self.__class__.__name__, len(args))
)
super(OrderedMultiDict, self).__init__()

self._clear_ll()
Expand Down Expand Up @@ -293,7 +305,7 @@ def update(self, E, **F):
del self[k]
for k, v in E.iteritems(multi=True):
self_add(k, v)
elif callable(getattr(E, 'keys', None)):
elif callable(getattr(E, "keys", None)):
for k in E.keys():
self[k] = E[k]
else:
Expand All @@ -318,7 +330,7 @@ def update_extend(self, E, **F):
iterator = iter(E.items())
elif isinstance(E, OrderedMultiDict):
iterator = E.iteritems(multi=True)
elif hasattr(E, 'keys'):
elif hasattr(E, "keys"):
iterator = ((k, E[k]) for k in E.keys())
else:
iterator = E
Expand Down Expand Up @@ -355,12 +367,13 @@ def __eq__(self, other):
for (selfk, selfv), (otherk, otherv) in zipped_items:
if selfk != otherk or selfv != otherv:
return False
if not(next(selfi, _MISSING) is _MISSING
and next(otheri, _MISSING) is _MISSING):
if not (
next(selfi, _MISSING) is _MISSING and next(otheri, _MISSING) is _MISSING
):
# leftovers (TODO: watch for StopIteration?)
return False
return True
elif hasattr(other, 'keys'):
elif hasattr(other, "keys"):
for selfk in self:
try:
other[selfk] == self[selfk]
Expand Down Expand Up @@ -408,7 +421,7 @@ def poplast(self, k=_MISSING, default=_MISSING):
k = self.root[PREV][KEY]
else:
if default is _MISSING:
raise KeyError('empty %r' % type(self))
raise KeyError("empty %r" % type(self))
return default
try:
self._remove(k)
Expand Down Expand Up @@ -555,8 +568,12 @@ def sortedvalues(self, key=None, reverse=False):
except AttributeError:
superself_iteritems = super(OrderedMultiDict, self).items()
# (not reverse) because they pop off in reverse order for reinsertion
sorted_val_map = dict([(k, sorted(v, key=key, reverse=(not reverse)))
for k, v in superself_iteritems])
sorted_val_map = dict(
[
(k, sorted(v, key=key, reverse=(not reverse)))
for k, v in superself_iteritems
]
)
ret = self.__class__()
for k in self.iterkeys(multi=True):
ret.add(k, sorted_val_map[k].pop())
Expand Down Expand Up @@ -626,8 +643,8 @@ def __reversed__(self):

def __repr__(self):
cn = self.__class__.__name__
kvs = ', '.join([repr((k, v)) for k, v in self.iteritems(multi=True)])
return '%s([%s])' % (cn, kvs)
kvs = ", ".join([repr((k, v)) for k, v in self.iteritems(multi=True)])
return "%s([%s])" % (cn, kvs)

def viewkeys(self):
"OMD.viewkeys() -> a set-like object providing a view on OMD's keys"
Expand All @@ -652,6 +669,7 @@ class FastIterOrderedMultiDict(OrderedMultiDict):
is faster and uses constant memory but adding duplicate key-value
pairs is slower. Brainchild of Mark Williams.
"""

def _clear_ll(self):
# TODO: always reset objects? (i.e., no else block below)
try:
Expand All @@ -660,9 +678,7 @@ def _clear_ll(self):
_map = self._map = {}
self.root = []
_map.clear()
self.root[:] = [self.root, self.root,
None, None,
self.root, self.root]
self.root[:] = [self.root, self.root, None, None, self.root, self.root]

def _insert(self, k, v):
root = self.root
Expand All @@ -671,9 +687,7 @@ def _insert(self, k, v):
last = root[PREV]

if cells is empty:
cell = [last, root,
k, v,
last, root]
cell = [last, root, k, v, last, root]
# was the last one skipped?
if last[SPREV][SNEXT] is root:
last[SPREV][SNEXT] = cell
Expand All @@ -683,9 +697,7 @@ def _insert(self, k, v):
# if the previous was skipped, go back to the cell that
# skipped it
sprev = last[SPREV] if (last[SPREV][SNEXT] is not last) else last
cell = [last, root,
k, v,
sprev, root]
cell = [last, root, k, v, sprev, root]
# skip me
last[SNEXT] = root
last[NEXT] = root[PREV] = root[SPREV] = cell
Expand Down Expand Up @@ -777,7 +789,8 @@ class OneToOne(dict):
For a very similar project, with even more one-to-one
functionality, check out `bidict <https://github.com/jab/bidict>`_.
"""
__slots__ = ('inv',)

__slots__ = ("inv",)

def __init__(self, *a, **kw):
raise_on_dupe = False
Expand Down Expand Up @@ -807,11 +820,14 @@ def __init__(self, *a, **kw):
for k, v in self.items():
val_multidict.setdefault(v, []).append(k)

dupes = dict([(v, k_list) for v, k_list in
val_multidict.items() if len(k_list) > 1])
dupes = dict(
[(v, k_list) for v, k_list in val_multidict.items() if len(k_list) > 1]
)

raise ValueError('expected unique values, got multiple keys for'
' the following values: %r' % dupes)
raise ValueError(
"expected unique values, got multiple keys for"
" the following values: %r" % dupes
)

@classmethod
def unique(cls, *a, **kw):
Expand Down Expand Up @@ -907,6 +923,7 @@ class ManyToMany(object):
also, can be used as a directed graph among hashable python objects
"""

def __init__(self, items=None):
self.data = {}
if type(items) is tuple and items and items[0] is _PAIRING:
Expand Down Expand Up @@ -956,7 +973,7 @@ def update(self, iterable):
self.inv.data[k] = other.inv.data[k]
else:
self.inv.data[k].update(other.inv.data[k])
elif callable(getattr(iterable, 'keys', None)):
elif callable(getattr(iterable, "keys", None)):
for k in iterable.keys():
self.add(k, iterable[k])
else:
Expand Down Expand Up @@ -1014,7 +1031,7 @@ def __eq__(self, other):

def __repr__(self):
cn = self.__class__.__name__
return '%s(%r)' % (cn, list(self.iteritems()))
return "%s(%r)" % (cn, list(self.iteritems()))


def subdict(d, keep=None, drop=None):
Expand Down Expand Up @@ -1066,7 +1083,8 @@ class FrozenDict(dict):
works everywhere a dict would, including JSON serialization.
"""
__slots__ = ('_hash',)

__slots__ = ("_hash",)

def updated(self, *a, **kw):
"""Make a copy and add items from a dictionary or iterable (and/or
Expand All @@ -1084,7 +1102,7 @@ def fromkeys(cls, keys, value=None):

def __repr__(self):
cn = self.__class__.__name__
return '%s(%s)' % (cn, dict.__repr__(self))
return "%s(%s)" % (cn, dict.__repr__(self))

def __reduce_ex__(self, protocol):
return type(self), (dict(self),)
Expand All @@ -1109,7 +1127,7 @@ def __copy__(self):
# block everything else
def _raise_frozen_typeerror(self, *a, **kw):
"raises a TypeError, because FrozenDicts are immutable"
raise TypeError('%s object is immutable' % self.__class__.__name__)
raise TypeError("%s object is immutable" % self.__class__.__name__)

__ior__ = __setitem__ = __delitem__ = update = _raise_frozen_typeerror
setdefault = pop = popitem = clear = _raise_frozen_typeerror
Expand Down
Loading

0 comments on commit 2da746f

Please sign in to comment.