code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
iq = aioxmpp.stanza.IQ( type_=aioxmpp.structs.IQType.SET, to=jid, payload=pubsub_xso.OwnerRequest( pubsub_xso.OwnerDelete( node, redirect_uri=redirect_uri ) ) ) yield from self.client.send(iq)
def delete(self, jid, node, *, redirect_uri=None)
Delete an existing node. :param jid: Address of the PubSub service. :type jid: :class:`aioxmpp.JID` :param node: Name of the PubSub node to delete. :type node: :class:`str` or :data:`None` :param redirect_uri: A URI to send to subscribers to indicate a replacement for the deleted node. :type redirect_uri: :class:`str` or :data:`None` :raises aioxmpp.errors.XMPPError: as returned by the service Optionally, a `redirect_uri` can be given. The `redirect_uri` will be sent to subscribers in the message notifying them about the node deletion.
6.257749
5.511424
1.135414
response = yield from self._disco.query_items( jid, node=node, ) result = [] for item in response.items: if item.jid != jid: continue result.append(( item.node, item.name, )) return result
def get_nodes(self, jid, node=None)
Request all nodes at a service or collection node. :param jid: Address of the PubSub service. :type jid: :class:`aioxmpp.JID` :param node: Name of the collection node to query :type node: :class:`str` or :data:`None` :raises aioxmpp.errors.XMPPError: as returned by the service :return: The list of nodes at the service or collection node. :rtype: :class:`~collections.abc.Sequence` of tuples consisting of the node name and its description. Request the nodes available at `jid`. If `node` is not :data:`None`, the request returns the children of the :xep:`248` collection node `node`. Make sure to check for the appropriate server feature first. Return a list of tuples consisting of the node names and their description (if available, otherwise :data:`None`). If more information is needed, use :meth:`.DiscoClient.get_items` directly. Only nodes whose :attr:`~.disco.xso.Item.jid` match the `jid` are returned.
4.236388
3.340887
1.268043
iq = aioxmpp.stanza.IQ( type_=aioxmpp.structs.IQType.GET, to=jid, payload=pubsub_xso.OwnerRequest( pubsub_xso.OwnerAffiliations(node), ) ) return (yield from self.client.send(iq))
def get_node_affiliations(self, jid, node)
Return the affiliations of other jids at a node. :param jid: Address of the PubSub service. :type jid: :class:`aioxmpp.JID` :param node: Name of the node to query :type node: :class:`str` :raises aioxmpp.errors.XMPPError: as returned by the service :return: The response from the service. :rtype: :class:`.xso.OwnerRequest` The affiliations are returned as :class:`.xso.OwnerRequest` instance whose :attr:`~.xso.OwnerRequest.payload` is a :class:`.xso.OwnerAffiliations` instance.
8.045102
5.273764
1.525495
iq = aioxmpp.stanza.IQ( type_=aioxmpp.structs.IQType.SET, to=jid, payload=pubsub_xso.OwnerRequest( pubsub_xso.OwnerAffiliations( node, affiliations=[ pubsub_xso.OwnerAffiliation( jid, affiliation ) for jid, affiliation in affiliations_to_set ] ) ) ) yield from self.client.send(iq)
def change_node_affiliations(self, jid, node, affiliations_to_set)
Update the affiliations at a node. :param jid: Address of the PubSub service. :type jid: :class:`aioxmpp.JID` :param node: Name of the node to modify :type node: :class:`str` :param affiliations_to_set: The affiliations to set at the node. :type affiliations_to_set: :class:`~collections.abc.Iterable` of tuples consisting of the JID to affiliate and the affiliation to use. :raises aioxmpp.errors.XMPPError: as returned by the service `affiliations_to_set` must be an iterable of pairs (`jid`, `affiliation`), where the `jid` indicates the JID for which the `affiliation` is to be set.
4.217258
4.309057
0.978696
iq = aioxmpp.stanza.IQ( type_=aioxmpp.structs.IQType.SET, to=jid, payload=pubsub_xso.OwnerRequest( pubsub_xso.OwnerSubscriptions( node, subscriptions=[ pubsub_xso.OwnerSubscription( jid, subscription ) for jid, subscription in subscriptions_to_set ] ) ) ) yield from self.client.send(iq)
def change_node_subscriptions(self, jid, node, subscriptions_to_set)
Update the subscriptions at a node. :param jid: Address of the PubSub service. :type jid: :class:`aioxmpp.JID` :param node: Name of the node to modify :type node: :class:`str` :param subscriptions_to_set: The subscriptions to set at the node. :type subscriptions_to_set: :class:`~collections.abc.Iterable` of tuples consisting of the JID to (un)subscribe and the subscription level to use. :raises aioxmpp.errors.XMPPError: as returned by the service `subscriptions_to_set` must be an iterable of pairs (`jid`, `subscription`), where the `jid` indicates the JID for which the `subscription` is to be set.
4.581012
4.740987
0.966257
iq = aioxmpp.stanza.IQ( type_=aioxmpp.structs.IQType.SET, to=jid, payload=pubsub_xso.OwnerRequest( pubsub_xso.OwnerPurge( node ) ) ) yield from self.client.send(iq)
def purge(self, jid, node)
Delete all items from a node. :param jid: JID of the PubSub service :param node: Name of the PubSub node :type node: :class:`str` Requires :attr:`.xso.Feature.PURGE`.
7.59138
6.20207
1.224007
# the following code is a copy of the formal definition of `yield from` # in PEP 380, with modifications to capture the value sent during yield _i = iter(receiver) try: _y = next(_i) except StopIteration as _e: return _e.value try: while True: try: _s = yield _y except GeneratorExit as _e: try: _m = _i.close except AttributeError: pass else: _m() raise _e except BaseException as _e: _x = sys.exc_info() try: _m = _i.throw except AttributeError: raise _e else: try: _y = _m(*_x) except StopIteration as _e: _r = _e.value break else: dest.append(_s) try: if _s is None: _y = next(_i) else: _y = _i.send(_s) except StopIteration as _e: _r = _e.value break except: # NOQA dest.clear() raise return _r
def capture_events(receiver, dest)
Capture all events sent to `receiver` in the sequence `dest`. This is a generator, and it is best used with ``yield from``. The observable effect of using this generator with ``yield from`` is identical to the effect of using `receiver` with ``yield from`` directly (including the return value), but in addition, the values which are *sent* to the receiver are captured in `dest`. If `receiver` raises an exception or the generator is closed prematurely using its :meth:`close`, `dest` is cleared. This is used to implement :class:`CapturingXSO`. See the documentation there for use cases. .. versionadded:: 0.5
2.9076
2.783391
1.044625
name_stack = [] for ev_type, *ev_args in events: if ev_type == "start": name = (ev_args[0], ev_args[1]) dest.startElementNS(name, None, ev_args[2]) name_stack.append(name) elif ev_type == "end": name = name_stack.pop() dest.endElementNS(name, None) elif ev_type == "text": dest.characters(ev_args[0])
def events_to_sax(events, dest)
Convert an iterable `events` of XSO events to SAX events by calling the matching SAX methods on `dest`
2.060775
2.139733
0.963099
result = self if type_ is not None: result = self._filter_type(result, type_) if lang is not None: result = self._filter_lang(result, lang) if attrs: result = self._filter_attrs(result, attrs) return result
def filter(self, *, type_=None, lang=None, attrs={})
Return an iterable which produces a sequence of the elements inside this :class:`XSOList`, filtered by the criteria given as arguments. The function starts with a working sequence consisting of the whole list. If `type_` is not :data:`None`, elements which are not an instance of the given type are excluded from the working sequence. If `lang` is not :data:`None`, it must be either a :class:`~.structs.LanguageRange` or an iterable of language ranges. The set of languages present among the working sequence is determined and used for a call to :class:`~.structs.lookup_language`. If the lookup returns a language, all elements whose :attr:`lang` is different from that value are excluded from the working sequence. .. note:: If an iterable of language ranges is given, it is evaluated into a list. This may be of concern if a huge iterable is about to be used for language ranges, but it is an requirement of the :class:`~.structs.lookup_language` function which is used under the hood. .. note:: Filtering by language assumes that the elements have a :class:`~aioxmpp.xso.LangAttr` descriptor named ``lang``. If `attrs` is not empty, the filter iterates over each `key`-`value` pair. For each iteration, all elements which do not have an attribute of the name in `key` or where that attribute has a value not equal to `value` are excluded from the working sequence. In general, the iterable returned from :meth:`filter` can only be used once. It is dynamic in the sense that changes to elements which are in the list *behind* the last element returned from the iterator will still be picked up when the iterator is resumed.
1.840064
2.150239
0.855749
return list(self.filter(type_=type_, lang=lang, attrs=attrs))
def filtered(self, *, type_=None, lang=None, attrs={})
This method is a convencience wrapper around :meth:`filter` which evaluates the result into a list and returns that list.
3.92281
3.05316
1.284836
try: parsed = self.type_.parse(value) except (TypeError, ValueError): if self.erroneous_as_absent: return False raise self._set_from_recv(instance, parsed) return True
def from_value(self, instance, value)
Convert the given value using the set `type_` and store it into `instance`’ attribute.
6.569059
6.59119
0.996642
value = self.__get__(instance, type(instance)) if value is None: return dest.characters(self.type_.format(value))
def to_sax(self, instance, dest)
Assign the formatted value stored at `instance`’ attribute to the text of `el`. If the `value` is :data:`None`, no text is generated.
6.309982
6.661621
0.947214
obj = yield from self._process(instance, ev_args, ctx) self.__set__(instance, obj) return obj
def from_events(self, instance, ev_args, ctx)
Detect the object to instanciate from the arguments `ev_args` of the ``"start"`` event. The new object is stored at the corresponding descriptor attribute on `instance`. This method is suspendable.
5.939099
7.220523
0.82253
obj = self.__get__(instance, type(instance)) if obj is None: return obj.unparse_to_sax(dest)
def to_sax(self, instance, dest)
Take the object associated with this descriptor on `instance` and serialize it as child into the given :class:`lxml.etree.Element` `parent`. If the object is :data:`None`, no content is generated.
5.641298
6.018829
0.937275
obj = yield from self._process(instance, ev_args, ctx) self.__get__(instance, type(instance)).append(obj) return obj
def from_events(self, instance, ev_args, ctx)
Like :meth:`.Child.from_events`, but instead of replacing the attribute value, the new object is appended to the list.
6.283134
5.139566
1.222503
# goal: collect all elements starting with the element for which we got # the start-ev_args in a lxml.etree.Element. def make_from_args(ev_args, parent): el = etree.SubElement(parent, tag_to_str((ev_args[0], ev_args[1]))) for key, value in ev_args[2].items(): el.set(tag_to_str(key), value) return el root_el = make_from_args(ev_args, self.__get__(instance, type(instance))) # create an element stack stack = [root_el] while stack: # we get send all sax-ish events until we return. we return when # the stack is empty, i.e. when our top element ended. ev_type, *ev_args = yield if ev_type == "start": # new element started, create and push to stack stack.append(make_from_args(ev_args, stack[-1])) elif ev_type == "text": # text for current element curr = stack[-1] if curr.text is not None: curr.text += ev_args[0] else: curr.text = ev_args[0] elif ev_type == "end": # element ended, remove from stack (it is already appended to # the current element) stack.pop() else: # not in coverage -- this is more like an assertion raise ValueError(ev_type)
def from_events(self, instance, ev_args, ctx)
Collect the events and convert them to a single XML subtree, which then gets appended to the list at `instance`. `ev_args` must be the arguments of the ``"start"`` event of the new child. This method is suspendable.
4.692746
4.511832
1.040098
if self.missing is not None: value = self.missing(instance, ctx) if value is not None: self._set_from_code(instance, value) return if self.default is _PropBase.NO_DEFAULT: raise ValueError("missing attribute {} on {}".format( tag_to_str(self.tag), tag_to_str(instance.TAG), ))
def handle_missing(self, instance, ctx)
Handle a missing attribute on `instance`. This is called whenever no value for the attribute is found during parsing. The call to :meth:`missing` is independent of the value of `required`. If the `missing` callback is not :data:`None`, it is called with the `instance` and the `ctx` as arguments. If the returned value is not :data:`None`, it is used as the value of the attribute (validation takes place as if the value had been set from the code, not as if the value had been received from XML) and the handler returns. If the `missing` callback is :data:`None` or returns :data:`None`, the handling continues as normal: if `required` is true, a :class:`ValueError` is raised.
4.776878
4.161955
1.147749
value = self.__get__(instance, type(instance)) if value == self.default: return d[self.tag] = self.type_.format(value)
def to_dict(self, instance, d)
Override the implementation from :class:`Text` by storing the formatted value in the XML attribute instead of the character data. If the value is :data:`None`, no element is generated.
5.593468
5.900886
0.947903
# goal: take all text inside the child element and collect it as # attribute value attrs = ev_args[2] if attrs and self.attr_policy == UnknownAttrPolicy.FAIL: raise ValueError("unexpected attribute (at text only node)") parts = [] while True: ev_type, *ev_args = yield if ev_type == "text": # collect ALL TEH TEXT! parts.append(ev_args[0]) elif ev_type == "start": # ok, a child inside the child was found, we look at our policy # to see what to do yield from enforce_unknown_child_policy( self.child_policy, ev_args) elif ev_type == "end": # end of our element, return break joined = "".join(parts) try: parsed = self.type_.parse(joined) except (ValueError, TypeError): if self.erroneous_as_absent: return raise self._set_from_recv(instance, parsed)
def from_events(self, instance, ev_args, ctx)
Starting with the element to which the start event information in `ev_args` belongs, parse text data. If any children are encountered, :attr:`child_policy` is enforced (see :class:`UnknownChildPolicy`). Likewise, if the start event contains attributes, :attr:`attr_policy` is enforced (c.f. :class:`UnknownAttrPolicy`). The extracted text is passed through :attr:`type_` and :attr:`validator` and if it passes, stored in the attribute on the `instance` with which the property is associated. This method is suspendable.
8.112682
6.585277
1.231943
value = self.__get__(instance, type(instance)) if value == self.default: return if self.declare_prefix is not False and self.tag[0]: dest.startPrefixMapping(self.declare_prefix, self.tag[0]) dest.startElementNS(self.tag, None, {}) try: dest.characters(self.type_.format(value)) finally: dest.endElementNS(self.tag, None) if self.declare_prefix is not False and self.tag[0]: dest.endPrefixMapping(self.declare_prefix)
def to_sax(self, instance, dest)
Create a child node at `parent` with the tag :attr:`tag`. Set the text contents to the value of the attribute which this descriptor represents at `instance`. If the value is :data:`None`, no element is generated.
3.213165
3.057589
1.050882
for item in items: dest[self.key(item)].append(item)
def fill_into_dict(self, items, dest)
Take an iterable of `items` and group it into the given `dest` dict, using the :attr:`key` function. The `dest` dict must either already contain the keys which are generated by the :attr:`key` function for the items in `items`, or must default them suitably. The values of the affected keys must be sequences or objects with an :meth:`append` method which does what you want it to do.
4.881105
4.750308
1.027534
tag = ev_args[0], ev_args[1] cls = self._tag_map[tag] obj = yield from cls.parse_events(ev_args, ctx) mapping = self.__get__(instance, type(instance)) mapping[self.key(obj)].append(obj)
def from_events(self, instance, ev_args, ctx)
Like :meth:`.ChildList.from_events`, but the object is appended to the list associated with its tag in the dict.
6.031914
5.250205
1.148891
with parent_ctx as ctx: obj = cls.__new__(cls) attrs = ev_args[2] attr_map = cls.ATTR_MAP.copy() for key, value in attrs.items(): try: prop = attr_map.pop(key) except KeyError: if cls.UNKNOWN_ATTR_POLICY == UnknownAttrPolicy.DROP: continue else: raise ValueError( "unexpected attribute {!r} on {}".format( key, tag_to_str((ev_args[0], ev_args[1])) )) from None try: if not prop.from_value(obj, value): # assignment failed due to recoverable error, treat as # absent attr_map[key] = prop except Exception: prop.mark_incomplete(obj) _mark_attributes_incomplete(attr_map.values(), obj) logger.debug("while parsing XSO %s (%r)", cls, value, exc_info=True) # true means suppress if not obj.xso_error_handler( prop, value, sys.exc_info()): raise for key, prop in attr_map.items(): try: prop.handle_missing(obj, ctx) except Exception: logger.debug("while parsing XSO %s", cls, exc_info=True) # true means suppress if not obj.xso_error_handler( prop, None, sys.exc_info()): raise try: prop = cls.ATTR_MAP[namespaces.xml, "lang"] except KeyError: pass else: lang = prop.__get__(obj, cls) if lang is not None: ctx.lang = lang collected_text = [] while True: ev_type, *ev_args = yield if ev_type == "end": break elif ev_type == "text": if not cls.TEXT_PROPERTY: if ev_args[0].strip(): # true means suppress if not obj.xso_error_handler( None, ev_args[0], None): raise ValueError("unexpected text") else: collected_text.append(ev_args[0]) elif ev_type == "start": try: handler = cls.CHILD_MAP[ev_args[0], ev_args[1]] except KeyError: if cls.COLLECTOR_PROPERTY: handler = cls.COLLECTOR_PROPERTY.xq_descriptor else: yield from enforce_unknown_child_policy( cls.UNKNOWN_CHILD_POLICY, ev_args, obj.xso_error_handler) continue try: yield from guard( handler.from_events(obj, ev_args, ctx), ev_args ) except Exception: logger.debug("while parsing XSO %s", type(obj), exc_info=True) # true means suppress if not obj.xso_error_handler( handler, ev_args, sys.exc_info()): raise if collected_text: collected_text = "".join(collected_text) try: cls.TEXT_PROPERTY.xq_descriptor.from_value( obj, collected_text ) except Exception: logger.debug("while parsing XSO", exc_info=True) # true means suppress if not obj.xso_error_handler( cls.TEXT_PROPERTY.xq_descriptor, collected_text, sys.exc_info()): raise obj.validate() obj.xso_after_load() return obj
def parse_events(cls, ev_args, parent_ctx)
Create an instance of this class, using the events sent into this function. `ev_args` must be the event arguments of the ``"start"`` event. .. seealso:: You probably should not call this method directly, but instead use :class:`XSOParser` with a :class:`SAXDriver`. .. note:: While this method creates an instance of the class, ``__init__`` is not called. See the documentation of :meth:`.xso.XSO` for details. This method is suspendable.
3.006614
2.967309
1.013246
if cls.__subclasses__(): raise TypeError( "register_child is forbidden on classes with subclasses" " (subclasses: {})".format( ", ".join(map(str, cls.__subclasses__())) )) if child_cls.TAG in cls.CHILD_MAP: raise ValueError("ambiguous Child") prop.xq_descriptor._register(child_cls) cls.CHILD_MAP[child_cls.TAG] = prop.xq_descriptor
def register_child(cls, prop, child_cls)
Register a new :class:`XMLStreamClass` instance `child_cls` for a given :class:`Child` descriptor `prop`. .. warning:: This method cannot be used after a class has been derived from this class. This is for consistency: the method modifies the bookkeeping attributes of the class. There would be two ways to deal with the situation: 1. Updating all the attributes at all the subclasses and re-evaluate the constraints of inheritance. This is simply not implemented, although it would be the preferred way. 2. Only update the bookkeeping attributes on *this* class, hiding the change from any existing subclasses. New subclasses would pick the change up, however, which is inconsistent. This is the way which was previously documented here and is not supported anymore. Obviously, (2) is bad, which is why it is not supported anymore. (1) might be supported at some point in the future. Attempting to use :meth:`register_child` on a class which already has subclasses results in a :class:`TypeError`. Note that *first* using :meth:`register_child` and only *then* deriving clasess is a valid use: it will still lead to a consistent inheritance hierarchy and is a convenient way to break reference cycles (e.g. if an XSO may be its own child).
5.58797
5.673323
0.984955
dest = [("start", )+tuple(ev_args)] result = yield from capture_events( super().parse_events(ev_args, parent_ctx), dest ) result._set_captured_events(dest) return result
def parse_events(cls, ev_args, parent_ctx)
Capture the events sent to :meth:`.XSO.parse_events`, including the initial `ev_args` to a list and call :meth:`_set_captured_events` on the result of :meth:`.XSO.parse_events`. Like the method it overrides, :meth:`parse_events` is suspendable.
9.521303
6.530398
1.457997
if cls.TAG in self._tag_map: raise ValueError( "duplicate tag: {!r} is already handled by {}".format( cls.TAG, self._tag_map[cls.TAG])) self._class_map[cls] = callback self._tag_map[cls.TAG] = (cls, callback)
def add_class(self, cls, callback)
Add a class `cls` for parsing as root level element. When an object of `cls` type has been completely parsed, `callback` is called with the object as argument.
3.35236
3.218288
1.041659
del self._tag_map[cls.TAG] del self._class_map[cls]
def remove_class(self, cls)
Remove a XSO class `cls` from parsing. This method raises :class:`KeyError` with the classes :attr:`TAG` attribute as argument if removing fails because the class is not registered.
6.877102
4.525895
1.519501
for c in chars: if any(in_table(c) for in_table in tables): return c return None
def check_against_tables(chars, tables)
Perform a check against the table predicates in `tables`. `tables` must be a reusable iterable containing characteristic functions of character sets, that is, functions which return :data:`True` if the character is in the table. The function returns the first character occuring in any of the tables or :data:`None` if no character matches.
4.827956
4.209559
1.146903
# the empty string is valid, as it cannot violate the RandALCat constraints if not chars: return # first_is_RorAL = unicodedata.bidirectional(chars[0]) in {"R", "AL"} # if first_is_RorAL: has_RandALCat = any(is_RandALCat(c) for c in chars) if not has_RandALCat: return has_LCat = any(is_LCat(c) for c in chars) if has_LCat: raise ValueError("L and R/AL characters must not occur in the same" " string") if not is_RandALCat(chars[0]) or not is_RandALCat(chars[-1]): raise ValueError("R/AL string must start and end with R/AL character.")
def check_bidi(chars)
Check proper bidirectionality as per stringprep. Operates on a list of unicode characters provided in `chars`.
4.418962
4.396209
1.005176
violator = check_against_tables(chars, bad_tables) if violator is not None: raise ValueError("Input contains invalid unicode codepoint: " "U+{:04x}".format(ord(violator)))
def check_prohibited_output(chars, bad_tables)
Check against prohibited output, by checking whether any of the characters from `chars` are in any of the `bad_tables`. Operates in-place on a list of code points from `chars`.
5.337888
6.650119
0.802676
bad_tables = ( stringprep.in_table_a1,) violator = check_against_tables(chars, bad_tables) if violator is not None: raise ValueError("Input contains unassigned code point: " "U+{:04x}".format(ord(violator)))
def check_unassigned(chars, bad_tables)
Check that `chars` does not contain any unassigned code points as per the given list of `bad_tables`. Operates on a list of unicode code points provided in `chars`.
7.614756
8.503756
0.895458
chars = list(string) _nodeprep_do_mapping(chars) do_normalization(chars) check_prohibited_output( chars, ( stringprep.in_table_c11, stringprep.in_table_c12, stringprep.in_table_c21, stringprep.in_table_c22, stringprep.in_table_c3, stringprep.in_table_c4, stringprep.in_table_c5, stringprep.in_table_c6, stringprep.in_table_c7, stringprep.in_table_c8, stringprep.in_table_c9, lambda x: x in _nodeprep_prohibited )) check_bidi(chars) if not allow_unassigned: check_unassigned( chars, ( stringprep.in_table_a1, ) ) return "".join(chars)
def nodeprep(string, allow_unassigned=False)
Process the given `string` using the Nodeprep (`RFC 6122`_) profile. In the error cases defined in `RFC 3454`_ (stringprep), a :class:`ValueError` is raised.
2.738091
2.673373
1.024208
chars = list(string) _resourceprep_do_mapping(chars) do_normalization(chars) check_prohibited_output( chars, ( stringprep.in_table_c12, stringprep.in_table_c21, stringprep.in_table_c22, stringprep.in_table_c3, stringprep.in_table_c4, stringprep.in_table_c5, stringprep.in_table_c6, stringprep.in_table_c7, stringprep.in_table_c8, stringprep.in_table_c9, )) check_bidi(chars) if not allow_unassigned: check_unassigned( chars, ( stringprep.in_table_a1, ) ) return "".join(chars)
def resourceprep(string, allow_unassigned=False)
Process the given `string` using the Resourceprep (`RFC 6122`_) profile. In the error cases defined in `RFC 3454`_ (stringprep), a :class:`ValueError` is raised.
2.720873
2.657094
1.024003
if mime_type == "image/png": if image_bytes is not None: if self._image_bytes is not None: raise RuntimeError( "Only one avatar image may be published directly." ) sha1 = hashlib.sha1() sha1.update(image_bytes) id_computed = normalize_id(sha1.hexdigest()) if id_ is not None: id_ = normalize_id(id_) if id_ != id_computed: raise RuntimeError( "The given id does not match the SHA1 of " "the image data." ) else: id_ = id_computed nbytes_computed = len(image_bytes) if nbytes is not None: if nbytes != nbytes_computed: raise RuntimeError( "The given length does not match the length " "of the image data." ) else: nbytes = nbytes_computed self._image_bytes = image_bytes self._png_id = id_ if image_bytes is None and url is None: raise RuntimeError( "Either the image bytes or an url to retrieve the avatar " "image must be given." ) if nbytes is None: raise RuntimeError( "Image data length is not given an not inferable " "from the other arguments." ) if id_ is None: raise RuntimeError( "The SHA1 of the image data is not given an not inferable " "from the other arguments." ) if image_bytes is not None and mime_type != "image/png": raise RuntimeError( "The image bytes can only be given for image/png data." ) self._metadata.info[mime_type].append( avatar_xso.Info( id_=id_, mime_type=mime_type, nbytes=nbytes, width=width, height=height, url=url ) )
def add_avatar_image(self, mime_type, *, id_=None, image_bytes=None, width=None, height=None, url=None, nbytes=None)
Add a source of the avatar image. All sources of an avatar image added to an avatar set must be *the same image*, in different formats and sizes. :param mime_type: The MIME type of the avatar image. :param id_: The SHA1 of the image data. :param nbytes: The size of the image data in bytes. :param image_bytes: The image data, this must be supplied only in one call. :param url: The URL of the avatar image. :param height: The height of the image in pixels (optional). :param width: The width of the image in pixels (optional). `id_` and `nbytes` may be omitted if and only if `image_data` is given and `mime_type` is ``image/png``. If they are supplied *and* image data is given, they are checked to match the image data. It is the caller's responsibility to assure that the provided links exist and the files have the correct SHA1 sums.
2.569891
2.451095
1.048466
if require_fresh: self._metadata_cache.pop(jid, None) else: try: return self._metadata_cache[jid] except KeyError: pass if disable_pep: metadata = [] else: metadata = yield from self._get_avatar_metadata_pep(jid) # try the vcard fallback, note: we don't try this # if the PEP avatar is disabled! if not metadata and jid not in self._has_pep_avatar: metadata = yield from self._get_avatar_metadata_vcard(jid) # if a notify was fired while we waited for the results, then # use the version in the cache, this will mitigate the race # condition because if our version is actually newer we will # soon get another notify for this version change! if jid not in self._metadata_cache: self._update_metadata(jid, metadata) return self._metadata_cache[jid]
def get_avatar_metadata(self, jid, *, require_fresh=False, disable_pep=False)
Retrieve a list of avatar descriptors. :param jid: the JID for which to retrieve the avatar metadata. :type jid: :class:`aioxmpp.JID` :param require_fresh: if true, do not return results from the avatar metadata chache, but retrieve them again from the server. :type require_fresh: :class:`bool` :param disable_pep: if true, do not try to retrieve the avatar via pep, only try the vCard fallback. This usually only useful when querying avatars via MUC, where the PEP request would be invalid (since it would be for a full jid). :type disable_pep: :class:`bool` :returns: an iterable of avatar descriptors. :rtype: a :class:`list` of :class:`~aioxmpp.avatar.service.AbstractAvatarDescriptor` instances Returning an empty list means that the avatar not set. We mask a :class:`XMPPCancelError` in the case that it is ``feature-not-implemented`` or ``item-not-found`` and return an empty list of avatar descriptors, since this is semantically equivalent to not having an avatar. .. note:: It is usually an error to get the avatar for a full jid, normally, the avatar is set for the bare jid of a user. The exception are vCard avatars over MUC, where the IQ requests for the vCard may be translated by the MUC server. It is recommended to use the `disable_pep` option in that case.
5.062148
4.989407
1.014579
id_ = avatar_set.png_id done = False with (yield from self._publish_lock): if (yield from self._pep.available()): yield from self._pep.publish( namespaces.xep0084_data, avatar_xso.Data(avatar_set.image_bytes), id_=id_ ) yield from self._pep.publish( namespaces.xep0084_metadata, avatar_set.metadata, id_=id_ ) done = True if self._synchronize_vcard: my_vcard = yield from self._vcard.get_vcard() my_vcard.set_photo_data("image/png", avatar_set.image_bytes) self._vcard_id = avatar_set.png_id yield from self._vcard.set_vcard(my_vcard) self._presence_server.resend_presence() done = True if not done: raise RuntimeError( "failed to publish avatar: no protocol available" )
def publish_avatar_set(self, avatar_set)
Make `avatar_set` the current avatar of the jid associated with this connection. If :attr:`synchronize_vcard` is true and PEP is available the vCard is only synchronized if the PEP update is successful. This means publishing the ``image/png`` avatar data and the avatar metadata set in pubsub. The `avatar_set` must be an instance of :class:`AvatarSet`. If :attr:`synchronize_vcard` is true the avatar is additionally published in the user vCard.
5.18503
4.215149
1.230094
with (yield from self._publish_lock): todo = [] if self._synchronize_vcard: todo.append(self._disable_vcard_avatar()) if (yield from self._pep.available()): todo.append(self._pep.publish( namespaces.xep0084_metadata, avatar_xso.Metadata() )) yield from gather_reraise_multi(*todo, message="disable_avatar")
def disable_avatar(self)
Temporarily disable the avatar. If :attr:`synchronize_vcard` is true, the vCard avatar is disabled (even if disabling the PEP avatar fails). This is done by setting the avatar metadata node empty and if :attr:`synchronize_vcard` is true, downloading the vCard, removing the avatar data and re-uploading the vCard. This method does not error if neither protocol is active. :raises aioxmpp.errors.GatherError: if an exception is raised by the spawned tasks.
15.547877
10.061385
1.545302
@asyncio.coroutine def _wipe_pep_avatar(): yield from self._pep.publish( namespaces.xep0084_metadata, avatar_xso.Metadata() ) yield from self._pep.publish( namespaces.xep0084_data, avatar_xso.Data(b'') ) with (yield from self._publish_lock): todo = [] if self._synchronize_vcard: todo.append(self._disable_vcard_avatar()) if (yield from self._pep.available()): todo.append(_wipe_pep_avatar()) yield from gather_reraise_multi(*todo, message="wipe_avatar")
def wipe_avatar(self)
Remove all avatar data stored on the server. If :attr:`synchronize_vcard` is true, the vCard avatar is disabled even if disabling the PEP avatar fails. This is equivalent to :meth:`disable_avatar` for vCard-based avatars, but will also remove the data PubSub node for PEP avatars. This method does not error if neither protocol is active. :raises aioxmpp.errors.GatherError: if an exception is raised by the spawned tasks.
7.636015
5.264443
1.450489
yield from self._check_for_blocking() if not jids_to_block: return cmd = blocking_xso.BlockCommand(jids_to_block) iq = aioxmpp.IQ( type_=aioxmpp.IQType.SET, payload=cmd, ) yield from self.client.send(iq)
def block_jids(self, jids_to_block)
Add the JIDs in the sequence `jids_to_block` to the client's blocklist.
6.236922
6.317786
0.987201
yield from self._check_for_blocking() if not jids_to_unblock: return cmd = blocking_xso.UnblockCommand(jids_to_unblock) iq = aioxmpp.IQ( type_=aioxmpp.IQType.SET, payload=cmd, ) yield from self.client.send(iq)
def unblock_jids(self, jids_to_unblock)
Remove the JIDs in the sequence `jids_to_block` from the client's blocklist.
5.927799
6.08243
0.974577
if descriptor.root_class is not self or self.__subclasses__(): raise TypeError( "descriptors cannot be modified on classes with subclasses" ) meta = type(self) descriptor_info = meta._upcast_descriptor_map( self.DESCRIPTOR_MAP, "{}.{}".format(self.__module__, self.__qualname__), ) # this would raise on conflict meta._merge_descriptors( descriptor_info, [ (key, (descriptor, "<added via _register_descriptor_keys>")) for key in keys ] ) for key in keys: self.DESCRIPTOR_MAP[key] = descriptor
def _register_descriptor_keys(self, descriptor, keys)
Register the given descriptor keys for the given descriptor at the class. :param descriptor: The descriptor for which the `keys` shall be registered. :type descriptor: :class:`AbstractDescriptor` instance :param keys: An iterable of descriptor keys :raises TypeError: if the specified keys are already handled by a descriptor. :raises TypeError: if this class has subclasses or if it is not the :attr:`~AbstractDescriptor.root_class` of the given descriptor. If the method raises, the caller must assume that registration was not successful. .. note:: The intended audience for this method are developers of :class:`AbstractDescriptor` subclasses, which are generally only expected to live in the :mod:`aioxmpp` package. Thus, you should not expect this API to be stable. If you have a use-case for using this function outside of :mod:`aioxmpp`, please let me know through the usual issue reporting means.
7.011401
6.274813
1.117388
my_form_type = getattr(self, "FORM_TYPE", None) f = self() for field in xso.fields: if field.var == "FORM_TYPE": if (my_form_type is not None and field.type_ == forms_xso.FieldType.HIDDEN and field.values): if my_form_type != field.values[0]: raise ValueError( "mismatching FORM_TYPE ({!r} != {!r})".format( field.values[0], my_form_type, ) ) continue if field.var is None: continue key = fields.descriptor_ns, field.var try: descriptor = self.DESCRIPTOR_MAP[key] except KeyError: continue if (field.type_ is not None and not field.type_.allow_upcast(descriptor.FIELD_TYPE)): raise ValueError( "mismatching type ({!r} != {!r}) on field var={!r}".format( field.type_, descriptor.FIELD_TYPE, field.var, ) ) data = descriptor.__get__(f, self) data.load(field) f._recv_xso = xso return f
def from_xso(self, xso)
Construct and return an instance from the given `xso`. .. note:: This is a static method (classmethod), even though sphinx does not document it as such. :param xso: A :xep:`4` data form :type xso: :class:`~.Data` :raises ValueError: if the ``FORM_TYPE`` mismatches :raises ValueError: if field types mismatch :return: newly created instance of this class The fields from the given `xso` are matched against the fields on the form. Any matching field loads its data from the `xso` field. Fields which occur on the form template but not in the `xso` are skipped. Fields which occur in the `xso` but not on the form template are also skipped (but are re-emitted when the form is rendered as reply, see :meth:`~.Form.render_reply`). If the form template has a ``FORM_TYPE`` attribute and the incoming `xso` also has a ``FORM_TYPE`` field, a mismatch between the two values leads to a :class:`ValueError`. The field types of matching fields are checked. If the field type on the incoming XSO may not be upcast to the field type declared on the form (see :meth:`~.FieldType.allow_upcast`), a :class:`ValueError` is raised. If the :attr:`~.Data.type_` does not indicate an actual form (but rather a cancellation request or tabular result), :class:`ValueError` is raised.
3.647419
3.100134
1.176536
data = copy.copy(self._recv_xso) data.type_ = forms_xso.DataType.SUBMIT data.fields = list(self._recv_xso.fields) for i, field_xso in enumerate(data.fields): if field_xso.var is None: continue if field_xso.var == "FORM_TYPE": continue key = fields.descriptor_ns, field_xso.var try: descriptor = self.DESCRIPTOR_MAP[key] except KeyError: continue bound_field = descriptor.__get__(self, type(self)) data.fields[i] = bound_field.render( use_local_metadata=False ) return data
def render_reply(self)
Create a :class:`~.Data` object equal to the object from which the from was created through :meth:`from_xso`, except that the values of the fields are exchanged with the values set on the form. Fields which have no corresponding form descriptor are left untouched. Fields which are accessible through form descriptors, but are not in the original :class:`~.Data` are not included in the output. This method only works on forms created through :meth:`from_xso`. The resulting :class:`~.Data` instance has the :attr:`~.Data.type_` set to :attr:`~.DataType.SUBMIT`.
5.879687
4.408779
1.333632
data = forms_xso.Data(type_=forms_xso.DataType.FORM) try: layout = self.LAYOUT except AttributeError: layout = list(self.DESCRIPTORS) my_form_type = getattr(self, "FORM_TYPE", None) if my_form_type is not None: field_xso = forms_xso.Field() field_xso.var = "FORM_TYPE" field_xso.type_ = forms_xso.FieldType.HIDDEN field_xso.values[:] = [my_form_type] data.fields.append(field_xso) for item in layout: if isinstance(item, str): field_xso = forms_xso.Field() field_xso.type_ = forms_xso.FieldType.FIXED field_xso.values[:] = [item] else: field_xso = item.__get__( self, type(self) ).render() data.fields.append(field_xso) return data
def render_request(self)
Create a :class:`Data` object containing all fields known to the :class:`Form`. If the :class:`Form` has a :attr:`LAYOUT` attribute, it is used during generation.
3.374109
3.03184
1.112891
try: result = self.lookup_in_database(key) except KeyError: pass else: return result while True: fut = self._lookup_cache[key] try: result = yield from fut except ValueError: continue else: return result
def lookup(self, key)
Look up the given `node` URL using the given `hash_` first in the database and then by waiting on the futures created with :meth:`create_query_future` for that node URL and hash. If the hash is not in the database, :meth:`lookup` iterates as long as there are pending futures for the given `hash_` and `node`. If there are no pending futures, :class:`KeyError` is raised. If a future raises a :class:`ValueError`, it is ignored. If the future returns a value, it is used as the result.
4.569356
3.467741
1.317675
fut = asyncio.Future() fut.add_done_callback( functools.partial(self._erase_future, key) ) self._lookup_cache[key] = fut return fut
def create_query_future(self, key)
Create and return a :class:`asyncio.Future` for the given `hash_` function and `node` URL. The future is referenced internally and used by any calls to :meth:`lookup` which are made while the future is pending. The future is removed from the internal storage automatically when a result or exception is set for it. This allows for deduplication of queries for the same hash.
3.988406
4.323087
0.922583
copied_entry = copy.copy(entry) self._memory_overlay[key] = copied_entry if self._user_db_path is not None: asyncio.ensure_future(asyncio.get_event_loop().run_in_executor( None, writeback, self._user_db_path / key.path, entry.captured_events))
def add_cache_entry(self, key, entry)
Add the given `entry` (which must be a :class:`~.disco.xso.InfoQuery` instance) to the user-level database keyed with the hash function type `hash_` and the `node` URL. The `entry` is **not** validated to actually map to `node` with the given `hash_` function, it is expected that the caller perfoms the validation.
6.202803
6.0534
1.024681
if node_namespace in self._pep_node_claims: raise RuntimeError( "claiming already claimed node" ) registered_node = RegisteredPEPNode( self, node_namespace, register_feature=register_feature, notify=notify, ) finalizer = weakref.finalize( registered_node, weakref.WeakMethod(registered_node._unregister) ) # we cannot guarantee that disco is not cleared up already, # so we do not unclaim the feature on exit finalizer.atexit = False self._pep_node_claims[node_namespace] = registered_node return registered_node
def claim_pep_node(self, node_namespace, *, register_feature=True, notify=False)
Claim node `node_namespace`. :param node_namespace: the pubsub node whose events shall be handled. :param register_feature: Whether to publish the `node_namespace` as feature. :param notify: Whether to register the ``+notify`` feature to receive notification without explicit subscription. :raises RuntimeError: if a handler for `node_namespace` is already set. :returns: a :class:`~aioxmpp.pep.service.RegisteredPEPNode` instance representing the claim. .. seealso:: :class:`aioxmpp.pep.register_pep_node` a descriptor which can be used with :class:`~aioxmpp.service.Service` subclasses to claim a PEP node automatically. This registers `node_namespace` as feature for service discovery unless ``register_feature=False`` is passed. .. note:: For `notify` to work, it is required that :class:`aioxmpp.EntityCapsService` is loaded and that presence is re-sent soon after :meth:`~aioxmpp.EntityCapsService.on_ver_changed` fires. See the documentation of the class and the signal for details.
4.735201
4.468795
1.059615
disco_info = yield from self._disco_client.query_info( self.client.local_jid.bare() ) for item in disco_info.identities.filter(attrs={"category": "pubsub"}): if item.type_ == "pep": return True return False
def available(self)
Check whether we have a PEP identity associated with our account.
9.911722
7.84935
1.262744
publish_options = None def autocreate_publish_options(): nonlocal publish_options if publish_options is None: publish_options = aioxmpp.forms.Data( aioxmpp.forms.DataType.SUBMIT ) publish_options.fields.append( aioxmpp.forms.Field( type_=aioxmpp.forms.FieldType.HIDDEN, var="FORM_TYPE", values=[ "http://jabber.org/protocol/pubsub#publish-options" ] ) ) return publish_options if access_model is not None: autocreate_publish_options() publish_options.fields.append(aioxmpp.forms.Field( var="pubsub#access_model", values=[access_model], )) yield from self._check_for_pep() return (yield from self._pubsub.publish( None, node, data, id_=id_, publish_options=publish_options ))
def publish(self, node, data, *, id_=None, access_model=None)
Publish an item `data` in the PubSub node `node` on the PEP service associated with the user's JID. :param node: The PubSub node to publish to. :param data: The item to publish. :type data: An XSO representing the paylaod. :param id_: The id the published item shall have. :param access_model: The access model to enforce on the node. Defaults to not enforcing any access model. :returns: The PubSub id of the published item or :data:`None` if it is unknown. :raises RuntimeError: if PEP is not supported. :raises RuntimeError: if `access_model` is set and `publish_options` is not supported by the server If no `id_` is given it is generated by the server (and may be returned). `access_model` defines a pre-condition on the access model used for the `node`. The valid values depend on the service; commonly useful ``"presence"`` (the default for PEP; allows access to anyone who can receive the presence) and ``"whitelist"`` (allows access only to a whitelist (which defaults to the own account only)).
3.279744
3.235919
1.013543
if self._closed: return self._closed = True self._pep_service._unclaim(self.node_namespace) self._unregister()
def close(self)
Unclaim the PEP node and unregister the registered features. It is not necessary to call close if this claim is managed by :class:`~aioxmpp.pep.register_pep_node`.
16.228786
7.672322
2.115238
# validate against neomodel try: self.deflate(self.__properties__, self) except DeflateError as e: raise ValidationError({e.property_name: e.msg}) except RequiredProperty as e: raise ValidationError({e.property_name: 'is required'})
def full_clean(self, exclude, validate_unique=False)
Validate node, on error raising ValidationErrors which can be handled by django forms :param exclude: :param validate_unique: Check if conflicting node exists in the labels indexes :return:
7.698587
8.064616
0.954613
send_mail(subject, message, from_email, [self.email])
def email_user(self, subject, message, from_email=None)
Send an email to this User.
2.555168
2.494806
1.024195
n = 0 for user in queryset: if not user.is_active: user.activate() n += 1 self.message_user( request, _('Successfully activated %(count)d %(items)s.') % {'count': n, 'items': model_ngettext(self.opts, n)}, messages.SUCCESS)
def activate_users(self, request, queryset)
Activates the selected users, if they are not already activated.
2.596697
2.518561
1.031024
n = 0 for user in queryset: if not user.is_active and settings.USERS_VERIFY_EMAIL: send_activation_email(user=user, request=request) n += 1 self.message_user( request, _('Activation emails sent to %(count)d %(items)s.') % {'count': n, 'items': model_ngettext(self.opts, n)}, messages.SUCCESS)
def send_activation_email(self, request, queryset)
Send activation emails for the selected users, if they are not already activated.
3.084356
2.897843
1.064363
try: qs = super(UserManager, self).get_queryset() except AttributeError: # pragma: no cover qs = super(UserManager, self).get_query_set() return qs
def get_queryset(self)
Fixes get_query_set vs get_queryset for Django <1.6
2.80292
2.201268
1.27332
try: ready, _, _ = self.select.select([self.fileno], [], [], POLL_TIMEOUT) return bool(ready) except self.select.error as why: if why.args[0] != EINTR: self._exceptions.append(AMQPConnectionError(why)) return False
def is_ready(self)
Is Socket Ready. :rtype: tuple
5.079247
4.936975
1.028818
self._wr_lock.acquire() self._rd_lock.acquire() try: self._running.clear() if self.socket: self._close_socket() if self._inbound_thread: self._inbound_thread.join(timeout=self._parameters['timeout']) self._inbound_thread = None self.poller = None self.socket = None finally: self._wr_lock.release() self._rd_lock.release()
def close(self)
Close Socket. :return:
3.294316
3.288371
1.001808
self._wr_lock.acquire() self._rd_lock.acquire() try: self.data_in = EMPTY_BUFFER self._running.set() sock_addresses = self._get_socket_addresses() self.socket = self._find_address_and_connect(sock_addresses) self.poller = Poller(self.socket.fileno(), self._exceptions, timeout=self._parameters['timeout']) self._inbound_thread = self._create_inbound_thread() finally: self._wr_lock.release() self._rd_lock.release()
def open(self)
Open Socket and establish a connection. :raises AMQPConnectionError: Raises if the connection encountered an error. :return:
4.592281
4.412899
1.04065
self._wr_lock.acquire() try: total_bytes_written = 0 bytes_to_send = len(frame_data) while total_bytes_written < bytes_to_send: try: if not self.socket: raise socket.error('connection/socket error') bytes_written = ( self.socket.send(frame_data[total_bytes_written:]) ) if bytes_written == 0: raise socket.error('connection/socket error') total_bytes_written += bytes_written except socket.timeout: pass except socket.error as why: if why.args[0] in (EWOULDBLOCK, EAGAIN): continue self._exceptions.append(AMQPConnectionError(why)) return finally: self._wr_lock.release()
def write_to_socket(self, frame_data)
Write data to the socket. :param str frame_data: :return:
2.467165
2.523484
0.977682
try: self.socket.shutdown(socket.SHUT_RDWR) except (OSError, socket.error): pass self.socket.close()
def _close_socket(self)
Shutdown and close the Socket. :return:
2.505672
3.222481
0.77756
family = socket.AF_UNSPEC if not socket.has_ipv6: family = socket.AF_INET try: addresses = socket.getaddrinfo(self._parameters['hostname'], self._parameters['port'], family, socket.SOCK_STREAM) except socket.gaierror as why: raise AMQPConnectionError(why) return addresses
def _get_socket_addresses(self)
Get Socket address information. :rtype: list
3.159858
3.618511
0.873248
error_message = None for address in addresses: sock = self._create_socket(socket_family=address[0]) try: sock.connect(address[4]) except (IOError, OSError) as why: error_message = why.strerror continue return sock raise AMQPConnectionError( 'Could not connect to %s:%d error: %s' % ( self._parameters['hostname'], self._parameters['port'], error_message ) )
def _find_address_and_connect(self, addresses)
Find and connect to the appropriate address. :param addresses: :raises AMQPConnectionError: Raises if the connection encountered an error. :rtype: socket.socket
3.677111
3.582413
1.026434
sock = socket.socket(socket_family, socket.SOCK_STREAM, 0) sock.settimeout(self._parameters['timeout'] or None) if self.use_ssl: if not compatibility.SSL_SUPPORTED: raise AMQPConnectionError( 'Python not compiled with support for TLSv1 or higher' ) sock = self._ssl_wrap_socket(sock) return sock
def _create_socket(self, socket_family)
Create Socket. :param int socket_family: :rtype: socket.socket
4.585487
4.992661
0.918445
context = self._parameters['ssl_options'].get('context') if context is not None: hostname = self._parameters['ssl_options'].get('server_hostname') return context.wrap_socket( sock, do_handshake_on_connect=True, server_hostname=hostname ) if 'ssl_version' not in self._parameters['ssl_options']: self._parameters['ssl_options']['ssl_version'] = ( compatibility.DEFAULT_SSL_VERSION ) return ssl.wrap_socket( sock, do_handshake_on_connect=True, **self._parameters['ssl_options'] )
def _ssl_wrap_socket(self, sock)
Wrap SSLSocket around the Socket. :param socket.socket sock: :rtype: SSLSocket
2.316607
2.434252
0.951671
inbound_thread = threading.Thread(target=self._process_incoming_data, name=__name__) inbound_thread.daemon = True inbound_thread.start() return inbound_thread
def _create_inbound_thread(self)
Internal Thread that handles all incoming traffic. :rtype: threading.Thread
3.161567
3.28877
0.961322
while self._running.is_set(): if self.poller.is_ready: self.data_in += self._receive() self.data_in = self._on_read_impl(self.data_in)
def _process_incoming_data(self)
Retrieve and process any incoming data. :return:
7.984606
8.428329
0.947353
data_in = EMPTY_BUFFER try: data_in = self._read_from_socket() except socket.timeout: pass except (IOError, OSError) as why: if why.args[0] not in (EWOULDBLOCK, EAGAIN): self._exceptions.append(AMQPConnectionError(why)) self._running.clear() return data_in
def _receive(self)
Receive any incoming socket data. If an error is thrown, handle it and return an empty string. :return: data_in :rtype: bytes
4.241589
4.210531
1.007376
if not self.use_ssl: if not self.socket: raise socket.error('connection/socket error') return self.socket.recv(MAX_FRAME_SIZE) with self._rd_lock: if not self.socket: raise socket.error('connection/socket error') return self.socket.read(MAX_FRAME_SIZE)
def _read_from_socket(self)
Read data from the socket. :rtype: bytes
3.542485
3.471297
1.020508
if not self._interval: return False self._running.set() with self._lock: self._threshold = 0 self._reads_since_check = 0 self._writes_since_check = 0 self._exceptions = exceptions LOGGER.debug('Heartbeat Checker Started') return self._start_new_timer()
def start(self, exceptions)
Start the Heartbeat Checker. :param list exceptions: :return:
6.210983
5.712902
1.087185
self._running.clear() with self._lock: if self._timer: self._timer.cancel() self._timer = None
def stop(self)
Stop the Heartbeat Checker. :return:
3.910264
4.606174
0.848918
if not self._running.is_set(): return False if self._writes_since_check == 0: self.send_heartbeat_impl() self._lock.acquire() try: if self._reads_since_check == 0: self._threshold += 1 if self._threshold >= 2: self._running.clear() self._raise_or_append_exception() return False else: self._threshold = 0 finally: self._reads_since_check = 0 self._writes_since_check = 0 self._lock.release() return self._start_new_timer()
def _check_for_life_signs(self)
Check Connection for life signs. First check if any data has been sent, if not send a heartbeat to the remote server. If we have not received any data what so ever within two intervals, we need to raise an exception so that we can close the connection. :rtype: bool
4.179149
3.61602
1.155732
message = ( 'Connection dead, no heartbeat or data received in >= ' '%ds' % ( self._interval * 2 ) ) why = AMQPConnectionError(message) if self._exceptions is None: raise why self._exceptions.append(why)
def _raise_or_append_exception(self)
The connection is presumably dead and we need to raise or append an exception. If we have a list for exceptions, append the exception and let the connection handle it, if not raise the exception here. :return:
9.912939
8.606533
1.151792
if not self._running.is_set(): return False self._timer = self.timer_impl( interval=self._interval, function=self._check_for_life_signs ) self._timer.daemon = True self._timer.start() return True
def _start_new_timer(self)
Create a timer that will be used to periodically check the connection for heartbeats. :return:
4.911111
5.016604
0.978971
if not compatibility.is_string(exchange): raise AMQPInvalidArgument('exchange should be a string') elif not compatibility.is_string(exchange_type): raise AMQPInvalidArgument('exchange_type should be a string') elif not isinstance(passive, bool): raise AMQPInvalidArgument('passive should be a boolean') elif not isinstance(durable, bool): raise AMQPInvalidArgument('durable should be a boolean') elif not isinstance(auto_delete, bool): raise AMQPInvalidArgument('auto_delete should be a boolean') elif arguments is not None and not isinstance(arguments, dict): raise AMQPInvalidArgument('arguments should be a dict or None') declare_frame = pamqp_exchange.Declare(exchange=exchange, exchange_type=exchange_type, passive=passive, durable=durable, auto_delete=auto_delete, arguments=arguments) return self._channel.rpc_request(declare_frame)
def declare(self, exchange='', exchange_type='direct', passive=False, durable=False, auto_delete=False, arguments=None)
Declare an Exchange. :param str exchange: Exchange name :param str exchange_type: Exchange type :param bool passive: Do not create :param bool durable: Durable exchange :param bool auto_delete: Automatically delete when not in use :param dict arguments: Exchange key/value arguments :raises AMQPInvalidArgument: Invalid Parameters :raises AMQPChannelError: Raises if the channel encountered an error. :raises AMQPConnectionError: Raises if the connection encountered an error. :rtype: dict
1.749635
1.791847
0.976442
if not compatibility.is_string(exchange): raise AMQPInvalidArgument('exchange should be a string') delete_frame = pamqp_exchange.Delete(exchange=exchange, if_unused=if_unused) return self._channel.rpc_request(delete_frame)
def delete(self, exchange='', if_unused=False)
Delete an Exchange. :param str exchange: Exchange name :param bool if_unused: Delete only if unused :raises AMQPInvalidArgument: Invalid Parameters :raises AMQPChannelError: Raises if the channel encountered an error. :raises AMQPConnectionError: Raises if the connection encountered an error. :rtype: dict
5.400298
5.594097
0.965356
if not compatibility.is_string(destination): raise AMQPInvalidArgument('destination should be a string') elif not compatibility.is_string(source): raise AMQPInvalidArgument('source should be a string') elif not compatibility.is_string(routing_key): raise AMQPInvalidArgument('routing_key should be a string') elif arguments is not None and not isinstance(arguments, dict): raise AMQPInvalidArgument('arguments should be a dict or None') bind_frame = pamqp_exchange.Bind(destination=destination, source=source, routing_key=routing_key, arguments=arguments) return self._channel.rpc_request(bind_frame)
def bind(self, destination='', source='', routing_key='', arguments=None)
Bind an Exchange. :param str destination: Exchange name :param str source: Exchange to bind to :param str routing_key: The routing key to use :param dict arguments: Bind key/value arguments :raises AMQPInvalidArgument: Invalid Parameters :raises AMQPChannelError: Raises if the channel encountered an error. :raises AMQPConnectionError: Raises if the connection encountered an error. :rtype: dict
2.157025
2.240693
0.962659
if not compatibility.is_string(destination): raise AMQPInvalidArgument('destination should be a string') elif not compatibility.is_string(source): raise AMQPInvalidArgument('source should be a string') elif not compatibility.is_string(routing_key): raise AMQPInvalidArgument('routing_key should be a string') elif arguments is not None and not isinstance(arguments, dict): raise AMQPInvalidArgument('arguments should be a dict or None') unbind_frame = pamqp_exchange.Unbind(destination=destination, source=source, routing_key=routing_key, arguments=arguments) return self._channel.rpc_request(unbind_frame)
def unbind(self, destination='', source='', routing_key='', arguments=None)
Unbind an Exchange. :param str destination: Exchange name :param str source: Exchange to unbind from :param str routing_key: The routing key used :param dict arguments: Unbind key/value arguments :raises AMQPInvalidArgument: Invalid Parameters :raises AMQPChannelError: Raises if the channel encountered an error. :raises AMQPConnectionError: Raises if the connection encountered an error. :rtype: dict
2.145588
2.198849
0.975778
virtual_host = quote(virtual_host, '') return self.http_client.get( API_QUEUE % ( virtual_host, queue ) )
def get(self, queue, virtual_host='/')
Get Queue details. :param queue: Queue name :param str virtual_host: Virtual host name :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: dict
5.409891
8.153623
0.663495
if show_all: return self.http_client.get(API_QUEUES) virtual_host = quote(virtual_host, '') return self.http_client.get( API_QUEUES_VIRTUAL_HOST % virtual_host )
def list(self, virtual_host='/', show_all=False)
List Queues. :param str virtual_host: Virtual host name :param bool show_all: List all Queues :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: list
3.835666
4.882979
0.785517
if passive: return self.get(queue, virtual_host=virtual_host) queue_payload = json.dumps( { 'durable': durable, 'auto_delete': auto_delete, 'arguments': arguments or {}, 'vhost': virtual_host } ) return self.http_client.put( API_QUEUE % ( quote(virtual_host, ''), queue ), payload=queue_payload)
def declare(self, queue='', virtual_host='/', passive=False, durable=False, auto_delete=False, arguments=None)
Declare a Queue. :param str queue: Queue name :param str virtual_host: Virtual host name :param bool passive: Do not create :param bool durable: Durable queue :param bool auto_delete: Automatically delete when not in use :param dict|None arguments: Queue key/value arguments :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: dict
3.498232
3.701629
0.945052
virtual_host = quote(virtual_host, '') return self.http_client.delete(API_QUEUE % ( virtual_host, queue ))
def delete(self, queue, virtual_host='/')
Delete a Queue. :param str queue: Queue name :param str virtual_host: Virtual host name :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: dict
7.650171
11.634325
0.657552
virtual_host = quote(virtual_host, '') return self.http_client.delete(API_QUEUE_PURGE % ( virtual_host, queue ))
def purge(self, queue, virtual_host='/')
Purge a Queue. :param str queue: Queue name :param str virtual_host: Virtual host name :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: None
6.724941
9.330789
0.720726
virtual_host = quote(virtual_host, '') return self.http_client.get(API_QUEUE_BINDINGS % ( virtual_host, queue ))
def bindings(self, queue, virtual_host='/')
Get Queue bindings. :param str queue: Queue name :param str virtual_host: Virtual host name :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: list
7.311872
9.107108
0.802875
bind_payload = json.dumps({ 'destination': queue, 'destination_type': 'q', 'routing_key': routing_key, 'source': exchange, 'arguments': arguments or {}, 'vhost': virtual_host }) virtual_host = quote(virtual_host, '') return self.http_client.post(API_QUEUE_BIND % ( virtual_host, exchange, queue ), payload=bind_payload)
def bind(self, queue='', exchange='', routing_key='', virtual_host='/', arguments=None)
Bind a Queue. :param str queue: Queue name :param str exchange: Exchange name :param str routing_key: The routing key to use :param str virtual_host: Virtual host name :param dict|None arguments: Bind key/value arguments :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: None
3.560263
4.026745
0.884154
unbind_payload = json.dumps({ 'destination': queue, 'destination_type': 'q', 'properties_key': properties_key or routing_key, 'source': exchange, 'vhost': virtual_host }) virtual_host = quote(virtual_host, '') return self.http_client.delete(API_QUEUE_UNBIND % ( virtual_host, exchange, queue, properties_key or routing_key ), payload=unbind_payload)
def unbind(self, queue='', exchange='', routing_key='', virtual_host='/', properties_key=None)
Unbind a Queue. :param str queue: Queue name :param str exchange: Exchange name :param str routing_key: The routing key to use :param str virtual_host: Virtual host name :param str properties_key: :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: None
3.601259
4.050325
0.889128
virtual_host = quote(virtual_host, '') return self.http_client.get( API_EXCHANGE % ( virtual_host, exchange) )
def get(self, exchange, virtual_host='/')
Get Exchange details. :param str exchange: Exchange name :param str virtual_host: Virtual host name :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: dict
7.385724
9.431609
0.783082
if show_all: return self.http_client.get(API_EXCHANGES) virtual_host = quote(virtual_host, '') return self.http_client.get( API_EXCHANGES_VIRTUAL_HOST % virtual_host )
def list(self, virtual_host='/', show_all=False)
List Exchanges. :param str virtual_host: Virtual host name :param bool show_all: List all Exchanges :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: list
3.8307
4.737185
0.808645
if passive: return self.get(exchange, virtual_host=virtual_host) exchange_payload = json.dumps( { 'durable': durable, 'auto_delete': auto_delete, 'internal': internal, 'type': exchange_type, 'arguments': arguments or {}, 'vhost': virtual_host } ) return self.http_client.put(API_EXCHANGE % ( quote(virtual_host, ''), exchange ), payload=exchange_payload)
def declare(self, exchange='', exchange_type='direct', virtual_host='/', passive=False, durable=False, auto_delete=False, internal=False, arguments=None)
Declare an Exchange. :param str exchange: Exchange name :param str exchange_type: Exchange type :param str virtual_host: Virtual host name :param bool passive: Do not create :param bool durable: Durable exchange :param bool auto_delete: Automatically delete when not in use :param bool internal: Is the exchange for use by the broker only. :param dict|None arguments: Exchange key/value arguments :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: None
3.487544
3.399412
1.025926
virtual_host = quote(virtual_host, '') return self.http_client.delete(API_EXCHANGE % ( virtual_host, exchange ))
def delete(self, exchange, virtual_host='/')
Delete an Exchange. :param str exchange: Exchange name :param str virtual_host: Virtual host name :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: dict
8.068014
10.852618
0.743416
virtual_host = quote(virtual_host, '') return self.http_client.get(API_EXCHANGE_BINDINGS % ( virtual_host, exchange ))
def bindings(self, exchange, virtual_host='/')
Get Exchange bindings. :param str exchange: Exchange name :param str virtual_host: Virtual host name :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: list
7.124067
8.796122
0.80991
bind_payload = json.dumps({ 'destination': destination, 'destination_type': 'e', 'routing_key': routing_key, 'source': source, 'arguments': arguments or {}, 'vhost': virtual_host }) virtual_host = quote(virtual_host, '') return self.http_client.post(API_EXCHANGE_BIND % ( virtual_host, source, destination ), payload=bind_payload)
def bind(self, destination='', source='', routing_key='', virtual_host='/', arguments=None)
Bind an Exchange. :param str source: Source Exchange name :param str destination: Destination Exchange name :param str routing_key: The routing key to use :param str virtual_host: Virtual host name :param dict|None arguments: Bind key/value arguments :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: None
3.543997
3.700279
0.957765
unbind_payload = json.dumps({ 'destination': destination, 'destination_type': 'e', 'properties_key': properties_key or routing_key, 'source': source, 'vhost': virtual_host }) virtual_host = quote(virtual_host, '') return self.http_client.delete(API_EXCHANGE_UNBIND % ( virtual_host, source, destination, properties_key or routing_key ), payload=unbind_payload)
def unbind(self, destination='', source='', routing_key='', virtual_host='/', properties_key=None)
Unbind an Exchange. :param str source: Source Exchange name :param str destination: Destination Exchange name :param str routing_key: The routing key to use :param str virtual_host: Virtual host name :param str properties_key: :raises ApiError: Raises if the remote server encountered an error. :raises ApiConnectionError: Raises if there was a connectivity issue. :rtype: None
3.521569
3.624768
0.971529
LOGGER.debug('Opening a new Channel') if not compatibility.is_integer(rpc_timeout): raise AMQPInvalidArgument('rpc_timeout should be an integer') elif self.is_closed: raise AMQPConnectionError('socket/connection closed') with self.lock: channel_id = self._get_next_available_channel_id() channel = Channel(channel_id, self, rpc_timeout, on_close_impl=self._cleanup_channel) self._channels[channel_id] = channel if not lazy: channel.open() LOGGER.debug('Channel #%d Opened', channel_id) return self._channels[channel_id]
def channel(self, rpc_timeout=60, lazy=False)
Open Channel. :param int rpc_timeout: Timeout before we give up waiting for an RPC response from the server. :raises AMQPInvalidArgument: Invalid Parameters :raises AMQPChannelError: Raises if the channel encountered an error. :raises AMQPConnectionError: Raises if the connection encountered an error.
3.907703
3.575563
1.092892
if not self.exceptions: if not self.is_closed: return why = AMQPConnectionError('connection was closed') self.exceptions.append(why) self.set_state(self.CLOSED) self.close() raise self.exceptions[0]
def check_for_errors(self)
Check Connection for errors. :raises AMQPConnectionError: Raises if the connection encountered an error. :return:
6.384198
5.552339
1.149821
LOGGER.debug('Connection Closing') if not self.is_closed: self.set_state(self.CLOSING) self.heartbeat.stop() try: if not self.is_closed and self.socket: self._channel0.send_close_connection() self._wait_for_connection_state(state=Stateful.CLOSED) except AMQPConnectionError: pass finally: self._close_remaining_channels() self._io.close() self.set_state(self.CLOSED) LOGGER.debug('Connection Closed')
def close(self)
Close connection. :raises AMQPConnectionError: Raises if the connection encountered an error. :return:
5.199408
4.980036
1.04405
LOGGER.debug('Connection Opening') self.set_state(self.OPENING) self._exceptions = [] self._channels = {} self._last_channel_id = None self._io.open() self._send_handshake() self._wait_for_connection_state(state=Stateful.OPEN) self.heartbeat.start(self._exceptions) LOGGER.debug('Connection Opened')
def open(self)
Open Connection. :raises AMQPConnectionError: Raises if the connection encountered an error.
6.452983
6.081353
1.06111
frame_data = pamqp_frame.marshal(frame_out, channel_id) self.heartbeat.register_write() self._io.write_to_socket(frame_data)
def write_frame(self, channel_id, frame_out)
Marshal and write an outgoing pamqp frame to the Socket. :param int channel_id: Channel ID. :param specification.Frame frame_out: Amqp frame. :return:
8.644195
8.910188
0.970147
data_out = EMPTY_BUFFER for single_frame in frames_out: data_out += pamqp_frame.marshal(single_frame, channel_id) self.heartbeat.register_write() self._io.write_to_socket(data_out)
def write_frames(self, channel_id, frames_out)
Marshal and write multiple outgoing pamqp frames to the Socket. :param int channel_id: Channel ID/ :param list frames_out: Amqp frames. :return:
7.245027
6.992193
1.03616
for channel_id in list(self._channels): self._channels[channel_id].set_state(Channel.CLOSED) self._channels[channel_id].close() self._cleanup_channel(channel_id)
def _close_remaining_channels(self)
Forcefully close all open channels. :return:
3.298438
3.823617
0.862649