code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
msg = aioxmpp.stanza.Message( to=self.to, from_=self.sender, type_=aioxmpp.MessageType.CHAT, ) msg.body[None] = self.body # Send metadata using xep-0004: Data Forms (https://xmpp.org/extensions/xep-0004.html) if len(self.metadata): data = forms_xso.Data(type_=forms_xso.DataType.FORM) for name, value in self.metadata.items(): data.fields.append( forms_xso.Field( var=name, type_=forms_xso.FieldType.TEXT_SINGLE, values=[value], ) ) if self.thread: data.fields.append(forms_xso.Field(var="_thread_node", type_=forms_xso.FieldType.TEXT_SINGLE, values=[self.thread])) data.title = SPADE_X_METADATA msg.xep0004_data = [data] return msg
def prepare(self)
Returns an aioxmpp.stanza.Message built from the Message and prepared to be sent. Returns: aioxmpp.stanza.Message: the message prepared to be sent
3.568941
3.485494
1.023941
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.bind((hostname, 0)) return s.getsockname()[1]
def unused_port(hostname)
Return a port that is unused on the current host.
1.796214
1.776591
1.011045
await runner.setup() agent.web.server = aioweb.TCPSite(runner, hostname, port) await agent.web.server.start() logger.info(f"Serving on http://{hostname}:{port}/")
async def start_server_in_loop(runner, hostname, port, agent)
Listens to http requests and sends them to the webapp. Args: runner: AppRunner to process the http requests hostname: host name to listen from. port: port to listen from. agent: agent that owns the web app.
3.874291
3.711265
1.043927
self.hostname = hostname if hostname else "localhost" if port: self.port = port elif not self.port: self.port = unused_port(self.hostname) if templates_path: self.loaders.insert(0, jinja2.FileSystemLoader(templates_path)) self._set_loaders() self.setup_routes() self.runner = aioweb.AppRunner(self.app) return self.agent.submit(start_server_in_loop(self.runner, self.hostname, self.port, self.agent))
def start(self, hostname=None, port=None, templates_path=None)
Starts the web interface. Args: hostname (str, optional): host name to listen from. (Default value = None) port (int, optional): port to listen from. (Default value = None) templates_path (str, optional): path to look for templates. (Default value = None)
4.220976
4.464782
0.945394
if raw: fn = controller else: fn = self._prepare_controller(controller, template) self.app.router.add_get(path, fn)
def add_get(self, path, controller, template, raw=False)
Setup a route of type GET Args: path (str): URL to listen to controller (coroutine): the coroutine to handle the request template (str): the template to render the response or None if it is a JSON response raw (bool): indicates if post-processing (jinja, json, etc) is needed or not
3.562404
4.076369
0.873916
if template: fn = aiohttp_jinja2.template(template_name=template)(controller) else: fn = self._parse_json_response(controller) return fn
def _prepare_controller(self, controller, template)
Wraps the controller wether to render a jinja template or to return a json response (if template is None) Args: controller (coroutine): the coroutine to be wrapped template (str): the name of the template or None Returns: coroutine: a wrapped coroutine of the controller
7.09617
5.723289
1.239876
self.agent = agent self.queue = asyncio.Queue(loop=self.agent.loop) self.presence = agent.presence self.web = agent.web
def set_agent(self, agent)
Links behaviour with its owner agent Args: agent (spade.agent.Agent): the agent who owns the behaviour
5.118396
5.577527
0.917682
if self.template: return self.template.match(message) return True
def match(self, message: Message) -> bool
Matches a message with the behaviour's template Args: message(spade.message.Message): the message to match with Returns: bool: wheter the messaged matches or not
7.484697
5.931438
1.261869
self.agent.set(name, value)
def set(self, name: str, value: Any) -> None
Stores a knowledge item in the agent knowledge base. Args: name (str): name of the item value (Any): value of the item
18.133505
7.401462
2.44999
self.agent.submit(self._start()) self.is_running = True
def start(self)
starts behaviour in the event loop
15.880044
11.807118
1.344955
self.agent._alive.wait() try: await self.on_start() except Exception as e: logger.error("Exception running on_start in behaviour {}: {}".format(self, e)) self.kill(exit_code=e) await self._step() self._is_done.clear()
async def _start(self)
Start coroutine. runs on_start coroutine and then runs the _step coroutine where the body of the behaviour is called.
6.758598
4.668417
1.447728
self._force_kill.set() if exit_code is not None: self._exit_code = exit_code logger.info("Killing behavior {0} with exit code: {1}".format(self, exit_code))
def kill(self, exit_code: Any = None)
Stops the behaviour Args: exit_code (object, optional): the exit code of the behaviour (Default value = None)
4.509064
3.97882
1.133267
if self._done() or self.is_killed(): return self._exit_code else: raise BehaviourNotFinishedException
def exit_code(self) -> Any
Returns the exit_code of the behaviour. It only works when the behaviour is done or killed, otherwise it raises an exception. Returns: object: the exit code of the behaviour
10.223904
5.771909
1.771321
while not self._done() and not self.is_killed(): try: await self._run() await asyncio.sleep(0) # relinquish cpu except Exception as e: logger.error("Exception running behaviour {}: {}".format(self, e)) self.kill(exit_code=e) try: await self.on_end() except Exception as e: logger.error("Exception running on_end in behaviour {}: {}".format(self, e)) self.kill(exit_code=e)
async def _step(self)
Main loop of the behaviour. checks whether behaviour is done or killed, ortherwise it calls run() coroutine.
3.415324
2.70841
1.261007
if not msg.sender: msg.sender = str(self.agent.jid) logger.debug(f"Adding agent's jid as sender to message: {msg}") await self.agent.container.send(msg, self) msg.sent = True self.agent.traces.append(msg, category=str(self))
async def send(self, msg: Message)
Sends a message. Args: msg (spade.message.Message): the message to be sent.
6.835614
6.127772
1.115514
if timeout: coro = self.queue.get() try: msg = await asyncio.wait_for(coro, timeout=timeout) except asyncio.TimeoutError: msg = None else: try: msg = self.queue.get_nowait() except asyncio.QueueEmpty: msg = None return msg
async def receive(self, timeout: float = None) -> Union[Message, None]
Receives a message for this behaviour. If timeout is not None it returns the message or "None" after timeout is done. Args: timeout (float): number of seconds until return Returns: spade.message.Message: a Message or None
1.798833
2.166511
0.830291
if value < 0: raise ValueError("Period must be greater or equal than zero.") self._period = timedelta(seconds=value)
def period(self, value: float)
Set the period. Args: value (float): seconds
4.963177
5.670765
0.875222
if not issubclass(state.__class__, State): raise AttributeError("state must be subclass of spade.behaviour.State") self._states[name] = state if initial: self.current_state = name
def add_state(self, name: str, state: State, initial: bool = False)
Adds a new state to the FSM. Args: name (str): the name of the state, which is used as its identifier. state (spade.behaviour.State): The state class initial (bool, optional): wether the state is the initial state or not. (Only one initial state is allowed) (Default value = False)
4.691043
3.360365
1.395992
self._transitions[source].append(dest)
def add_transition(self, source: str, dest: str)
Adds a transition from one state to another. Args: source (str): the name of the state from where the transition starts dest (str): the name of the state where the transition ends
11.69897
8.799541
1.329498
if dest not in self._states or source not in self._states: raise NotValidState elif dest not in self._transitions[source]: raise NotValidTransition return True
def is_valid_transition(self, source: str, dest: str) -> bool
Checks if a transitions is registered in the FSM Args: source (str): the source state name dest (str): the destination state name Returns: bool: wether the transition is valid or not
4.941098
4.500643
1.097865
graph = "digraph finite_state_machine { rankdir=LR; node [fixedsize=true];" for origin, dest in self._transitions.items(): origin = origin.replace(" ", "_") for d in dest: d = d.replace(" ", "_") graph += "{0} -> {1};".format(origin, d) graph += "}" return graph
def to_graphviz(self) -> str
Converts the FSM behaviour structure to Graphviz syntax Returns: str: the graph in Graphviz syntax
3.353281
3.239137
1.035239
'''Retrieve a window given its name or function handle. Parameters ---------- name_or_function : str or callable If a function, returns `name_or_function(**kwargs)`. If a string, and it matches the name of one of the defined filter functions, the corresponding function is called with `**kwargs`. If a string, and it matches the name of a pre-computed filter, the corresponding filter is retrieved, and kwargs is ignored. Valid pre-computed filter names are: - 'kaiser_fast' - 'kaiser_best' Returns ------- half_window : np.ndarray The right wing of the interpolation filter precision : int > 0 The number of samples between zero-crossings of the filter rolloff : float > 0 The roll-off frequency of the filter as a fraction of Nyquist Raises ------ NotImplementedError If `name_or_function` cannot be found as a filter. ''' if name_or_function in FILTER_FUNCTIONS: return getattr(sys.modules[__name__], name_or_function)(**kwargs) elif six.callable(name_or_function): return name_or_function(**kwargs) else: try: return load_filter(name_or_function) except (IOError, ValueError): raise NotImplementedError('Cannot load filter definition for ' '{}'.format(name_or_function))
def get_filter(name_or_function, **kwargs)
Retrieve a window given its name or function handle. Parameters ---------- name_or_function : str or callable If a function, returns `name_or_function(**kwargs)`. If a string, and it matches the name of one of the defined filter functions, the corresponding function is called with `**kwargs`. If a string, and it matches the name of a pre-computed filter, the corresponding filter is retrieved, and kwargs is ignored. Valid pre-computed filter names are: - 'kaiser_fast' - 'kaiser_best' Returns ------- half_window : np.ndarray The right wing of the interpolation filter precision : int > 0 The number of samples between zero-crossings of the filter rolloff : float > 0 The roll-off frequency of the filter as a fraction of Nyquist Raises ------ NotImplementedError If `name_or_function` cannot be found as a filter.
3.794114
1.409223
2.692345
'''Retrieve a pre-computed filter. Parameters ---------- filter_name : str The key of the filter, e.g., 'kaiser_fast' Returns ------- half_window : np.ndarray The right wing of the interpolation filter precision : int > 0 The number of samples between zero-crossings of the fitler rolloff : float > 0 The roll-off frequency of the filter, as a fraction of Nyquist ''' fname = os.path.join('data', os.path.extsep.join([filter_name, 'npz'])) data = np.load(pkg_resources.resource_filename(__name__, fname)) return data['half_window'], data['precision'], data['rolloff']
def load_filter(filter_name)
Retrieve a pre-computed filter. Parameters ---------- filter_name : str The key of the filter, e.g., 'kaiser_fast' Returns ------- half_window : np.ndarray The right wing of the interpolation filter precision : int > 0 The number of samples between zero-crossings of the fitler rolloff : float > 0 The roll-off frequency of the filter, as a fraction of Nyquist
4.649641
1.752094
2.653762
signature = inspect.signature(func) return [k for k, v in signature.parameters.items() if v.default is not inspect.Parameter.empty or v.kind != inspect.Parameter.POSITIONAL_OR_KEYWORD]
def default_values_of(func)
Return the defaults of the function `func`.
3.056693
2.722693
1.122672
defaults = default_values_of(func) args = arguments_of(func) if defaults: args = args[:-len(defaults)] return args
def required_arguments(func)
Return all arguments of a function that do not have a default value.
5.990284
4.778828
1.253505
merged_style = merge_styles([DEFAULT_STYLE, style]) validator = build_validator(validate) def get_prompt_tokens(): return [("class:qmark", qmark), ("class:question", ' {} '.format(message))] p = PromptSession(get_prompt_tokens, style=merged_style, validator=validator, **kwargs) p.default_buffer.reset(Document(default)) return Question(p.app)
def text(message: Text, default: Text = "", validate: Union[Type[Validator], Callable[[Text], bool], None] = None, # noqa qmark: Text = DEFAULT_QUESTION_PREFIX, style: Optional[Style] = None, **kwargs: Any) -> Question
Prompt the user to enter a free text message. This question type can be used to prompt the user for some text input. Args: message: Question text default: Default value will be returned if the user just hits enter. validate: Require the entered value to pass a validation. The value can not be submited until the validator accepts it (e.g. to check minimum password length). This can either be a function accepting the input and returning a boolean, or an class reference to a subclass of the prompt toolkit Validator class. qmark: Question prefix displayed in front of the question. By default this is a `?` style: A custom color and style for the question parts. You can configure colors as well as font types for different elements. Returns: Question: Question instance, ready to be prompted (using `.ask()`).
4.609854
5.325562
0.865609
if self.should_skip_question: return self.default try: sys.stdout.flush() return await self.unsafe_ask_async(patch_stdout) except KeyboardInterrupt: print("\n{}\n".format(kbi_msg)) return None
async def ask_async(self, patch_stdout: bool = False, kbi_msg: str = DEFAULT_KBI_MESSAGE) -> Any
Ask the question using asyncio and return user response.
5.184104
4.548072
1.139847
if self.should_skip_question: return self.default try: return self.unsafe_ask(patch_stdout) except KeyboardInterrupt: print("\n{}\n".format(kbi_msg)) return None
def ask(self, patch_stdout: bool = False, kbi_msg: str = DEFAULT_KBI_MESSAGE) -> Any
Ask the question synchronously and return user response.
5.516662
4.629075
1.191742
if patch_stdout: with prompt_toolkit.patch_stdout.patch_stdout(): return self.application.run() else: return self.application.run()
def unsafe_ask(self, patch_stdout: bool = False) -> Any
Ask the question synchronously and return user response. Does not catch keyboard interrupts.
4.605752
3.904461
1.179613
self.should_skip_question = condition self.default = default return self
def skip_if(self, condition: bool, default: Any = None) -> 'Question'
Skip the question if flag is set and return the default instead.
6.019691
4.901636
1.228099
if not utils.ACTIVATED_ASYNC_MODE: await utils.activate_prompt_toolkit_async_mode() if patch_stdout: # with prompt_toolkit.patch_stdout.patch_stdout(): return await self.application.run_async().to_asyncio_future() else: return await self.application.run_async().to_asyncio_future()
async def unsafe_ask_async(self, patch_stdout: bool = False) -> Any
Ask the question using asyncio and return user response. Does not catch keyboard interrupts.
5.662023
5.41754
1.045128
default_container = ps.layout.container default_buffer_window = \ default_container.get_children()[0].content.get_children()[1].content assert isinstance(default_buffer_window, Window) # this forces the main window to stay as small as possible, avoiding # empty lines in selections default_buffer_window.dont_extend_height = Always()
def _fix_unecessary_blank_lines(ps: PromptSession) -> None
This is a fix for additional empty lines added by prompt toolkit. This assumes the layout of the default session doesn't change, if it does, this needs an update.
10.141758
8.323491
1.21845
ps = PromptSession(get_prompt_tokens, reserve_space_for_menu=0, **kwargs) _fix_unecessary_blank_lines(ps) return Layout(HSplit([ ps.layout.container, ConditionalContainer( Window(ic), filter=~IsDone() ) ]))
def create_inquirer_layout( ic: InquirerControl, get_prompt_tokens: Callable[[], List[Tuple[Text, Text]]], **kwargs) -> Layout
Create a layout combining question and inquirer selection.
6.534598
6.083591
1.074135
if isinstance(c, Choice): return c elif isinstance(c, str): return Choice(c, c) else: return Choice(c.get('name'), c.get('value'), c.get('disabled', None), c.get('checked'), c.get('key'))
def build(c: Union[Text, 'Choice', Dict[Text, Any]]) -> 'Choice'
Create a choice object from different representations.
2.634748
2.66104
0.99012
return text.text(message, default, validate, qmark, style, is_password=True, **kwargs)
def password(message: Text, default: Text = "", validate: Union[Type[Validator], Callable[[Text], bool], None] = None, # noqa qmark: Text = DEFAULT_QUESTION_PREFIX, style: Optional[Style] = None, **kwargs: Any) -> Question
Question the user to enter a secret text not displayed in the prompt. This question type can be used to prompt the user for information that should not be shown in the command line. The typed text will be replaced with `*`. Args: message: Question text default: Default value will be returned if the user just hits enter. validate: Require the entered value to pass a validation. The value can not be submited until the validator accepts it (e.g. to check minimum password length). This can either be a function accepting the input and returning a boolean, or an class reference to a subclass of the prompt toolkit Validator class. qmark: Question prefix displayed in front of the question. By default this is a `?` style: A custom color and style for the question parts. You can configure colors as well as font types for different elements. Returns: Question: Question instance, ready to be prompted (using `.ask()`).
6.128166
7.630172
0.803149
if choices is None or len(choices) == 0: raise ValueError('A list of choices needs to be provided.') if use_shortcuts and len(choices) > len(InquirerControl.SHORTCUT_KEYS): raise ValueError('A list with shortcuts supports a maximum of {} ' 'choices as this is the maximum number ' 'of keyboard shortcuts that are available. You' 'provided {} choices!' ''.format(len(InquirerControl.SHORTCUT_KEYS), len(choices))) merged_style = merge_styles([DEFAULT_STYLE, style]) ic = InquirerControl(choices, default, use_indicator=use_indicator, use_shortcuts=use_shortcuts) def get_prompt_tokens(): # noinspection PyListCreation tokens = [("class:qmark", qmark), ("class:question", ' {} '.format(message))] if ic.is_answered: tokens.append(("class:answer", ' ' + ic.get_pointed_at().title)) else: if use_shortcuts: tokens.append(("class:instruction", ' (Use shortcuts)')) else: tokens.append(("class:instruction", ' (Use arrow keys)')) return tokens layout = common.create_inquirer_layout(ic, get_prompt_tokens, **kwargs) bindings = KeyBindings() @bindings.add(Keys.ControlQ, eager=True) @bindings.add(Keys.ControlC, eager=True) def _(event): event.app.exit(exception=KeyboardInterrupt, style='class:aborting') if use_shortcuts: # add key bindings for choices for i, c in enumerate(ic.choices): if isinstance(c, Separator): continue # noinspection PyShadowingNames def _reg_binding(i, keys): # trick out late evaluation with a "function factory": # https://stackoverflow.com/a/3431699 @bindings.add(keys, eager=True) def select_choice(event): ic.pointed_at = i _reg_binding(i, c.shortcut_key) else: @bindings.add(Keys.Down, eager=True) @bindings.add("j", eager=True) def move_cursor_down(event): ic.select_next() while not ic.is_selection_valid(): ic.select_next() @bindings.add(Keys.Up, eager=True) @bindings.add("k", eager=True) def move_cursor_up(event): ic.select_previous() while not ic.is_selection_valid(): ic.select_previous() @bindings.add(Keys.ControlM, eager=True) def set_answer(event): ic.is_answered = True event.app.exit(result=ic.get_pointed_at().value) @bindings.add(Keys.Any) def other(event): pass return Question(Application( layout=layout, key_bindings=bindings, style=merged_style, **kwargs ))
def select(message: Text, choices: List[Union[Text, Choice, Dict[Text, Any]]], default: Optional[Text] = None, qmark: Text = DEFAULT_QUESTION_PREFIX, style: Optional[Style] = None, use_shortcuts: bool = False, use_indicator: bool = False, **kwargs: Any) -> Question
Prompt the user to select one item from the list of choices. The user can only select one option. Args: message: Question text choices: Items shown in the selection, this can contain `Choice` or or `Separator` objects or simple items as strings. Passing `Choice` objects, allows you to configure the item more (e.g. preselecting it or disabeling it). default: Default return value (single value). qmark: Question prefix displayed in front of the question. By default this is a `?` style: A custom color and style for the question parts. You can configure colors as well as font types for different elements. use_indicator: Flag to enable the small indicator in front of the list highlighting the current location of the selection cursor. use_shortcuts: Allow the user to select items from the list using shortcuts. The shortcuts will be displayed in front of the list items. Returns: Question: Question instance, ready to be prompted (using `.ask()`).
3.01674
3.039133
0.992632
return Form(*(FormField(k, q) for k, q in kwargs.items()))
def form(**kwargs: Question)
Create a form with multiple questions. The parameter name of a question will be the key for the answer in the returned dict.
9.520675
13.125999
0.72533
if isinstance(questions, dict): questions = [questions] answers = answers or {} for question_config in questions: # import the question if 'type' not in question_config: raise PromptParameterException('type') if 'name' not in question_config: raise PromptParameterException('name') choices = question_config.get('choices') if choices is not None and callable(choices): question_config['choices'] = choices(answers) _kwargs = kwargs.copy() _kwargs.update(question_config) _type = _kwargs.pop('type') _filter = _kwargs.pop('filter', None) name = _kwargs.pop('name') when = _kwargs.pop('when', None) if true_color: _kwargs["color_depth"] = ColorDepth.TRUE_COLOR try: if when: # at least a little sanity check! if callable(question_config['when']): try: if not question_config['when'](answers): continue except Exception as e: raise ValueError("Problem in 'when' check of {} " "question: {}".format(name, e)) else: raise ValueError("'when' needs to be function that " "accepts a dict argument") if _filter: # at least a little sanity check! if not callable(_filter): raise ValueError("'filter' needs to be function that " "accepts an argument") if callable(question_config.get('default')): _kwargs['default'] = question_config['default'](answers) create_question_func = prompt_by_name(_type) if not create_question_func: raise ValueError("No question type '{}' found. " "Known question types are {}." "".format(_type, ", ".join(AVAILABLE_PROMPTS))) missing_args = list(utils.missing_arguments(create_question_func, _kwargs)) if missing_args: raise PromptParameterException(missing_args[0]) question = create_question_func(**_kwargs) answer = question.unsafe_ask(patch_stdout) if answer is not None: if _filter: try: answer = _filter(answer) except Exception as e: raise ValueError("Problem processing 'filter' of {} " "question: {}".format(name, e)) answers[name] = answer except KeyboardInterrupt: print('') print(kbi_msg) print('') return {} return answers
def prompt(questions: List[Dict[Text, Any]], answers: Optional[Dict[Text, Any]] = None, patch_stdout: bool = False, true_color: bool = False, kbi_msg: Text = DEFAULT_KBI_MESSAGE, **kwargs)
Prompt the user for input on all the questions.
2.712409
2.668319
1.016524
merged_style = merge_styles([DEFAULT_STYLE, style]) status = {'answer': None} def get_prompt_tokens(): tokens = [] tokens.append(("class:qmark", qmark)) tokens.append(("class:question", ' {} '.format(message))) if status['answer'] is not None: answer = ' {}'.format(YES if status['answer'] else NO) tokens.append(("class:answer", answer)) else: instruction = ' {}'.format(YES_OR_NO if default else NO_OR_YES) tokens.append(("class:instruction", instruction)) return to_formatted_text(tokens) bindings = KeyBindings() @bindings.add(Keys.ControlQ, eager=True) @bindings.add(Keys.ControlC, eager=True) def _(event): event.app.exit(exception=KeyboardInterrupt, style='class:aborting') @bindings.add('n') @bindings.add('N') def key_n(event): status['answer'] = False event.app.exit(result=False) @bindings.add('y') @bindings.add('Y') def key_y(event): status['answer'] = True event.app.exit(result=True) @bindings.add(Keys.ControlM, eager=True) def set_answer(event): status['answer'] = default event.app.exit(result=default) @bindings.add(Keys.Any) def other(event): pass return Question(PromptSession(get_prompt_tokens, key_bindings=bindings, style=merged_style, **kwargs).app)
def confirm(message: Text, default: bool = True, qmark: Text = DEFAULT_QUESTION_PREFIX, style: Optional[Style] = None, **kwargs: Any) -> Question
Prompt the user to confirm or reject. This question type can be used to prompt the user for a confirmation of a yes-or-no question. If the user just hits enter, the default value will be returned. Args: message: Question text default: Default value will be returned if the user just hits enter. qmark: Question prefix displayed in front of the question. By default this is a `?` style: A custom color and style for the question parts. You can configure colors as well as font types for different elements. Returns: Question: Question instance, ready to be prompted (using `.ask()`).
2.543222
2.642639
0.96238
return select.select(message, choices, default, qmark, style, use_shortcuts=True, **kwargs)
def rawselect(message: Text, choices: List[Union[Text, Choice, Dict[Text, Any]]], default: Optional[Text] = None, qmark: Text = DEFAULT_QUESTION_PREFIX, style: Optional[Style] = None, **kwargs: Any) -> Question
Ask the user to select one item from a list of choices using shortcuts. The user can only select one option. Args: message: Question text choices: Items shown in the selection, this can contain `Choice` or or `Separator` objects or simple items as strings. Passing `Choice` objects, allows you to configure the item more (e.g. preselecting it or disabeling it). default: Default return value (single value). qmark: Question prefix displayed in front of the question. By default this is a `?` style: A custom color and style for the question parts. You can configure colors as well as font types for different elements. Returns: Question: Question instance, ready to be prompted (using `.ask()`).
6.463267
10.950788
0.59021
apps = get_installed_apps() connection = self.connection.connection.alias keyspace = self.connection.connection.keyspace for app in apps: self._cql_models[app.__name__] = get_cql_models( app, connection=connection, keyspace=keyspace)
def _discover_models(self)
Return a dict containing a list of cassandra.cqlengine.Model classes within installed App.
5.874783
4.261144
1.378687
all_models = list(chain.from_iterable(self.cql_models.values())) tables = [model.column_family_name(include_keyspace=False) for model in all_models] return tables
def django_table_names(self, only_existing=False, **kwargs)
Returns a list of all table names that have associated cqlengine models and are present in settings.INSTALLED_APPS.
5.750349
4.816297
1.193936
# Avoid migration code being executed if cursor: return [] connection = self.connection.connection keyspace_name = connection.keyspace if not connection.cluster.schema_metadata_enabled and \ keyspace_name not in connection.cluster.metadata.keyspaces: connection.cluster.refresh_schema_metadata() keyspace = connection.cluster.metadata.keyspaces[keyspace_name] return keyspace.tables
def table_names(self, cursor=None, **kwargs)
Returns all table names in current keyspace
4.193095
3.77596
1.110471
for models in self.connection.introspection.cql_models.values(): for model in models: model.__keyspace__ = keyspace
def set_models_keyspace(self, keyspace)
Set keyspace for all connection models
7.394964
6.096973
1.212891
return self.model( session_key=self._get_or_create_session_key(), session_data=self.encode(data), expire_date=self.get_expiry_date(), )
def create_model_instance(self, data)
Return a new instance of the session model object, which represents the current session state. Intended to be used for saving the session data to the database. :param data:
4.643691
3.955486
1.173987
if self.session_key is None: return self.create() data = self._get_session(no_load=must_create) obj = self.create_model_instance(data) obj.save()
def save(self, must_create=False)
Saves the current session data to the database. If 'must_create' is True, a database error will be raised if the saving operation doesn't create a *new* entry (as opposed to possibly updating an existing entry). :param must_create:
5.597275
6.774402
0.826239
from importlib import import_module for app_name in settings.INSTALLED_APPS: try: import_module('.management', app_name) except SystemError: # We get SystemError if INSTALLED_APPS contains the # name of a class rather than a module pass except ImportError as exc: # This is slightly hackish. We want to ignore ImportErrors # if the "management" module itself is missing -- but we don't # want to ignore the exception if the management module exists # but raises an ImportError for some reason. The only way we # can do this is to check the text of the exception. Note that # we're a bit broad in how we check the text, because different # Python implementations may not use the same text. # CPython uses the text "No module named management" # PyPy uses "No module named myproject.myapp.management" msg = exc.args[0] if not msg.startswith('No module named') \ or 'management' not in msg: raise
def _import_management()
Import the 'management' module within each installed app, to register dispatcher events.
4.972851
4.73415
1.050421
if django.VERSION >= (1, 7): from django.apps import apps return [a.models_module for a in apps.get_app_configs() if a.models_module is not None] else: from django.db import models return models.get_apps()
def get_installed_apps()
Return list of all installed apps
2.332401
2.257026
1.033396
from .models import DjangoCassandraModel models = [] single_cassandra_connection = len(list(get_cassandra_connections())) == 1 is_default_connection = connection == DEFAULT_DB_ALIAS or \ single_cassandra_connection for name, obj in inspect.getmembers(app): cql_model_types = ( cqlengine.models.Model, DjangoCassandraModel ) if ( inspect.isclass(obj) and issubclass(obj, cql_model_types) and not obj.__abstract__ ): if obj.__connection__ == connection or \ (obj.__connection__ is None and is_default_connection) or \ obj.__connection__ is None and obj.__keyspace__ is not None and obj.__keyspace__ == keyspace: models.append(obj) return models
def get_cql_models(app, connection=None, keyspace=None)
:param app: django models module :param connection: connection name :param keyspace: keyspace :return: list of all cassandra.cqlengine.Model within app that should be synced to keyspace.
3.177635
3.112829
1.020819
from django.db import connections for alias in connections: engine = connections[alias].settings_dict.get('ENGINE', '') if engine == 'django_cassandra_engine': yield alias, connections[alias]
def get_cassandra_connections()
:return: List of tuples (db_alias, connection) for all cassandra connections in DATABASES dict.
3.332091
2.744583
1.214061
for alias, conn in get_cassandra_connections(): if conn.connection.default: return alias, conn return list(get_cassandra_connections())[0]
def get_default_cassandra_connection()
Return first default cassandra connection :return:
6.21391
6.621999
0.938374
for _alias, connection in get_cassandra_connections(): if alias is not None: if alias == _alias: return connection elif name is not None: if name == connection.settings_dict['NAME']: return connection else: return connection
def get_cassandra_connection(alias=None, name=None)
:return: cassandra connection matching alias or name or just first found.
2.764229
2.679268
1.03171
self._change_cassandra_engine_name('django.db.backends.dummy') try: super(Command, self).handle(*args, **options) finally: self._change_cassandra_engine_name('django_cassandra_engine')
def handle(self, *args, **options)
Pretend django_cassandra_engine to be dummy database backend with no support for migrations.
4.243947
2.478718
1.712154
for table in tables: qs = "TRUNCATE {}".format(table) self.connection.connection.execute(qs) return []
def sql_flush(self, style, tables, sequences, allow_cascade=False)
Truncate all existing tables in current keyspace. :returns: an empty list
5.353479
6.446927
0.830392
pk_field_names = tuple(f.name for f in model._get_primary_key_columns()) def append_field(field_name): if field_name not in real_field_names: real_field_names.append(field_name) real_field_names = [] for name in field_names: if name == 'pk': for real_pk_field_name in pk_field_names: append_field(real_pk_field_name) elif name == '-pk': for real_pk_field_name in pk_field_names: append_field('-' + real_pk_field_name) else: append_field(name) return real_field_names
def convert_pk_field_names_to_real(model, field_names)
Convert field names including 'pk' to the real field names: >>> convert_pk_field_names_to_real(['pk', 'another_field']) ['real_pk_field', 'another_field']
2.041292
2.218511
0.920118
getattr(self, self._private_fields_name).append(field) self._expire_cache(reverse=True) self._expire_cache(reverse=False)
def add_field(self, field, **kwargs)
Add each field as a private field.
8.160274
6.540664
1.247622
methods_to_add = ( django_field_methods.value_from_object, django_field_methods.value_to_string, django_field_methods.get_attname, django_field_methods.get_cache_name, django_field_methods.pre_save, django_field_methods.get_prep_value, django_field_methods.get_choices, django_field_methods.get_choices_default, django_field_methods.save_form_data, django_field_methods.formfield, django_field_methods.get_db_prep_value, django_field_methods.get_db_prep_save, django_field_methods.db_type_suffix, django_field_methods.select_format, django_field_methods.get_internal_type, django_field_methods.get_attname_column, django_field_methods.check, django_field_methods._check_field_name, django_field_methods._check_db_index, django_field_methods.deconstruct, django_field_methods.run_validators, django_field_methods.clean, django_field_methods.get_db_converters, django_field_methods.get_prep_lookup, django_field_methods.get_db_prep_lookup, django_field_methods.get_filter_kwargs_for_object, django_field_methods.set_attributes_from_name, django_field_methods.db_parameters, django_field_methods.get_pk_value_on_save, django_field_methods.get_col, ) for name, cql_column in six.iteritems(self._defined_columns): self._set_column_django_attributes(cql_column=cql_column, name=name) for method in methods_to_add: try: method_name = method.func_name except AttributeError: # python 3 method_name = method.__name__ new_method = six.create_bound_method(method, cql_column) setattr(cql_column, method_name, new_method)
def _give_columns_django_field_attributes(self)
Add Django Field attributes to each cqlengine.Column instance. So that the Django Options class may interact with it as if it were a Django Field.
2.116593
2.05044
1.032263
if name == 'pk': return cls._meta.get_field(cls._meta.pk.name) return cls._columns[name]
def _get_column(cls, name)
Based on cqlengine.models.BaseModel._get_column. But to work with 'pk'
5.023652
4.096198
1.226418
next = request.POST.get('next', request.GET.get('next', request.META.get('HTTP_REFERER', None))) if not next: next = request.path return next
def _get_next(request)
The part that's the least straightforward about views in this module is how they determine their redirects after they have finished computation. In short, they will try and determine the next place to go in the following order: 1. If there is a variable named ``next`` in the *POST* parameters, the view will redirect to that variable's value. 2. If there is a variable named ``next`` in the *GET* parameters, the view will redirect to that variable's value. 3. If Django can determine the previous page from the HTTP headers, the view will redirect to that previous page.
2.780921
3.189746
0.871831
EXIF_ORIENTATION_STEPS = { 1: [], 2: ['FLIP_LEFT_RIGHT'], 3: ['ROTATE_180'], 4: ['FLIP_TOP_BOTTOM'], 5: ['ROTATE_270', 'FLIP_LEFT_RIGHT'], 6: ['ROTATE_270'], 7: ['ROTATE_90', 'FLIP_LEFT_RIGHT'], 8: ['ROTATE_90'], } try: orientation = image._getexif()[0x0112] ops = EXIF_ORIENTATION_STEPS[orientation] except: ops = [] for method in ops: image = image.transpose(getattr(Image, method)) return image
def transpose_image(self, image)
Transpose based on EXIF information. Borrowed from django-imagekit: imagekit.processors.Transpose
2.056371
1.952463
1.053219
if isinstance(user_or_username, get_user_model()): user_or_username = get_username(user_or_username) key = six.u('%s_%s_%s') % (prefix, user_or_username, size) return six.u('%s_%s') % (slugify(key)[:100], hashlib.md5(force_bytes(key)).hexdigest())
def get_cache_key(user_or_username, size, prefix)
Returns a cache key consisten of a username and image size.
2.875692
2.90023
0.991539
if not settings.AVATAR_CACHE_ENABLED: def decorator(func): return func return decorator def decorator(func): def cached_func(user, size=None, **kwargs): prefix = func.__name__ cached_funcs.add(prefix) key = get_cache_key(user, size or default_size, prefix=prefix) result = cache.get(key) if result is None: result = func(user, size or default_size, **kwargs) cache_set(key, result) return result return cached_func return decorator
def cache_result(default_size=settings.AVATAR_DEFAULT_SIZE)
Decorator to cache the result of functions that take a ``user`` and a ``size`` value.
2.392862
2.368182
1.010421
sizes = set(settings.AVATAR_AUTO_GENERATE_SIZES) if size is not None: sizes.add(size) for prefix in cached_funcs: for size in sizes: cache.delete(get_cache_key(user, size, prefix))
def invalidate_cache(user, size=None)
Function to be called when saving or changing an user's avatars.
4.348341
3.992032
1.089255
alt = six.text_type(user) url = reverse('avatar_render_primary', kwargs={'user': user, 'size': size}) return ( % (url, alt, size, size))
def primary_avatar(user, size=settings.AVATAR_DEFAULT_SIZE)
This tag tries to get the default avatar for a user without doing any db requests. It achieve this by linking to a special view that will do all the work for us. If that special view is then cached by a CDN for instance, we will avoid many db calls.
9.291212
10.440113
0.889953
''' Generate a public token for sandbox testing. :param str institution_id: :param [str] initial_products: :param str webhook: ''' options = _options or {} if webhook is not None: options['webhook'] = webhook transaction_options = {} transaction_options.update(options.get('transactions', {})) if transactions__start_date is not None: transaction_options['start_date'] = transactions__start_date if transactions__end_date is not None: transaction_options['end_date'] = transactions__end_date if transaction_options: options['transactions'] = transaction_options return self.client.post_public_key('/sandbox/public_token/create', { 'institution_id': institution_id, 'initial_products': initial_products, 'options': options, })
def create(self, institution_id, initial_products, _options=None, webhook=None, transactions__start_date=None, transactions__end_date=None, )
Generate a public token for sandbox testing. :param str institution_id: :param [str] initial_products: :param str webhook:
2.442744
1.829457
1.335229
''' Return accounts and transactions for an item. (`HTTP docs <https://plaid.com/docs/api/#transactions>`__) The transactions in the response are paginated -- compare the number of transactions received so far against response['total_transactions'] to determine whether to fetch another page. :param str access_token: :param str start_date: The earliest date for transactions. :param str end_date: The latest date for transactions. :param [str] account_ids: A list of account_ids to retrieve for the item. Optional. :param int count: The number of transactions to fetch. Optional. :param int offset: The number of transactions to skip from the beginning of the fetch. Optional. All date should be formatted as ``YYYY-MM-DD``. ''' options = _options or {} if account_ids is not None: options['account_ids'] = account_ids if count is not None: options['count'] = count if offset is not None: options['offset'] = offset return self.client.post('/transactions/get', { 'access_token': access_token, 'start_date': start_date, 'end_date': end_date, 'options': options, })
def get(self, access_token, start_date, end_date, _options=None, account_ids=None, count=None, offset=None, )
Return accounts and transactions for an item. (`HTTP docs <https://plaid.com/docs/api/#transactions>`__) The transactions in the response are paginated -- compare the number of transactions received so far against response['total_transactions'] to determine whether to fetch another page. :param str access_token: :param str start_date: The earliest date for transactions. :param str end_date: The latest date for transactions. :param [str] account_ids: A list of account_ids to retrieve for the item. Optional. :param int count: The number of transactions to fetch. Optional. :param int offset: The number of transactions to skip from the beginning of the fetch. Optional. All date should be formatted as ``YYYY-MM-DD``.
3.026066
1.259172
2.403218
''' Create an error of the right class from an API response. :param response dict Response JSON ''' cls = PLAID_ERROR_TYPE_MAP.get(response['error_type'], PlaidError) return cls(response['error_message'], response['error_type'], response['error_code'], response['display_message'], response['request_id'], response.get('causes'))
def from_response(response)
Create an error of the right class from an API response. :param response dict Response JSON
5.228945
2.966356
1.76275
''' Create an asset report. :param [str] access_tokens: A list of access tokens, one token for each Item to be included in the Asset Report. :param int days_requested: Days of transaction history requested to be included in the Asset Report. :param dict options: An optional dictionary. For more information on the options object, see the documentation site listed above. ''' options = options or {} return self.client.post('/asset_report/create', { 'access_tokens': access_tokens, 'days_requested': days_requested, 'options': options, })
def create(self, access_tokens, days_requested, options=None)
Create an asset report. :param [str] access_tokens: A list of access tokens, one token for each Item to be included in the Asset Report. :param int days_requested: Days of transaction history requested to be included in the Asset Report. :param dict options: An optional dictionary. For more information on the options object, see the documentation site listed above.
4.309103
1.461546
2.948319
''' Create a new, refreshed asset report based on an existing asset report. :param str asset_report_token: The existing Asset Report's asset report token. :param int days_requested: Days of transaction history requested to be included in the Asset Report. :param dict options: An optional dictionary. This is the same object used in `create`. ''' options = options or {} return self.client.post('/asset_report/refresh', { 'asset_report_token': asset_report_token, 'days_requested': days_requested, 'options': options, })
def refresh(self, asset_report_token, days_requested, options=None)
Create a new, refreshed asset report based on an existing asset report. :param str asset_report_token: The existing Asset Report's asset report token. :param int days_requested: Days of transaction history requested to be included in the Asset Report. :param dict options: An optional dictionary. This is the same object used in `create`.
3.839099
1.477182
2.598935
''' Retrieves an asset report. :param str asset_report_token: The asset report token for the asset report you created. :param bool include_insights: An optional boolean specifying whether we should retrieve the report as an Asset Report with Insights. For more, see https://plaid.com/docs/#retrieve-json-report-request. ''' return self.client.post('/asset_report/get', { 'asset_report_token': asset_report_token, 'include_insights': include_insights, })
def get(self, asset_report_token, include_insights=False)
Retrieves an asset report. :param str asset_report_token: The asset report token for the asset report you created. :param bool include_insights: An optional boolean specifying whether we should retrieve the report as an Asset Report with Insights. For more, see https://plaid.com/docs/#retrieve-json-report-request.
4.534155
1.555583
2.914762
'''Make a post request with client_id and secret key.''' post_data = { 'client_id': self.client_id, 'secret': self.secret, } post_data.update(data) return self._post(path, post_data, is_json)
def post(self, path, data, is_json=True)
Make a post request with client_id and secret key.
3.328311
2.469205
1.347928
'''Make a post request requiring no auth.''' return self._post(path, data, is_json)
def post_public(self, path, data, is_json=True)
Make a post request requiring no auth.
10.886977
5.342875
2.037663
'''Make a post request using a public key.''' post_data = { 'public_key': self.public_key } post_data.update(data) return self._post(path, post_data, is_json)
def post_public_key(self, path, data, is_json=True)
Make a post request using a public key.
3.385743
3.079648
1.099393
''' Fetch all Plaid institutions, using /institutions/all. :param int count: Number of institutions to fetch. :param int offset: Number of institutions to skip. ''' options = _options or {} return self.client.post('/institutions/get', { 'count': count, 'offset': offset, 'options': options, })
def get(self, count, offset=0, _options=None)
Fetch all Plaid institutions, using /institutions/all. :param int count: Number of institutions to fetch. :param int offset: Number of institutions to skip.
4.536448
2.090951
2.169562
''' Fetch a single institution by id. :param str institution_id: ''' options = _options or {} return self.client.post_public_key('/institutions/get_by_id', { 'institution_id': institution_id, 'options': options, })
def get_by_id(self, institution_id, _options=None)
Fetch a single institution by id. :param str institution_id:
4.881257
3.641219
1.340556
''' Search all institutions by name. :param str query: Query against the full list of institutions. :param [str] products: Filter FIs by available products. Optional. ''' options = _options or {} return self.client.post_public_key('/institutions/search', { 'query': query, 'products': products, 'options': options, })
def search(self, query, _options={}, products=None)
Search all institutions by name. :param str query: Query against the full list of institutions. :param [str] products: Filter FIs by available products. Optional.
7.58118
2.458407
3.083777
''' Retrieve account and routing numbers for checking and savings accounts. (`HTTP docs <https://plaid.com/docs/api/#auth>`__) :param str access_token: :param [str] account_ids: A list of account_ids to retrieve for the item. Optional. ''' options = _options or {} if account_ids is not None: options['account_ids'] = account_ids return self.client.post('/auth/get', { 'access_token': access_token, 'options': options, })
def get(self, access_token, _options=None, account_ids=None)
Retrieve account and routing numbers for checking and savings accounts. (`HTTP docs <https://plaid.com/docs/api/#auth>`__) :param str access_token: :param [str] account_ids: A list of account_ids to retrieve for the item. Optional.
4.814608
1.817686
2.648756
if isinstance(file_path_or_buffer, six.string_types): with open(file_path_or_buffer, 'rb') as f: return __file_to_dataframe(f, schema, **kwargs) else: return __file_to_dataframe(file_path_or_buffer, schema, **kwargs)
def read_avro(file_path_or_buffer, schema=None, **kwargs)
Avro file reader. Args: file_path_or_buffer: Input file path or file-like object. schema: Avro schema. **kwargs: Keyword argument to pandas.DataFrame.from_records. Returns: Class of pd.DataFrame.
1.815956
2.035071
0.892331
if schema is None: schema = __schema_infer(df) open_mode = 'wb' if not append else 'a+b' if isinstance(file_path_or_buffer, six.string_types): with open(file_path_or_buffer, open_mode) as f: fastavro.writer(f, schema=schema, records=df.to_dict('records'), codec=codec) else: fastavro.writer(file_path_or_buffer, schema=schema, records=df.to_dict('records'), codec=codec)
def to_avro(file_path_or_buffer, df, schema=None, codec='null', append=False)
Avro file writer. Args: file_path_or_buffer: Output file path or file-like object. df: pd.DataFrame. schema: Dict of Avro schema. If it's set None, inferring schema. append: Boolean to control if will append to existing file codec: A string indicating the compression codec to use. Default is no compression ("null"), other acceptable values are "snappy" and "deflate". You must have python-snappy installed to use the snappy codec.
2.098104
2.157275
0.972572
self.n += w if len(self) == 0: self._add_centroid(Centroid(x, w)) return S = self._find_closest_centroids(x) while len(S) != 0 and w > 0: j = choice(list(range(len(S)))) c_j = S[j] q = self._compute_centroid_quantile(c_j) # This filters the out centroids that do not satisfy the second part # of the definition of S. See original paper by Dunning. if c_j.count + w > self._threshold(q): S.pop(j) continue delta_w = min(self._threshold(q) - c_j.count, w) self._update_centroid(c_j, x, delta_w) w -= delta_w S.pop(j) if w > 0: self._add_centroid(Centroid(x, w)) if len(self) > self.K / self.delta: self.compress() return
def update(self, x, w=1)
Update the t-digest with value x and weight w.
4.352075
4.085862
1.065155
for x in values: self.update(x, w) self.compress() return
def batch_update(self, values, w=1)
Update the t-digest with an iterable of values. This assumes all points have the same weight.
6.70834
5.134424
1.306542
if not (0 <= p <= 100): raise ValueError("p must be between 0 and 100, inclusive.") p = float(p)/100. p *= self.n c_i = None t = 0 if p == 0: return self.C.min_item()[1].mean for i, key in enumerate(self.C.keys()): c_i_plus_one = self.C[key] if i == 0: k = c_i_plus_one.count / 2 else: k = (c_i_plus_one.count + c_i.count) / 2. if p < t + k: z1 = p - t z2 = t + k - p return (c_i.mean * z2 + c_i_plus_one.mean * z1) / (z1 + z2) c_i = c_i_plus_one t += k return self.C.max_item()[1].mean
def percentile(self, p)
Computes the percentile of a specific value in [0,100].
2.855312
2.830459
1.00878
t = 0 N = float(self.n) if len(self) == 1: # only one centroid return int(x >= self.C.min_key()) for i, key in enumerate(self.C.keys()): c_i = self.C[key] if i == len(self) - 1: delta = (c_i.mean - self.C.prev_item(key)[1].mean) / 2. else: delta = (self.C.succ_item(key)[1].mean - c_i.mean) / 2. z = max(-1, (x - c_i.mean) / delta) if z < 1: return t / N + c_i.count / N * (z + 1) / 2 t += c_i.count return 1
def cdf(self, x)
Computes the cdf of a specific value, ie. computes F(x) where F denotes the CDF of the distribution.
4.005075
4.074811
0.982886
if not (p1 < p2): raise ValueError("p1 must be between 0 and 100 and less than p2.") min_count = p1 / 100. * self.n max_count = p2 / 100. * self.n trimmed_sum = trimmed_count = curr_count = 0 for i, c in enumerate(self.C.values()): next_count = curr_count + c.count if next_count <= min_count: curr_count = next_count continue count = c.count if curr_count < min_count: count = next_count - min_count if next_count > max_count: count -= next_count - max_count trimmed_sum += count * c.mean trimmed_count += count if next_count >= max_count: break curr_count = next_count if trimmed_count == 0: return 0 return trimmed_sum / trimmed_count
def trimmed_mean(self, p1, p2)
Computes the mean of the distribution between the two percentiles p1 and p2. This is a modified algorithm than the one presented in the original t-Digest paper.
2.634289
2.495607
1.05557
centroids = [] for key in self.C.keys(): tree_values = self.C.get_value(key) centroids.append({'m':tree_values.mean, 'c':tree_values.count}) return centroids
def centroids_to_list(self)
Returns a Python list of the TDigest object's Centroid values.
5.07012
4.215997
1.202591
return {'n':self.n, 'delta':self.delta, 'K':self.K, 'centroids':self.centroids_to_list()}
def to_dict(self)
Returns a Python dictionary of the TDigest and internal Centroid values. Or use centroids_to_list() for a list of only the Centroid values.
6.464594
3.163805
2.043297
self.delta = dict_values.get('delta', self.delta) self.K = dict_values.get('K', self.K) self.update_centroids_from_list(dict_values['centroids']) return self
def update_from_dict(self, dict_values)
Updates TDigest object with dictionary values. The digest delta and K values are optional if you would like to update them, but the n value is not required because it is computed from the centroid weights. For example, you can initalize a new TDigest: digest = TDigest() Then load dictionary values into the digest: digest.update_from_dict({'K': 25, 'delta': 0.01, 'centroids': [{'c': 1.0, 'm': 1.0}, {'c': 1.0, 'm': 2.0}, {'c': 1.0, 'm': 3.0}]}) Or update an existing digest where the centroids will be appropriately merged: digest = TDigest() digest.update(1) digest.update(2) digest.update(3) digest.update_from_dict({'K': 25, 'delta': 0.01, 'centroids': [{'c': 1.0, 'm': 1.0}, {'c': 1.0, 'm': 2.0}, {'c': 1.0, 'm': 3.0}]}) Resulting in the digest having merged similar centroids by increasing their weight: {'K': 25, 'delta': 0.01, 'centroids': [{'c': 2.0, 'm': 1.0}, {'c': 2.0, 'm': 2.0}, {'c': 2.0, 'm': 3.0}], 'n': 6.0} Alternative you can provide only a list of centroid values with update_centroids_from_list()
3.33191
2.487129
1.339661
[self.update(value['m'], value['c']) for value in list_values] return self
def update_centroids_from_list(self, list_values)
Add or update Centroids from a Python list. Any existing centroids in the digest object are appropriately updated. Example: digest.update_centroids([{'c': 1.0, 'm': 1.0}, {'c': 1.0, 'm': 2.0}, {'c': 1.0, 'm': 3.0}])
8.512331
7.800449
1.091262
if isspmatrix(matrix): return sparse_allclose(matrix, matrix.transpose()) return np.allclose(matrix, matrix.T)
def is_undirected(matrix)
Determine if the matrix reprensents a directed graph :param matrix: The matrix to tested :returns: boolean
4.996195
9.54945
0.523192
for i in range(matrix.shape[0]): if isspmatrix(matrix): col = find(matrix[:,i])[2] else: col = matrix[:,i].T.tolist()[0] coeff = max( Fraction(c).limit_denominator().denominator for c in col ) matrix[:,i] *= coeff return matrix
def convert_to_adjacency_matrix(matrix)
Converts transition matrix into adjacency matrix :param matrix: The matrix to be converted :returns: adjacency matrix
6.583613
8.01639
0.821269
if isspmatrix(matrix): delta = dok_matrix(matrix.shape) else: delta = np.zeros(matrix.shape) for i in clusters : for j in permutations(i, 2): delta[j] = 1 return delta
def delta_matrix(matrix, clusters)
Compute delta matrix where delta[i,j]=1 if i and j belong to same cluster and i!=j :param matrix: The adjacency matrix :param clusters: The clusters returned by get_clusters :returns: delta matrix
3.713074
3.797228
0.977838
matrix = convert_to_adjacency_matrix(matrix) m = matrix.sum() if isspmatrix(matrix): matrix_2 = matrix.tocsr(copy=True) else : matrix_2 = matrix if is_undirected(matrix): expected = lambda i,j : (( matrix_2[i,:].sum() + matrix[:,i].sum() )* ( matrix[:,j].sum() + matrix_2[j,:].sum() )) else: expected = lambda i,j : ( matrix_2[i,:].sum()*matrix[:,j].sum() ) delta = delta_matrix(matrix, clusters) indices = np.array(delta.nonzero()) Q = sum( matrix[i, j] - expected(i, j)/m for i, j in indices.T )/m return Q
def modularity(matrix, clusters)
Compute the modularity :param matrix: The adjacency matrix :param clusters: The clusters returned by get_clusters :returns: modularity value
3.903414
4.197409
0.929958
# make a networkx graph from the adjacency matrix graph = nx.Graph(matrix) # map node to cluster id for colors cluster_map = {node: i for i, cluster in enumerate(clusters) for node in cluster} colors = [cluster_map[i] for i in range(len(graph.nodes()))] # if colormap not specified in kwargs, use a default if not kwargs.get("cmap", False): kwargs["cmap"] = cm.tab20 # draw nx.draw_networkx(graph, node_color=colors, **kwargs) axis("off") show(block=False)
def draw_graph(matrix, clusters, **kwargs)
Visualize the clustering :param matrix: The unprocessed adjacency matrix :param clusters: list of tuples containing clusters as returned by 'get_clusters' :param kwargs: Additional keyword arguments to be passed to networkx.draw_networkx
3.520653
3.531262
0.996996
c = np.abs(a - b) - rtol * np.abs(b) # noinspection PyUnresolvedReferences return c.max() <= atol
def sparse_allclose(a, b, rtol=1e-5, atol=1e-8)
Version of np.allclose for use with sparse matrices
3.903954
3.853239
1.013162
if isspmatrix(matrix): return normalize(matrix.power(power)) return normalize(np.power(matrix, power))
def inflate(matrix, power)
Apply cluster inflation to the given matrix by raising each element to the given power. :param matrix: The matrix to be inflated :param power: Cluster inflation parameter :returns: The inflated matrix
6.491347
8.233554
0.788402
if isspmatrix(matrix): return matrix ** power return np.linalg.matrix_power(matrix, power)
def expand(matrix, power)
Apply cluster expansion to the given matrix by raising the matrix to the given power. :param matrix: The matrix to be expanded :param power: Cluster expansion parameter :returns: The expanded matrix
4.865572
6.669152
0.729564
shape = matrix.shape assert shape[0] == shape[1], "Error, matrix is not square" if isspmatrix(matrix): new_matrix = matrix.todok() else: new_matrix = matrix.copy() for i in range(shape[0]): new_matrix[i, i] = loop_value if isspmatrix(matrix): return new_matrix.tocsc() return new_matrix
def add_self_loops(matrix, loop_value)
Add self-loops to the matrix by setting the diagonal to loop_value :param matrix: The matrix to add loops to :param loop_value: Value to use for self-loops :returns: The matrix with self-loops
2.413348
2.839844
0.849817
if isspmatrix(matrix): pruned = dok_matrix(matrix.shape) pruned[matrix >= threshold] = matrix[matrix >= threshold] pruned = pruned.tocsc() else: pruned = matrix.copy() pruned[pruned < threshold] = 0 # keep max value in each column. same behaviour for dense/sparse num_cols = matrix.shape[1] row_indices = matrix.argmax(axis=0).reshape((num_cols,)) col_indices = np.arange(num_cols) pruned[row_indices, col_indices] = matrix[row_indices, col_indices] return pruned
def prune(matrix, threshold)
Prune the matrix so that very small edges are removed. The maximum value in each column is never pruned. :param matrix: The matrix to be pruned :param threshold: The value below which edges will be removed :returns: The pruned matrix
2.890798
2.866343
1.008532
if isspmatrix(matrix1) or isspmatrix(matrix2): return sparse_allclose(matrix1, matrix2) return np.allclose(matrix1, matrix2)
def converged(matrix1, matrix2)
Check for convergence by determining if matrix1 and matrix2 are approximately equal. :param matrix1: The matrix to compare with matrix2 :param matrix2: The matrix to compare with matrix1 :returns: True if matrix1 and matrix2 approximately equal
2.978349
4.538366
0.65626
# Expansion matrix = expand(matrix, expansion) # Inflation matrix = inflate(matrix, inflation) return matrix
def iterate(matrix, expansion, inflation)
Run a single iteration (expansion + inflation) of the mcl algorithm :param matrix: The matrix to perform the iteration on :param expansion: Cluster expansion factor :param inflation: Cluster inflation factor
3.200221
4.009157
0.798228