Unnamed: 0
int64
0
10k
repository_name
stringlengths
7
54
func_path_in_repository
stringlengths
5
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
100
30.3k
language
stringclasses
1 value
func_code_string
stringlengths
100
30.3k
func_code_tokens
stringlengths
138
33.2k
func_documentation_string
stringlengths
1
15k
func_documentation_tokens
stringlengths
5
5.14k
split_name
stringclasses
1 value
func_code_url
stringlengths
91
315
6,200
biolink/biolink-model
metamodel/generators/pythongen.py
PythonGenerator.all_slots_for
def all_slots_for(self, cls: ClassDefinition) -> List[SlotDefinitionName]: """ Return all slots for class cls """ if not cls.is_a: return cls.slots else: return [sn for sn in self.all_slots_for(self.schema.classes[cls.is_a]) if sn not in cls.slot_usage] \ + cls.slots
python
def all_slots_for(self, cls: ClassDefinition) -> List[SlotDefinitionName]: """ Return all slots for class cls """ if not cls.is_a: return cls.slots else: return [sn for sn in self.all_slots_for(self.schema.classes[cls.is_a]) if sn not in cls.slot_usage] \ + cls.slots
['def', 'all_slots_for', '(', 'self', ',', 'cls', ':', 'ClassDefinition', ')', '->', 'List', '[', 'SlotDefinitionName', ']', ':', 'if', 'not', 'cls', '.', 'is_a', ':', 'return', 'cls', '.', 'slots', 'else', ':', 'return', '[', 'sn', 'for', 'sn', 'in', 'self', '.', 'all_slots_for', '(', 'self', '.', 'schema', '.', 'classes', '[', 'cls', '.', 'is_a', ']', ')', 'if', 'sn', 'not', 'in', 'cls', '.', 'slot_usage', ']', '+', 'cls', '.', 'slots']
Return all slots for class cls
['Return', 'all', 'slots', 'for', 'class', 'cls']
train
https://github.com/biolink/biolink-model/blob/f379e28d5d4085e1115798c6cb28e5acc4dba8b4/metamodel/generators/pythongen.py#L242-L248
6,201
igorcoding/asynctnt-queue
asynctnt_queue/tube.py
Tube.release
async def release(self, task_id, *, delay=None): """ Release task (return to queue) with delay if specified :param task_id: Task id :param delay: Time in seconds before task will become ready again :return: Task instance """ opts = {} if delay is not None: opts['delay'] = delay args = (task_id, opts) res = await self.conn.call(self.__funcs['release'], args) return self._create_task(res.body)
python
async def release(self, task_id, *, delay=None): """ Release task (return to queue) with delay if specified :param task_id: Task id :param delay: Time in seconds before task will become ready again :return: Task instance """ opts = {} if delay is not None: opts['delay'] = delay args = (task_id, opts) res = await self.conn.call(self.__funcs['release'], args) return self._create_task(res.body)
['async', 'def', 'release', '(', 'self', ',', 'task_id', ',', '*', ',', 'delay', '=', 'None', ')', ':', 'opts', '=', '{', '}', 'if', 'delay', 'is', 'not', 'None', ':', 'opts', '[', "'delay'", ']', '=', 'delay', 'args', '=', '(', 'task_id', ',', 'opts', ')', 'res', '=', 'await', 'self', '.', 'conn', '.', 'call', '(', 'self', '.', '__funcs', '[', "'release'", ']', ',', 'args', ')', 'return', 'self', '.', '_create_task', '(', 'res', '.', 'body', ')']
Release task (return to queue) with delay if specified :param task_id: Task id :param delay: Time in seconds before task will become ready again :return: Task instance
['Release', 'task', '(', 'return', 'to', 'queue', ')', 'with', 'delay', 'if', 'specified']
train
https://github.com/igorcoding/asynctnt-queue/blob/75719b2dd27e8314ae924aea6a7a85be8f48ecc5/asynctnt_queue/tube.py#L141-L154
6,202
bram85/topydo
topydo/lib/Todo.py
Todo.days_till_due
def days_till_due(self): """ Returns the number of days till the due date. Returns a negative number of days when the due date is in the past. Returns 0 when the task has no due date. """ due = self.due_date() if due: diff = due - date.today() return diff.days return 0
python
def days_till_due(self): """ Returns the number of days till the due date. Returns a negative number of days when the due date is in the past. Returns 0 when the task has no due date. """ due = self.due_date() if due: diff = due - date.today() return diff.days return 0
['def', 'days_till_due', '(', 'self', ')', ':', 'due', '=', 'self', '.', 'due_date', '(', ')', 'if', 'due', ':', 'diff', '=', 'due', '-', 'date', '.', 'today', '(', ')', 'return', 'diff', '.', 'days', 'return', '0']
Returns the number of days till the due date. Returns a negative number of days when the due date is in the past. Returns 0 when the task has no due date.
['Returns', 'the', 'number', 'of', 'days', 'till', 'the', 'due', 'date', '.', 'Returns', 'a', 'negative', 'number', 'of', 'days', 'when', 'the', 'due', 'date', 'is', 'in', 'the', 'past', '.', 'Returns', '0', 'when', 'the', 'task', 'has', 'no', 'due', 'date', '.']
train
https://github.com/bram85/topydo/blob/b59fcfca5361869a6b78d4c9808c7c6cd0a18b58/topydo/lib/Todo.py#L73-L83
6,203
timothyb0912/pylogit
pylogit/base_multinomial_cm_v2.py
check_type_of_param_list_elements
def check_type_of_param_list_elements(param_list): """ Ensures that all elements of param_list are ndarrays or None. Raises a helpful ValueError if otherwise. """ try: assert isinstance(param_list[0], np.ndarray) assert all([(x is None or isinstance(x, np.ndarray)) for x in param_list]) except AssertionError: msg = "param_list[0] must be a numpy array." msg_2 = "All other elements must be numpy arrays or None." total_msg = msg + "\n" + msg_2 raise TypeError(total_msg) return None
python
def check_type_of_param_list_elements(param_list): """ Ensures that all elements of param_list are ndarrays or None. Raises a helpful ValueError if otherwise. """ try: assert isinstance(param_list[0], np.ndarray) assert all([(x is None or isinstance(x, np.ndarray)) for x in param_list]) except AssertionError: msg = "param_list[0] must be a numpy array." msg_2 = "All other elements must be numpy arrays or None." total_msg = msg + "\n" + msg_2 raise TypeError(total_msg) return None
['def', 'check_type_of_param_list_elements', '(', 'param_list', ')', ':', 'try', ':', 'assert', 'isinstance', '(', 'param_list', '[', '0', ']', ',', 'np', '.', 'ndarray', ')', 'assert', 'all', '(', '[', '(', 'x', 'is', 'None', 'or', 'isinstance', '(', 'x', ',', 'np', '.', 'ndarray', ')', ')', 'for', 'x', 'in', 'param_list', ']', ')', 'except', 'AssertionError', ':', 'msg', '=', '"param_list[0] must be a numpy array."', 'msg_2', '=', '"All other elements must be numpy arrays or None."', 'total_msg', '=', 'msg', '+', '"\\n"', '+', 'msg_2', 'raise', 'TypeError', '(', 'total_msg', ')', 'return', 'None']
Ensures that all elements of param_list are ndarrays or None. Raises a helpful ValueError if otherwise.
['Ensures', 'that', 'all', 'elements', 'of', 'param_list', 'are', 'ndarrays', 'or', 'None', '.', 'Raises', 'a', 'helpful', 'ValueError', 'if', 'otherwise', '.']
train
https://github.com/timothyb0912/pylogit/blob/f83b0fd6debaa7358d87c3828428f6d4ead71357/pylogit/base_multinomial_cm_v2.py#L425-L440
6,204
cloudtools/stacker
stacker/dag/__init__.py
DAG.walk
def walk(self, walk_func): """ Walks each node of the graph in reverse topological order. This can be used to perform a set of operations, where the next operation depends on the previous operation. It's important to note that walking happens serially, and is not paralellized. Args: walk_func (:class:`types.FunctionType`): The function to be called on each node of the graph. """ nodes = self.topological_sort() # Reverse so we start with nodes that have no dependencies. nodes.reverse() for n in nodes: walk_func(n)
python
def walk(self, walk_func): """ Walks each node of the graph in reverse topological order. This can be used to perform a set of operations, where the next operation depends on the previous operation. It's important to note that walking happens serially, and is not paralellized. Args: walk_func (:class:`types.FunctionType`): The function to be called on each node of the graph. """ nodes = self.topological_sort() # Reverse so we start with nodes that have no dependencies. nodes.reverse() for n in nodes: walk_func(n)
['def', 'walk', '(', 'self', ',', 'walk_func', ')', ':', 'nodes', '=', 'self', '.', 'topological_sort', '(', ')', '# Reverse so we start with nodes that have no dependencies.', 'nodes', '.', 'reverse', '(', ')', 'for', 'n', 'in', 'nodes', ':', 'walk_func', '(', 'n', ')']
Walks each node of the graph in reverse topological order. This can be used to perform a set of operations, where the next operation depends on the previous operation. It's important to note that walking happens serially, and is not paralellized. Args: walk_func (:class:`types.FunctionType`): The function to be called on each node of the graph.
['Walks', 'each', 'node', 'of', 'the', 'graph', 'in', 'reverse', 'topological', 'order', '.', 'This', 'can', 'be', 'used', 'to', 'perform', 'a', 'set', 'of', 'operations', 'where', 'the', 'next', 'operation', 'depends', 'on', 'the', 'previous', 'operation', '.', 'It', 's', 'important', 'to', 'note', 'that', 'walking', 'happens', 'serially', 'and', 'is', 'not', 'paralellized', '.']
train
https://github.com/cloudtools/stacker/blob/ad6013a03a560c46ba3c63c4d153336273e6da5d/stacker/dag/__init__.py#L152-L167
6,205
django-json-api/django-rest-framework-json-api
rest_framework_json_api/utils.py
_format_object
def _format_object(obj, format_type=None): """Depending on settings calls either `format_keys` or `format_field_names`""" if json_api_settings.FORMAT_KEYS is not None: return format_keys(obj, format_type) return format_field_names(obj, format_type)
python
def _format_object(obj, format_type=None): """Depending on settings calls either `format_keys` or `format_field_names`""" if json_api_settings.FORMAT_KEYS is not None: return format_keys(obj, format_type) return format_field_names(obj, format_type)
['def', '_format_object', '(', 'obj', ',', 'format_type', '=', 'None', ')', ':', 'if', 'json_api_settings', '.', 'FORMAT_KEYS', 'is', 'not', 'None', ':', 'return', 'format_keys', '(', 'obj', ',', 'format_type', ')', 'return', 'format_field_names', '(', 'obj', ',', 'format_type', ')']
Depending on settings calls either `format_keys` or `format_field_names`
['Depending', 'on', 'settings', 'calls', 'either', 'format_keys', 'or', 'format_field_names']
train
https://github.com/django-json-api/django-rest-framework-json-api/blob/de7021f9e011615ce8b65d0cb38227c6c12721b6/rest_framework_json_api/utils.py#L121-L127
6,206
LonamiWebs/Telethon
telethon/tl/custom/message.py
Message.edit
async def edit(self, *args, **kwargs): """ Edits the message iff it's outgoing. Shorthand for `telethon.client.messages.MessageMethods.edit_message` with both ``entity`` and ``message`` already set. Returns ``None`` if the message was incoming, or the edited `Message` otherwise. .. note:: This is different from `client.edit_message <telethon.client.messages.MessageMethods.edit_message>` and **will respect** the previous state of the message. For example, if the message didn't have a link preview, the edit won't add one by default, and you should force it by setting it to ``True`` if you want it. This is generally the most desired and convenient behaviour, and will work for link previews and message buttons. """ if self.fwd_from or not self.out: return None # We assume self.out was patched for our chat if 'link_preview' not in kwargs: kwargs['link_preview'] = bool(self.web_preview) if 'buttons' not in kwargs: kwargs['buttons'] = self.reply_markup return await self._client.edit_message( await self.get_input_chat(), self.id, *args, **kwargs )
python
async def edit(self, *args, **kwargs): """ Edits the message iff it's outgoing. Shorthand for `telethon.client.messages.MessageMethods.edit_message` with both ``entity`` and ``message`` already set. Returns ``None`` if the message was incoming, or the edited `Message` otherwise. .. note:: This is different from `client.edit_message <telethon.client.messages.MessageMethods.edit_message>` and **will respect** the previous state of the message. For example, if the message didn't have a link preview, the edit won't add one by default, and you should force it by setting it to ``True`` if you want it. This is generally the most desired and convenient behaviour, and will work for link previews and message buttons. """ if self.fwd_from or not self.out: return None # We assume self.out was patched for our chat if 'link_preview' not in kwargs: kwargs['link_preview'] = bool(self.web_preview) if 'buttons' not in kwargs: kwargs['buttons'] = self.reply_markup return await self._client.edit_message( await self.get_input_chat(), self.id, *args, **kwargs )
['async', 'def', 'edit', '(', 'self', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'if', 'self', '.', 'fwd_from', 'or', 'not', 'self', '.', 'out', ':', 'return', 'None', '# We assume self.out was patched for our chat', 'if', "'link_preview'", 'not', 'in', 'kwargs', ':', 'kwargs', '[', "'link_preview'", ']', '=', 'bool', '(', 'self', '.', 'web_preview', ')', 'if', "'buttons'", 'not', 'in', 'kwargs', ':', 'kwargs', '[', "'buttons'", ']', '=', 'self', '.', 'reply_markup', 'return', 'await', 'self', '.', '_client', '.', 'edit_message', '(', 'await', 'self', '.', 'get_input_chat', '(', ')', ',', 'self', '.', 'id', ',', '*', 'args', ',', '*', '*', 'kwargs', ')']
Edits the message iff it's outgoing. Shorthand for `telethon.client.messages.MessageMethods.edit_message` with both ``entity`` and ``message`` already set. Returns ``None`` if the message was incoming, or the edited `Message` otherwise. .. note:: This is different from `client.edit_message <telethon.client.messages.MessageMethods.edit_message>` and **will respect** the previous state of the message. For example, if the message didn't have a link preview, the edit won't add one by default, and you should force it by setting it to ``True`` if you want it. This is generally the most desired and convenient behaviour, and will work for link previews and message buttons.
['Edits', 'the', 'message', 'iff', 'it', 's', 'outgoing', '.', 'Shorthand', 'for', 'telethon', '.', 'client', '.', 'messages', '.', 'MessageMethods', '.', 'edit_message', 'with', 'both', 'entity', 'and', 'message', 'already', 'set', '.']
train
https://github.com/LonamiWebs/Telethon/blob/1ead9757d366b58c1e0567cddb0196e20f1a445f/telethon/tl/custom/message.py#L651-L684
6,207
skioo/django-customer-billing
billing/actions/credit_cards.py
reactivate
def reactivate(credit_card_id: str) -> None: """ Reactivates a credit card. """ logger.info('reactivating-credit-card', credit_card_id=credit_card_id) with transaction.atomic(): cc = CreditCard.objects.get(pk=credit_card_id) cc.reactivate() cc.save()
python
def reactivate(credit_card_id: str) -> None: """ Reactivates a credit card. """ logger.info('reactivating-credit-card', credit_card_id=credit_card_id) with transaction.atomic(): cc = CreditCard.objects.get(pk=credit_card_id) cc.reactivate() cc.save()
['def', 'reactivate', '(', 'credit_card_id', ':', 'str', ')', '->', 'None', ':', 'logger', '.', 'info', '(', "'reactivating-credit-card'", ',', 'credit_card_id', '=', 'credit_card_id', ')', 'with', 'transaction', '.', 'atomic', '(', ')', ':', 'cc', '=', 'CreditCard', '.', 'objects', '.', 'get', '(', 'pk', '=', 'credit_card_id', ')', 'cc', '.', 'reactivate', '(', ')', 'cc', '.', 'save', '(', ')']
Reactivates a credit card.
['Reactivates', 'a', 'credit', 'card', '.']
train
https://github.com/skioo/django-customer-billing/blob/6ac1ed9ef9d1d7eee0379de7f0c4b76919ae1f2d/billing/actions/credit_cards.py#L20-L28
6,208
LuminosoInsight/python-ftfy
ftfy/fixes.py
fix_surrogates
def fix_surrogates(text): """ Replace 16-bit surrogate codepoints with the characters they represent (when properly paired), or with \ufffd otherwise. >>> high_surrogate = chr(0xd83d) >>> low_surrogate = chr(0xdca9) >>> print(fix_surrogates(high_surrogate + low_surrogate)) 💩 >>> print(fix_surrogates(low_surrogate + high_surrogate)) �� The above doctest had to be very carefully written, because even putting the Unicode escapes of the surrogates in the docstring was causing various tools to fail, which I think just goes to show why this fixer is necessary. """ if SURROGATE_RE.search(text): text = SURROGATE_PAIR_RE.sub(convert_surrogate_pair, text) text = SURROGATE_RE.sub('\ufffd', text) return text
python
def fix_surrogates(text): """ Replace 16-bit surrogate codepoints with the characters they represent (when properly paired), or with \ufffd otherwise. >>> high_surrogate = chr(0xd83d) >>> low_surrogate = chr(0xdca9) >>> print(fix_surrogates(high_surrogate + low_surrogate)) 💩 >>> print(fix_surrogates(low_surrogate + high_surrogate)) �� The above doctest had to be very carefully written, because even putting the Unicode escapes of the surrogates in the docstring was causing various tools to fail, which I think just goes to show why this fixer is necessary. """ if SURROGATE_RE.search(text): text = SURROGATE_PAIR_RE.sub(convert_surrogate_pair, text) text = SURROGATE_RE.sub('\ufffd', text) return text
['def', 'fix_surrogates', '(', 'text', ')', ':', 'if', 'SURROGATE_RE', '.', 'search', '(', 'text', ')', ':', 'text', '=', 'SURROGATE_PAIR_RE', '.', 'sub', '(', 'convert_surrogate_pair', ',', 'text', ')', 'text', '=', 'SURROGATE_RE', '.', 'sub', '(', "'\\ufffd'", ',', 'text', ')', 'return', 'text']
Replace 16-bit surrogate codepoints with the characters they represent (when properly paired), or with \ufffd otherwise. >>> high_surrogate = chr(0xd83d) >>> low_surrogate = chr(0xdca9) >>> print(fix_surrogates(high_surrogate + low_surrogate)) 💩 >>> print(fix_surrogates(low_surrogate + high_surrogate)) �� The above doctest had to be very carefully written, because even putting the Unicode escapes of the surrogates in the docstring was causing various tools to fail, which I think just goes to show why this fixer is necessary.
['Replace', '16', '-', 'bit', 'surrogate', 'codepoints', 'with', 'the', 'characters', 'they', 'represent', '(', 'when', 'properly', 'paired', ')', 'or', 'with', '\\', 'ufffd', 'otherwise', '.']
train
https://github.com/LuminosoInsight/python-ftfy/blob/476acc6ad270bffe07f97d4f7cf2139acdc69633/ftfy/fixes.py#L469-L489
6,209
senaite/senaite.core
bika/lims/jsonapi/update.py
Update.require
def require(self, fieldname, allow_blank=False): """fieldname is required""" if self.request.form and fieldname not in self.request.form.keys(): raise Exception("Required field not found in request: %s" % fieldname) if self.request.form and (not self.request.form[fieldname] or allow_blank): raise Exception("Required field %s may not have blank value")
python
def require(self, fieldname, allow_blank=False): """fieldname is required""" if self.request.form and fieldname not in self.request.form.keys(): raise Exception("Required field not found in request: %s" % fieldname) if self.request.form and (not self.request.form[fieldname] or allow_blank): raise Exception("Required field %s may not have blank value")
['def', 'require', '(', 'self', ',', 'fieldname', ',', 'allow_blank', '=', 'False', ')', ':', 'if', 'self', '.', 'request', '.', 'form', 'and', 'fieldname', 'not', 'in', 'self', '.', 'request', '.', 'form', '.', 'keys', '(', ')', ':', 'raise', 'Exception', '(', '"Required field not found in request: %s"', '%', 'fieldname', ')', 'if', 'self', '.', 'request', '.', 'form', 'and', '(', 'not', 'self', '.', 'request', '.', 'form', '[', 'fieldname', ']', 'or', 'allow_blank', ')', ':', 'raise', 'Exception', '(', '"Required field %s may not have blank value"', ')']
fieldname is required
['fieldname', 'is', 'required']
train
https://github.com/senaite/senaite.core/blob/7602ce2ea2f9e81eb34e20ce17b98a3e70713f85/bika/lims/jsonapi/update.py#L167-L172
6,210
daviddrysdale/python-phonenumbers
python/phonenumbers/asyoutypeformatter.py
AsYouTypeFormatter._normalize_and_accrue_digits_and_plus_sign
def _normalize_and_accrue_digits_and_plus_sign(self, next_char, remember_position): """Accrues digits and the plus sign to _accrued_input_without_formatting for later use. If next_char contains a digit in non-ASCII format (e.g. the full-width version of digits), it is first normalized to the ASCII version. The return value is next_char itself, or its normalized version, if next_char is a digit in non-ASCII format. This method assumes its input is either a digit or the plus sign.""" if next_char == _PLUS_SIGN: normalized_char = next_char self._accrued_input_without_formatting += next_char else: next_digit = unicode_digit(next_char, -1) if next_digit != -1: normalized_char = unicod(next_digit) else: # pragma no cover normalized_char = next_char self._accrued_input_without_formatting += normalized_char self._national_number += normalized_char if remember_position: self._position_to_remember = len(self._accrued_input_without_formatting) return normalized_char
python
def _normalize_and_accrue_digits_and_plus_sign(self, next_char, remember_position): """Accrues digits and the plus sign to _accrued_input_without_formatting for later use. If next_char contains a digit in non-ASCII format (e.g. the full-width version of digits), it is first normalized to the ASCII version. The return value is next_char itself, or its normalized version, if next_char is a digit in non-ASCII format. This method assumes its input is either a digit or the plus sign.""" if next_char == _PLUS_SIGN: normalized_char = next_char self._accrued_input_without_formatting += next_char else: next_digit = unicode_digit(next_char, -1) if next_digit != -1: normalized_char = unicod(next_digit) else: # pragma no cover normalized_char = next_char self._accrued_input_without_formatting += normalized_char self._national_number += normalized_char if remember_position: self._position_to_remember = len(self._accrued_input_without_formatting) return normalized_char
['def', '_normalize_and_accrue_digits_and_plus_sign', '(', 'self', ',', 'next_char', ',', 'remember_position', ')', ':', 'if', 'next_char', '==', '_PLUS_SIGN', ':', 'normalized_char', '=', 'next_char', 'self', '.', '_accrued_input_without_formatting', '+=', 'next_char', 'else', ':', 'next_digit', '=', 'unicode_digit', '(', 'next_char', ',', '-', '1', ')', 'if', 'next_digit', '!=', '-', '1', ':', 'normalized_char', '=', 'unicod', '(', 'next_digit', ')', 'else', ':', '# pragma no cover', 'normalized_char', '=', 'next_char', 'self', '.', '_accrued_input_without_formatting', '+=', 'normalized_char', 'self', '.', '_national_number', '+=', 'normalized_char', 'if', 'remember_position', ':', 'self', '.', '_position_to_remember', '=', 'len', '(', 'self', '.', '_accrued_input_without_formatting', ')', 'return', 'normalized_char']
Accrues digits and the plus sign to _accrued_input_without_formatting for later use. If next_char contains a digit in non-ASCII format (e.g. the full-width version of digits), it is first normalized to the ASCII version. The return value is next_char itself, or its normalized version, if next_char is a digit in non-ASCII format. This method assumes its input is either a digit or the plus sign.
['Accrues', 'digits', 'and', 'the', 'plus', 'sign', 'to', '_accrued_input_without_formatting', 'for', 'later', 'use', '.', 'If', 'next_char', 'contains', 'a', 'digit', 'in', 'non', '-', 'ASCII', 'format', '(', 'e', '.', 'g', '.', 'the', 'full', '-', 'width', 'version', 'of', 'digits', ')', 'it', 'is', 'first', 'normalized', 'to', 'the', 'ASCII', 'version', '.', 'The', 'return', 'value', 'is', 'next_char', 'itself', 'or', 'its', 'normalized', 'version', 'if', 'next_char', 'is', 'a', 'digit', 'in', 'non', '-', 'ASCII', 'format', '.', 'This', 'method', 'assumes', 'its', 'input', 'is', 'either', 'a', 'digit', 'or', 'the', 'plus', 'sign', '.']
train
https://github.com/daviddrysdale/python-phonenumbers/blob/9cc5bb4ab5e661e70789b4c64bf7a9383c7bdc20/python/phonenumbers/asyoutypeformatter.py#L550-L571
6,211
wonambi-python/wonambi
wonambi/trans/filter.py
filter_
def filter_(data, axis='time', low_cut=None, high_cut=None, order=4, ftype='butter', Rs=None, notchfreq=50, notchquality=25): """Design filter and apply it. Parameters ---------- ftype : str 'butter', 'cheby1', 'cheby2', 'ellip', 'bessel', 'diff', or 'notch' axis : str, optional axis to apply the filter on. low_cut : float, optional (not for notch) low cutoff for high-pass filter high_cut : float, optional (not for notch) high cutoff for low-pass filter order : int, optional (not for notch) filter order data : instance of Data (not for notch) the data to filter. notchfreq : float (only for notch) frequency to apply notch filter to (+ harmonics) notchquality : int (only for notch) Quality factor (see scipy.signal.iirnotch) Returns ------- filtered_data : instance of DataRaw filtered data Notes ----- You can specify any filter type as defined by iirfilter. If you specify low_cut only, it generates a high-pass filter. If you specify high_cut only, it generates a low-pass filter. If you specify both, it generates a band-pass filter. low_cut and high_cut should be given as ratio of the Nyquist. But if you specify s_freq, then the ratio will be computed automatically. Raises ------ ValueError if the cutoff frequency is larger than the Nyquist frequency. """ nyquist = data.s_freq / 2. btype = None if low_cut is not None and high_cut is not None: if low_cut > nyquist or high_cut > nyquist: raise ValueError('cutoff has to be less than Nyquist ' 'frequency') btype = 'bandpass' Wn = (low_cut / nyquist, high_cut / nyquist) elif low_cut is not None: if low_cut > nyquist: raise ValueError('cutoff has to be less than Nyquist ' 'frequency') btype = 'highpass' Wn = low_cut / nyquist elif high_cut is not None: if high_cut > nyquist: raise ValueError('cutoff has to be less than Nyquist ' 'frequency') btype = 'lowpass' Wn = high_cut / nyquist if btype is None and ftype != 'notch': raise TypeError('You should specify at least low_cut or high_cut') if Rs is None: Rs = 40 if ftype == 'notch': b_a = [iirnotch(w0 / nyquist, notchquality) for w0 in arange(notchfreq, nyquist, notchfreq)] else: lg.debug('order {0: 2}, Wn {1}, btype {2}, ftype {3}' ''.format(order, str(Wn), btype, ftype)) b_a = [iirfilter(order, Wn, btype=btype, ftype=ftype, rs=Rs), ] fdata = data._copy() for i in range(data.number_of('trial')): x = data.data[i] for b, a in b_a: x = filtfilt(b, a, x, axis=data.index_of(axis)) fdata.data[i] = x return fdata
python
def filter_(data, axis='time', low_cut=None, high_cut=None, order=4, ftype='butter', Rs=None, notchfreq=50, notchquality=25): """Design filter and apply it. Parameters ---------- ftype : str 'butter', 'cheby1', 'cheby2', 'ellip', 'bessel', 'diff', or 'notch' axis : str, optional axis to apply the filter on. low_cut : float, optional (not for notch) low cutoff for high-pass filter high_cut : float, optional (not for notch) high cutoff for low-pass filter order : int, optional (not for notch) filter order data : instance of Data (not for notch) the data to filter. notchfreq : float (only for notch) frequency to apply notch filter to (+ harmonics) notchquality : int (only for notch) Quality factor (see scipy.signal.iirnotch) Returns ------- filtered_data : instance of DataRaw filtered data Notes ----- You can specify any filter type as defined by iirfilter. If you specify low_cut only, it generates a high-pass filter. If you specify high_cut only, it generates a low-pass filter. If you specify both, it generates a band-pass filter. low_cut and high_cut should be given as ratio of the Nyquist. But if you specify s_freq, then the ratio will be computed automatically. Raises ------ ValueError if the cutoff frequency is larger than the Nyquist frequency. """ nyquist = data.s_freq / 2. btype = None if low_cut is not None and high_cut is not None: if low_cut > nyquist or high_cut > nyquist: raise ValueError('cutoff has to be less than Nyquist ' 'frequency') btype = 'bandpass' Wn = (low_cut / nyquist, high_cut / nyquist) elif low_cut is not None: if low_cut > nyquist: raise ValueError('cutoff has to be less than Nyquist ' 'frequency') btype = 'highpass' Wn = low_cut / nyquist elif high_cut is not None: if high_cut > nyquist: raise ValueError('cutoff has to be less than Nyquist ' 'frequency') btype = 'lowpass' Wn = high_cut / nyquist if btype is None and ftype != 'notch': raise TypeError('You should specify at least low_cut or high_cut') if Rs is None: Rs = 40 if ftype == 'notch': b_a = [iirnotch(w0 / nyquist, notchquality) for w0 in arange(notchfreq, nyquist, notchfreq)] else: lg.debug('order {0: 2}, Wn {1}, btype {2}, ftype {3}' ''.format(order, str(Wn), btype, ftype)) b_a = [iirfilter(order, Wn, btype=btype, ftype=ftype, rs=Rs), ] fdata = data._copy() for i in range(data.number_of('trial')): x = data.data[i] for b, a in b_a: x = filtfilt(b, a, x, axis=data.index_of(axis)) fdata.data[i] = x return fdata
['def', 'filter_', '(', 'data', ',', 'axis', '=', "'time'", ',', 'low_cut', '=', 'None', ',', 'high_cut', '=', 'None', ',', 'order', '=', '4', ',', 'ftype', '=', "'butter'", ',', 'Rs', '=', 'None', ',', 'notchfreq', '=', '50', ',', 'notchquality', '=', '25', ')', ':', 'nyquist', '=', 'data', '.', 's_freq', '/', '2.', 'btype', '=', 'None', 'if', 'low_cut', 'is', 'not', 'None', 'and', 'high_cut', 'is', 'not', 'None', ':', 'if', 'low_cut', '>', 'nyquist', 'or', 'high_cut', '>', 'nyquist', ':', 'raise', 'ValueError', '(', "'cutoff has to be less than Nyquist '", "'frequency'", ')', 'btype', '=', "'bandpass'", 'Wn', '=', '(', 'low_cut', '/', 'nyquist', ',', 'high_cut', '/', 'nyquist', ')', 'elif', 'low_cut', 'is', 'not', 'None', ':', 'if', 'low_cut', '>', 'nyquist', ':', 'raise', 'ValueError', '(', "'cutoff has to be less than Nyquist '", "'frequency'", ')', 'btype', '=', "'highpass'", 'Wn', '=', 'low_cut', '/', 'nyquist', 'elif', 'high_cut', 'is', 'not', 'None', ':', 'if', 'high_cut', '>', 'nyquist', ':', 'raise', 'ValueError', '(', "'cutoff has to be less than Nyquist '", "'frequency'", ')', 'btype', '=', "'lowpass'", 'Wn', '=', 'high_cut', '/', 'nyquist', 'if', 'btype', 'is', 'None', 'and', 'ftype', '!=', "'notch'", ':', 'raise', 'TypeError', '(', "'You should specify at least low_cut or high_cut'", ')', 'if', 'Rs', 'is', 'None', ':', 'Rs', '=', '40', 'if', 'ftype', '==', "'notch'", ':', 'b_a', '=', '[', 'iirnotch', '(', 'w0', '/', 'nyquist', ',', 'notchquality', ')', 'for', 'w0', 'in', 'arange', '(', 'notchfreq', ',', 'nyquist', ',', 'notchfreq', ')', ']', 'else', ':', 'lg', '.', 'debug', '(', "'order {0: 2}, Wn {1}, btype {2}, ftype {3}'", "''", '.', 'format', '(', 'order', ',', 'str', '(', 'Wn', ')', ',', 'btype', ',', 'ftype', ')', ')', 'b_a', '=', '[', 'iirfilter', '(', 'order', ',', 'Wn', ',', 'btype', '=', 'btype', ',', 'ftype', '=', 'ftype', ',', 'rs', '=', 'Rs', ')', ',', ']', 'fdata', '=', 'data', '.', '_copy', '(', ')', 'for', 'i', 'in', 'range', '(', 'data', '.', 'number_of', '(', "'trial'", ')', ')', ':', 'x', '=', 'data', '.', 'data', '[', 'i', ']', 'for', 'b', ',', 'a', 'in', 'b_a', ':', 'x', '=', 'filtfilt', '(', 'b', ',', 'a', ',', 'x', ',', 'axis', '=', 'data', '.', 'index_of', '(', 'axis', ')', ')', 'fdata', '.', 'data', '[', 'i', ']', '=', 'x', 'return', 'fdata']
Design filter and apply it. Parameters ---------- ftype : str 'butter', 'cheby1', 'cheby2', 'ellip', 'bessel', 'diff', or 'notch' axis : str, optional axis to apply the filter on. low_cut : float, optional (not for notch) low cutoff for high-pass filter high_cut : float, optional (not for notch) high cutoff for low-pass filter order : int, optional (not for notch) filter order data : instance of Data (not for notch) the data to filter. notchfreq : float (only for notch) frequency to apply notch filter to (+ harmonics) notchquality : int (only for notch) Quality factor (see scipy.signal.iirnotch) Returns ------- filtered_data : instance of DataRaw filtered data Notes ----- You can specify any filter type as defined by iirfilter. If you specify low_cut only, it generates a high-pass filter. If you specify high_cut only, it generates a low-pass filter. If you specify both, it generates a band-pass filter. low_cut and high_cut should be given as ratio of the Nyquist. But if you specify s_freq, then the ratio will be computed automatically. Raises ------ ValueError if the cutoff frequency is larger than the Nyquist frequency.
['Design', 'filter', 'and', 'apply', 'it', '.']
train
https://github.com/wonambi-python/wonambi/blob/1d8e3d7e53df8017c199f703bcab582914676e76/wonambi/trans/filter.py#L18-L108
6,212
quantopian/zipline
zipline/pipeline/data/dataset.py
DataSetFamily.slice
def slice(cls, *args, **kwargs): """Take a slice of a DataSetFamily to produce a dataset indexed by asset and date. Parameters ---------- *args **kwargs The coordinates to fix along each extra dimension. Returns ------- dataset : DataSet A regular pipeline dataset indexed by asset and date. Notes ----- The extra dimensions coords used to produce the result are available under the ``extra_coords`` attribute. """ coords, hash_key = cls._canonical_key(args, kwargs) try: return cls._slice_cache[hash_key] except KeyError: pass Slice = cls._make_dataset(coords) cls._slice_cache[hash_key] = Slice return Slice
python
def slice(cls, *args, **kwargs): """Take a slice of a DataSetFamily to produce a dataset indexed by asset and date. Parameters ---------- *args **kwargs The coordinates to fix along each extra dimension. Returns ------- dataset : DataSet A regular pipeline dataset indexed by asset and date. Notes ----- The extra dimensions coords used to produce the result are available under the ``extra_coords`` attribute. """ coords, hash_key = cls._canonical_key(args, kwargs) try: return cls._slice_cache[hash_key] except KeyError: pass Slice = cls._make_dataset(coords) cls._slice_cache[hash_key] = Slice return Slice
['def', 'slice', '(', 'cls', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'coords', ',', 'hash_key', '=', 'cls', '.', '_canonical_key', '(', 'args', ',', 'kwargs', ')', 'try', ':', 'return', 'cls', '.', '_slice_cache', '[', 'hash_key', ']', 'except', 'KeyError', ':', 'pass', 'Slice', '=', 'cls', '.', '_make_dataset', '(', 'coords', ')', 'cls', '.', '_slice_cache', '[', 'hash_key', ']', '=', 'Slice', 'return', 'Slice']
Take a slice of a DataSetFamily to produce a dataset indexed by asset and date. Parameters ---------- *args **kwargs The coordinates to fix along each extra dimension. Returns ------- dataset : DataSet A regular pipeline dataset indexed by asset and date. Notes ----- The extra dimensions coords used to produce the result are available under the ``extra_coords`` attribute.
['Take', 'a', 'slice', 'of', 'a', 'DataSetFamily', 'to', 'produce', 'a', 'dataset', 'indexed', 'by', 'asset', 'and', 'date', '.']
train
https://github.com/quantopian/zipline/blob/77ad15e6dc4c1cbcdc133653bac8a63fc704f7fe/zipline/pipeline/data/dataset.py#L826-L854
6,213
DataONEorg/d1_python
gmn/src/d1_gmn/app/sysmeta_extract.py
assert_invalid_field_list
def assert_invalid_field_list(field_list): """raise d1_common.types.exceptions.InvalidRequest() if ``field_list`` contains any invalid field names. A list of the invalid fields is included in the exception. - Implicitly called by ``extract_values()``. """ if field_list is not None: invalid_field_list = [ v for v in field_list if v not in get_valid_field_name_list() ] if invalid_field_list: raise d1_common.types.exceptions.InvalidRequest( 0, "Invalid fields: {}".format(", ".join(invalid_field_list)) )
python
def assert_invalid_field_list(field_list): """raise d1_common.types.exceptions.InvalidRequest() if ``field_list`` contains any invalid field names. A list of the invalid fields is included in the exception. - Implicitly called by ``extract_values()``. """ if field_list is not None: invalid_field_list = [ v for v in field_list if v not in get_valid_field_name_list() ] if invalid_field_list: raise d1_common.types.exceptions.InvalidRequest( 0, "Invalid fields: {}".format(", ".join(invalid_field_list)) )
['def', 'assert_invalid_field_list', '(', 'field_list', ')', ':', 'if', 'field_list', 'is', 'not', 'None', ':', 'invalid_field_list', '=', '[', 'v', 'for', 'v', 'in', 'field_list', 'if', 'v', 'not', 'in', 'get_valid_field_name_list', '(', ')', ']', 'if', 'invalid_field_list', ':', 'raise', 'd1_common', '.', 'types', '.', 'exceptions', '.', 'InvalidRequest', '(', '0', ',', '"Invalid fields: {}"', '.', 'format', '(', '", "', '.', 'join', '(', 'invalid_field_list', ')', ')', ')']
raise d1_common.types.exceptions.InvalidRequest() if ``field_list`` contains any invalid field names. A list of the invalid fields is included in the exception. - Implicitly called by ``extract_values()``.
['raise', 'd1_common', '.', 'types', '.', 'exceptions', '.', 'InvalidRequest', '()', 'if', 'field_list', 'contains', 'any', 'invalid', 'field', 'names', '.', 'A', 'list', 'of', 'the', 'invalid', 'fields', 'is', 'included', 'in', 'the', 'exception', '.']
train
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/sysmeta_extract.py#L130-L144
6,214
awslabs/aws-sam-cli
samcli/local/docker/lambda_build_container.py
LambdaBuildContainer._convert_to_container_dirs
def _convert_to_container_dirs(host_paths_to_convert, host_to_container_path_mapping): """ Use this method to convert a list of host paths to a list of equivalent paths within the container where the given host path is mounted. This is necessary when SAM CLI needs to pass path information to the Lambda Builder running within the container. If a host path is not mounted within the container, then this method simply passes the path to the result without any changes. Ex: [ "/home/foo", "/home/bar", "/home/not/mounted"] => ["/tmp/source", "/tmp/manifest", "/home/not/mounted"] Parameters ---------- host_paths_to_convert : list List of paths in host that needs to be converted host_to_container_path_mapping : dict Mapping of paths in host to the equivalent paths within the container Returns ------- list Equivalent paths within the container """ if not host_paths_to_convert: # Nothing to do return host_paths_to_convert # Make sure the key is absolute host path. Relative paths are tricky to work with because two different # relative paths can point to the same directory ("../foo", "../../foo") mapping = {str(pathlib.Path(p).resolve()): v for p, v in host_to_container_path_mapping.items()} result = [] for original_path in host_paths_to_convert: abspath = str(pathlib.Path(original_path).resolve()) if abspath in mapping: result.append(mapping[abspath]) else: result.append(original_path) LOG.debug("Cannot convert host path '%s' to its equivalent path within the container. " "Host path is not mounted within the container", abspath) return result
python
def _convert_to_container_dirs(host_paths_to_convert, host_to_container_path_mapping): """ Use this method to convert a list of host paths to a list of equivalent paths within the container where the given host path is mounted. This is necessary when SAM CLI needs to pass path information to the Lambda Builder running within the container. If a host path is not mounted within the container, then this method simply passes the path to the result without any changes. Ex: [ "/home/foo", "/home/bar", "/home/not/mounted"] => ["/tmp/source", "/tmp/manifest", "/home/not/mounted"] Parameters ---------- host_paths_to_convert : list List of paths in host that needs to be converted host_to_container_path_mapping : dict Mapping of paths in host to the equivalent paths within the container Returns ------- list Equivalent paths within the container """ if not host_paths_to_convert: # Nothing to do return host_paths_to_convert # Make sure the key is absolute host path. Relative paths are tricky to work with because two different # relative paths can point to the same directory ("../foo", "../../foo") mapping = {str(pathlib.Path(p).resolve()): v for p, v in host_to_container_path_mapping.items()} result = [] for original_path in host_paths_to_convert: abspath = str(pathlib.Path(original_path).resolve()) if abspath in mapping: result.append(mapping[abspath]) else: result.append(original_path) LOG.debug("Cannot convert host path '%s' to its equivalent path within the container. " "Host path is not mounted within the container", abspath) return result
['def', '_convert_to_container_dirs', '(', 'host_paths_to_convert', ',', 'host_to_container_path_mapping', ')', ':', 'if', 'not', 'host_paths_to_convert', ':', '# Nothing to do', 'return', 'host_paths_to_convert', '# Make sure the key is absolute host path. Relative paths are tricky to work with because two different', '# relative paths can point to the same directory ("../foo", "../../foo")', 'mapping', '=', '{', 'str', '(', 'pathlib', '.', 'Path', '(', 'p', ')', '.', 'resolve', '(', ')', ')', ':', 'v', 'for', 'p', ',', 'v', 'in', 'host_to_container_path_mapping', '.', 'items', '(', ')', '}', 'result', '=', '[', ']', 'for', 'original_path', 'in', 'host_paths_to_convert', ':', 'abspath', '=', 'str', '(', 'pathlib', '.', 'Path', '(', 'original_path', ')', '.', 'resolve', '(', ')', ')', 'if', 'abspath', 'in', 'mapping', ':', 'result', '.', 'append', '(', 'mapping', '[', 'abspath', ']', ')', 'else', ':', 'result', '.', 'append', '(', 'original_path', ')', 'LOG', '.', 'debug', '(', '"Cannot convert host path \'%s\' to its equivalent path within the container. "', '"Host path is not mounted within the container"', ',', 'abspath', ')', 'return', 'result']
Use this method to convert a list of host paths to a list of equivalent paths within the container where the given host path is mounted. This is necessary when SAM CLI needs to pass path information to the Lambda Builder running within the container. If a host path is not mounted within the container, then this method simply passes the path to the result without any changes. Ex: [ "/home/foo", "/home/bar", "/home/not/mounted"] => ["/tmp/source", "/tmp/manifest", "/home/not/mounted"] Parameters ---------- host_paths_to_convert : list List of paths in host that needs to be converted host_to_container_path_mapping : dict Mapping of paths in host to the equivalent paths within the container Returns ------- list Equivalent paths within the container
['Use', 'this', 'method', 'to', 'convert', 'a', 'list', 'of', 'host', 'paths', 'to', 'a', 'list', 'of', 'equivalent', 'paths', 'within', 'the', 'container', 'where', 'the', 'given', 'host', 'path', 'is', 'mounted', '.', 'This', 'is', 'necessary', 'when', 'SAM', 'CLI', 'needs', 'to', 'pass', 'path', 'information', 'to', 'the', 'Lambda', 'Builder', 'running', 'within', 'the', 'container', '.']
train
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/lambda_build_container.py#L183-L228
6,215
ConsenSys/mythril-classic
mythril/analysis/modules/delegatecall.py
_analyze_states
def _analyze_states(state: GlobalState) -> List[Issue]: """ :param state: the current state :return: returns the issues for that corresponding state """ call = get_call_from_state(state) if call is None: return [] issues = [] # type: List[Issue] if call.type is not "DELEGATECALL": return [] if state.environment.active_function_name is not "fallback": return [] state = call.state address = state.get_current_instruction()["address"] meminstart = get_variable(state.mstate.stack[-3]) if meminstart.type == VarType.CONCRETE: issues += _concrete_call(call, state, address, meminstart) return issues
python
def _analyze_states(state: GlobalState) -> List[Issue]: """ :param state: the current state :return: returns the issues for that corresponding state """ call = get_call_from_state(state) if call is None: return [] issues = [] # type: List[Issue] if call.type is not "DELEGATECALL": return [] if state.environment.active_function_name is not "fallback": return [] state = call.state address = state.get_current_instruction()["address"] meminstart = get_variable(state.mstate.stack[-3]) if meminstart.type == VarType.CONCRETE: issues += _concrete_call(call, state, address, meminstart) return issues
['def', '_analyze_states', '(', 'state', ':', 'GlobalState', ')', '->', 'List', '[', 'Issue', ']', ':', 'call', '=', 'get_call_from_state', '(', 'state', ')', 'if', 'call', 'is', 'None', ':', 'return', '[', ']', 'issues', '=', '[', ']', '# type: List[Issue]', 'if', 'call', '.', 'type', 'is', 'not', '"DELEGATECALL"', ':', 'return', '[', ']', 'if', 'state', '.', 'environment', '.', 'active_function_name', 'is', 'not', '"fallback"', ':', 'return', '[', ']', 'state', '=', 'call', '.', 'state', 'address', '=', 'state', '.', 'get_current_instruction', '(', ')', '[', '"address"', ']', 'meminstart', '=', 'get_variable', '(', 'state', '.', 'mstate', '.', 'stack', '[', '-', '3', ']', ')', 'if', 'meminstart', '.', 'type', '==', 'VarType', '.', 'CONCRETE', ':', 'issues', '+=', '_concrete_call', '(', 'call', ',', 'state', ',', 'address', ',', 'meminstart', ')', 'return', 'issues']
:param state: the current state :return: returns the issues for that corresponding state
[':', 'param', 'state', ':', 'the', 'current', 'state', ':', 'return', ':', 'returns', 'the', 'issues', 'for', 'that', 'corresponding', 'state']
train
https://github.com/ConsenSys/mythril-classic/blob/27af71c34b2ce94f4fae5613ec457f93df1a8f56/mythril/analysis/modules/delegatecall.py#L41-L63
6,216
SheffieldML/GPy
GPy/util/netpbmfile.py
NetpbmFile._header
def _header(self, pam=False): """Return file header as byte string.""" if pam or self.magicnum == b'P7': header = "\n".join(( "P7", "HEIGHT %i" % self.height, "WIDTH %i" % self.width, "DEPTH %i" % self.depth, "MAXVAL %i" % self.maxval, "\n".join("TUPLTYPE %s" % unicode(i) for i in self.tupltypes), "ENDHDR\n")) elif self.maxval == 1: header = "P4 %i %i\n" % (self.width, self.height) elif self.depth == 1: header = "P5 %i %i %i\n" % (self.width, self.height, self.maxval) else: header = "P6 %i %i %i\n" % (self.width, self.height, self.maxval) if sys.version_info[0] > 2: header = bytes(header, 'ascii') return header
python
def _header(self, pam=False): """Return file header as byte string.""" if pam or self.magicnum == b'P7': header = "\n".join(( "P7", "HEIGHT %i" % self.height, "WIDTH %i" % self.width, "DEPTH %i" % self.depth, "MAXVAL %i" % self.maxval, "\n".join("TUPLTYPE %s" % unicode(i) for i in self.tupltypes), "ENDHDR\n")) elif self.maxval == 1: header = "P4 %i %i\n" % (self.width, self.height) elif self.depth == 1: header = "P5 %i %i %i\n" % (self.width, self.height, self.maxval) else: header = "P6 %i %i %i\n" % (self.width, self.height, self.maxval) if sys.version_info[0] > 2: header = bytes(header, 'ascii') return header
['def', '_header', '(', 'self', ',', 'pam', '=', 'False', ')', ':', 'if', 'pam', 'or', 'self', '.', 'magicnum', '==', "b'P7'", ':', 'header', '=', '"\\n"', '.', 'join', '(', '(', '"P7"', ',', '"HEIGHT %i"', '%', 'self', '.', 'height', ',', '"WIDTH %i"', '%', 'self', '.', 'width', ',', '"DEPTH %i"', '%', 'self', '.', 'depth', ',', '"MAXVAL %i"', '%', 'self', '.', 'maxval', ',', '"\\n"', '.', 'join', '(', '"TUPLTYPE %s"', '%', 'unicode', '(', 'i', ')', 'for', 'i', 'in', 'self', '.', 'tupltypes', ')', ',', '"ENDHDR\\n"', ')', ')', 'elif', 'self', '.', 'maxval', '==', '1', ':', 'header', '=', '"P4 %i %i\\n"', '%', '(', 'self', '.', 'width', ',', 'self', '.', 'height', ')', 'elif', 'self', '.', 'depth', '==', '1', ':', 'header', '=', '"P5 %i %i %i\\n"', '%', '(', 'self', '.', 'width', ',', 'self', '.', 'height', ',', 'self', '.', 'maxval', ')', 'else', ':', 'header', '=', '"P6 %i %i %i\\n"', '%', '(', 'self', '.', 'width', ',', 'self', '.', 'height', ',', 'self', '.', 'maxval', ')', 'if', 'sys', '.', 'version_info', '[', '0', ']', '>', '2', ':', 'header', '=', 'bytes', '(', 'header', ',', "'ascii'", ')', 'return', 'header']
Return file header as byte string.
['Return', 'file', 'header', 'as', 'byte', 'string', '.']
train
https://github.com/SheffieldML/GPy/blob/54c32d79d289d622fb18b898aee65a2a431d90cf/GPy/util/netpbmfile.py#L274-L293
6,217
binux/pyspider
pyspider/run.py
processor
def processor(ctx, processor_cls, process_time_limit, enable_stdout_capture=True, get_object=False): """ Run Processor. """ g = ctx.obj Processor = load_cls(None, None, processor_cls) processor = Processor(projectdb=g.projectdb, inqueue=g.fetcher2processor, status_queue=g.status_queue, newtask_queue=g.newtask_queue, result_queue=g.processor2result, enable_stdout_capture=enable_stdout_capture, process_time_limit=process_time_limit) g.instances.append(processor) if g.get('testing_mode') or get_object: return processor processor.run()
python
def processor(ctx, processor_cls, process_time_limit, enable_stdout_capture=True, get_object=False): """ Run Processor. """ g = ctx.obj Processor = load_cls(None, None, processor_cls) processor = Processor(projectdb=g.projectdb, inqueue=g.fetcher2processor, status_queue=g.status_queue, newtask_queue=g.newtask_queue, result_queue=g.processor2result, enable_stdout_capture=enable_stdout_capture, process_time_limit=process_time_limit) g.instances.append(processor) if g.get('testing_mode') or get_object: return processor processor.run()
['def', 'processor', '(', 'ctx', ',', 'processor_cls', ',', 'process_time_limit', ',', 'enable_stdout_capture', '=', 'True', ',', 'get_object', '=', 'False', ')', ':', 'g', '=', 'ctx', '.', 'obj', 'Processor', '=', 'load_cls', '(', 'None', ',', 'None', ',', 'processor_cls', ')', 'processor', '=', 'Processor', '(', 'projectdb', '=', 'g', '.', 'projectdb', ',', 'inqueue', '=', 'g', '.', 'fetcher2processor', ',', 'status_queue', '=', 'g', '.', 'status_queue', ',', 'newtask_queue', '=', 'g', '.', 'newtask_queue', ',', 'result_queue', '=', 'g', '.', 'processor2result', ',', 'enable_stdout_capture', '=', 'enable_stdout_capture', ',', 'process_time_limit', '=', 'process_time_limit', ')', 'g', '.', 'instances', '.', 'append', '(', 'processor', ')', 'if', 'g', '.', 'get', '(', "'testing_mode'", ')', 'or', 'get_object', ':', 'return', 'processor', 'processor', '.', 'run', '(', ')']
Run Processor.
['Run', 'Processor', '.']
train
https://github.com/binux/pyspider/blob/3fccfabe2b057b7a56d4a4c79dc0dd6cd2239fe9/pyspider/run.py#L277-L294
6,218
tumblr/pytumblr
pytumblr/__init__.py
TumblrRestClient.create_photo
def create_photo(self, blogname, **kwargs): """ Create a photo post or photoset on a blog :param blogname: a string, the url of the blog you want to post to. :param state: a string, The state of the post. :param tags: a list of tags that you want applied to the post :param tweet: a string, the customized tweet that you want :param date: a string, the GMT date and time of the post :param format: a string, sets the format type of the post. html or markdown :param slug: a string, a short text summary to the end of the post url :param caption: a string, the caption that you want applied to the photo :param link: a string, the 'click-through' url you want on the photo :param source: a string, the photo source url :param data: a string or a list of the path of photo(s) :returns: a dict created from the JSON response """ kwargs.update({"type": "photo"}) return self._send_post(blogname, kwargs)
python
def create_photo(self, blogname, **kwargs): """ Create a photo post or photoset on a blog :param blogname: a string, the url of the blog you want to post to. :param state: a string, The state of the post. :param tags: a list of tags that you want applied to the post :param tweet: a string, the customized tweet that you want :param date: a string, the GMT date and time of the post :param format: a string, sets the format type of the post. html or markdown :param slug: a string, a short text summary to the end of the post url :param caption: a string, the caption that you want applied to the photo :param link: a string, the 'click-through' url you want on the photo :param source: a string, the photo source url :param data: a string or a list of the path of photo(s) :returns: a dict created from the JSON response """ kwargs.update({"type": "photo"}) return self._send_post(blogname, kwargs)
['def', 'create_photo', '(', 'self', ',', 'blogname', ',', '*', '*', 'kwargs', ')', ':', 'kwargs', '.', 'update', '(', '{', '"type"', ':', '"photo"', '}', ')', 'return', 'self', '.', '_send_post', '(', 'blogname', ',', 'kwargs', ')']
Create a photo post or photoset on a blog :param blogname: a string, the url of the blog you want to post to. :param state: a string, The state of the post. :param tags: a list of tags that you want applied to the post :param tweet: a string, the customized tweet that you want :param date: a string, the GMT date and time of the post :param format: a string, sets the format type of the post. html or markdown :param slug: a string, a short text summary to the end of the post url :param caption: a string, the caption that you want applied to the photo :param link: a string, the 'click-through' url you want on the photo :param source: a string, the photo source url :param data: a string or a list of the path of photo(s) :returns: a dict created from the JSON response
['Create', 'a', 'photo', 'post', 'or', 'photoset', 'on', 'a', 'blog']
train
https://github.com/tumblr/pytumblr/blob/4a5cd7c4b8ae78d12811d9fd52620afa1692a415/pytumblr/__init__.py#L289-L308
6,219
david-cortes/costsensitive
costsensitive/__init__.py
FilterTree.fit
def fit(self, X, C): """ Fit a filter tree classifier Note ---- Shifting the order of the classes within the cost array will produce different results, as it will build a different binary tree comparing different classes at each node. Parameters ---------- X : array (n_samples, n_features) The data on which to fit a cost-sensitive classifier. C : array (n_samples, n_classes) The cost of predicting each label for each observation (more means worse). """ X,C = _check_fit_input(X,C) C = np.asfortranarray(C) nclasses=C.shape[1] self.tree=_BinTree(nclasses) self.classifiers=[deepcopy(self.base_classifier) for c in range(nclasses-1)] classifier_queue=self.tree.is_at_bottom next_round=list() already_fitted=set() labels_take=-np.ones((X.shape[0],len(self.classifiers))) while True: for c in classifier_queue: if c in already_fitted or (c is None): continue child1, child2 = self.tree.childs[c] if (child1>0) and (child1 not in already_fitted): continue if (child2>0) and (child2 not in already_fitted): continue if child1<=0: class1=-np.repeat(child1,X.shape[0]).astype("int64") else: class1=labels_take[:, child1].astype("int64") if child2<=0: class2=-np.repeat(child2,X.shape[0]).astype("int64") else: class2=labels_take[:, child2].astype("int64") cost1=C[np.arange(X.shape[0]),np.clip(class1,a_min=0,a_max=None)] cost2=C[np.arange(X.shape[0]),np.clip(class2,a_min=0,a_max=None)] y=(cost1<cost2).astype('uint8') w=np.abs(cost1-cost2) valid_obs=w>0 if child1>0: valid_obs=valid_obs&(labels_take[:,child1]>=0) if child2>0: valid_obs=valid_obs&(labels_take[:,child2]>=0) X_take=X[valid_obs,:] y_take=y[valid_obs] w_take=w[valid_obs] w_take=_standardize_weights(w_take) self.classifiers[c].fit(X_take,y_take,sample_weight=w_take) labels_arr=np.c_[class1,class2].astype("int64") labels_take[valid_obs,c]=labels_arr[np.repeat(0,X_take.shape[0]),\ self.classifiers[c].predict(X_take).reshape(-1).astype('uint8')] already_fitted.add(c) next_round.append(self.tree.parents[c]) if c==0 or (len(classifier_queue)==0): break classifier_queue=list(set(next_round)) next_round=list() if (len(classifier_queue)==0): break return self
python
def fit(self, X, C): """ Fit a filter tree classifier Note ---- Shifting the order of the classes within the cost array will produce different results, as it will build a different binary tree comparing different classes at each node. Parameters ---------- X : array (n_samples, n_features) The data on which to fit a cost-sensitive classifier. C : array (n_samples, n_classes) The cost of predicting each label for each observation (more means worse). """ X,C = _check_fit_input(X,C) C = np.asfortranarray(C) nclasses=C.shape[1] self.tree=_BinTree(nclasses) self.classifiers=[deepcopy(self.base_classifier) for c in range(nclasses-1)] classifier_queue=self.tree.is_at_bottom next_round=list() already_fitted=set() labels_take=-np.ones((X.shape[0],len(self.classifiers))) while True: for c in classifier_queue: if c in already_fitted or (c is None): continue child1, child2 = self.tree.childs[c] if (child1>0) and (child1 not in already_fitted): continue if (child2>0) and (child2 not in already_fitted): continue if child1<=0: class1=-np.repeat(child1,X.shape[0]).astype("int64") else: class1=labels_take[:, child1].astype("int64") if child2<=0: class2=-np.repeat(child2,X.shape[0]).astype("int64") else: class2=labels_take[:, child2].astype("int64") cost1=C[np.arange(X.shape[0]),np.clip(class1,a_min=0,a_max=None)] cost2=C[np.arange(X.shape[0]),np.clip(class2,a_min=0,a_max=None)] y=(cost1<cost2).astype('uint8') w=np.abs(cost1-cost2) valid_obs=w>0 if child1>0: valid_obs=valid_obs&(labels_take[:,child1]>=0) if child2>0: valid_obs=valid_obs&(labels_take[:,child2]>=0) X_take=X[valid_obs,:] y_take=y[valid_obs] w_take=w[valid_obs] w_take=_standardize_weights(w_take) self.classifiers[c].fit(X_take,y_take,sample_weight=w_take) labels_arr=np.c_[class1,class2].astype("int64") labels_take[valid_obs,c]=labels_arr[np.repeat(0,X_take.shape[0]),\ self.classifiers[c].predict(X_take).reshape(-1).astype('uint8')] already_fitted.add(c) next_round.append(self.tree.parents[c]) if c==0 or (len(classifier_queue)==0): break classifier_queue=list(set(next_round)) next_round=list() if (len(classifier_queue)==0): break return self
['def', 'fit', '(', 'self', ',', 'X', ',', 'C', ')', ':', 'X', ',', 'C', '=', '_check_fit_input', '(', 'X', ',', 'C', ')', 'C', '=', 'np', '.', 'asfortranarray', '(', 'C', ')', 'nclasses', '=', 'C', '.', 'shape', '[', '1', ']', 'self', '.', 'tree', '=', '_BinTree', '(', 'nclasses', ')', 'self', '.', 'classifiers', '=', '[', 'deepcopy', '(', 'self', '.', 'base_classifier', ')', 'for', 'c', 'in', 'range', '(', 'nclasses', '-', '1', ')', ']', 'classifier_queue', '=', 'self', '.', 'tree', '.', 'is_at_bottom', 'next_round', '=', 'list', '(', ')', 'already_fitted', '=', 'set', '(', ')', 'labels_take', '=', '-', 'np', '.', 'ones', '(', '(', 'X', '.', 'shape', '[', '0', ']', ',', 'len', '(', 'self', '.', 'classifiers', ')', ')', ')', 'while', 'True', ':', 'for', 'c', 'in', 'classifier_queue', ':', 'if', 'c', 'in', 'already_fitted', 'or', '(', 'c', 'is', 'None', ')', ':', 'continue', 'child1', ',', 'child2', '=', 'self', '.', 'tree', '.', 'childs', '[', 'c', ']', 'if', '(', 'child1', '>', '0', ')', 'and', '(', 'child1', 'not', 'in', 'already_fitted', ')', ':', 'continue', 'if', '(', 'child2', '>', '0', ')', 'and', '(', 'child2', 'not', 'in', 'already_fitted', ')', ':', 'continue', 'if', 'child1', '<=', '0', ':', 'class1', '=', '-', 'np', '.', 'repeat', '(', 'child1', ',', 'X', '.', 'shape', '[', '0', ']', ')', '.', 'astype', '(', '"int64"', ')', 'else', ':', 'class1', '=', 'labels_take', '[', ':', ',', 'child1', ']', '.', 'astype', '(', '"int64"', ')', 'if', 'child2', '<=', '0', ':', 'class2', '=', '-', 'np', '.', 'repeat', '(', 'child2', ',', 'X', '.', 'shape', '[', '0', ']', ')', '.', 'astype', '(', '"int64"', ')', 'else', ':', 'class2', '=', 'labels_take', '[', ':', ',', 'child2', ']', '.', 'astype', '(', '"int64"', ')', 'cost1', '=', 'C', '[', 'np', '.', 'arange', '(', 'X', '.', 'shape', '[', '0', ']', ')', ',', 'np', '.', 'clip', '(', 'class1', ',', 'a_min', '=', '0', ',', 'a_max', '=', 'None', ')', ']', 'cost2', '=', 'C', '[', 'np', '.', 'arange', '(', 'X', '.', 'shape', '[', '0', ']', ')', ',', 'np', '.', 'clip', '(', 'class2', ',', 'a_min', '=', '0', ',', 'a_max', '=', 'None', ')', ']', 'y', '=', '(', 'cost1', '<', 'cost2', ')', '.', 'astype', '(', "'uint8'", ')', 'w', '=', 'np', '.', 'abs', '(', 'cost1', '-', 'cost2', ')', 'valid_obs', '=', 'w', '>', '0', 'if', 'child1', '>', '0', ':', 'valid_obs', '=', 'valid_obs', '&', '(', 'labels_take', '[', ':', ',', 'child1', ']', '>=', '0', ')', 'if', 'child2', '>', '0', ':', 'valid_obs', '=', 'valid_obs', '&', '(', 'labels_take', '[', ':', ',', 'child2', ']', '>=', '0', ')', 'X_take', '=', 'X', '[', 'valid_obs', ',', ':', ']', 'y_take', '=', 'y', '[', 'valid_obs', ']', 'w_take', '=', 'w', '[', 'valid_obs', ']', 'w_take', '=', '_standardize_weights', '(', 'w_take', ')', 'self', '.', 'classifiers', '[', 'c', ']', '.', 'fit', '(', 'X_take', ',', 'y_take', ',', 'sample_weight', '=', 'w_take', ')', 'labels_arr', '=', 'np', '.', 'c_', '[', 'class1', ',', 'class2', ']', '.', 'astype', '(', '"int64"', ')', 'labels_take', '[', 'valid_obs', ',', 'c', ']', '=', 'labels_arr', '[', 'np', '.', 'repeat', '(', '0', ',', 'X_take', '.', 'shape', '[', '0', ']', ')', ',', 'self', '.', 'classifiers', '[', 'c', ']', '.', 'predict', '(', 'X_take', ')', '.', 'reshape', '(', '-', '1', ')', '.', 'astype', '(', "'uint8'", ')', ']', 'already_fitted', '.', 'add', '(', 'c', ')', 'next_round', '.', 'append', '(', 'self', '.', 'tree', '.', 'parents', '[', 'c', ']', ')', 'if', 'c', '==', '0', 'or', '(', 'len', '(', 'classifier_queue', ')', '==', '0', ')', ':', 'break', 'classifier_queue', '=', 'list', '(', 'set', '(', 'next_round', ')', ')', 'next_round', '=', 'list', '(', ')', 'if', '(', 'len', '(', 'classifier_queue', ')', '==', '0', ')', ':', 'break', 'return', 'self']
Fit a filter tree classifier Note ---- Shifting the order of the classes within the cost array will produce different results, as it will build a different binary tree comparing different classes at each node. Parameters ---------- X : array (n_samples, n_features) The data on which to fit a cost-sensitive classifier. C : array (n_samples, n_classes) The cost of predicting each label for each observation (more means worse).
['Fit', 'a', 'filter', 'tree', 'classifier', 'Note', '----', 'Shifting', 'the', 'order', 'of', 'the', 'classes', 'within', 'the', 'cost', 'array', 'will', 'produce', 'different', 'results', 'as', 'it', 'will', 'build', 'a', 'different', 'binary', 'tree', 'comparing', 'different', 'classes', 'at', 'each', 'node', '.', 'Parameters', '----------', 'X', ':', 'array', '(', 'n_samples', 'n_features', ')', 'The', 'data', 'on', 'which', 'to', 'fit', 'a', 'cost', '-', 'sensitive', 'classifier', '.', 'C', ':', 'array', '(', 'n_samples', 'n_classes', ')', 'The', 'cost', 'of', 'predicting', 'each', 'label', 'for', 'each', 'observation', '(', 'more', 'means', 'worse', ')', '.']
train
https://github.com/david-cortes/costsensitive/blob/355fbf20397ce673ce9e22048b6c52dbeeb354cc/costsensitive/__init__.py#L387-L462
6,220
NICTA/revrand
revrand/mathfun/special.py
softplus
def softplus(X): """ Pass X through a soft-plus function, , in a numerically stable way (using the log-sum-exp trick). The softplus transformation is: .. math:: \log(1 + \exp\{X\}) Parameters ---------- X: ndarray shape (N,) array or shape (N, D) array of data. Returns ------- spX: ndarray array of same shape of X with the result of softmax(X). """ if np.isscalar(X): return logsumexp(np.vstack((np.zeros(1), [X])).T, axis=1)[0] N = X.shape[0] if X.ndim == 1: return logsumexp(np.vstack((np.zeros(N), X)).T, axis=1) elif X.ndim == 2: sftX = np.empty(X.shape, dtype=float) for d in range(X.shape[1]): sftX[:, d] = logsumexp(np.vstack((np.zeros(N), X[:, d])).T, axis=1) return sftX else: raise ValueError("This only works on up to 2D arrays.")
python
def softplus(X): """ Pass X through a soft-plus function, , in a numerically stable way (using the log-sum-exp trick). The softplus transformation is: .. math:: \log(1 + \exp\{X\}) Parameters ---------- X: ndarray shape (N,) array or shape (N, D) array of data. Returns ------- spX: ndarray array of same shape of X with the result of softmax(X). """ if np.isscalar(X): return logsumexp(np.vstack((np.zeros(1), [X])).T, axis=1)[0] N = X.shape[0] if X.ndim == 1: return logsumexp(np.vstack((np.zeros(N), X)).T, axis=1) elif X.ndim == 2: sftX = np.empty(X.shape, dtype=float) for d in range(X.shape[1]): sftX[:, d] = logsumexp(np.vstack((np.zeros(N), X[:, d])).T, axis=1) return sftX else: raise ValueError("This only works on up to 2D arrays.")
['def', 'softplus', '(', 'X', ')', ':', 'if', 'np', '.', 'isscalar', '(', 'X', ')', ':', 'return', 'logsumexp', '(', 'np', '.', 'vstack', '(', '(', 'np', '.', 'zeros', '(', '1', ')', ',', '[', 'X', ']', ')', ')', '.', 'T', ',', 'axis', '=', '1', ')', '[', '0', ']', 'N', '=', 'X', '.', 'shape', '[', '0', ']', 'if', 'X', '.', 'ndim', '==', '1', ':', 'return', 'logsumexp', '(', 'np', '.', 'vstack', '(', '(', 'np', '.', 'zeros', '(', 'N', ')', ',', 'X', ')', ')', '.', 'T', ',', 'axis', '=', '1', ')', 'elif', 'X', '.', 'ndim', '==', '2', ':', 'sftX', '=', 'np', '.', 'empty', '(', 'X', '.', 'shape', ',', 'dtype', '=', 'float', ')', 'for', 'd', 'in', 'range', '(', 'X', '.', 'shape', '[', '1', ']', ')', ':', 'sftX', '[', ':', ',', 'd', ']', '=', 'logsumexp', '(', 'np', '.', 'vstack', '(', '(', 'np', '.', 'zeros', '(', 'N', ')', ',', 'X', '[', ':', ',', 'd', ']', ')', ')', '.', 'T', ',', 'axis', '=', '1', ')', 'return', 'sftX', 'else', ':', 'raise', 'ValueError', '(', '"This only works on up to 2D arrays."', ')']
Pass X through a soft-plus function, , in a numerically stable way (using the log-sum-exp trick). The softplus transformation is: .. math:: \log(1 + \exp\{X\}) Parameters ---------- X: ndarray shape (N,) array or shape (N, D) array of data. Returns ------- spX: ndarray array of same shape of X with the result of softmax(X).
['Pass', 'X', 'through', 'a', 'soft', '-', 'plus', 'function', 'in', 'a', 'numerically', 'stable', 'way', '(', 'using', 'the', 'log', '-', 'sum', '-', 'exp', 'trick', ')', '.']
train
https://github.com/NICTA/revrand/blob/4c1881b6c1772d2b988518e49dde954f165acfb6/revrand/mathfun/special.py#L91-L124
6,221
TylerGubala/bpy-build
setup.py
InstallCMakeLibs.run
def run(self): """ Copy libraries from the bin directory and place them as appropriate """ self.announce("Moving library files", level=3) # We have already built the libraries in the previous build_ext step self.skip_build = True bin_dir = self.distribution.bin_dir libs = [os.path.join(bin_dir, _lib) for _lib in os.listdir(bin_dir) if os.path.isfile(os.path.join(bin_dir, _lib)) and os.path.splitext(_lib)[1] in [".dll", ".so"] and not (_lib.startswith("python") or _lib.startswith("bpy"))] for lib in libs: shutil.move(lib, os.path.join(self.build_dir, os.path.basename(lib))) # Mark the libs for installation, adding them to # distribution.data_files seems to ensure that setuptools' record # writer appends them to installed-files.txt in the package's egg-info # # Also tried adding the libraries to the distribution.libraries list, # but that never seemed to add them to the installed-files.txt in the # egg-info, and the online recommendation seems to be adding libraries # into eager_resources in the call to setup(), which I think puts them # in data_files anyways. # # What is the best way? self.distribution.data_files = [os.path.join(self.install_dir, os.path.basename(lib)) for lib in libs] # Must be forced to run after adding the libs to data_files self.distribution.run_command("install_data") super().run()
python
def run(self): """ Copy libraries from the bin directory and place them as appropriate """ self.announce("Moving library files", level=3) # We have already built the libraries in the previous build_ext step self.skip_build = True bin_dir = self.distribution.bin_dir libs = [os.path.join(bin_dir, _lib) for _lib in os.listdir(bin_dir) if os.path.isfile(os.path.join(bin_dir, _lib)) and os.path.splitext(_lib)[1] in [".dll", ".so"] and not (_lib.startswith("python") or _lib.startswith("bpy"))] for lib in libs: shutil.move(lib, os.path.join(self.build_dir, os.path.basename(lib))) # Mark the libs for installation, adding them to # distribution.data_files seems to ensure that setuptools' record # writer appends them to installed-files.txt in the package's egg-info # # Also tried adding the libraries to the distribution.libraries list, # but that never seemed to add them to the installed-files.txt in the # egg-info, and the online recommendation seems to be adding libraries # into eager_resources in the call to setup(), which I think puts them # in data_files anyways. # # What is the best way? self.distribution.data_files = [os.path.join(self.install_dir, os.path.basename(lib)) for lib in libs] # Must be forced to run after adding the libs to data_files self.distribution.run_command("install_data") super().run()
['def', 'run', '(', 'self', ')', ':', 'self', '.', 'announce', '(', '"Moving library files"', ',', 'level', '=', '3', ')', '# We have already built the libraries in the previous build_ext step', 'self', '.', 'skip_build', '=', 'True', 'bin_dir', '=', 'self', '.', 'distribution', '.', 'bin_dir', 'libs', '=', '[', 'os', '.', 'path', '.', 'join', '(', 'bin_dir', ',', '_lib', ')', 'for', '_lib', 'in', 'os', '.', 'listdir', '(', 'bin_dir', ')', 'if', 'os', '.', 'path', '.', 'isfile', '(', 'os', '.', 'path', '.', 'join', '(', 'bin_dir', ',', '_lib', ')', ')', 'and', 'os', '.', 'path', '.', 'splitext', '(', '_lib', ')', '[', '1', ']', 'in', '[', '".dll"', ',', '".so"', ']', 'and', 'not', '(', '_lib', '.', 'startswith', '(', '"python"', ')', 'or', '_lib', '.', 'startswith', '(', '"bpy"', ')', ')', ']', 'for', 'lib', 'in', 'libs', ':', 'shutil', '.', 'move', '(', 'lib', ',', 'os', '.', 'path', '.', 'join', '(', 'self', '.', 'build_dir', ',', 'os', '.', 'path', '.', 'basename', '(', 'lib', ')', ')', ')', '# Mark the libs for installation, adding them to ', "# distribution.data_files seems to ensure that setuptools' record ", "# writer appends them to installed-files.txt in the package's egg-info", '#', '# Also tried adding the libraries to the distribution.libraries list, ', '# but that never seemed to add them to the installed-files.txt in the ', '# egg-info, and the online recommendation seems to be adding libraries ', '# into eager_resources in the call to setup(), which I think puts them ', '# in data_files anyways. ', '# ', '# What is the best way?', 'self', '.', 'distribution', '.', 'data_files', '=', '[', 'os', '.', 'path', '.', 'join', '(', 'self', '.', 'install_dir', ',', 'os', '.', 'path', '.', 'basename', '(', 'lib', ')', ')', 'for', 'lib', 'in', 'libs', ']', '# Must be forced to run after adding the libs to data_files', 'self', '.', 'distribution', '.', 'run_command', '(', '"install_data"', ')', 'super', '(', ')', '.', 'run', '(', ')']
Copy libraries from the bin directory and place them as appropriate
['Copy', 'libraries', 'from', 'the', 'bin', 'directory', 'and', 'place', 'them', 'as', 'appropriate']
train
https://github.com/TylerGubala/bpy-build/blob/667d41526a346cfa271e26c5d675689c7ab1a254/setup.py#L181-L225
6,222
MycroftAI/adapt
adapt/intent.py
find_first_tag
def find_first_tag(tags, entity_type, after_index=-1): """Searches tags for entity type after given index Args: tags(list): a list of tags with entity types to be compaired too entity_type entity_type(str): This is he entity type to be looking for in tags after_index(int): the start token must be greaterthan this. Returns: ( tag, v, confidence ): tag(str): is the tag that matched v(str): ? the word that matched? confidence(float): is a mesure of accuacy. 1 is full confidence and 0 is none. """ for tag in tags: for entity in tag.get('entities'): for v, t in entity.get('data'): if t.lower() == entity_type.lower() and tag.get('start_token', 0) > after_index: return tag, v, entity.get('confidence') return None, None, None
python
def find_first_tag(tags, entity_type, after_index=-1): """Searches tags for entity type after given index Args: tags(list): a list of tags with entity types to be compaired too entity_type entity_type(str): This is he entity type to be looking for in tags after_index(int): the start token must be greaterthan this. Returns: ( tag, v, confidence ): tag(str): is the tag that matched v(str): ? the word that matched? confidence(float): is a mesure of accuacy. 1 is full confidence and 0 is none. """ for tag in tags: for entity in tag.get('entities'): for v, t in entity.get('data'): if t.lower() == entity_type.lower() and tag.get('start_token', 0) > after_index: return tag, v, entity.get('confidence') return None, None, None
['def', 'find_first_tag', '(', 'tags', ',', 'entity_type', ',', 'after_index', '=', '-', '1', ')', ':', 'for', 'tag', 'in', 'tags', ':', 'for', 'entity', 'in', 'tag', '.', 'get', '(', "'entities'", ')', ':', 'for', 'v', ',', 't', 'in', 'entity', '.', 'get', '(', "'data'", ')', ':', 'if', 't', '.', 'lower', '(', ')', '==', 'entity_type', '.', 'lower', '(', ')', 'and', 'tag', '.', 'get', '(', "'start_token'", ',', '0', ')', '>', 'after_index', ':', 'return', 'tag', ',', 'v', ',', 'entity', '.', 'get', '(', "'confidence'", ')', 'return', 'None', ',', 'None', ',', 'None']
Searches tags for entity type after given index Args: tags(list): a list of tags with entity types to be compaired too entity_type entity_type(str): This is he entity type to be looking for in tags after_index(int): the start token must be greaterthan this. Returns: ( tag, v, confidence ): tag(str): is the tag that matched v(str): ? the word that matched? confidence(float): is a mesure of accuacy. 1 is full confidence and 0 is none.
['Searches', 'tags', 'for', 'entity', 'type', 'after', 'given', 'index']
train
https://github.com/MycroftAI/adapt/blob/334f23248b8e09fb9d84a88398424ec5bd3bae4c/adapt/intent.py#L29-L49
6,223
saltstack/salt
salt/modules/keystone.py
service_create
def service_create(name, service_type, description=None, profile=None, **connection_args): ''' Add service to Keystone service catalog CLI Examples: .. code-block:: bash salt '*' keystone.service_create nova compute \ 'OpenStack Compute Service' ''' kstone = auth(profile, **connection_args) service = kstone.services.create(name, service_type, description=description) return service_get(service.id, profile=profile, **connection_args)
python
def service_create(name, service_type, description=None, profile=None, **connection_args): ''' Add service to Keystone service catalog CLI Examples: .. code-block:: bash salt '*' keystone.service_create nova compute \ 'OpenStack Compute Service' ''' kstone = auth(profile, **connection_args) service = kstone.services.create(name, service_type, description=description) return service_get(service.id, profile=profile, **connection_args)
['def', 'service_create', '(', 'name', ',', 'service_type', ',', 'description', '=', 'None', ',', 'profile', '=', 'None', ',', '*', '*', 'connection_args', ')', ':', 'kstone', '=', 'auth', '(', 'profile', ',', '*', '*', 'connection_args', ')', 'service', '=', 'kstone', '.', 'services', '.', 'create', '(', 'name', ',', 'service_type', ',', 'description', '=', 'description', ')', 'return', 'service_get', '(', 'service', '.', 'id', ',', 'profile', '=', 'profile', ',', '*', '*', 'connection_args', ')']
Add service to Keystone service catalog CLI Examples: .. code-block:: bash salt '*' keystone.service_create nova compute \ 'OpenStack Compute Service'
['Add', 'service', 'to', 'Keystone', 'service', 'catalog']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/keystone.py#L525-L539
6,224
apache/incubator-mxnet
example/rnn/large_word_lm/model.py
generate_samples
def generate_samples(label, num_splits, sampler): """ Split labels into `num_splits` and generate candidates based on log-uniform distribution. """ def listify(x): return x if isinstance(x, list) else [x] label_splits = listify(label.split(num_splits, axis=0)) prob_samples = [] prob_targets = [] samples = [] for label_split in label_splits: label_split_2d = label_split.reshape((-1,1)) sampled_value = sampler.draw(label_split_2d) sampled_classes, exp_cnt_true, exp_cnt_sampled = sampled_value samples.append(sampled_classes.astype(np.float32)) prob_targets.append(exp_cnt_true.astype(np.float32).reshape((-1,1))) prob_samples.append(exp_cnt_sampled.astype(np.float32)) return samples, prob_samples, prob_targets
python
def generate_samples(label, num_splits, sampler): """ Split labels into `num_splits` and generate candidates based on log-uniform distribution. """ def listify(x): return x if isinstance(x, list) else [x] label_splits = listify(label.split(num_splits, axis=0)) prob_samples = [] prob_targets = [] samples = [] for label_split in label_splits: label_split_2d = label_split.reshape((-1,1)) sampled_value = sampler.draw(label_split_2d) sampled_classes, exp_cnt_true, exp_cnt_sampled = sampled_value samples.append(sampled_classes.astype(np.float32)) prob_targets.append(exp_cnt_true.astype(np.float32).reshape((-1,1))) prob_samples.append(exp_cnt_sampled.astype(np.float32)) return samples, prob_samples, prob_targets
['def', 'generate_samples', '(', 'label', ',', 'num_splits', ',', 'sampler', ')', ':', 'def', 'listify', '(', 'x', ')', ':', 'return', 'x', 'if', 'isinstance', '(', 'x', ',', 'list', ')', 'else', '[', 'x', ']', 'label_splits', '=', 'listify', '(', 'label', '.', 'split', '(', 'num_splits', ',', 'axis', '=', '0', ')', ')', 'prob_samples', '=', '[', ']', 'prob_targets', '=', '[', ']', 'samples', '=', '[', ']', 'for', 'label_split', 'in', 'label_splits', ':', 'label_split_2d', '=', 'label_split', '.', 'reshape', '(', '(', '-', '1', ',', '1', ')', ')', 'sampled_value', '=', 'sampler', '.', 'draw', '(', 'label_split_2d', ')', 'sampled_classes', ',', 'exp_cnt_true', ',', 'exp_cnt_sampled', '=', 'sampled_value', 'samples', '.', 'append', '(', 'sampled_classes', '.', 'astype', '(', 'np', '.', 'float32', ')', ')', 'prob_targets', '.', 'append', '(', 'exp_cnt_true', '.', 'astype', '(', 'np', '.', 'float32', ')', '.', 'reshape', '(', '(', '-', '1', ',', '1', ')', ')', ')', 'prob_samples', '.', 'append', '(', 'exp_cnt_sampled', '.', 'astype', '(', 'np', '.', 'float32', ')', ')', 'return', 'samples', ',', 'prob_samples', ',', 'prob_targets']
Split labels into `num_splits` and generate candidates based on log-uniform distribution.
['Split', 'labels', 'into', 'num_splits', 'and', 'generate', 'candidates', 'based', 'on', 'log', '-', 'uniform', 'distribution', '.']
train
https://github.com/apache/incubator-mxnet/blob/1af29e9c060a4c7d60eeaacba32afdb9a7775ba7/example/rnn/large_word_lm/model.py#L130-L147
6,225
rjdkmr/do_x3dna
dnaMD/dnaMD/dnaEY.py
dnaEY.getGlobalDeformationEnergy
def getGlobalDeformationEnergy(self, bp, complexDna, freeDnaFrames=None, boundDnaFrames=None, paxis='Z', which='all', masked=False, outFile=None): r"""Deformation energy of the input DNA using Global elastic properties It can be used to calculated deformation energy of a input DNA with reference to the DNA present in the current object. The deformation free energy is calculated using elastic matrix as follows .. math:: G = \frac{1}{2L_0}\mathbf{xKx^T} .. math:: \mathbf{x} = \begin{bmatrix} (\theta^{x} - \theta^{x}_0) & (\theta^{y} - \theta^{y}_0) & (L - L_0) & (\phi - \phi_0) \end{bmatrix} .. currentmodule:: dnaMD Parameters ---------- bp : list List of two base-steps forming the DNA segment. For example: with ``bp=[5, 50]``, 5-50 base-step segment will be considered. complexDna : :class:`dnaMD.DNA` Input :class:`dnaMD.DNA` instance for which deformation energy will be calculated. freeDnaFrames : list To select a trajectory segment of current (free) DNA data. List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. boundDnaFrames : list To select a trajectory segment of input (bound) DNA data. List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. paxis : str Axis parallel to global helical-axis(``'X'``, or ``'Y'`` or ``'Z'``). Only require when bending motions are included in the calculation. which : str or list For which motions, energy should be calculated. It should be either a list containing terms listed below or "all" for all energy terms. Following keywords are available: * ``'full'`` : Use entire elastic matrix -- all motions with their coupling * ``'diag'`` : Use diagonal of elastic matrix -- all motions but no coupling * ``'b1'`` : Only bending-1 motion * ``'b2'`` : Only bending-2 motion * ``'stretch'`` : Only stretching motion * ``'twist'`` : Only Twisting motions * ``'st_coupling'`` : Only stretch-twist coupling motion * ``'bs_coupling'`` : Only Bending and stretching coupling * ``'bt_coupling'`` : Only Bending and Twisting coupling * ``'bb_coupling'`` : Only bending-1 and bending-2 coupling * ``'bend'`` : Both bending motions with their coupling * ``'st'`` : Stretching and twisting motions with their coupling * ``'bs'`` : Bending (b1, b2) and stretching motions with their coupling * ``'bt'`` : Bending (b1, b2) and twisting motions with their coupling masked : bool ``Default=False``. To skip specific frames/snapshots. ``DNA.mask`` array should be set to use this functionality. This array contains boolean (either ``True`` or ``False``) value for each frame to mask the frames. Presently, mask array is automatically generated during :meth:`dnaMD.DNA.generate_smooth_axis` to skip those frames where 3D fitting curve was not successful within the given criteria. outFile : str Output file in csv format. Returns ------- time : numpy.ndarray 1D array containing time values. energy : OrderedDict of numpy.ndarray Dictionary of 1D array of shape (nframes) containing energy terms requested for DNA. """ if self.esType == 'BST': energyTerms = self.enGlobalTypes else: energyTerms = self.enGlobalTypes[:5] if isinstance(which, str): if which != 'all': raise ValueError('Either use "all" or use list of terms from this {0} list \n.'.format(energyTerms)) else: which = energyTerms elif isinstance(which, list): for key in which: if key not in energyTerms: raise ValueError('{0} is not a supported keyword.\n Use from the following list: \n{1}'.format( which, energyTerms)) else: raise ValueError('Either use "all" or use list of terms from this {0} list \n.'.format( energyTerms)) if self.esType == 'BST': means, esMatrix = self.getStretchTwistBendModulus(bp, frames=freeDnaFrames, masked=masked, matrix=True, paxis=paxis) else: means, esMatrix = self.getStretchTwistModulus(bp, frames=freeDnaFrames, masked=masked, matrix=True) esMatrix = 2.5 * esMatrix # Convert kT to kJ/mol time, array = self.extractGlobalParameters(complexDna, bp, frames=boundDnaFrames, paxis=paxis, masked=masked) # Initialize energy dictionary energyOut = OrderedDict() for key in which: energyOut[key] = [] for i in range(array[0].shape[0]): vec = array[:, i] diff = vec - means for key in which: if self.esType == 'BST': t_energy = self._calcEnergyBendStretchTwist(diff, esMatrix, key) else: t_energy = self._calcEnergyStretchTwist(diff, esMatrix, key) energyOut[key].append(t_energy) for key in which: energyOut[key] = np.asarray(energyOut[key]) # Write output file if outFile is not None: with open(outFile, 'w') as fout: fout.write('#Time') for name in which: fout.write(', {0}'.format(name)) fout.write('\n') for t in range(len(time)): fout.write('{0:.3f}'.format(time[t])) for name in which: fout.write(', {0:.5f}'.format(energyOut[name][t])) fout.write('\n') return time, energyOut
python
def getGlobalDeformationEnergy(self, bp, complexDna, freeDnaFrames=None, boundDnaFrames=None, paxis='Z', which='all', masked=False, outFile=None): r"""Deformation energy of the input DNA using Global elastic properties It can be used to calculated deformation energy of a input DNA with reference to the DNA present in the current object. The deformation free energy is calculated using elastic matrix as follows .. math:: G = \frac{1}{2L_0}\mathbf{xKx^T} .. math:: \mathbf{x} = \begin{bmatrix} (\theta^{x} - \theta^{x}_0) & (\theta^{y} - \theta^{y}_0) & (L - L_0) & (\phi - \phi_0) \end{bmatrix} .. currentmodule:: dnaMD Parameters ---------- bp : list List of two base-steps forming the DNA segment. For example: with ``bp=[5, 50]``, 5-50 base-step segment will be considered. complexDna : :class:`dnaMD.DNA` Input :class:`dnaMD.DNA` instance for which deformation energy will be calculated. freeDnaFrames : list To select a trajectory segment of current (free) DNA data. List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. boundDnaFrames : list To select a trajectory segment of input (bound) DNA data. List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. paxis : str Axis parallel to global helical-axis(``'X'``, or ``'Y'`` or ``'Z'``). Only require when bending motions are included in the calculation. which : str or list For which motions, energy should be calculated. It should be either a list containing terms listed below or "all" for all energy terms. Following keywords are available: * ``'full'`` : Use entire elastic matrix -- all motions with their coupling * ``'diag'`` : Use diagonal of elastic matrix -- all motions but no coupling * ``'b1'`` : Only bending-1 motion * ``'b2'`` : Only bending-2 motion * ``'stretch'`` : Only stretching motion * ``'twist'`` : Only Twisting motions * ``'st_coupling'`` : Only stretch-twist coupling motion * ``'bs_coupling'`` : Only Bending and stretching coupling * ``'bt_coupling'`` : Only Bending and Twisting coupling * ``'bb_coupling'`` : Only bending-1 and bending-2 coupling * ``'bend'`` : Both bending motions with their coupling * ``'st'`` : Stretching and twisting motions with their coupling * ``'bs'`` : Bending (b1, b2) and stretching motions with their coupling * ``'bt'`` : Bending (b1, b2) and twisting motions with their coupling masked : bool ``Default=False``. To skip specific frames/snapshots. ``DNA.mask`` array should be set to use this functionality. This array contains boolean (either ``True`` or ``False``) value for each frame to mask the frames. Presently, mask array is automatically generated during :meth:`dnaMD.DNA.generate_smooth_axis` to skip those frames where 3D fitting curve was not successful within the given criteria. outFile : str Output file in csv format. Returns ------- time : numpy.ndarray 1D array containing time values. energy : OrderedDict of numpy.ndarray Dictionary of 1D array of shape (nframes) containing energy terms requested for DNA. """ if self.esType == 'BST': energyTerms = self.enGlobalTypes else: energyTerms = self.enGlobalTypes[:5] if isinstance(which, str): if which != 'all': raise ValueError('Either use "all" or use list of terms from this {0} list \n.'.format(energyTerms)) else: which = energyTerms elif isinstance(which, list): for key in which: if key not in energyTerms: raise ValueError('{0} is not a supported keyword.\n Use from the following list: \n{1}'.format( which, energyTerms)) else: raise ValueError('Either use "all" or use list of terms from this {0} list \n.'.format( energyTerms)) if self.esType == 'BST': means, esMatrix = self.getStretchTwistBendModulus(bp, frames=freeDnaFrames, masked=masked, matrix=True, paxis=paxis) else: means, esMatrix = self.getStretchTwistModulus(bp, frames=freeDnaFrames, masked=masked, matrix=True) esMatrix = 2.5 * esMatrix # Convert kT to kJ/mol time, array = self.extractGlobalParameters(complexDna, bp, frames=boundDnaFrames, paxis=paxis, masked=masked) # Initialize energy dictionary energyOut = OrderedDict() for key in which: energyOut[key] = [] for i in range(array[0].shape[0]): vec = array[:, i] diff = vec - means for key in which: if self.esType == 'BST': t_energy = self._calcEnergyBendStretchTwist(diff, esMatrix, key) else: t_energy = self._calcEnergyStretchTwist(diff, esMatrix, key) energyOut[key].append(t_energy) for key in which: energyOut[key] = np.asarray(energyOut[key]) # Write output file if outFile is not None: with open(outFile, 'w') as fout: fout.write('#Time') for name in which: fout.write(', {0}'.format(name)) fout.write('\n') for t in range(len(time)): fout.write('{0:.3f}'.format(time[t])) for name in which: fout.write(', {0:.5f}'.format(energyOut[name][t])) fout.write('\n') return time, energyOut
['def', 'getGlobalDeformationEnergy', '(', 'self', ',', 'bp', ',', 'complexDna', ',', 'freeDnaFrames', '=', 'None', ',', 'boundDnaFrames', '=', 'None', ',', 'paxis', '=', "'Z'", ',', 'which', '=', "'all'", ',', 'masked', '=', 'False', ',', 'outFile', '=', 'None', ')', ':', 'if', 'self', '.', 'esType', '==', "'BST'", ':', 'energyTerms', '=', 'self', '.', 'enGlobalTypes', 'else', ':', 'energyTerms', '=', 'self', '.', 'enGlobalTypes', '[', ':', '5', ']', 'if', 'isinstance', '(', 'which', ',', 'str', ')', ':', 'if', 'which', '!=', "'all'", ':', 'raise', 'ValueError', '(', '\'Either use "all" or use list of terms from this {0} list \\n.\'', '.', 'format', '(', 'energyTerms', ')', ')', 'else', ':', 'which', '=', 'energyTerms', 'elif', 'isinstance', '(', 'which', ',', 'list', ')', ':', 'for', 'key', 'in', 'which', ':', 'if', 'key', 'not', 'in', 'energyTerms', ':', 'raise', 'ValueError', '(', "'{0} is not a supported keyword.\\n Use from the following list: \\n{1}'", '.', 'format', '(', 'which', ',', 'energyTerms', ')', ')', 'else', ':', 'raise', 'ValueError', '(', '\'Either use "all" or use list of terms from this {0} list \\n.\'', '.', 'format', '(', 'energyTerms', ')', ')', 'if', 'self', '.', 'esType', '==', "'BST'", ':', 'means', ',', 'esMatrix', '=', 'self', '.', 'getStretchTwistBendModulus', '(', 'bp', ',', 'frames', '=', 'freeDnaFrames', ',', 'masked', '=', 'masked', ',', 'matrix', '=', 'True', ',', 'paxis', '=', 'paxis', ')', 'else', ':', 'means', ',', 'esMatrix', '=', 'self', '.', 'getStretchTwistModulus', '(', 'bp', ',', 'frames', '=', 'freeDnaFrames', ',', 'masked', '=', 'masked', ',', 'matrix', '=', 'True', ')', 'esMatrix', '=', '2.5', '*', 'esMatrix', '# Convert kT to kJ/mol', 'time', ',', 'array', '=', 'self', '.', 'extractGlobalParameters', '(', 'complexDna', ',', 'bp', ',', 'frames', '=', 'boundDnaFrames', ',', 'paxis', '=', 'paxis', ',', 'masked', '=', 'masked', ')', '# Initialize energy dictionary', 'energyOut', '=', 'OrderedDict', '(', ')', 'for', 'key', 'in', 'which', ':', 'energyOut', '[', 'key', ']', '=', '[', ']', 'for', 'i', 'in', 'range', '(', 'array', '[', '0', ']', '.', 'shape', '[', '0', ']', ')', ':', 'vec', '=', 'array', '[', ':', ',', 'i', ']', 'diff', '=', 'vec', '-', 'means', 'for', 'key', 'in', 'which', ':', 'if', 'self', '.', 'esType', '==', "'BST'", ':', 't_energy', '=', 'self', '.', '_calcEnergyBendStretchTwist', '(', 'diff', ',', 'esMatrix', ',', 'key', ')', 'else', ':', 't_energy', '=', 'self', '.', '_calcEnergyStretchTwist', '(', 'diff', ',', 'esMatrix', ',', 'key', ')', 'energyOut', '[', 'key', ']', '.', 'append', '(', 't_energy', ')', 'for', 'key', 'in', 'which', ':', 'energyOut', '[', 'key', ']', '=', 'np', '.', 'asarray', '(', 'energyOut', '[', 'key', ']', ')', '# Write output file', 'if', 'outFile', 'is', 'not', 'None', ':', 'with', 'open', '(', 'outFile', ',', "'w'", ')', 'as', 'fout', ':', 'fout', '.', 'write', '(', "'#Time'", ')', 'for', 'name', 'in', 'which', ':', 'fout', '.', 'write', '(', "', {0}'", '.', 'format', '(', 'name', ')', ')', 'fout', '.', 'write', '(', "'\\n'", ')', 'for', 't', 'in', 'range', '(', 'len', '(', 'time', ')', ')', ':', 'fout', '.', 'write', '(', "'{0:.3f}'", '.', 'format', '(', 'time', '[', 't', ']', ')', ')', 'for', 'name', 'in', 'which', ':', 'fout', '.', 'write', '(', "', {0:.5f}'", '.', 'format', '(', 'energyOut', '[', 'name', ']', '[', 't', ']', ')', ')', 'fout', '.', 'write', '(', "'\\n'", ')', 'return', 'time', ',', 'energyOut']
r"""Deformation energy of the input DNA using Global elastic properties It can be used to calculated deformation energy of a input DNA with reference to the DNA present in the current object. The deformation free energy is calculated using elastic matrix as follows .. math:: G = \frac{1}{2L_0}\mathbf{xKx^T} .. math:: \mathbf{x} = \begin{bmatrix} (\theta^{x} - \theta^{x}_0) & (\theta^{y} - \theta^{y}_0) & (L - L_0) & (\phi - \phi_0) \end{bmatrix} .. currentmodule:: dnaMD Parameters ---------- bp : list List of two base-steps forming the DNA segment. For example: with ``bp=[5, 50]``, 5-50 base-step segment will be considered. complexDna : :class:`dnaMD.DNA` Input :class:`dnaMD.DNA` instance for which deformation energy will be calculated. freeDnaFrames : list To select a trajectory segment of current (free) DNA data. List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. boundDnaFrames : list To select a trajectory segment of input (bound) DNA data. List of two trajectory frames between which parameters will be extracted. It can be used to select portions of the trajectory. For example, with ``frames=[100, 1000]``, 100th to 1000th frame of the trajectory will be considered. paxis : str Axis parallel to global helical-axis(``'X'``, or ``'Y'`` or ``'Z'``). Only require when bending motions are included in the calculation. which : str or list For which motions, energy should be calculated. It should be either a list containing terms listed below or "all" for all energy terms. Following keywords are available: * ``'full'`` : Use entire elastic matrix -- all motions with their coupling * ``'diag'`` : Use diagonal of elastic matrix -- all motions but no coupling * ``'b1'`` : Only bending-1 motion * ``'b2'`` : Only bending-2 motion * ``'stretch'`` : Only stretching motion * ``'twist'`` : Only Twisting motions * ``'st_coupling'`` : Only stretch-twist coupling motion * ``'bs_coupling'`` : Only Bending and stretching coupling * ``'bt_coupling'`` : Only Bending and Twisting coupling * ``'bb_coupling'`` : Only bending-1 and bending-2 coupling * ``'bend'`` : Both bending motions with their coupling * ``'st'`` : Stretching and twisting motions with their coupling * ``'bs'`` : Bending (b1, b2) and stretching motions with their coupling * ``'bt'`` : Bending (b1, b2) and twisting motions with their coupling masked : bool ``Default=False``. To skip specific frames/snapshots. ``DNA.mask`` array should be set to use this functionality. This array contains boolean (either ``True`` or ``False``) value for each frame to mask the frames. Presently, mask array is automatically generated during :meth:`dnaMD.DNA.generate_smooth_axis` to skip those frames where 3D fitting curve was not successful within the given criteria. outFile : str Output file in csv format. Returns ------- time : numpy.ndarray 1D array containing time values. energy : OrderedDict of numpy.ndarray Dictionary of 1D array of shape (nframes) containing energy terms requested for DNA.
['r', 'Deformation', 'energy', 'of', 'the', 'input', 'DNA', 'using', 'Global', 'elastic', 'properties']
train
https://github.com/rjdkmr/do_x3dna/blob/fe910335eefcada76737f9e7cd6f25036cd32ab6/dnaMD/dnaMD/dnaEY.py#L653-L795
6,226
CiscoTestAutomation/yang
connector/src/yang/connector/__init__.py
Netconf.request
def request(self, msg, timeout=30): '''request High-level api: sends message through NetConf session and returns with a reply. Exception is thrown out either the reply is in wrong format or timout. Users can modify timeout value (in seconds) by passing parameter timeout. Users may want to set a larger timeout when making a large query. Parameters ---------- msg : `str` Any message need to be sent out in XML format. The message can be in wrong format if it is a negative test case. Because ncclient tracks same message-id in both rpc and rpc-reply, missing message-id in your rpc may cause exception when receiving rpc-reply. Most other wrong format rpc's can be sent without exception. timeout : `int`, optional An optional keyed argument to set timeout value in seconds. Its default value is 30 seconds. Returns ------- str The reply from the device in string. If something goes wrong, an exception will be raised. Raises ------ Exception If NetConf is not connected, or there is a timeout when receiving reply. Code Example:: >>> from pyats.topology import loader >>> testbed = loader.load('/users/xxx/xxx/asr_20_22.yaml') >>> device = testbed.devices['asr22'] >>> device.connect(alias='nc', via='netconf') >>> netconf_request = """ ... <rpc message-id="101" ... xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"> ... <get> ... <filter> ... <native xmlns="http://cisco.com/ns/yang/ned/ios"> ... <version> ... </version> ... </native> ... </filter> ... </get> ... </rpc> ... """ >>> reply = device.nc.request(netconf_request) >>> Expected Results:: >>> print(reply) <?xml version="1.0" encoding="UTF-8"?> <rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="101"><data> <native xmlns="http://cisco.com/ns/yang/ned/ios"> <version>16.3</version></native></data></rpc-reply> >>> ''' rpc = RawRPC(session = self.session, device_handler = self._device_handler, timeout = timeout, raise_mode = operations.rpc.RaiseMode.NONE) # identify message-id m = re.search(r'message-id="([A-Za-z0-9_\-:# ]*)"', msg) if m: rpc._id = m.group(1) rpc._listener.register(rpc._id, rpc) logger.debug('Found message-id="%s" in your rpc, which is good.', rpc._id) else: logger.warning('Cannot find message-id in your rpc. You may ' 'expect an exception when receiving rpc-reply ' 'due to missing message-id.') return rpc._request(msg).xml
python
def request(self, msg, timeout=30): '''request High-level api: sends message through NetConf session and returns with a reply. Exception is thrown out either the reply is in wrong format or timout. Users can modify timeout value (in seconds) by passing parameter timeout. Users may want to set a larger timeout when making a large query. Parameters ---------- msg : `str` Any message need to be sent out in XML format. The message can be in wrong format if it is a negative test case. Because ncclient tracks same message-id in both rpc and rpc-reply, missing message-id in your rpc may cause exception when receiving rpc-reply. Most other wrong format rpc's can be sent without exception. timeout : `int`, optional An optional keyed argument to set timeout value in seconds. Its default value is 30 seconds. Returns ------- str The reply from the device in string. If something goes wrong, an exception will be raised. Raises ------ Exception If NetConf is not connected, or there is a timeout when receiving reply. Code Example:: >>> from pyats.topology import loader >>> testbed = loader.load('/users/xxx/xxx/asr_20_22.yaml') >>> device = testbed.devices['asr22'] >>> device.connect(alias='nc', via='netconf') >>> netconf_request = """ ... <rpc message-id="101" ... xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"> ... <get> ... <filter> ... <native xmlns="http://cisco.com/ns/yang/ned/ios"> ... <version> ... </version> ... </native> ... </filter> ... </get> ... </rpc> ... """ >>> reply = device.nc.request(netconf_request) >>> Expected Results:: >>> print(reply) <?xml version="1.0" encoding="UTF-8"?> <rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="101"><data> <native xmlns="http://cisco.com/ns/yang/ned/ios"> <version>16.3</version></native></data></rpc-reply> >>> ''' rpc = RawRPC(session = self.session, device_handler = self._device_handler, timeout = timeout, raise_mode = operations.rpc.RaiseMode.NONE) # identify message-id m = re.search(r'message-id="([A-Za-z0-9_\-:# ]*)"', msg) if m: rpc._id = m.group(1) rpc._listener.register(rpc._id, rpc) logger.debug('Found message-id="%s" in your rpc, which is good.', rpc._id) else: logger.warning('Cannot find message-id in your rpc. You may ' 'expect an exception when receiving rpc-reply ' 'due to missing message-id.') return rpc._request(msg).xml
['def', 'request', '(', 'self', ',', 'msg', ',', 'timeout', '=', '30', ')', ':', 'rpc', '=', 'RawRPC', '(', 'session', '=', 'self', '.', 'session', ',', 'device_handler', '=', 'self', '.', '_device_handler', ',', 'timeout', '=', 'timeout', ',', 'raise_mode', '=', 'operations', '.', 'rpc', '.', 'RaiseMode', '.', 'NONE', ')', '# identify message-id', 'm', '=', 're', '.', 'search', '(', 'r\'message-id="([A-Za-z0-9_\\-:# ]*)"\'', ',', 'msg', ')', 'if', 'm', ':', 'rpc', '.', '_id', '=', 'm', '.', 'group', '(', '1', ')', 'rpc', '.', '_listener', '.', 'register', '(', 'rpc', '.', '_id', ',', 'rpc', ')', 'logger', '.', 'debug', '(', '\'Found message-id="%s" in your rpc, which is good.\'', ',', 'rpc', '.', '_id', ')', 'else', ':', 'logger', '.', 'warning', '(', "'Cannot find message-id in your rpc. You may '", "'expect an exception when receiving rpc-reply '", "'due to missing message-id.'", ')', 'return', 'rpc', '.', '_request', '(', 'msg', ')', '.', 'xml']
request High-level api: sends message through NetConf session and returns with a reply. Exception is thrown out either the reply is in wrong format or timout. Users can modify timeout value (in seconds) by passing parameter timeout. Users may want to set a larger timeout when making a large query. Parameters ---------- msg : `str` Any message need to be sent out in XML format. The message can be in wrong format if it is a negative test case. Because ncclient tracks same message-id in both rpc and rpc-reply, missing message-id in your rpc may cause exception when receiving rpc-reply. Most other wrong format rpc's can be sent without exception. timeout : `int`, optional An optional keyed argument to set timeout value in seconds. Its default value is 30 seconds. Returns ------- str The reply from the device in string. If something goes wrong, an exception will be raised. Raises ------ Exception If NetConf is not connected, or there is a timeout when receiving reply. Code Example:: >>> from pyats.topology import loader >>> testbed = loader.load('/users/xxx/xxx/asr_20_22.yaml') >>> device = testbed.devices['asr22'] >>> device.connect(alias='nc', via='netconf') >>> netconf_request = """ ... <rpc message-id="101" ... xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"> ... <get> ... <filter> ... <native xmlns="http://cisco.com/ns/yang/ned/ios"> ... <version> ... </version> ... </native> ... </filter> ... </get> ... </rpc> ... """ >>> reply = device.nc.request(netconf_request) >>> Expected Results:: >>> print(reply) <?xml version="1.0" encoding="UTF-8"?> <rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="101"><data> <native xmlns="http://cisco.com/ns/yang/ned/ios"> <version>16.3</version></native></data></rpc-reply> >>>
['request']
train
https://github.com/CiscoTestAutomation/yang/blob/c70ec5ac5a91f276c4060009203770ece92e76b4/connector/src/yang/connector/__init__.py#L397-L485
6,227
UDST/urbansim
urbansim/models/util.py
columns_in_formula
def columns_in_formula(formula): """ Returns the names of all the columns used in a patsy formula. Parameters ---------- formula : str, iterable, or dict Any formula construction supported by ``str_model_expression``. Returns ------- columns : list of str """ if formula is None: return [] formula = str_model_expression(formula, add_constant=False) columns = [] tokens = map( lambda x: x.extra, tz.remove( lambda x: x.extra is None, _tokens_from_patsy(patsy.parse_formula.parse_formula(formula)))) for tok in tokens: # if there are parentheses in the expression we # want to drop them and everything outside # and start again from the top if '(' in tok: start = tok.find('(') + 1 fin = tok.rfind(')') columns.extend(columns_in_formula(tok[start:fin])) else: for toknum, tokval, _, _, _ in generate_tokens( StringIO(tok).readline): if toknum == NAME: columns.append(tokval) return list(tz.unique(columns))
python
def columns_in_formula(formula): """ Returns the names of all the columns used in a patsy formula. Parameters ---------- formula : str, iterable, or dict Any formula construction supported by ``str_model_expression``. Returns ------- columns : list of str """ if formula is None: return [] formula = str_model_expression(formula, add_constant=False) columns = [] tokens = map( lambda x: x.extra, tz.remove( lambda x: x.extra is None, _tokens_from_patsy(patsy.parse_formula.parse_formula(formula)))) for tok in tokens: # if there are parentheses in the expression we # want to drop them and everything outside # and start again from the top if '(' in tok: start = tok.find('(') + 1 fin = tok.rfind(')') columns.extend(columns_in_formula(tok[start:fin])) else: for toknum, tokval, _, _, _ in generate_tokens( StringIO(tok).readline): if toknum == NAME: columns.append(tokval) return list(tz.unique(columns))
['def', 'columns_in_formula', '(', 'formula', ')', ':', 'if', 'formula', 'is', 'None', ':', 'return', '[', ']', 'formula', '=', 'str_model_expression', '(', 'formula', ',', 'add_constant', '=', 'False', ')', 'columns', '=', '[', ']', 'tokens', '=', 'map', '(', 'lambda', 'x', ':', 'x', '.', 'extra', ',', 'tz', '.', 'remove', '(', 'lambda', 'x', ':', 'x', '.', 'extra', 'is', 'None', ',', '_tokens_from_patsy', '(', 'patsy', '.', 'parse_formula', '.', 'parse_formula', '(', 'formula', ')', ')', ')', ')', 'for', 'tok', 'in', 'tokens', ':', '# if there are parentheses in the expression we', '# want to drop them and everything outside', '# and start again from the top', 'if', "'('", 'in', 'tok', ':', 'start', '=', 'tok', '.', 'find', '(', "'('", ')', '+', '1', 'fin', '=', 'tok', '.', 'rfind', '(', "')'", ')', 'columns', '.', 'extend', '(', 'columns_in_formula', '(', 'tok', '[', 'start', ':', 'fin', ']', ')', ')', 'else', ':', 'for', 'toknum', ',', 'tokval', ',', '_', ',', '_', ',', '_', 'in', 'generate_tokens', '(', 'StringIO', '(', 'tok', ')', '.', 'readline', ')', ':', 'if', 'toknum', '==', 'NAME', ':', 'columns', '.', 'append', '(', 'tokval', ')', 'return', 'list', '(', 'tz', '.', 'unique', '(', 'columns', ')', ')']
Returns the names of all the columns used in a patsy formula. Parameters ---------- formula : str, iterable, or dict Any formula construction supported by ``str_model_expression``. Returns ------- columns : list of str
['Returns', 'the', 'names', 'of', 'all', 'the', 'columns', 'used', 'in', 'a', 'patsy', 'formula', '.']
train
https://github.com/UDST/urbansim/blob/79f815a6503e109f50be270cee92d0f4a34f49ef/urbansim/models/util.py#L307-L347
6,228
rr-/docstring_parser
docstring_parser/parser/google.py
_build_meta
def _build_meta(text: str, title: str) -> DocstringMeta: """Build docstring element. :param text: docstring element text :param title: title of section containing element :return: """ meta = _sections[title] if meta == "returns" and ":" not in text.split()[0]: return DocstringMeta([meta], description=text) # Split spec and description before, desc = text.split(":", 1) if desc: desc = desc[1:] if desc[0] == " " else desc if "\n" in desc: first_line, rest = desc.split("\n", 1) desc = first_line + "\n" + inspect.cleandoc(rest) desc = desc.strip("\n") # Build Meta args m = re.match(r"(\S+) \((\S+)\)$", before) if meta == "param" and m: arg_name, type_name = m.group(1, 2) args = [meta, type_name, arg_name] else: args = [meta, before] return DocstringMeta(args, description=desc)
python
def _build_meta(text: str, title: str) -> DocstringMeta: """Build docstring element. :param text: docstring element text :param title: title of section containing element :return: """ meta = _sections[title] if meta == "returns" and ":" not in text.split()[0]: return DocstringMeta([meta], description=text) # Split spec and description before, desc = text.split(":", 1) if desc: desc = desc[1:] if desc[0] == " " else desc if "\n" in desc: first_line, rest = desc.split("\n", 1) desc = first_line + "\n" + inspect.cleandoc(rest) desc = desc.strip("\n") # Build Meta args m = re.match(r"(\S+) \((\S+)\)$", before) if meta == "param" and m: arg_name, type_name = m.group(1, 2) args = [meta, type_name, arg_name] else: args = [meta, before] return DocstringMeta(args, description=desc)
['def', '_build_meta', '(', 'text', ':', 'str', ',', 'title', ':', 'str', ')', '->', 'DocstringMeta', ':', 'meta', '=', '_sections', '[', 'title', ']', 'if', 'meta', '==', '"returns"', 'and', '":"', 'not', 'in', 'text', '.', 'split', '(', ')', '[', '0', ']', ':', 'return', 'DocstringMeta', '(', '[', 'meta', ']', ',', 'description', '=', 'text', ')', '# Split spec and description', 'before', ',', 'desc', '=', 'text', '.', 'split', '(', '":"', ',', '1', ')', 'if', 'desc', ':', 'desc', '=', 'desc', '[', '1', ':', ']', 'if', 'desc', '[', '0', ']', '==', '" "', 'else', 'desc', 'if', '"\\n"', 'in', 'desc', ':', 'first_line', ',', 'rest', '=', 'desc', '.', 'split', '(', '"\\n"', ',', '1', ')', 'desc', '=', 'first_line', '+', '"\\n"', '+', 'inspect', '.', 'cleandoc', '(', 'rest', ')', 'desc', '=', 'desc', '.', 'strip', '(', '"\\n"', ')', '# Build Meta args', 'm', '=', 're', '.', 'match', '(', 'r"(\\S+) \\((\\S+)\\)$"', ',', 'before', ')', 'if', 'meta', '==', '"param"', 'and', 'm', ':', 'arg_name', ',', 'type_name', '=', 'm', '.', 'group', '(', '1', ',', '2', ')', 'args', '=', '[', 'meta', ',', 'type_name', ',', 'arg_name', ']', 'else', ':', 'args', '=', '[', 'meta', ',', 'before', ']', 'return', 'DocstringMeta', '(', 'args', ',', 'description', '=', 'desc', ')']
Build docstring element. :param text: docstring element text :param title: title of section containing element :return:
['Build', 'docstring', 'element', '.']
train
https://github.com/rr-/docstring_parser/blob/389773f6790a84d33b10160589ce8591122e12bb/docstring_parser/parser/google.py#L28-L57
6,229
Parisson/TimeSide
timeside/core/tools/package.py
check_aubio
def check_aubio(): "Check Aubio availability" try: import aubio except ImportError: warnings.warn('Aubio librairy is not available', ImportWarning, stacklevel=2) _WITH_AUBIO = False else: _WITH_AUBIO = True del aubio return _WITH_AUBIO
python
def check_aubio(): "Check Aubio availability" try: import aubio except ImportError: warnings.warn('Aubio librairy is not available', ImportWarning, stacklevel=2) _WITH_AUBIO = False else: _WITH_AUBIO = True del aubio return _WITH_AUBIO
['def', 'check_aubio', '(', ')', ':', 'try', ':', 'import', 'aubio', 'except', 'ImportError', ':', 'warnings', '.', 'warn', '(', "'Aubio librairy is not available'", ',', 'ImportWarning', ',', 'stacklevel', '=', '2', ')', '_WITH_AUBIO', '=', 'False', 'else', ':', '_WITH_AUBIO', '=', 'True', 'del', 'aubio', 'return', '_WITH_AUBIO']
Check Aubio availability
['Check', 'Aubio', 'availability']
train
https://github.com/Parisson/TimeSide/blob/0618d75cd2f16021afcfd3d5b77f692adad76ea5/timeside/core/tools/package.py#L86-L98
6,230
econ-ark/HARK
HARK/interpolation.py
HARKinterpolator4D.derivativeW
def derivativeW(self,w,x,y,z): ''' Evaluates the partial derivative with respect to w (the first argument) of the interpolated function at the given input. Parameters ---------- w : np.array or float Real values to be evaluated in the interpolated function. x : np.array or float Real values to be evaluated in the interpolated function; must be the same size as w. y : np.array or float Real values to be evaluated in the interpolated function; must be the same size as w. z : np.array or float Real values to be evaluated in the interpolated function; must be the same size as w. Returns ------- dfdw : np.array or float The derivative with respect to w of the interpolated function eval- uated at w,x,y,z: dfdw = f_w(w,x,y,z), with the same shape as inputs. ''' wa = np.asarray(w) xa = np.asarray(x) ya = np.asarray(y) za = np.asarray(z) return (self._derW(wa.flatten(),xa.flatten(),ya.flatten(),za.flatten())).reshape(wa.shape)
python
def derivativeW(self,w,x,y,z): ''' Evaluates the partial derivative with respect to w (the first argument) of the interpolated function at the given input. Parameters ---------- w : np.array or float Real values to be evaluated in the interpolated function. x : np.array or float Real values to be evaluated in the interpolated function; must be the same size as w. y : np.array or float Real values to be evaluated in the interpolated function; must be the same size as w. z : np.array or float Real values to be evaluated in the interpolated function; must be the same size as w. Returns ------- dfdw : np.array or float The derivative with respect to w of the interpolated function eval- uated at w,x,y,z: dfdw = f_w(w,x,y,z), with the same shape as inputs. ''' wa = np.asarray(w) xa = np.asarray(x) ya = np.asarray(y) za = np.asarray(z) return (self._derW(wa.flatten(),xa.flatten(),ya.flatten(),za.flatten())).reshape(wa.shape)
['def', 'derivativeW', '(', 'self', ',', 'w', ',', 'x', ',', 'y', ',', 'z', ')', ':', 'wa', '=', 'np', '.', 'asarray', '(', 'w', ')', 'xa', '=', 'np', '.', 'asarray', '(', 'x', ')', 'ya', '=', 'np', '.', 'asarray', '(', 'y', ')', 'za', '=', 'np', '.', 'asarray', '(', 'z', ')', 'return', '(', 'self', '.', '_derW', '(', 'wa', '.', 'flatten', '(', ')', ',', 'xa', '.', 'flatten', '(', ')', ',', 'ya', '.', 'flatten', '(', ')', ',', 'za', '.', 'flatten', '(', ')', ')', ')', '.', 'reshape', '(', 'wa', '.', 'shape', ')']
Evaluates the partial derivative with respect to w (the first argument) of the interpolated function at the given input. Parameters ---------- w : np.array or float Real values to be evaluated in the interpolated function. x : np.array or float Real values to be evaluated in the interpolated function; must be the same size as w. y : np.array or float Real values to be evaluated in the interpolated function; must be the same size as w. z : np.array or float Real values to be evaluated in the interpolated function; must be the same size as w. Returns ------- dfdw : np.array or float The derivative with respect to w of the interpolated function eval- uated at w,x,y,z: dfdw = f_w(w,x,y,z), with the same shape as inputs.
['Evaluates', 'the', 'partial', 'derivative', 'with', 'respect', 'to', 'w', '(', 'the', 'first', 'argument', ')', 'of', 'the', 'interpolated', 'function', 'at', 'the', 'given', 'input', '.']
train
https://github.com/econ-ark/HARK/blob/3d184153a189e618a87c9540df1cd12044039cc5/HARK/interpolation.py#L383-L412
6,231
sdss/sdss_access
python/sdss_access/path/path.py
BasePath.location
def location(self, filetype, base_dir=None, **kwargs): """Return the location of the relative sas path of a given type of file. Parameters ---------- filetype : str File type parameter. Returns ------- full : str The relative sas path to the file. """ full = kwargs.get('full', None) if not full: full = self.full(filetype, **kwargs) self.set_base_dir(base_dir=base_dir) location = full[len(self.base_dir):] if full and full.startswith(self.base_dir) else None if location and '//' in location: location = location.replace('//', '/') return location
python
def location(self, filetype, base_dir=None, **kwargs): """Return the location of the relative sas path of a given type of file. Parameters ---------- filetype : str File type parameter. Returns ------- full : str The relative sas path to the file. """ full = kwargs.get('full', None) if not full: full = self.full(filetype, **kwargs) self.set_base_dir(base_dir=base_dir) location = full[len(self.base_dir):] if full and full.startswith(self.base_dir) else None if location and '//' in location: location = location.replace('//', '/') return location
['def', 'location', '(', 'self', ',', 'filetype', ',', 'base_dir', '=', 'None', ',', '*', '*', 'kwargs', ')', ':', 'full', '=', 'kwargs', '.', 'get', '(', "'full'", ',', 'None', ')', 'if', 'not', 'full', ':', 'full', '=', 'self', '.', 'full', '(', 'filetype', ',', '*', '*', 'kwargs', ')', 'self', '.', 'set_base_dir', '(', 'base_dir', '=', 'base_dir', ')', 'location', '=', 'full', '[', 'len', '(', 'self', '.', 'base_dir', ')', ':', ']', 'if', 'full', 'and', 'full', '.', 'startswith', '(', 'self', '.', 'base_dir', ')', 'else', 'None', 'if', 'location', 'and', "'//'", 'in', 'location', ':', 'location', '=', 'location', '.', 'replace', '(', "'//'", ',', "'/'", ')', 'return', 'location']
Return the location of the relative sas path of a given type of file. Parameters ---------- filetype : str File type parameter. Returns ------- full : str The relative sas path to the file.
['Return', 'the', 'location', 'of', 'the', 'relative', 'sas', 'path', 'of', 'a', 'given', 'type', 'of', 'file', '.']
train
https://github.com/sdss/sdss_access/blob/76375bbf37d39d2e4ccbed90bdfa9a4298784470/python/sdss_access/path/path.py#L584-L608
6,232
mitsei/dlkit
dlkit/handcar/relationship/managers.py
RelationshipProxyManager.get_relationship_query_session_for_family
def get_relationship_query_session_for_family(self, family_id=None, proxy=None): """Gets the ``OsidSession`` associated with the relationship query service for the given family. arg: family_id (osid.id.Id): the ``Id`` of the family arg: proxy (osid.proxy.Proxy): a proxy return: (osid.relationship.RelationshipQuerySession) - a ``RelationshipQuerySession`` raise: NotFound - no ``Family`` found by the given ``Id`` raise: NullArgument - ``family_id`` or ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_relationship_query()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_relationship_query()`` and ``supports_visible_federation()`` are ``true``* """ if not family_id: raise NullArgument if not self.supports_relationship_query(): raise Unimplemented() ## # Need to include check to see if the familyId is found otherwise raise NotFound ## try: from . import sessions except ImportError: raise OperationFailed() proxy = self._convert_proxy(proxy) try: session = sessions.RelationshipQuerySession(family_id, proxy=proxy, runtime=self._runtime) except AttributeError: raise OperationFailed() return session
python
def get_relationship_query_session_for_family(self, family_id=None, proxy=None): """Gets the ``OsidSession`` associated with the relationship query service for the given family. arg: family_id (osid.id.Id): the ``Id`` of the family arg: proxy (osid.proxy.Proxy): a proxy return: (osid.relationship.RelationshipQuerySession) - a ``RelationshipQuerySession`` raise: NotFound - no ``Family`` found by the given ``Id`` raise: NullArgument - ``family_id`` or ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_relationship_query()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_relationship_query()`` and ``supports_visible_federation()`` are ``true``* """ if not family_id: raise NullArgument if not self.supports_relationship_query(): raise Unimplemented() ## # Need to include check to see if the familyId is found otherwise raise NotFound ## try: from . import sessions except ImportError: raise OperationFailed() proxy = self._convert_proxy(proxy) try: session = sessions.RelationshipQuerySession(family_id, proxy=proxy, runtime=self._runtime) except AttributeError: raise OperationFailed() return session
['def', 'get_relationship_query_session_for_family', '(', 'self', ',', 'family_id', '=', 'None', ',', 'proxy', '=', 'None', ')', ':', 'if', 'not', 'family_id', ':', 'raise', 'NullArgument', 'if', 'not', 'self', '.', 'supports_relationship_query', '(', ')', ':', 'raise', 'Unimplemented', '(', ')', '##', '# Need to include check to see if the familyId is found otherwise raise NotFound', '##', 'try', ':', 'from', '.', 'import', 'sessions', 'except', 'ImportError', ':', 'raise', 'OperationFailed', '(', ')', 'proxy', '=', 'self', '.', '_convert_proxy', '(', 'proxy', ')', 'try', ':', 'session', '=', 'sessions', '.', 'RelationshipQuerySession', '(', 'family_id', ',', 'proxy', '=', 'proxy', ',', 'runtime', '=', 'self', '.', '_runtime', ')', 'except', 'AttributeError', ':', 'raise', 'OperationFailed', '(', ')', 'return', 'session']
Gets the ``OsidSession`` associated with the relationship query service for the given family. arg: family_id (osid.id.Id): the ``Id`` of the family arg: proxy (osid.proxy.Proxy): a proxy return: (osid.relationship.RelationshipQuerySession) - a ``RelationshipQuerySession`` raise: NotFound - no ``Family`` found by the given ``Id`` raise: NullArgument - ``family_id`` or ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_relationship_query()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_relationship_query()`` and ``supports_visible_federation()`` are ``true``*
['Gets', 'the', 'OsidSession', 'associated', 'with', 'the', 'relationship', 'query', 'service', 'for', 'the', 'given', 'family', '.']
train
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/handcar/relationship/managers.py#L985-L1017
6,233
michal-stuglik/django-blastplus
blastplus/features/record.py
Alignment.get_id
def get_id(self): """Returns unique id of an alignment. """ return hash(str(self.title) + str(self.best_score()) + str(self.hit_def))
python
def get_id(self): """Returns unique id of an alignment. """ return hash(str(self.title) + str(self.best_score()) + str(self.hit_def))
['def', 'get_id', '(', 'self', ')', ':', 'return', 'hash', '(', 'str', '(', 'self', '.', 'title', ')', '+', 'str', '(', 'self', '.', 'best_score', '(', ')', ')', '+', 'str', '(', 'self', '.', 'hit_def', ')', ')']
Returns unique id of an alignment.
['Returns', 'unique', 'id', 'of', 'an', 'alignment', '.']
train
https://github.com/michal-stuglik/django-blastplus/blob/4f5e15fb9f8069c3bed5f8fd941c4b9891daad4b/blastplus/features/record.py#L119-L121
6,234
twisted/axiom
axiom/userbase.py
insertUserStore
def insertUserStore(siteStore, userStorePath): """ Move the SubStore at the indicated location into the given site store's directory and then hook it up to the site store's authentication database. @type siteStore: C{Store} @type userStorePath: C{FilePath} """ # The following may, but does not need to be in a transaction, because it # is merely an attempt to guess a reasonable filesystem name to use for # this avatar. The user store being operated on is expected to be used # exclusively by this process. ls = siteStore.findUnique(LoginSystem) unattachedSubStore = Store(userStorePath) for lm in unattachedSubStore.query(LoginMethod, LoginMethod.account == unattachedSubStore.findUnique(LoginAccount), sort=LoginMethod.internal.descending): if ls.accountByAddress(lm.localpart, lm.domain) is None: localpart, domain = lm.localpart, lm.domain break else: raise AllNamesConflict() unattachedSubStore.close() insertLocation = siteStore.newFilePath('account', domain, localpart + '.axiom') insertParentLoc = insertLocation.parent() if not insertParentLoc.exists(): insertParentLoc.makedirs() if insertLocation.exists(): raise DatabaseDirectoryConflict() userStorePath.moveTo(insertLocation) ss = SubStore(store=siteStore, storepath=insertLocation) attachedStore = ss.open() # migrateUp() manages its own transactions because it interacts with two # different stores. attachedStore.findUnique(LoginAccount).migrateUp()
python
def insertUserStore(siteStore, userStorePath): """ Move the SubStore at the indicated location into the given site store's directory and then hook it up to the site store's authentication database. @type siteStore: C{Store} @type userStorePath: C{FilePath} """ # The following may, but does not need to be in a transaction, because it # is merely an attempt to guess a reasonable filesystem name to use for # this avatar. The user store being operated on is expected to be used # exclusively by this process. ls = siteStore.findUnique(LoginSystem) unattachedSubStore = Store(userStorePath) for lm in unattachedSubStore.query(LoginMethod, LoginMethod.account == unattachedSubStore.findUnique(LoginAccount), sort=LoginMethod.internal.descending): if ls.accountByAddress(lm.localpart, lm.domain) is None: localpart, domain = lm.localpart, lm.domain break else: raise AllNamesConflict() unattachedSubStore.close() insertLocation = siteStore.newFilePath('account', domain, localpart + '.axiom') insertParentLoc = insertLocation.parent() if not insertParentLoc.exists(): insertParentLoc.makedirs() if insertLocation.exists(): raise DatabaseDirectoryConflict() userStorePath.moveTo(insertLocation) ss = SubStore(store=siteStore, storepath=insertLocation) attachedStore = ss.open() # migrateUp() manages its own transactions because it interacts with two # different stores. attachedStore.findUnique(LoginAccount).migrateUp()
['def', 'insertUserStore', '(', 'siteStore', ',', 'userStorePath', ')', ':', '# The following may, but does not need to be in a transaction, because it', '# is merely an attempt to guess a reasonable filesystem name to use for', '# this avatar. The user store being operated on is expected to be used', '# exclusively by this process.', 'ls', '=', 'siteStore', '.', 'findUnique', '(', 'LoginSystem', ')', 'unattachedSubStore', '=', 'Store', '(', 'userStorePath', ')', 'for', 'lm', 'in', 'unattachedSubStore', '.', 'query', '(', 'LoginMethod', ',', 'LoginMethod', '.', 'account', '==', 'unattachedSubStore', '.', 'findUnique', '(', 'LoginAccount', ')', ',', 'sort', '=', 'LoginMethod', '.', 'internal', '.', 'descending', ')', ':', 'if', 'ls', '.', 'accountByAddress', '(', 'lm', '.', 'localpart', ',', 'lm', '.', 'domain', ')', 'is', 'None', ':', 'localpart', ',', 'domain', '=', 'lm', '.', 'localpart', ',', 'lm', '.', 'domain', 'break', 'else', ':', 'raise', 'AllNamesConflict', '(', ')', 'unattachedSubStore', '.', 'close', '(', ')', 'insertLocation', '=', 'siteStore', '.', 'newFilePath', '(', "'account'", ',', 'domain', ',', 'localpart', '+', "'.axiom'", ')', 'insertParentLoc', '=', 'insertLocation', '.', 'parent', '(', ')', 'if', 'not', 'insertParentLoc', '.', 'exists', '(', ')', ':', 'insertParentLoc', '.', 'makedirs', '(', ')', 'if', 'insertLocation', '.', 'exists', '(', ')', ':', 'raise', 'DatabaseDirectoryConflict', '(', ')', 'userStorePath', '.', 'moveTo', '(', 'insertLocation', ')', 'ss', '=', 'SubStore', '(', 'store', '=', 'siteStore', ',', 'storepath', '=', 'insertLocation', ')', 'attachedStore', '=', 'ss', '.', 'open', '(', ')', '# migrateUp() manages its own transactions because it interacts with two', '# different stores.', 'attachedStore', '.', 'findUnique', '(', 'LoginAccount', ')', '.', 'migrateUp', '(', ')']
Move the SubStore at the indicated location into the given site store's directory and then hook it up to the site store's authentication database. @type siteStore: C{Store} @type userStorePath: C{FilePath}
['Move', 'the', 'SubStore', 'at', 'the', 'indicated', 'location', 'into', 'the', 'given', 'site', 'store', 's', 'directory', 'and', 'then', 'hook', 'it', 'up', 'to', 'the', 'site', 'store', 's', 'authentication', 'database', '.']
train
https://github.com/twisted/axiom/blob/7de70bc8fe1bb81f9c2339fba8daec9eb2e92b68/axiom/userbase.py#L306-L342
6,235
hannes-brt/hebel
hebel/pycuda_ops/cublas.py
cublasCtpsv
def cublasCtpsv(handle, uplo, trans, diag, n, AP, x, incx): """ Solve complex triangular-packed system with one right-hand side. """ status = _libcublas.cublasCtpsv_v2(handle, _CUBLAS_FILL_MODE[uplo], _CUBLAS_OP[trans], _CUBLAS_DIAG[diag], n, int(AP), int(x), incx) cublasCheckStatus(status)
python
def cublasCtpsv(handle, uplo, trans, diag, n, AP, x, incx): """ Solve complex triangular-packed system with one right-hand side. """ status = _libcublas.cublasCtpsv_v2(handle, _CUBLAS_FILL_MODE[uplo], _CUBLAS_OP[trans], _CUBLAS_DIAG[diag], n, int(AP), int(x), incx) cublasCheckStatus(status)
['def', 'cublasCtpsv', '(', 'handle', ',', 'uplo', ',', 'trans', ',', 'diag', ',', 'n', ',', 'AP', ',', 'x', ',', 'incx', ')', ':', 'status', '=', '_libcublas', '.', 'cublasCtpsv_v2', '(', 'handle', ',', '_CUBLAS_FILL_MODE', '[', 'uplo', ']', ',', '_CUBLAS_OP', '[', 'trans', ']', ',', '_CUBLAS_DIAG', '[', 'diag', ']', ',', 'n', ',', 'int', '(', 'AP', ')', ',', 'int', '(', 'x', ')', ',', 'incx', ')', 'cublasCheckStatus', '(', 'status', ')']
Solve complex triangular-packed system with one right-hand side.
['Solve', 'complex', 'triangular', '-', 'packed', 'system', 'with', 'one', 'right', '-', 'hand', 'side', '.']
train
https://github.com/hannes-brt/hebel/blob/1e2c3a9309c2646103901b26a55be4e312dd5005/hebel/pycuda_ops/cublas.py#L3328-L3339
6,236
scivision/msise00
msise00/base.py
run
def run(time: datetime, altkm: float, glat: Union[float, np.ndarray], glon: Union[float, np.ndarray], *, f107a: float = None, f107: float = None, Ap: int = None) -> xarray.Dataset: """ loops the rungtd1d function below. Figure it's easier to troubleshoot in Python than Fortran. """ glat = np.atleast_2d(glat) glon = np.atleast_2d(glon) # has to be here # %% altitude 1-D if glat.size == 1 and glon.size == 1 and isinstance(time, (str, date, datetime, np.datetime64)): atmos = rungtd1d(time, altkm, glat.squeeze()[()], glon.squeeze()[()], f107a=f107a, f107=f107, Ap=Ap) # %% lat/lon grid at 1 altitude else: atmos = loopalt_gtd(time, glat, glon, altkm, f107a=f107a, f107=f107, Ap=Ap) return atmos
python
def run(time: datetime, altkm: float, glat: Union[float, np.ndarray], glon: Union[float, np.ndarray], *, f107a: float = None, f107: float = None, Ap: int = None) -> xarray.Dataset: """ loops the rungtd1d function below. Figure it's easier to troubleshoot in Python than Fortran. """ glat = np.atleast_2d(glat) glon = np.atleast_2d(glon) # has to be here # %% altitude 1-D if glat.size == 1 and glon.size == 1 and isinstance(time, (str, date, datetime, np.datetime64)): atmos = rungtd1d(time, altkm, glat.squeeze()[()], glon.squeeze()[()], f107a=f107a, f107=f107, Ap=Ap) # %% lat/lon grid at 1 altitude else: atmos = loopalt_gtd(time, glat, glon, altkm, f107a=f107a, f107=f107, Ap=Ap) return atmos
['def', 'run', '(', 'time', ':', 'datetime', ',', 'altkm', ':', 'float', ',', 'glat', ':', 'Union', '[', 'float', ',', 'np', '.', 'ndarray', ']', ',', 'glon', ':', 'Union', '[', 'float', ',', 'np', '.', 'ndarray', ']', ',', '*', ',', 'f107a', ':', 'float', '=', 'None', ',', 'f107', ':', 'float', '=', 'None', ',', 'Ap', ':', 'int', '=', 'None', ')', '->', 'xarray', '.', 'Dataset', ':', 'glat', '=', 'np', '.', 'atleast_2d', '(', 'glat', ')', 'glon', '=', 'np', '.', 'atleast_2d', '(', 'glon', ')', '# has to be here', '# %% altitude 1-D', 'if', 'glat', '.', 'size', '==', '1', 'and', 'glon', '.', 'size', '==', '1', 'and', 'isinstance', '(', 'time', ',', '(', 'str', ',', 'date', ',', 'datetime', ',', 'np', '.', 'datetime64', ')', ')', ':', 'atmos', '=', 'rungtd1d', '(', 'time', ',', 'altkm', ',', 'glat', '.', 'squeeze', '(', ')', '[', '(', ')', ']', ',', 'glon', '.', 'squeeze', '(', ')', '[', '(', ')', ']', ',', 'f107a', '=', 'f107a', ',', 'f107', '=', 'f107', ',', 'Ap', '=', 'Ap', ')', '# %% lat/lon grid at 1 altitude', 'else', ':', 'atmos', '=', 'loopalt_gtd', '(', 'time', ',', 'glat', ',', 'glon', ',', 'altkm', ',', 'f107a', '=', 'f107a', ',', 'f107', '=', 'f107', ',', 'Ap', '=', 'Ap', ')', 'return', 'atmos']
loops the rungtd1d function below. Figure it's easier to troubleshoot in Python than Fortran.
['loops', 'the', 'rungtd1d', 'function', 'below', '.', 'Figure', 'it', 's', 'easier', 'to', 'troubleshoot', 'in', 'Python', 'than', 'Fortran', '.']
train
https://github.com/scivision/msise00/blob/13a283ec02679ab74672f284ba68a7a8f896dc6f/msise00/base.py#L34-L51
6,237
MrYsLab/pymata-aio
pymata_aio/pymata3.py
PyMata3.set_analog_latch
def set_analog_latch(self, pin, threshold_type, threshold_value, cb=None, cb_type=None): """ This method "arms" an analog pin for its data to be latched and saved in the latching table. If a callback method is provided, when latching criteria is achieved, the callback function is called with latching data notification. :param pin: Analog pin number (value following an 'A' designator, i.e. A5 = 5 :param threshold_type: Constants.LATCH_GT | Constants.LATCH_LT | Constants.LATCH_GTE | Constants.LATCH_LTE :param threshold_value: numerical value - between 0 and 1023 :param cb: callback method :param cb_type: Constants.CB_TYPE_DIRECT = direct call or Constants.CB_TYPE_ASYNCIO = asyncio coroutine :returns: True if successful, False if parameter data is invalid """ task = asyncio.ensure_future(self.core.set_analog_latch(pin, threshold_type, threshold_value, cb, cb_type)) result = self.loop.run_until_complete(task) return result
python
def set_analog_latch(self, pin, threshold_type, threshold_value, cb=None, cb_type=None): """ This method "arms" an analog pin for its data to be latched and saved in the latching table. If a callback method is provided, when latching criteria is achieved, the callback function is called with latching data notification. :param pin: Analog pin number (value following an 'A' designator, i.e. A5 = 5 :param threshold_type: Constants.LATCH_GT | Constants.LATCH_LT | Constants.LATCH_GTE | Constants.LATCH_LTE :param threshold_value: numerical value - between 0 and 1023 :param cb: callback method :param cb_type: Constants.CB_TYPE_DIRECT = direct call or Constants.CB_TYPE_ASYNCIO = asyncio coroutine :returns: True if successful, False if parameter data is invalid """ task = asyncio.ensure_future(self.core.set_analog_latch(pin, threshold_type, threshold_value, cb, cb_type)) result = self.loop.run_until_complete(task) return result
['def', 'set_analog_latch', '(', 'self', ',', 'pin', ',', 'threshold_type', ',', 'threshold_value', ',', 'cb', '=', 'None', ',', 'cb_type', '=', 'None', ')', ':', 'task', '=', 'asyncio', '.', 'ensure_future', '(', 'self', '.', 'core', '.', 'set_analog_latch', '(', 'pin', ',', 'threshold_type', ',', 'threshold_value', ',', 'cb', ',', 'cb_type', ')', ')', 'result', '=', 'self', '.', 'loop', '.', 'run_until_complete', '(', 'task', ')', 'return', 'result']
This method "arms" an analog pin for its data to be latched and saved in the latching table. If a callback method is provided, when latching criteria is achieved, the callback function is called with latching data notification. :param pin: Analog pin number (value following an 'A' designator, i.e. A5 = 5 :param threshold_type: Constants.LATCH_GT | Constants.LATCH_LT | Constants.LATCH_GTE | Constants.LATCH_LTE :param threshold_value: numerical value - between 0 and 1023 :param cb: callback method :param cb_type: Constants.CB_TYPE_DIRECT = direct call or Constants.CB_TYPE_ASYNCIO = asyncio coroutine :returns: True if successful, False if parameter data is invalid
['This', 'method', 'arms', 'an', 'analog', 'pin', 'for', 'its', 'data', 'to', 'be', 'latched', 'and', 'saved', 'in', 'the', 'latching', 'table', '.', 'If', 'a', 'callback', 'method', 'is', 'provided', 'when', 'latching', 'criteria', 'is', 'achieved', 'the', 'callback', 'function', 'is', 'called', 'with', 'latching', 'data', 'notification', '.']
train
https://github.com/MrYsLab/pymata-aio/blob/015081a4628b9d47dfe3f8d6c698ff903f107810/pymata_aio/pymata3.py#L524-L550
6,238
ihmeuw/vivarium
src/vivarium/config_tree.py
ConfigNode.drop_layer
def drop_layer(self, layer): """Removes the named layer and the value associated with it from the node. Parameters ---------- layer : str Name of the layer to drop. Raises ------ TypeError If the node is frozen KeyError If the named layer does not exist """ if self._frozen: raise TypeError('Frozen ConfigNode does not support modification') self.reset_layer(layer) self._layers.remove(layer)
python
def drop_layer(self, layer): """Removes the named layer and the value associated with it from the node. Parameters ---------- layer : str Name of the layer to drop. Raises ------ TypeError If the node is frozen KeyError If the named layer does not exist """ if self._frozen: raise TypeError('Frozen ConfigNode does not support modification') self.reset_layer(layer) self._layers.remove(layer)
['def', 'drop_layer', '(', 'self', ',', 'layer', ')', ':', 'if', 'self', '.', '_frozen', ':', 'raise', 'TypeError', '(', "'Frozen ConfigNode does not support modification'", ')', 'self', '.', 'reset_layer', '(', 'layer', ')', 'self', '.', '_layers', '.', 'remove', '(', 'layer', ')']
Removes the named layer and the value associated with it from the node. Parameters ---------- layer : str Name of the layer to drop. Raises ------ TypeError If the node is frozen KeyError If the named layer does not exist
['Removes', 'the', 'named', 'layer', 'and', 'the', 'value', 'associated', 'with', 'it', 'from', 'the', 'node', '.']
train
https://github.com/ihmeuw/vivarium/blob/c5f5d50f775c8bf337d3aae1ff7c57c025a8e258/src/vivarium/config_tree.py#L148-L166
6,239
wrongwaycn/ssdb-py
ssdb/connection.py
PythonParser.on_connect
def on_connect(self, connection): """ Called when the socket connects """ self._sock = connection._sock self._buffer = SocketBuffer(self._sock, self.socket_read_size) if connection.decode_responses: self.encoding = connection.encoding
python
def on_connect(self, connection): """ Called when the socket connects """ self._sock = connection._sock self._buffer = SocketBuffer(self._sock, self.socket_read_size) if connection.decode_responses: self.encoding = connection.encoding
['def', 'on_connect', '(', 'self', ',', 'connection', ')', ':', 'self', '.', '_sock', '=', 'connection', '.', '_sock', 'self', '.', '_buffer', '=', 'SocketBuffer', '(', 'self', '.', '_sock', ',', 'self', '.', 'socket_read_size', ')', 'if', 'connection', '.', 'decode_responses', ':', 'self', '.', 'encoding', '=', 'connection', '.', 'encoding']
Called when the socket connects
['Called', 'when', 'the', 'socket', 'connects']
train
https://github.com/wrongwaycn/ssdb-py/blob/ce7b1542f0faa06fe71a60c667fe15992af0f621/ssdb/connection.py#L171-L178
6,240
a10networks/acos-client
acos_client/v30/device_context.py
DeviceContext.switch
def switch(self, device_id, obj_slot_id): """Switching of device-context""" payload = { "device-context": self._build_payload(device_id, obj_slot_id) } return self._post(self.url_prefix, payload)
python
def switch(self, device_id, obj_slot_id): """Switching of device-context""" payload = { "device-context": self._build_payload(device_id, obj_slot_id) } return self._post(self.url_prefix, payload)
['def', 'switch', '(', 'self', ',', 'device_id', ',', 'obj_slot_id', ')', ':', 'payload', '=', '{', '"device-context"', ':', 'self', '.', '_build_payload', '(', 'device_id', ',', 'obj_slot_id', ')', '}', 'return', 'self', '.', '_post', '(', 'self', '.', 'url_prefix', ',', 'payload', ')']
Switching of device-context
['Switching', 'of', 'device', '-', 'context']
train
https://github.com/a10networks/acos-client/blob/14d1fff589650650c9a65047d54c6c8c1d6b75f2/acos_client/v30/device_context.py#L24-L30
6,241
EconForge/dolo
dolo/algos/value_iteration.py
evaluate_policy
def evaluate_policy(model, mdr, tol=1e-8, maxit=2000, grid={}, verbose=True, initial_guess=None, hook=None, integration_orders=None, details=False, interp_type='cubic'): """Compute value function corresponding to policy ``dr`` Parameters: ----------- model: "dtcscc" model. Must contain a 'value' function. mdr: decision rule to evaluate Returns: -------- decision rule: value function (a function of the space similar to a decision rule object) """ process = model.exogenous dprocess = process.discretize() n_ms = dprocess.n_nodes() # number of exogenous states n_mv = dprocess.n_inodes( 0) # this assume number of integration nodes is constant x0 = model.calibration['controls'] v0 = model.calibration['values'] parms = model.calibration['parameters'] n_x = len(x0) n_v = len(v0) n_s = len(model.symbols['states']) endo_grid = model.get_grid(**grid) exo_grid = dprocess.grid if initial_guess is not None: mdrv = initial_guess else: mdrv = DecisionRule(exo_grid, endo_grid, interp_type=interp_type) grid = mdrv.endo_grid.nodes() N = grid.shape[0] if isinstance(mdr, np.ndarray): controls = mdr else: controls = np.zeros((n_ms, N, n_x)) for i_m in range(n_ms): controls[i_m, :, :] = mdr.eval_is(i_m, grid) values_0 = np.zeros((n_ms, N, n_v)) if initial_guess is None: for i_m in range(n_ms): values_0[i_m, :, :] = v0[None, :] else: for i_m in range(n_ms): values_0[i_m, :, :] = initial_guess.eval_is(i_m, grid) val = model.functions['value'] g = model.functions['transition'] sh_v = values_0.shape err = 10 inner_maxit = 50 it = 0 if verbose: headline = '|{0:^4} | {1:10} | {2:8} | {3:8} |'.format( 'N', ' Error', 'Gain', 'Time') stars = '-' * len(headline) print(stars) print(headline) print(stars) t1 = time.time() err_0 = np.nan verbit = (verbose == 'full') while err > tol and it < maxit: it += 1 t_start = time.time() mdrv.set_values(values_0.reshape(sh_v)) values = update_value(val, g, grid, controls, values_0, mdr, mdrv, dprocess, parms).reshape((-1, n_v)) err = abs(values.reshape(sh_v) - values_0).max() err_SA = err / err_0 err_0 = err values_0 = values.reshape(sh_v) t_finish = time.time() elapsed = t_finish - t_start if verbose: print('|{0:4} | {1:10.3e} | {2:8.3f} | {3:8.3f} |'.format( it, err, err_SA, elapsed)) # values_0 = values.reshape(sh_v) t2 = time.time() if verbose: print(stars) print("Elapsed: {} seconds.".format(t2 - t1)) print(stars) if not details: return mdrv else: return EvaluationResult(mdrv, it, tol, err)
python
def evaluate_policy(model, mdr, tol=1e-8, maxit=2000, grid={}, verbose=True, initial_guess=None, hook=None, integration_orders=None, details=False, interp_type='cubic'): """Compute value function corresponding to policy ``dr`` Parameters: ----------- model: "dtcscc" model. Must contain a 'value' function. mdr: decision rule to evaluate Returns: -------- decision rule: value function (a function of the space similar to a decision rule object) """ process = model.exogenous dprocess = process.discretize() n_ms = dprocess.n_nodes() # number of exogenous states n_mv = dprocess.n_inodes( 0) # this assume number of integration nodes is constant x0 = model.calibration['controls'] v0 = model.calibration['values'] parms = model.calibration['parameters'] n_x = len(x0) n_v = len(v0) n_s = len(model.symbols['states']) endo_grid = model.get_grid(**grid) exo_grid = dprocess.grid if initial_guess is not None: mdrv = initial_guess else: mdrv = DecisionRule(exo_grid, endo_grid, interp_type=interp_type) grid = mdrv.endo_grid.nodes() N = grid.shape[0] if isinstance(mdr, np.ndarray): controls = mdr else: controls = np.zeros((n_ms, N, n_x)) for i_m in range(n_ms): controls[i_m, :, :] = mdr.eval_is(i_m, grid) values_0 = np.zeros((n_ms, N, n_v)) if initial_guess is None: for i_m in range(n_ms): values_0[i_m, :, :] = v0[None, :] else: for i_m in range(n_ms): values_0[i_m, :, :] = initial_guess.eval_is(i_m, grid) val = model.functions['value'] g = model.functions['transition'] sh_v = values_0.shape err = 10 inner_maxit = 50 it = 0 if verbose: headline = '|{0:^4} | {1:10} | {2:8} | {3:8} |'.format( 'N', ' Error', 'Gain', 'Time') stars = '-' * len(headline) print(stars) print(headline) print(stars) t1 = time.time() err_0 = np.nan verbit = (verbose == 'full') while err > tol and it < maxit: it += 1 t_start = time.time() mdrv.set_values(values_0.reshape(sh_v)) values = update_value(val, g, grid, controls, values_0, mdr, mdrv, dprocess, parms).reshape((-1, n_v)) err = abs(values.reshape(sh_v) - values_0).max() err_SA = err / err_0 err_0 = err values_0 = values.reshape(sh_v) t_finish = time.time() elapsed = t_finish - t_start if verbose: print('|{0:4} | {1:10.3e} | {2:8.3f} | {3:8.3f} |'.format( it, err, err_SA, elapsed)) # values_0 = values.reshape(sh_v) t2 = time.time() if verbose: print(stars) print("Elapsed: {} seconds.".format(t2 - t1)) print(stars) if not details: return mdrv else: return EvaluationResult(mdrv, it, tol, err)
['def', 'evaluate_policy', '(', 'model', ',', 'mdr', ',', 'tol', '=', '1e-8', ',', 'maxit', '=', '2000', ',', 'grid', '=', '{', '}', ',', 'verbose', '=', 'True', ',', 'initial_guess', '=', 'None', ',', 'hook', '=', 'None', ',', 'integration_orders', '=', 'None', ',', 'details', '=', 'False', ',', 'interp_type', '=', "'cubic'", ')', ':', 'process', '=', 'model', '.', 'exogenous', 'dprocess', '=', 'process', '.', 'discretize', '(', ')', 'n_ms', '=', 'dprocess', '.', 'n_nodes', '(', ')', '# number of exogenous states', 'n_mv', '=', 'dprocess', '.', 'n_inodes', '(', '0', ')', '# this assume number of integration nodes is constant', 'x0', '=', 'model', '.', 'calibration', '[', "'controls'", ']', 'v0', '=', 'model', '.', 'calibration', '[', "'values'", ']', 'parms', '=', 'model', '.', 'calibration', '[', "'parameters'", ']', 'n_x', '=', 'len', '(', 'x0', ')', 'n_v', '=', 'len', '(', 'v0', ')', 'n_s', '=', 'len', '(', 'model', '.', 'symbols', '[', "'states'", ']', ')', 'endo_grid', '=', 'model', '.', 'get_grid', '(', '*', '*', 'grid', ')', 'exo_grid', '=', 'dprocess', '.', 'grid', 'if', 'initial_guess', 'is', 'not', 'None', ':', 'mdrv', '=', 'initial_guess', 'else', ':', 'mdrv', '=', 'DecisionRule', '(', 'exo_grid', ',', 'endo_grid', ',', 'interp_type', '=', 'interp_type', ')', 'grid', '=', 'mdrv', '.', 'endo_grid', '.', 'nodes', '(', ')', 'N', '=', 'grid', '.', 'shape', '[', '0', ']', 'if', 'isinstance', '(', 'mdr', ',', 'np', '.', 'ndarray', ')', ':', 'controls', '=', 'mdr', 'else', ':', 'controls', '=', 'np', '.', 'zeros', '(', '(', 'n_ms', ',', 'N', ',', 'n_x', ')', ')', 'for', 'i_m', 'in', 'range', '(', 'n_ms', ')', ':', 'controls', '[', 'i_m', ',', ':', ',', ':', ']', '=', 'mdr', '.', 'eval_is', '(', 'i_m', ',', 'grid', ')', 'values_0', '=', 'np', '.', 'zeros', '(', '(', 'n_ms', ',', 'N', ',', 'n_v', ')', ')', 'if', 'initial_guess', 'is', 'None', ':', 'for', 'i_m', 'in', 'range', '(', 'n_ms', ')', ':', 'values_0', '[', 'i_m', ',', ':', ',', ':', ']', '=', 'v0', '[', 'None', ',', ':', ']', 'else', ':', 'for', 'i_m', 'in', 'range', '(', 'n_ms', ')', ':', 'values_0', '[', 'i_m', ',', ':', ',', ':', ']', '=', 'initial_guess', '.', 'eval_is', '(', 'i_m', ',', 'grid', ')', 'val', '=', 'model', '.', 'functions', '[', "'value'", ']', 'g', '=', 'model', '.', 'functions', '[', "'transition'", ']', 'sh_v', '=', 'values_0', '.', 'shape', 'err', '=', '10', 'inner_maxit', '=', '50', 'it', '=', '0', 'if', 'verbose', ':', 'headline', '=', "'|{0:^4} | {1:10} | {2:8} | {3:8} |'", '.', 'format', '(', "'N'", ',', "' Error'", ',', "'Gain'", ',', "'Time'", ')', 'stars', '=', "'-'", '*', 'len', '(', 'headline', ')', 'print', '(', 'stars', ')', 'print', '(', 'headline', ')', 'print', '(', 'stars', ')', 't1', '=', 'time', '.', 'time', '(', ')', 'err_0', '=', 'np', '.', 'nan', 'verbit', '=', '(', 'verbose', '==', "'full'", ')', 'while', 'err', '>', 'tol', 'and', 'it', '<', 'maxit', ':', 'it', '+=', '1', 't_start', '=', 'time', '.', 'time', '(', ')', 'mdrv', '.', 'set_values', '(', 'values_0', '.', 'reshape', '(', 'sh_v', ')', ')', 'values', '=', 'update_value', '(', 'val', ',', 'g', ',', 'grid', ',', 'controls', ',', 'values_0', ',', 'mdr', ',', 'mdrv', ',', 'dprocess', ',', 'parms', ')', '.', 'reshape', '(', '(', '-', '1', ',', 'n_v', ')', ')', 'err', '=', 'abs', '(', 'values', '.', 'reshape', '(', 'sh_v', ')', '-', 'values_0', ')', '.', 'max', '(', ')', 'err_SA', '=', 'err', '/', 'err_0', 'err_0', '=', 'err', 'values_0', '=', 'values', '.', 'reshape', '(', 'sh_v', ')', 't_finish', '=', 'time', '.', 'time', '(', ')', 'elapsed', '=', 't_finish', '-', 't_start', 'if', 'verbose', ':', 'print', '(', "'|{0:4} | {1:10.3e} | {2:8.3f} | {3:8.3f} |'", '.', 'format', '(', 'it', ',', 'err', ',', 'err_SA', ',', 'elapsed', ')', ')', '# values_0 = values.reshape(sh_v)', 't2', '=', 'time', '.', 'time', '(', ')', 'if', 'verbose', ':', 'print', '(', 'stars', ')', 'print', '(', '"Elapsed: {} seconds."', '.', 'format', '(', 't2', '-', 't1', ')', ')', 'print', '(', 'stars', ')', 'if', 'not', 'details', ':', 'return', 'mdrv', 'else', ':', 'return', 'EvaluationResult', '(', 'mdrv', ',', 'it', ',', 'tol', ',', 'err', ')']
Compute value function corresponding to policy ``dr`` Parameters: ----------- model: "dtcscc" model. Must contain a 'value' function. mdr: decision rule to evaluate Returns: -------- decision rule: value function (a function of the space similar to a decision rule object)
['Compute', 'value', 'function', 'corresponding', 'to', 'policy', 'dr']
train
https://github.com/EconForge/dolo/blob/d91ddf148b009bf79852d9aec70f3a1877e0f79a/dolo/algos/value_iteration.py#L210-L339
6,242
neovim/pynvim
pynvim/api/nvim.py
Nvim.async_call
def async_call(self, fn, *args, **kwargs): """Schedule `fn` to be called by the event loop soon. This function is thread-safe, and is the only way code not on the main thread could interact with nvim api objects. This function can also be called in a synchronous event handler, just before it returns, to defer execution that shouldn't block neovim. """ call_point = ''.join(format_stack(None, 5)[:-1]) def handler(): try: fn(*args, **kwargs) except Exception as err: msg = ("error caught while executing async callback:\n" "{!r}\n{}\n \nthe call was requested at\n{}" .format(err, format_exc_skip(1), call_point)) self._err_cb(msg) raise self._session.threadsafe_call(handler)
python
def async_call(self, fn, *args, **kwargs): """Schedule `fn` to be called by the event loop soon. This function is thread-safe, and is the only way code not on the main thread could interact with nvim api objects. This function can also be called in a synchronous event handler, just before it returns, to defer execution that shouldn't block neovim. """ call_point = ''.join(format_stack(None, 5)[:-1]) def handler(): try: fn(*args, **kwargs) except Exception as err: msg = ("error caught while executing async callback:\n" "{!r}\n{}\n \nthe call was requested at\n{}" .format(err, format_exc_skip(1), call_point)) self._err_cb(msg) raise self._session.threadsafe_call(handler)
['def', 'async_call', '(', 'self', ',', 'fn', ',', '*', 'args', ',', '*', '*', 'kwargs', ')', ':', 'call_point', '=', "''", '.', 'join', '(', 'format_stack', '(', 'None', ',', '5', ')', '[', ':', '-', '1', ']', ')', 'def', 'handler', '(', ')', ':', 'try', ':', 'fn', '(', '*', 'args', ',', '*', '*', 'kwargs', ')', 'except', 'Exception', 'as', 'err', ':', 'msg', '=', '(', '"error caught while executing async callback:\\n"', '"{!r}\\n{}\\n \\nthe call was requested at\\n{}"', '.', 'format', '(', 'err', ',', 'format_exc_skip', '(', '1', ')', ',', 'call_point', ')', ')', 'self', '.', '_err_cb', '(', 'msg', ')', 'raise', 'self', '.', '_session', '.', 'threadsafe_call', '(', 'handler', ')']
Schedule `fn` to be called by the event loop soon. This function is thread-safe, and is the only way code not on the main thread could interact with nvim api objects. This function can also be called in a synchronous event handler, just before it returns, to defer execution that shouldn't block neovim.
['Schedule', 'fn', 'to', 'be', 'called', 'by', 'the', 'event', 'loop', 'soon', '.']
train
https://github.com/neovim/pynvim/blob/5e577188e6d7133f597ad0ce60dc6a4b1314064a/pynvim/api/nvim.py#L433-L454
6,243
mass-project/mass_api_client
mass_api_client/resources/sample.py
DomainSample.create
def create(cls, domain, tlp_level=0, tags=[]): """ Create a new :class:`DomainSample` on the server. :param domain: The domain as a string. :param tlp_level: The TLP-Level :param tags: Tags to add to the sample. :return: The created sample. """ return cls._create(domain=domain, tlp_level=tlp_level, tags=tags)
python
def create(cls, domain, tlp_level=0, tags=[]): """ Create a new :class:`DomainSample` on the server. :param domain: The domain as a string. :param tlp_level: The TLP-Level :param tags: Tags to add to the sample. :return: The created sample. """ return cls._create(domain=domain, tlp_level=tlp_level, tags=tags)
['def', 'create', '(', 'cls', ',', 'domain', ',', 'tlp_level', '=', '0', ',', 'tags', '=', '[', ']', ')', ':', 'return', 'cls', '.', '_create', '(', 'domain', '=', 'domain', ',', 'tlp_level', '=', 'tlp_level', ',', 'tags', '=', 'tags', ')']
Create a new :class:`DomainSample` on the server. :param domain: The domain as a string. :param tlp_level: The TLP-Level :param tags: Tags to add to the sample. :return: The created sample.
['Create', 'a', 'new', ':', 'class', ':', 'DomainSample', 'on', 'the', 'server', '.']
train
https://github.com/mass-project/mass_api_client/blob/b200c32c93608bf3b2707fbf0e83a2228702e2c8/mass_api_client/resources/sample.py#L68-L77
6,244
MacHu-GWU/crawlib-project
crawlib/pipeline/mongodb/query_builder.py
unfinished
def unfinished(finished_status, update_interval, status_key, edit_at_key): """ Create dict query for pymongo that getting all unfinished task. :param finished_status: int, status code that less than this will be considered as unfinished. :param update_interval: int, the record will be updated every x seconds. :param status_key: status code field key, support dot notation. :param edit_at_key: edit_at time field key, support dot notation. :return: dict, a pymongo filter. **中文文档** 状态码小于某个值, 或者, 现在距离更新时间已经超过一定阈值. """ return { "$or": [ {status_key: {"$lt": finished_status}}, {edit_at_key: {"$lt": x_seconds_before_now(update_interval)}}, ] }
python
def unfinished(finished_status, update_interval, status_key, edit_at_key): """ Create dict query for pymongo that getting all unfinished task. :param finished_status: int, status code that less than this will be considered as unfinished. :param update_interval: int, the record will be updated every x seconds. :param status_key: status code field key, support dot notation. :param edit_at_key: edit_at time field key, support dot notation. :return: dict, a pymongo filter. **中文文档** 状态码小于某个值, 或者, 现在距离更新时间已经超过一定阈值. """ return { "$or": [ {status_key: {"$lt": finished_status}}, {edit_at_key: {"$lt": x_seconds_before_now(update_interval)}}, ] }
['def', 'unfinished', '(', 'finished_status', ',', 'update_interval', ',', 'status_key', ',', 'edit_at_key', ')', ':', 'return', '{', '"$or"', ':', '[', '{', 'status_key', ':', '{', '"$lt"', ':', 'finished_status', '}', '}', ',', '{', 'edit_at_key', ':', '{', '"$lt"', ':', 'x_seconds_before_now', '(', 'update_interval', ')', '}', '}', ',', ']', '}']
Create dict query for pymongo that getting all unfinished task. :param finished_status: int, status code that less than this will be considered as unfinished. :param update_interval: int, the record will be updated every x seconds. :param status_key: status code field key, support dot notation. :param edit_at_key: edit_at time field key, support dot notation. :return: dict, a pymongo filter. **中文文档** 状态码小于某个值, 或者, 现在距离更新时间已经超过一定阈值.
['Create', 'dict', 'query', 'for', 'pymongo', 'that', 'getting', 'all', 'unfinished', 'task', '.']
train
https://github.com/MacHu-GWU/crawlib-project/blob/241516f2a7a0a32c692f7af35a1f44064e8ce1ab/crawlib/pipeline/mongodb/query_builder.py#L52-L77
6,245
saltstack/salt
salt/runners/queue.py
insert_runner
def insert_runner(fun, args=None, kwargs=None, queue=None, backend=None): ''' Insert a reference to a runner into the queue so that it can be run later. fun The runner function that is going to be run args list or comma-seperated string of args to send to fun kwargs dictionary of keyword arguments to send to fun queue queue to insert the runner reference into backend backend that to use for the queue CLI Example: .. code-block:: bash salt-run queue.insert_runner test.stdout_print salt-run queue.insert_runner event.send test_insert_runner kwargs='{"data": {"foo": "bar"}}' ''' if args is None: args = [] elif isinstance(args, six.string_types): args = args.split(',') if kwargs is None: kwargs = {} queue_kwargs = __get_queue_opts(queue=queue, backend=backend) data = {'fun': fun, 'args': args, 'kwargs': kwargs} return insert(items=data, **queue_kwargs)
python
def insert_runner(fun, args=None, kwargs=None, queue=None, backend=None): ''' Insert a reference to a runner into the queue so that it can be run later. fun The runner function that is going to be run args list or comma-seperated string of args to send to fun kwargs dictionary of keyword arguments to send to fun queue queue to insert the runner reference into backend backend that to use for the queue CLI Example: .. code-block:: bash salt-run queue.insert_runner test.stdout_print salt-run queue.insert_runner event.send test_insert_runner kwargs='{"data": {"foo": "bar"}}' ''' if args is None: args = [] elif isinstance(args, six.string_types): args = args.split(',') if kwargs is None: kwargs = {} queue_kwargs = __get_queue_opts(queue=queue, backend=backend) data = {'fun': fun, 'args': args, 'kwargs': kwargs} return insert(items=data, **queue_kwargs)
['def', 'insert_runner', '(', 'fun', ',', 'args', '=', 'None', ',', 'kwargs', '=', 'None', ',', 'queue', '=', 'None', ',', 'backend', '=', 'None', ')', ':', 'if', 'args', 'is', 'None', ':', 'args', '=', '[', ']', 'elif', 'isinstance', '(', 'args', ',', 'six', '.', 'string_types', ')', ':', 'args', '=', 'args', '.', 'split', '(', "','", ')', 'if', 'kwargs', 'is', 'None', ':', 'kwargs', '=', '{', '}', 'queue_kwargs', '=', '__get_queue_opts', '(', 'queue', '=', 'queue', ',', 'backend', '=', 'backend', ')', 'data', '=', '{', "'fun'", ':', 'fun', ',', "'args'", ':', 'args', ',', "'kwargs'", ':', 'kwargs', '}', 'return', 'insert', '(', 'items', '=', 'data', ',', '*', '*', 'queue_kwargs', ')']
Insert a reference to a runner into the queue so that it can be run later. fun The runner function that is going to be run args list or comma-seperated string of args to send to fun kwargs dictionary of keyword arguments to send to fun queue queue to insert the runner reference into backend backend that to use for the queue CLI Example: .. code-block:: bash salt-run queue.insert_runner test.stdout_print salt-run queue.insert_runner event.send test_insert_runner kwargs='{"data": {"foo": "bar"}}'
['Insert', 'a', 'reference', 'to', 'a', 'runner', 'into', 'the', 'queue', 'so', 'that', 'it', 'can', 'be', 'run', 'later', '.']
train
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/runners/queue.py#L242-L277
6,246
brianhie/scanorama
bin/unsupervised.py
silhouette_score
def silhouette_score(X, labels, metric='euclidean', sample_size=None, random_state=None, **kwds): """Compute the mean Silhouette Coefficient of all samples. The Silhouette Coefficient is calculated using the mean intra-cluster distance (``a``) and the mean nearest-cluster distance (``b``) for each sample. The Silhouette Coefficient for a sample is ``(b - a) / max(a, b)``. To clarify, ``b`` is the distance between a sample and the nearest cluster that the sample is not a part of. Note that Silhouette Coefficient is only defined if number of labels is 2 <= n_labels <= n_samples - 1. This function returns the mean Silhouette Coefficient over all samples. To obtain the values for each sample, use :func:`silhouette_samples`. The best value is 1 and the worst value is -1. Values near 0 indicate overlapping clusters. Negative values generally indicate that a sample has been assigned to the wrong cluster, as a different cluster is more similar. Read more in the :ref:`User Guide <silhouette_coefficient>`. Parameters ---------- X : array [n_samples_a, n_samples_a] if metric == "precomputed", or, \ [n_samples_a, n_features] otherwise Array of pairwise distances between samples, or a feature array. labels : array, shape = [n_samples] Predicted labels for each sample. metric : string, or callable The metric to use when calculating distance between instances in a feature array. If metric is a string, it must be one of the options allowed by :func:`metrics.pairwise.pairwise_distances <sklearn.metrics.pairwise.pairwise_distances>`. If X is the distance array itself, use ``metric="precomputed"``. sample_size : int or None The size of the sample to use when computing the Silhouette Coefficient on a random subset of the data. If ``sample_size is None``, no sampling is used. random_state : int, RandomState instance or None, optional (default=None) The generator used to randomly select a subset of samples. If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Used when ``sample_size is not None``. **kwds : optional keyword parameters Any further parameters are passed directly to the distance function. If using a scipy.spatial.distance metric, the parameters are still metric dependent. See the scipy docs for usage examples. Returns ------- silhouette : float Mean Silhouette Coefficient for all samples. References ---------- .. [1] `Peter J. Rousseeuw (1987). "Silhouettes: a Graphical Aid to the Interpretation and Validation of Cluster Analysis". Computational and Applied Mathematics 20: 53-65. <http://www.sciencedirect.com/science/article/pii/0377042787901257>`_ .. [2] `Wikipedia entry on the Silhouette Coefficient <https://en.wikipedia.org/wiki/Silhouette_(clustering)>`_ """ if sample_size is not None: X, labels = check_X_y(X, labels, accept_sparse=['csc', 'csr']) random_state = check_random_state(random_state) indices = random_state.permutation(X.shape[0])[:sample_size] if metric == "precomputed": X, labels = X[indices].T[indices].T, labels[indices] else: X, labels = X[indices], labels[indices] return np.mean(silhouette_samples(X, labels, metric=metric, **kwds))
python
def silhouette_score(X, labels, metric='euclidean', sample_size=None, random_state=None, **kwds): """Compute the mean Silhouette Coefficient of all samples. The Silhouette Coefficient is calculated using the mean intra-cluster distance (``a``) and the mean nearest-cluster distance (``b``) for each sample. The Silhouette Coefficient for a sample is ``(b - a) / max(a, b)``. To clarify, ``b`` is the distance between a sample and the nearest cluster that the sample is not a part of. Note that Silhouette Coefficient is only defined if number of labels is 2 <= n_labels <= n_samples - 1. This function returns the mean Silhouette Coefficient over all samples. To obtain the values for each sample, use :func:`silhouette_samples`. The best value is 1 and the worst value is -1. Values near 0 indicate overlapping clusters. Negative values generally indicate that a sample has been assigned to the wrong cluster, as a different cluster is more similar. Read more in the :ref:`User Guide <silhouette_coefficient>`. Parameters ---------- X : array [n_samples_a, n_samples_a] if metric == "precomputed", or, \ [n_samples_a, n_features] otherwise Array of pairwise distances between samples, or a feature array. labels : array, shape = [n_samples] Predicted labels for each sample. metric : string, or callable The metric to use when calculating distance between instances in a feature array. If metric is a string, it must be one of the options allowed by :func:`metrics.pairwise.pairwise_distances <sklearn.metrics.pairwise.pairwise_distances>`. If X is the distance array itself, use ``metric="precomputed"``. sample_size : int or None The size of the sample to use when computing the Silhouette Coefficient on a random subset of the data. If ``sample_size is None``, no sampling is used. random_state : int, RandomState instance or None, optional (default=None) The generator used to randomly select a subset of samples. If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Used when ``sample_size is not None``. **kwds : optional keyword parameters Any further parameters are passed directly to the distance function. If using a scipy.spatial.distance metric, the parameters are still metric dependent. See the scipy docs for usage examples. Returns ------- silhouette : float Mean Silhouette Coefficient for all samples. References ---------- .. [1] `Peter J. Rousseeuw (1987). "Silhouettes: a Graphical Aid to the Interpretation and Validation of Cluster Analysis". Computational and Applied Mathematics 20: 53-65. <http://www.sciencedirect.com/science/article/pii/0377042787901257>`_ .. [2] `Wikipedia entry on the Silhouette Coefficient <https://en.wikipedia.org/wiki/Silhouette_(clustering)>`_ """ if sample_size is not None: X, labels = check_X_y(X, labels, accept_sparse=['csc', 'csr']) random_state = check_random_state(random_state) indices = random_state.permutation(X.shape[0])[:sample_size] if metric == "precomputed": X, labels = X[indices].T[indices].T, labels[indices] else: X, labels = X[indices], labels[indices] return np.mean(silhouette_samples(X, labels, metric=metric, **kwds))
['def', 'silhouette_score', '(', 'X', ',', 'labels', ',', 'metric', '=', "'euclidean'", ',', 'sample_size', '=', 'None', ',', 'random_state', '=', 'None', ',', '*', '*', 'kwds', ')', ':', 'if', 'sample_size', 'is', 'not', 'None', ':', 'X', ',', 'labels', '=', 'check_X_y', '(', 'X', ',', 'labels', ',', 'accept_sparse', '=', '[', "'csc'", ',', "'csr'", ']', ')', 'random_state', '=', 'check_random_state', '(', 'random_state', ')', 'indices', '=', 'random_state', '.', 'permutation', '(', 'X', '.', 'shape', '[', '0', ']', ')', '[', ':', 'sample_size', ']', 'if', 'metric', '==', '"precomputed"', ':', 'X', ',', 'labels', '=', 'X', '[', 'indices', ']', '.', 'T', '[', 'indices', ']', '.', 'T', ',', 'labels', '[', 'indices', ']', 'else', ':', 'X', ',', 'labels', '=', 'X', '[', 'indices', ']', ',', 'labels', '[', 'indices', ']', 'return', 'np', '.', 'mean', '(', 'silhouette_samples', '(', 'X', ',', 'labels', ',', 'metric', '=', 'metric', ',', '*', '*', 'kwds', ')', ')']
Compute the mean Silhouette Coefficient of all samples. The Silhouette Coefficient is calculated using the mean intra-cluster distance (``a``) and the mean nearest-cluster distance (``b``) for each sample. The Silhouette Coefficient for a sample is ``(b - a) / max(a, b)``. To clarify, ``b`` is the distance between a sample and the nearest cluster that the sample is not a part of. Note that Silhouette Coefficient is only defined if number of labels is 2 <= n_labels <= n_samples - 1. This function returns the mean Silhouette Coefficient over all samples. To obtain the values for each sample, use :func:`silhouette_samples`. The best value is 1 and the worst value is -1. Values near 0 indicate overlapping clusters. Negative values generally indicate that a sample has been assigned to the wrong cluster, as a different cluster is more similar. Read more in the :ref:`User Guide <silhouette_coefficient>`. Parameters ---------- X : array [n_samples_a, n_samples_a] if metric == "precomputed", or, \ [n_samples_a, n_features] otherwise Array of pairwise distances between samples, or a feature array. labels : array, shape = [n_samples] Predicted labels for each sample. metric : string, or callable The metric to use when calculating distance between instances in a feature array. If metric is a string, it must be one of the options allowed by :func:`metrics.pairwise.pairwise_distances <sklearn.metrics.pairwise.pairwise_distances>`. If X is the distance array itself, use ``metric="precomputed"``. sample_size : int or None The size of the sample to use when computing the Silhouette Coefficient on a random subset of the data. If ``sample_size is None``, no sampling is used. random_state : int, RandomState instance or None, optional (default=None) The generator used to randomly select a subset of samples. If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Used when ``sample_size is not None``. **kwds : optional keyword parameters Any further parameters are passed directly to the distance function. If using a scipy.spatial.distance metric, the parameters are still metric dependent. See the scipy docs for usage examples. Returns ------- silhouette : float Mean Silhouette Coefficient for all samples. References ---------- .. [1] `Peter J. Rousseeuw (1987). "Silhouettes: a Graphical Aid to the Interpretation and Validation of Cluster Analysis". Computational and Applied Mathematics 20: 53-65. <http://www.sciencedirect.com/science/article/pii/0377042787901257>`_ .. [2] `Wikipedia entry on the Silhouette Coefficient <https://en.wikipedia.org/wiki/Silhouette_(clustering)>`_
['Compute', 'the', 'mean', 'Silhouette', 'Coefficient', 'of', 'all', 'samples', '.']
train
https://github.com/brianhie/scanorama/blob/57aafac87d07a8d682f57450165dd07f066ebb3c/bin/unsupervised.py#L27-L106
6,247
buildbot/buildbot
master/buildbot/reporters/gerrit.py
GerritStatusPush._gerritCmd
def _gerritCmd(self, *args): '''Construct a command as a list of strings suitable for :func:`subprocess.call`. ''' if self.gerrit_identity_file is not None: options = ['-i', self.gerrit_identity_file] else: options = [] return ['ssh'] + options + [ '@'.join((self.gerrit_username, self.gerrit_server)), '-p', str(self.gerrit_port), 'gerrit' ] + list(args)
python
def _gerritCmd(self, *args): '''Construct a command as a list of strings suitable for :func:`subprocess.call`. ''' if self.gerrit_identity_file is not None: options = ['-i', self.gerrit_identity_file] else: options = [] return ['ssh'] + options + [ '@'.join((self.gerrit_username, self.gerrit_server)), '-p', str(self.gerrit_port), 'gerrit' ] + list(args)
['def', '_gerritCmd', '(', 'self', ',', '*', 'args', ')', ':', 'if', 'self', '.', 'gerrit_identity_file', 'is', 'not', 'None', ':', 'options', '=', '[', "'-i'", ',', 'self', '.', 'gerrit_identity_file', ']', 'else', ':', 'options', '=', '[', ']', 'return', '[', "'ssh'", ']', '+', 'options', '+', '[', "'@'", '.', 'join', '(', '(', 'self', '.', 'gerrit_username', ',', 'self', '.', 'gerrit_server', ')', ')', ',', "'-p'", ',', 'str', '(', 'self', '.', 'gerrit_port', ')', ',', "'gerrit'", ']', '+', 'list', '(', 'args', ')']
Construct a command as a list of strings suitable for :func:`subprocess.call`.
['Construct', 'a', 'command', 'as', 'a', 'list', 'of', 'strings', 'suitable', 'for', ':', 'func', ':', 'subprocess', '.', 'call', '.']
train
https://github.com/buildbot/buildbot/blob/5df3cfae6d760557d99156633c32b1822a1e130c/master/buildbot/reporters/gerrit.py#L185-L197
6,248
JnyJny/Geometry
Geometry/point.py
Point.cross
def cross(self, other): ''' :other: Point or point equivalent :return: float Vector cross product of points U (self) and V (other), computed: U x V = (u1*i + u2*j + u3*k) x (v1*i + v2*j + v3*k) s1 = u2v3 - u3v2 s2 = u3v1 - u1v3 s3 = u1v2 - u2v1 U x V = s1 + s2 + s3 Returns a float. ''' b = self.__class__._convert(other) return sum([(self.y * b.z) - (self.z * b.y), (self.z * b.x) - (self.x * b.z), (self.x * b.y) - (self.y * b.x)])
python
def cross(self, other): ''' :other: Point or point equivalent :return: float Vector cross product of points U (self) and V (other), computed: U x V = (u1*i + u2*j + u3*k) x (v1*i + v2*j + v3*k) s1 = u2v3 - u3v2 s2 = u3v1 - u1v3 s3 = u1v2 - u2v1 U x V = s1 + s2 + s3 Returns a float. ''' b = self.__class__._convert(other) return sum([(self.y * b.z) - (self.z * b.y), (self.z * b.x) - (self.x * b.z), (self.x * b.y) - (self.y * b.x)])
['def', 'cross', '(', 'self', ',', 'other', ')', ':', 'b', '=', 'self', '.', '__class__', '.', '_convert', '(', 'other', ')', 'return', 'sum', '(', '[', '(', 'self', '.', 'y', '*', 'b', '.', 'z', ')', '-', '(', 'self', '.', 'z', '*', 'b', '.', 'y', ')', ',', '(', 'self', '.', 'z', '*', 'b', '.', 'x', ')', '-', '(', 'self', '.', 'x', '*', 'b', '.', 'z', ')', ',', '(', 'self', '.', 'x', '*', 'b', '.', 'y', ')', '-', '(', 'self', '.', 'y', '*', 'b', '.', 'x', ')', ']', ')']
:other: Point or point equivalent :return: float Vector cross product of points U (self) and V (other), computed: U x V = (u1*i + u2*j + u3*k) x (v1*i + v2*j + v3*k) s1 = u2v3 - u3v2 s2 = u3v1 - u1v3 s3 = u1v2 - u2v1 U x V = s1 + s2 + s3 Returns a float.
[':', 'other', ':', 'Point', 'or', 'point', 'equivalent', ':', 'return', ':', 'float']
train
https://github.com/JnyJny/Geometry/blob/3500f815fa56c535b36d1b6fd0afe69ce5d055be/Geometry/point.py#L1260-L1281
6,249
bamthomas/aioimaplib
aioimaplib/aioimaplib.py
quoted
def quoted(arg): """ Given a string, return a quoted string as per RFC 3501, section 9. Implementation copied from https://github.com/mjs/imapclient (imapclient/imapclient.py), 3-clause BSD license """ if isinstance(arg, str): arg = arg.replace('\\', '\\\\') arg = arg.replace('"', '\\"') q = '"' else: arg = arg.replace(b'\\', b'\\\\') arg = arg.replace(b'"', b'\\"') q = b'"' return q + arg + q
python
def quoted(arg): """ Given a string, return a quoted string as per RFC 3501, section 9. Implementation copied from https://github.com/mjs/imapclient (imapclient/imapclient.py), 3-clause BSD license """ if isinstance(arg, str): arg = arg.replace('\\', '\\\\') arg = arg.replace('"', '\\"') q = '"' else: arg = arg.replace(b'\\', b'\\\\') arg = arg.replace(b'"', b'\\"') q = b'"' return q + arg + q
['def', 'quoted', '(', 'arg', ')', ':', 'if', 'isinstance', '(', 'arg', ',', 'str', ')', ':', 'arg', '=', 'arg', '.', 'replace', '(', "'\\\\'", ',', "'\\\\\\\\'", ')', 'arg', '=', 'arg', '.', 'replace', '(', '\'"\'', ',', '\'\\\\"\'', ')', 'q', '=', '\'"\'', 'else', ':', 'arg', '=', 'arg', '.', 'replace', '(', "b'\\\\'", ',', "b'\\\\\\\\'", ')', 'arg', '=', 'arg', '.', 'replace', '(', 'b\'"\'', ',', 'b\'\\\\"\'', ')', 'q', '=', 'b\'"\'', 'return', 'q', '+', 'arg', '+', 'q']
Given a string, return a quoted string as per RFC 3501, section 9. Implementation copied from https://github.com/mjs/imapclient (imapclient/imapclient.py), 3-clause BSD license
['Given', 'a', 'string', 'return', 'a', 'quoted', 'string', 'as', 'per', 'RFC', '3501', 'section', '9', '.']
train
https://github.com/bamthomas/aioimaplib/blob/9670d43950cafc4d41aab7a36824b8051fa89899/aioimaplib/aioimaplib.py#L100-L114
6,250
ipazc/mtcnn
mtcnn/layer_factory.py
LayerFactory.new_conv
def new_conv(self, name: str, kernel_size: tuple, channels_output: int, stride_size: tuple, padding: str='SAME', group: int=1, biased: bool=True, relu: bool=True, input_layer_name: str=None): """ Creates a convolution layer for the network. :param name: name for the layer :param kernel_size: tuple containing the size of the kernel (Width, Height) :param channels_output: ¿? Perhaps number of channels in the output? it is used as the bias size. :param stride_size: tuple containing the size of the stride (Width, Height) :param padding: Type of padding. Available values are: ('SAME', 'VALID') :param group: groups for the kernel operation. More info required. :param biased: boolean flag to set if biased or not. :param relu: boolean flag to set if ReLu should be applied at the end of the layer or not. :param input_layer_name: name of the input layer for this layer. If None, it will take the last added layer of the network. """ # Verify that the padding is acceptable self.__validate_padding(padding) input_layer = self.__network.get_layer(input_layer_name) # Get the number of channels in the input channels_input = int(input_layer.get_shape()[-1]) # Verify that the grouping parameter is valid self.__validate_grouping(channels_input, channels_output, group) # Convolution for a given input and kernel convolve = lambda input_val, kernel: tf.nn.conv2d(input_val, kernel, [1, stride_size[1], stride_size[0], 1], padding=padding) with tf.variable_scope(name) as scope: kernel = self.__make_var('weights', shape=[kernel_size[1], kernel_size[0], channels_input // group, channels_output]) output = convolve(input_layer, kernel) # Add the biases, if required if biased: biases = self.__make_var('biases', [channels_output]) output = tf.nn.bias_add(output, biases) # Apply ReLU non-linearity, if required if relu: output = tf.nn.relu(output, name=scope.name) self.__network.add_layer(name, layer_output=output)
python
def new_conv(self, name: str, kernel_size: tuple, channels_output: int, stride_size: tuple, padding: str='SAME', group: int=1, biased: bool=True, relu: bool=True, input_layer_name: str=None): """ Creates a convolution layer for the network. :param name: name for the layer :param kernel_size: tuple containing the size of the kernel (Width, Height) :param channels_output: ¿? Perhaps number of channels in the output? it is used as the bias size. :param stride_size: tuple containing the size of the stride (Width, Height) :param padding: Type of padding. Available values are: ('SAME', 'VALID') :param group: groups for the kernel operation. More info required. :param biased: boolean flag to set if biased or not. :param relu: boolean flag to set if ReLu should be applied at the end of the layer or not. :param input_layer_name: name of the input layer for this layer. If None, it will take the last added layer of the network. """ # Verify that the padding is acceptable self.__validate_padding(padding) input_layer = self.__network.get_layer(input_layer_name) # Get the number of channels in the input channels_input = int(input_layer.get_shape()[-1]) # Verify that the grouping parameter is valid self.__validate_grouping(channels_input, channels_output, group) # Convolution for a given input and kernel convolve = lambda input_val, kernel: tf.nn.conv2d(input_val, kernel, [1, stride_size[1], stride_size[0], 1], padding=padding) with tf.variable_scope(name) as scope: kernel = self.__make_var('weights', shape=[kernel_size[1], kernel_size[0], channels_input // group, channels_output]) output = convolve(input_layer, kernel) # Add the biases, if required if biased: biases = self.__make_var('biases', [channels_output]) output = tf.nn.bias_add(output, biases) # Apply ReLU non-linearity, if required if relu: output = tf.nn.relu(output, name=scope.name) self.__network.add_layer(name, layer_output=output)
['def', 'new_conv', '(', 'self', ',', 'name', ':', 'str', ',', 'kernel_size', ':', 'tuple', ',', 'channels_output', ':', 'int', ',', 'stride_size', ':', 'tuple', ',', 'padding', ':', 'str', '=', "'SAME'", ',', 'group', ':', 'int', '=', '1', ',', 'biased', ':', 'bool', '=', 'True', ',', 'relu', ':', 'bool', '=', 'True', ',', 'input_layer_name', ':', 'str', '=', 'None', ')', ':', '# Verify that the padding is acceptable', 'self', '.', '__validate_padding', '(', 'padding', ')', 'input_layer', '=', 'self', '.', '__network', '.', 'get_layer', '(', 'input_layer_name', ')', '# Get the number of channels in the input', 'channels_input', '=', 'int', '(', 'input_layer', '.', 'get_shape', '(', ')', '[', '-', '1', ']', ')', '# Verify that the grouping parameter is valid', 'self', '.', '__validate_grouping', '(', 'channels_input', ',', 'channels_output', ',', 'group', ')', '# Convolution for a given input and kernel', 'convolve', '=', 'lambda', 'input_val', ',', 'kernel', ':', 'tf', '.', 'nn', '.', 'conv2d', '(', 'input_val', ',', 'kernel', ',', '[', '1', ',', 'stride_size', '[', '1', ']', ',', 'stride_size', '[', '0', ']', ',', '1', ']', ',', 'padding', '=', 'padding', ')', 'with', 'tf', '.', 'variable_scope', '(', 'name', ')', 'as', 'scope', ':', 'kernel', '=', 'self', '.', '__make_var', '(', "'weights'", ',', 'shape', '=', '[', 'kernel_size', '[', '1', ']', ',', 'kernel_size', '[', '0', ']', ',', 'channels_input', '//', 'group', ',', 'channels_output', ']', ')', 'output', '=', 'convolve', '(', 'input_layer', ',', 'kernel', ')', '# Add the biases, if required', 'if', 'biased', ':', 'biases', '=', 'self', '.', '__make_var', '(', "'biases'", ',', '[', 'channels_output', ']', ')', 'output', '=', 'tf', '.', 'nn', '.', 'bias_add', '(', 'output', ',', 'biases', ')', '# Apply ReLU non-linearity, if required', 'if', 'relu', ':', 'output', '=', 'tf', '.', 'nn', '.', 'relu', '(', 'output', ',', 'name', '=', 'scope', '.', 'name', ')', 'self', '.', '__network', '.', 'add_layer', '(', 'name', ',', 'layer_output', '=', 'output', ')']
Creates a convolution layer for the network. :param name: name for the layer :param kernel_size: tuple containing the size of the kernel (Width, Height) :param channels_output: ¿? Perhaps number of channels in the output? it is used as the bias size. :param stride_size: tuple containing the size of the stride (Width, Height) :param padding: Type of padding. Available values are: ('SAME', 'VALID') :param group: groups for the kernel operation. More info required. :param biased: boolean flag to set if biased or not. :param relu: boolean flag to set if ReLu should be applied at the end of the layer or not. :param input_layer_name: name of the input layer for this layer. If None, it will take the last added layer of the network.
['Creates', 'a', 'convolution', 'layer', 'for', 'the', 'network', '.', ':', 'param', 'name', ':', 'name', 'for', 'the', 'layer', ':', 'param', 'kernel_size', ':', 'tuple', 'containing', 'the', 'size', 'of', 'the', 'kernel', '(', 'Width', 'Height', ')', ':', 'param', 'channels_output', ':', '¿?', 'Perhaps', 'number', 'of', 'channels', 'in', 'the', 'output?', 'it', 'is', 'used', 'as', 'the', 'bias', 'size', '.', ':', 'param', 'stride_size', ':', 'tuple', 'containing', 'the', 'size', 'of', 'the', 'stride', '(', 'Width', 'Height', ')', ':', 'param', 'padding', ':', 'Type', 'of', 'padding', '.', 'Available', 'values', 'are', ':', '(', 'SAME', 'VALID', ')', ':', 'param', 'group', ':', 'groups', 'for', 'the', 'kernel', 'operation', '.', 'More', 'info', 'required', '.', ':', 'param', 'biased', ':', 'boolean', 'flag', 'to', 'set', 'if', 'biased', 'or', 'not', '.', ':', 'param', 'relu', ':', 'boolean', 'flag', 'to', 'set', 'if', 'ReLu', 'should', 'be', 'applied', 'at', 'the', 'end', 'of', 'the', 'layer', 'or', 'not', '.', ':', 'param', 'input_layer_name', ':', 'name', 'of', 'the', 'input', 'layer', 'for', 'this', 'layer', '.', 'If', 'None', 'it', 'will', 'take', 'the', 'last', 'added', 'layer', 'of', 'the', 'network', '.']
train
https://github.com/ipazc/mtcnn/blob/17029fe453a435f50c472ae2fd1c493341b5ede3/mtcnn/layer_factory.py#L91-L138
6,251
BlueBrain/NeuroM
examples/section_ids.py
get_segment
def get_segment(neuron, section_id, segment_id): '''Get a segment given a section and segment id Returns: array of two [x, y, z, r] points defining segment ''' sec = neuron.sections[section_id] return sec.points[segment_id:segment_id + 2][:, COLS.XYZR]
python
def get_segment(neuron, section_id, segment_id): '''Get a segment given a section and segment id Returns: array of two [x, y, z, r] points defining segment ''' sec = neuron.sections[section_id] return sec.points[segment_id:segment_id + 2][:, COLS.XYZR]
['def', 'get_segment', '(', 'neuron', ',', 'section_id', ',', 'segment_id', ')', ':', 'sec', '=', 'neuron', '.', 'sections', '[', 'section_id', ']', 'return', 'sec', '.', 'points', '[', 'segment_id', ':', 'segment_id', '+', '2', ']', '[', ':', ',', 'COLS', '.', 'XYZR', ']']
Get a segment given a section and segment id Returns: array of two [x, y, z, r] points defining segment
['Get', 'a', 'segment', 'given', 'a', 'section', 'and', 'segment', 'id']
train
https://github.com/BlueBrain/NeuroM/blob/254bb73535b20053d175bc4725bade662177d12b/examples/section_ids.py#L37-L44
6,252
kronenthaler/mod-pbxproj
pbxproj/pbxextensions/ProjectFlags.py
ProjectFlags.add_flags
def add_flags(self, flag_name, flags, target_name=None, configuration_name=None): """ Adds the given flags to the flag_name section of the target on the configurations :param flag_name: name of the flag to be added the values to :param flags: A string or array of strings :param target_name: Target name or list of target names to add the flag to or None for every target :param configuration_name: Configuration name to add the flag to or None for every configuration :return: void """ for configuration in self.objects.get_configurations_on_targets(target_name, configuration_name): configuration.add_flags(flag_name, flags)
python
def add_flags(self, flag_name, flags, target_name=None, configuration_name=None): """ Adds the given flags to the flag_name section of the target on the configurations :param flag_name: name of the flag to be added the values to :param flags: A string or array of strings :param target_name: Target name or list of target names to add the flag to or None for every target :param configuration_name: Configuration name to add the flag to or None for every configuration :return: void """ for configuration in self.objects.get_configurations_on_targets(target_name, configuration_name): configuration.add_flags(flag_name, flags)
['def', 'add_flags', '(', 'self', ',', 'flag_name', ',', 'flags', ',', 'target_name', '=', 'None', ',', 'configuration_name', '=', 'None', ')', ':', 'for', 'configuration', 'in', 'self', '.', 'objects', '.', 'get_configurations_on_targets', '(', 'target_name', ',', 'configuration_name', ')', ':', 'configuration', '.', 'add_flags', '(', 'flag_name', ',', 'flags', ')']
Adds the given flags to the flag_name section of the target on the configurations :param flag_name: name of the flag to be added the values to :param flags: A string or array of strings :param target_name: Target name or list of target names to add the flag to or None for every target :param configuration_name: Configuration name to add the flag to or None for every configuration :return: void
['Adds', 'the', 'given', 'flags', 'to', 'the', 'flag_name', 'section', 'of', 'the', 'target', 'on', 'the', 'configurations', ':', 'param', 'flag_name', ':', 'name', 'of', 'the', 'flag', 'to', 'be', 'added', 'the', 'values', 'to', ':', 'param', 'flags', ':', 'A', 'string', 'or', 'array', 'of', 'strings', ':', 'param', 'target_name', ':', 'Target', 'name', 'or', 'list', 'of', 'target', 'names', 'to', 'add', 'the', 'flag', 'to', 'or', 'None', 'for', 'every', 'target', ':', 'param', 'configuration_name', ':', 'Configuration', 'name', 'to', 'add', 'the', 'flag', 'to', 'or', 'None', 'for', 'every', 'configuration', ':', 'return', ':', 'void']
train
https://github.com/kronenthaler/mod-pbxproj/blob/8de3cbdd3210480ddbb1fa0f50a4f4ea87de6e71/pbxproj/pbxextensions/ProjectFlags.py#L13-L23
6,253
eleme/ruskit
ruskit/distribute.py
MaxFlowSolver.from_nodes
def from_nodes(cls, nodes, new_nodes, max_slaves_limit=None): '''When this is used only for peeking reuslt `new_nodes` can be any type with `host` and `port` attributes ''' param = gen_distribution(nodes, new_nodes) param['max_slaves_limit'] = max_slaves_limit return cls(**param)
python
def from_nodes(cls, nodes, new_nodes, max_slaves_limit=None): '''When this is used only for peeking reuslt `new_nodes` can be any type with `host` and `port` attributes ''' param = gen_distribution(nodes, new_nodes) param['max_slaves_limit'] = max_slaves_limit return cls(**param)
['def', 'from_nodes', '(', 'cls', ',', 'nodes', ',', 'new_nodes', ',', 'max_slaves_limit', '=', 'None', ')', ':', 'param', '=', 'gen_distribution', '(', 'nodes', ',', 'new_nodes', ')', 'param', '[', "'max_slaves_limit'", ']', '=', 'max_slaves_limit', 'return', 'cls', '(', '*', '*', 'param', ')']
When this is used only for peeking reuslt `new_nodes` can be any type with `host` and `port` attributes
['When', 'this', 'is', 'used', 'only', 'for', 'peeking', 'reuslt', 'new_nodes', 'can', 'be', 'any', 'type', 'with', 'host', 'and', 'port', 'attributes']
train
https://github.com/eleme/ruskit/blob/2e8c5a3f6a65b8aeb07012b4e2c8ba324d887c3b/ruskit/distribute.py#L83-L89
6,254
merll/docker-fabric
dockerfabric/apiclient.py
DockerFabricClient.close
def close(self): """ Closes the connection and any tunnels created for it. """ try: super(DockerFabricClient, self).close() finally: if self._tunnel is not None: self._tunnel.close()
python
def close(self): """ Closes the connection and any tunnels created for it. """ try: super(DockerFabricClient, self).close() finally: if self._tunnel is not None: self._tunnel.close()
['def', 'close', '(', 'self', ')', ':', 'try', ':', 'super', '(', 'DockerFabricClient', ',', 'self', ')', '.', 'close', '(', ')', 'finally', ':', 'if', 'self', '.', '_tunnel', 'is', 'not', 'None', ':', 'self', '.', '_tunnel', '.', 'close', '(', ')']
Closes the connection and any tunnels created for it.
['Closes', 'the', 'connection', 'and', 'any', 'tunnels', 'created', 'for', 'it', '.']
train
https://github.com/merll/docker-fabric/blob/785d84e40e17265b667d8b11a6e30d8e6b2bf8d4/dockerfabric/apiclient.py#L137-L145
6,255
rwl/pylon
contrib/public/services/jsonrpc/__init__.py
SimpleServiceHandler.handleNotification
def handleNotification(self, req): """handles a notification request by calling the appropriete method the service exposes""" name = req["method"] params = req["params"] try: #to get a callable obj obj = getMethodByName(self.service, name) rslt = obj(*params) except: pass
python
def handleNotification(self, req): """handles a notification request by calling the appropriete method the service exposes""" name = req["method"] params = req["params"] try: #to get a callable obj obj = getMethodByName(self.service, name) rslt = obj(*params) except: pass
['def', 'handleNotification', '(', 'self', ',', 'req', ')', ':', 'name', '=', 'req', '[', '"method"', ']', 'params', '=', 'req', '[', '"params"', ']', 'try', ':', '#to get a callable obj ', 'obj', '=', 'getMethodByName', '(', 'self', '.', 'service', ',', 'name', ')', 'rslt', '=', 'obj', '(', '*', 'params', ')', 'except', ':', 'pass']
handles a notification request by calling the appropriete method the service exposes
['handles', 'a', 'notification', 'request', 'by', 'calling', 'the', 'appropriete', 'method', 'the', 'service', 'exposes']
train
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/public/services/jsonrpc/__init__.py#L229-L237
6,256
ibis-project/ibis
ibis/expr/analytics.py
histogram
def histogram( arg, nbins=None, binwidth=None, base=None, closed='left', aux_hash=None ): """ Compute a histogram with fixed width bins Parameters ---------- arg : numeric array expression nbins : int, default None If supplied, will be used to compute the binwidth binwidth : number, default None If not supplied, computed from the data (actual max and min values) base : number, default None closed : {'left', 'right'}, default 'left' Which side of each interval is closed Returns ------- histogrammed : coded value expression """ op = Histogram( arg, nbins, binwidth, base, closed=closed, aux_hash=aux_hash ) return op.to_expr()
python
def histogram( arg, nbins=None, binwidth=None, base=None, closed='left', aux_hash=None ): """ Compute a histogram with fixed width bins Parameters ---------- arg : numeric array expression nbins : int, default None If supplied, will be used to compute the binwidth binwidth : number, default None If not supplied, computed from the data (actual max and min values) base : number, default None closed : {'left', 'right'}, default 'left' Which side of each interval is closed Returns ------- histogrammed : coded value expression """ op = Histogram( arg, nbins, binwidth, base, closed=closed, aux_hash=aux_hash ) return op.to_expr()
['def', 'histogram', '(', 'arg', ',', 'nbins', '=', 'None', ',', 'binwidth', '=', 'None', ',', 'base', '=', 'None', ',', 'closed', '=', "'left'", ',', 'aux_hash', '=', 'None', ')', ':', 'op', '=', 'Histogram', '(', 'arg', ',', 'nbins', ',', 'binwidth', ',', 'base', ',', 'closed', '=', 'closed', ',', 'aux_hash', '=', 'aux_hash', ')', 'return', 'op', '.', 'to_expr', '(', ')']
Compute a histogram with fixed width bins Parameters ---------- arg : numeric array expression nbins : int, default None If supplied, will be used to compute the binwidth binwidth : number, default None If not supplied, computed from the data (actual max and min values) base : number, default None closed : {'left', 'right'}, default 'left' Which side of each interval is closed Returns ------- histogrammed : coded value expression
['Compute', 'a', 'histogram', 'with', 'fixed', 'width', 'bins']
train
https://github.com/ibis-project/ibis/blob/1e39a5fd9ef088b45c155e8a5f541767ee8ef2e7/ibis/expr/analytics.py#L127-L151
6,257
ssato/python-anytemplate
anytemplate/engines/base.py
fallback_render
def fallback_render(template, context, at_paths=None, at_encoding=anytemplate.compat.ENCODING, **kwargs): """ Render from given template and context. This is a basic implementation actually does nothing and just returns the content of given template file `template`. :param template: Template file path :param context: A dict or dict-like object to instantiate given template file :param at_paths: Template search paths :param at_encoding: Template encoding :param kwargs: Keyword arguments passed to the template engine to render templates with specific features enabled. :return: Rendered result string """ tmpl = anytemplate.utils.find_template_from_path(template, at_paths) if tmpl is None: raise TemplateNotFound("template: %s" % template) try: return anytemplate.compat.copen(tmpl, encoding=at_encoding).read() except UnicodeDecodeError: return open(tmpl).read()
python
def fallback_render(template, context, at_paths=None, at_encoding=anytemplate.compat.ENCODING, **kwargs): """ Render from given template and context. This is a basic implementation actually does nothing and just returns the content of given template file `template`. :param template: Template file path :param context: A dict or dict-like object to instantiate given template file :param at_paths: Template search paths :param at_encoding: Template encoding :param kwargs: Keyword arguments passed to the template engine to render templates with specific features enabled. :return: Rendered result string """ tmpl = anytemplate.utils.find_template_from_path(template, at_paths) if tmpl is None: raise TemplateNotFound("template: %s" % template) try: return anytemplate.compat.copen(tmpl, encoding=at_encoding).read() except UnicodeDecodeError: return open(tmpl).read()
['def', 'fallback_render', '(', 'template', ',', 'context', ',', 'at_paths', '=', 'None', ',', 'at_encoding', '=', 'anytemplate', '.', 'compat', '.', 'ENCODING', ',', '*', '*', 'kwargs', ')', ':', 'tmpl', '=', 'anytemplate', '.', 'utils', '.', 'find_template_from_path', '(', 'template', ',', 'at_paths', ')', 'if', 'tmpl', 'is', 'None', ':', 'raise', 'TemplateNotFound', '(', '"template: %s"', '%', 'template', ')', 'try', ':', 'return', 'anytemplate', '.', 'compat', '.', 'copen', '(', 'tmpl', ',', 'encoding', '=', 'at_encoding', ')', '.', 'read', '(', ')', 'except', 'UnicodeDecodeError', ':', 'return', 'open', '(', 'tmpl', ')', '.', 'read', '(', ')']
Render from given template and context. This is a basic implementation actually does nothing and just returns the content of given template file `template`. :param template: Template file path :param context: A dict or dict-like object to instantiate given template file :param at_paths: Template search paths :param at_encoding: Template encoding :param kwargs: Keyword arguments passed to the template engine to render templates with specific features enabled. :return: Rendered result string
['Render', 'from', 'given', 'template', 'and', 'context', '.']
train
https://github.com/ssato/python-anytemplate/blob/3e56baa914bd47f044083b20e33100f836443596/anytemplate/engines/base.py#L66-L92
6,258
NASA-AMMOS/AIT-Core
ait/core/dtype.py
ArrayType._assertIndex
def _assertIndex(self, index): """Raise TypeError or IndexError if index is not an integer or out of range for the number of elements in this array, respectively. """ if type(index) is not int: raise TypeError('list indices must be integers') if index < 0 or index >= self.nelems: raise IndexError('list index out of range')
python
def _assertIndex(self, index): """Raise TypeError or IndexError if index is not an integer or out of range for the number of elements in this array, respectively. """ if type(index) is not int: raise TypeError('list indices must be integers') if index < 0 or index >= self.nelems: raise IndexError('list index out of range')
['def', '_assertIndex', '(', 'self', ',', 'index', ')', ':', 'if', 'type', '(', 'index', ')', 'is', 'not', 'int', ':', 'raise', 'TypeError', '(', "'list indices must be integers'", ')', 'if', 'index', '<', '0', 'or', 'index', '>=', 'self', '.', 'nelems', ':', 'raise', 'IndexError', '(', "'list index out of range'", ')']
Raise TypeError or IndexError if index is not an integer or out of range for the number of elements in this array, respectively.
['Raise', 'TypeError', 'or', 'IndexError', 'if', 'index', 'is', 'not', 'an', 'integer', 'or', 'out', 'of', 'range', 'for', 'the', 'number', 'of', 'elements', 'in', 'this', 'array', 'respectively', '.']
train
https://github.com/NASA-AMMOS/AIT-Core/blob/9d85bd9c738e7a6a6fbdff672bea708238b02a3a/ait/core/dtype.py#L326-L333
6,259
OpenHydrology/floodestimation
floodestimation/entities.py
Catchment.distance_to
def distance_to(self, other_catchment): """ Returns the distance between the centroids of two catchments in kilometers. :param other_catchment: Catchment to calculate distance to :type other_catchment: :class:`.Catchment` :return: Distance between the catchments in km. :rtype: float """ try: if self.country == other_catchment.country: try: return 0.001 * hypot(self.descriptors.centroid_ngr.x - other_catchment.descriptors.centroid_ngr.x, self.descriptors.centroid_ngr.y - other_catchment.descriptors.centroid_ngr.y) except TypeError: # In case no centroid available, just return infinity which is helpful in most cases return float('+inf') else: # If the catchments are in a different country (e.g. `ni` versus `gb`) then set distance to infinity. return float('+inf') except (TypeError, KeyError): raise InsufficientDataError("Catchment `descriptors` attribute must be set first.")
python
def distance_to(self, other_catchment): """ Returns the distance between the centroids of two catchments in kilometers. :param other_catchment: Catchment to calculate distance to :type other_catchment: :class:`.Catchment` :return: Distance between the catchments in km. :rtype: float """ try: if self.country == other_catchment.country: try: return 0.001 * hypot(self.descriptors.centroid_ngr.x - other_catchment.descriptors.centroid_ngr.x, self.descriptors.centroid_ngr.y - other_catchment.descriptors.centroid_ngr.y) except TypeError: # In case no centroid available, just return infinity which is helpful in most cases return float('+inf') else: # If the catchments are in a different country (e.g. `ni` versus `gb`) then set distance to infinity. return float('+inf') except (TypeError, KeyError): raise InsufficientDataError("Catchment `descriptors` attribute must be set first.")
['def', 'distance_to', '(', 'self', ',', 'other_catchment', ')', ':', 'try', ':', 'if', 'self', '.', 'country', '==', 'other_catchment', '.', 'country', ':', 'try', ':', 'return', '0.001', '*', 'hypot', '(', 'self', '.', 'descriptors', '.', 'centroid_ngr', '.', 'x', '-', 'other_catchment', '.', 'descriptors', '.', 'centroid_ngr', '.', 'x', ',', 'self', '.', 'descriptors', '.', 'centroid_ngr', '.', 'y', '-', 'other_catchment', '.', 'descriptors', '.', 'centroid_ngr', '.', 'y', ')', 'except', 'TypeError', ':', '# In case no centroid available, just return infinity which is helpful in most cases', 'return', 'float', '(', "'+inf'", ')', 'else', ':', '# If the catchments are in a different country (e.g. `ni` versus `gb`) then set distance to infinity.', 'return', 'float', '(', "'+inf'", ')', 'except', '(', 'TypeError', ',', 'KeyError', ')', ':', 'raise', 'InsufficientDataError', '(', '"Catchment `descriptors` attribute must be set first."', ')']
Returns the distance between the centroids of two catchments in kilometers. :param other_catchment: Catchment to calculate distance to :type other_catchment: :class:`.Catchment` :return: Distance between the catchments in km. :rtype: float
['Returns', 'the', 'distance', 'between', 'the', 'centroids', 'of', 'two', 'catchments', 'in', 'kilometers', '.']
train
https://github.com/OpenHydrology/floodestimation/blob/782da7c5abd1348923129efe89fb70003ebb088c/floodestimation/entities.py#L137-L158
6,260
astraw/stdeb
stdeb/util.py
expand_tarball
def expand_tarball(tarball_fname,cwd=None): "expand a tarball" if tarball_fname.endswith('.gz'): opts = 'xzf' elif tarball_fname.endswith('.bz2'): opts = 'xjf' else: opts = 'xf' args = ['/bin/tar',opts,tarball_fname] process_command(args, cwd=cwd)
python
def expand_tarball(tarball_fname,cwd=None): "expand a tarball" if tarball_fname.endswith('.gz'): opts = 'xzf' elif tarball_fname.endswith('.bz2'): opts = 'xjf' else: opts = 'xf' args = ['/bin/tar',opts,tarball_fname] process_command(args, cwd=cwd)
['def', 'expand_tarball', '(', 'tarball_fname', ',', 'cwd', '=', 'None', ')', ':', 'if', 'tarball_fname', '.', 'endswith', '(', "'.gz'", ')', ':', 'opts', '=', "'xzf'", 'elif', 'tarball_fname', '.', 'endswith', '(', "'.bz2'", ')', ':', 'opts', '=', "'xjf'", 'else', ':', 'opts', '=', "'xf'", 'args', '=', '[', "'/bin/tar'", ',', 'opts', ',', 'tarball_fname', ']', 'process_command', '(', 'args', ',', 'cwd', '=', 'cwd', ')']
expand a tarball
['expand', 'a', 'tarball']
train
https://github.com/astraw/stdeb/blob/493ab88e8a60be053b1baef81fb39b45e17ceef5/stdeb/util.py#L466-L472
6,261
gitpython-developers/GitPython
git/config.py
GitConfigParser.read
def read(self): """Reads the data stored in the files we have been initialized with. It will ignore files that cannot be read, possibly leaving an empty configuration :return: Nothing :raise IOError: if a file cannot be handled""" if self._is_initialized: return self._is_initialized = True if not isinstance(self._file_or_files, (tuple, list)): files_to_read = [self._file_or_files] else: files_to_read = list(self._file_or_files) # end assure we have a copy of the paths to handle seen = set(files_to_read) num_read_include_files = 0 while files_to_read: file_path = files_to_read.pop(0) fp = file_path file_ok = False if hasattr(fp, "seek"): self._read(fp, fp.name) else: # assume a path if it is not a file-object try: with open(file_path, 'rb') as fp: file_ok = True self._read(fp, fp.name) except IOError: continue # Read includes and append those that we didn't handle yet # We expect all paths to be normalized and absolute (and will assure that is the case) if self._has_includes(): for _, include_path in self.items('include'): if include_path.startswith('~'): include_path = osp.expanduser(include_path) if not osp.isabs(include_path): if not file_ok: continue # end ignore relative paths if we don't know the configuration file path assert osp.isabs(file_path), "Need absolute paths to be sure our cycle checks will work" include_path = osp.join(osp.dirname(file_path), include_path) # end make include path absolute include_path = osp.normpath(include_path) if include_path in seen or not os.access(include_path, os.R_OK): continue seen.add(include_path) # insert included file to the top to be considered first files_to_read.insert(0, include_path) num_read_include_files += 1 # each include path in configuration file # end handle includes # END for each file object to read # If there was no file included, we can safely write back (potentially) the configuration file # without altering it's meaning if num_read_include_files == 0: self._merge_includes = False
python
def read(self): """Reads the data stored in the files we have been initialized with. It will ignore files that cannot be read, possibly leaving an empty configuration :return: Nothing :raise IOError: if a file cannot be handled""" if self._is_initialized: return self._is_initialized = True if not isinstance(self._file_or_files, (tuple, list)): files_to_read = [self._file_or_files] else: files_to_read = list(self._file_or_files) # end assure we have a copy of the paths to handle seen = set(files_to_read) num_read_include_files = 0 while files_to_read: file_path = files_to_read.pop(0) fp = file_path file_ok = False if hasattr(fp, "seek"): self._read(fp, fp.name) else: # assume a path if it is not a file-object try: with open(file_path, 'rb') as fp: file_ok = True self._read(fp, fp.name) except IOError: continue # Read includes and append those that we didn't handle yet # We expect all paths to be normalized and absolute (and will assure that is the case) if self._has_includes(): for _, include_path in self.items('include'): if include_path.startswith('~'): include_path = osp.expanduser(include_path) if not osp.isabs(include_path): if not file_ok: continue # end ignore relative paths if we don't know the configuration file path assert osp.isabs(file_path), "Need absolute paths to be sure our cycle checks will work" include_path = osp.join(osp.dirname(file_path), include_path) # end make include path absolute include_path = osp.normpath(include_path) if include_path in seen or not os.access(include_path, os.R_OK): continue seen.add(include_path) # insert included file to the top to be considered first files_to_read.insert(0, include_path) num_read_include_files += 1 # each include path in configuration file # end handle includes # END for each file object to read # If there was no file included, we can safely write back (potentially) the configuration file # without altering it's meaning if num_read_include_files == 0: self._merge_includes = False
['def', 'read', '(', 'self', ')', ':', 'if', 'self', '.', '_is_initialized', ':', 'return', 'self', '.', '_is_initialized', '=', 'True', 'if', 'not', 'isinstance', '(', 'self', '.', '_file_or_files', ',', '(', 'tuple', ',', 'list', ')', ')', ':', 'files_to_read', '=', '[', 'self', '.', '_file_or_files', ']', 'else', ':', 'files_to_read', '=', 'list', '(', 'self', '.', '_file_or_files', ')', '# end assure we have a copy of the paths to handle', 'seen', '=', 'set', '(', 'files_to_read', ')', 'num_read_include_files', '=', '0', 'while', 'files_to_read', ':', 'file_path', '=', 'files_to_read', '.', 'pop', '(', '0', ')', 'fp', '=', 'file_path', 'file_ok', '=', 'False', 'if', 'hasattr', '(', 'fp', ',', '"seek"', ')', ':', 'self', '.', '_read', '(', 'fp', ',', 'fp', '.', 'name', ')', 'else', ':', '# assume a path if it is not a file-object', 'try', ':', 'with', 'open', '(', 'file_path', ',', "'rb'", ')', 'as', 'fp', ':', 'file_ok', '=', 'True', 'self', '.', '_read', '(', 'fp', ',', 'fp', '.', 'name', ')', 'except', 'IOError', ':', 'continue', "# Read includes and append those that we didn't handle yet", '# We expect all paths to be normalized and absolute (and will assure that is the case)', 'if', 'self', '.', '_has_includes', '(', ')', ':', 'for', '_', ',', 'include_path', 'in', 'self', '.', 'items', '(', "'include'", ')', ':', 'if', 'include_path', '.', 'startswith', '(', "'~'", ')', ':', 'include_path', '=', 'osp', '.', 'expanduser', '(', 'include_path', ')', 'if', 'not', 'osp', '.', 'isabs', '(', 'include_path', ')', ':', 'if', 'not', 'file_ok', ':', 'continue', "# end ignore relative paths if we don't know the configuration file path", 'assert', 'osp', '.', 'isabs', '(', 'file_path', ')', ',', '"Need absolute paths to be sure our cycle checks will work"', 'include_path', '=', 'osp', '.', 'join', '(', 'osp', '.', 'dirname', '(', 'file_path', ')', ',', 'include_path', ')', '# end make include path absolute', 'include_path', '=', 'osp', '.', 'normpath', '(', 'include_path', ')', 'if', 'include_path', 'in', 'seen', 'or', 'not', 'os', '.', 'access', '(', 'include_path', ',', 'os', '.', 'R_OK', ')', ':', 'continue', 'seen', '.', 'add', '(', 'include_path', ')', '# insert included file to the top to be considered first', 'files_to_read', '.', 'insert', '(', '0', ',', 'include_path', ')', 'num_read_include_files', '+=', '1', '# each include path in configuration file', '# end handle includes', '# END for each file object to read', '# If there was no file included, we can safely write back (potentially) the configuration file', "# without altering it's meaning", 'if', 'num_read_include_files', '==', '0', ':', 'self', '.', '_merge_includes', '=', 'False']
Reads the data stored in the files we have been initialized with. It will ignore files that cannot be read, possibly leaving an empty configuration :return: Nothing :raise IOError: if a file cannot be handled
['Reads', 'the', 'data', 'stored', 'in', 'the', 'files', 'we', 'have', 'been', 'initialized', 'with', '.', 'It', 'will', 'ignore', 'files', 'that', 'cannot', 'be', 'read', 'possibly', 'leaving', 'an', 'empty', 'configuration']
train
https://github.com/gitpython-developers/GitPython/blob/1f66e25c25cde2423917ee18c4704fff83b837d1/git/config.py#L376-L437
6,262
kislyuk/aegea
aegea/packages/github3/orgs.py
Organization.is_public_member
def is_public_member(self, login): """Check if the user with login ``login`` is a public member. :returns: bool """ url = self._build_url('public_members', login, base_url=self._api) return self._boolean(self._get(url), 204, 404)
python
def is_public_member(self, login): """Check if the user with login ``login`` is a public member. :returns: bool """ url = self._build_url('public_members', login, base_url=self._api) return self._boolean(self._get(url), 204, 404)
['def', 'is_public_member', '(', 'self', ',', 'login', ')', ':', 'url', '=', 'self', '.', '_build_url', '(', "'public_members'", ',', 'login', ',', 'base_url', '=', 'self', '.', '_api', ')', 'return', 'self', '.', '_boolean', '(', 'self', '.', '_get', '(', 'url', ')', ',', '204', ',', '404', ')']
Check if the user with login ``login`` is a public member. :returns: bool
['Check', 'if', 'the', 'user', 'with', 'login', 'login', 'is', 'a', 'public', 'member', '.']
train
https://github.com/kislyuk/aegea/blob/94957e9dba036eae3052e2662c208b259c08399a/aegea/packages/github3/orgs.py#L424-L430
6,263
danilobellini/dose
dose/watcher.py
to_unicode
def to_unicode(path, errors="replace"): """Given a bytestring/unicode path, return it as unicode.""" if isinstance(path, UNICODE): return path return path.decode(sys.getfilesystemencoding(), errors)
python
def to_unicode(path, errors="replace"): """Given a bytestring/unicode path, return it as unicode.""" if isinstance(path, UNICODE): return path return path.decode(sys.getfilesystemencoding(), errors)
['def', 'to_unicode', '(', 'path', ',', 'errors', '=', '"replace"', ')', ':', 'if', 'isinstance', '(', 'path', ',', 'UNICODE', ')', ':', 'return', 'path', 'return', 'path', '.', 'decode', '(', 'sys', '.', 'getfilesystemencoding', '(', ')', ',', 'errors', ')']
Given a bytestring/unicode path, return it as unicode.
['Given', 'a', 'bytestring', '/', 'unicode', 'path', 'return', 'it', 'as', 'unicode', '.']
train
https://github.com/danilobellini/dose/blob/141f48322f7812b7d32e3d5f065d4473a11102a4/dose/watcher.py#L8-L12
6,264
openpaperwork/paperwork-backend
paperwork_backend/docsearch.py
DocIndexUpdater.commit
def commit(self, index_update=True, label_guesser_update=True): """ Apply the changes to the index """ logger.info("Index: Commiting changes") self.docsearch.index.commit(index_update=index_update, label_guesser_update=label_guesser_update)
python
def commit(self, index_update=True, label_guesser_update=True): """ Apply the changes to the index """ logger.info("Index: Commiting changes") self.docsearch.index.commit(index_update=index_update, label_guesser_update=label_guesser_update)
['def', 'commit', '(', 'self', ',', 'index_update', '=', 'True', ',', 'label_guesser_update', '=', 'True', ')', ':', 'logger', '.', 'info', '(', '"Index: Commiting changes"', ')', 'self', '.', 'docsearch', '.', 'index', '.', 'commit', '(', 'index_update', '=', 'index_update', ',', 'label_guesser_update', '=', 'label_guesser_update', ')']
Apply the changes to the index
['Apply', 'the', 'changes', 'to', 'the', 'index']
train
https://github.com/openpaperwork/paperwork-backend/blob/114b831e94e039e68b339751fd18250877abad76/paperwork_backend/docsearch.py#L224-L230
6,265
noahbenson/neuropythy
neuropythy/geometry/mesh.py
path_trace
def path_trace(map_projection, pts, closed=False, meta_data=None): ''' path_trace(proj, points) yields a path-trace object that represents the given path of points on the given map projection proj. The following options may be given: * closed (default: False) specifies whether the points form a closed loop. If they do form such a loop, the points should be given in the same ordering (counter-clockwise or clockwise) that mesh vertices are given in; usually counter-clockwise. * meta_data (default: None) specifies an optional additional meta-data map to append to the object. ''' return PathTrace(map_projection, pts, closed=closed, meta_data=meta_data)
python
def path_trace(map_projection, pts, closed=False, meta_data=None): ''' path_trace(proj, points) yields a path-trace object that represents the given path of points on the given map projection proj. The following options may be given: * closed (default: False) specifies whether the points form a closed loop. If they do form such a loop, the points should be given in the same ordering (counter-clockwise or clockwise) that mesh vertices are given in; usually counter-clockwise. * meta_data (default: None) specifies an optional additional meta-data map to append to the object. ''' return PathTrace(map_projection, pts, closed=closed, meta_data=meta_data)
['def', 'path_trace', '(', 'map_projection', ',', 'pts', ',', 'closed', '=', 'False', ',', 'meta_data', '=', 'None', ')', ':', 'return', 'PathTrace', '(', 'map_projection', ',', 'pts', ',', 'closed', '=', 'closed', ',', 'meta_data', '=', 'meta_data', ')']
path_trace(proj, points) yields a path-trace object that represents the given path of points on the given map projection proj. The following options may be given: * closed (default: False) specifies whether the points form a closed loop. If they do form such a loop, the points should be given in the same ordering (counter-clockwise or clockwise) that mesh vertices are given in; usually counter-clockwise. * meta_data (default: None) specifies an optional additional meta-data map to append to the object.
['path_trace', '(', 'proj', 'points', ')', 'yields', 'a', 'path', '-', 'trace', 'object', 'that', 'represents', 'the', 'given', 'path', 'of', 'points', 'on', 'the', 'given', 'map', 'projection', 'proj', '.', 'The', 'following', 'options', 'may', 'be', 'given', ':', '*', 'closed', '(', 'default', ':', 'False', ')', 'specifies', 'whether', 'the', 'points', 'form', 'a', 'closed', 'loop', '.', 'If', 'they', 'do', 'form', 'such', 'a', 'loop', 'the', 'points', 'should', 'be', 'given', 'in', 'the', 'same', 'ordering', '(', 'counter', '-', 'clockwise', 'or', 'clockwise', ')', 'that', 'mesh', 'vertices', 'are', 'given', 'in', ';', 'usually', 'counter', '-', 'clockwise', '.', '*', 'meta_data', '(', 'default', ':', 'None', ')', 'specifies', 'an', 'optional', 'additional', 'meta', '-', 'data', 'map', 'to', 'append', 'to', 'the', 'object', '.']
train
https://github.com/noahbenson/neuropythy/blob/b588889f6db36ddb9602ae4a72c1c0d3f41586b2/neuropythy/geometry/mesh.py#L3899-L3911
6,266
kislyuk/aegea
aegea/packages/github3/repos/repo.py
Repository.hook
def hook(self, id_num): """Get a single hook. :param int id_num: (required), id of the hook :returns: :class:`Hook <github3.repos.hook.Hook>` if successful, otherwise None """ json = None if int(id_num) > 0: url = self._build_url('hooks', str(id_num), base_url=self._api) json = self._json(self._get(url), 200) return Hook(json, self) if json else None
python
def hook(self, id_num): """Get a single hook. :param int id_num: (required), id of the hook :returns: :class:`Hook <github3.repos.hook.Hook>` if successful, otherwise None """ json = None if int(id_num) > 0: url = self._build_url('hooks', str(id_num), base_url=self._api) json = self._json(self._get(url), 200) return Hook(json, self) if json else None
['def', 'hook', '(', 'self', ',', 'id_num', ')', ':', 'json', '=', 'None', 'if', 'int', '(', 'id_num', ')', '>', '0', ':', 'url', '=', 'self', '.', '_build_url', '(', "'hooks'", ',', 'str', '(', 'id_num', ')', ',', 'base_url', '=', 'self', '.', '_api', ')', 'json', '=', 'self', '.', '_json', '(', 'self', '.', '_get', '(', 'url', ')', ',', '200', ')', 'return', 'Hook', '(', 'json', ',', 'self', ')', 'if', 'json', 'else', 'None']
Get a single hook. :param int id_num: (required), id of the hook :returns: :class:`Hook <github3.repos.hook.Hook>` if successful, otherwise None
['Get', 'a', 'single', 'hook', '.']
train
https://github.com/kislyuk/aegea/blob/94957e9dba036eae3052e2662c208b259c08399a/aegea/packages/github3/repos/repo.py#L996-L1007
6,267
maxalbert/tohu
tohu/v2/custom_generator.py
add_new_next_method
def add_new_next_method(obj): """ TODO """ def new_next(self): field_values = [next(g) for g in self.field_gens.values()] return self.item_cls(*field_values) obj.__next__ = new_next
python
def add_new_next_method(obj): """ TODO """ def new_next(self): field_values = [next(g) for g in self.field_gens.values()] return self.item_cls(*field_values) obj.__next__ = new_next
['def', 'add_new_next_method', '(', 'obj', ')', ':', 'def', 'new_next', '(', 'self', ')', ':', 'field_values', '=', '[', 'next', '(', 'g', ')', 'for', 'g', 'in', 'self', '.', 'field_gens', '.', 'values', '(', ')', ']', 'return', 'self', '.', 'item_cls', '(', '*', 'field_values', ')', 'obj', '.', '__next__', '=', 'new_next']
TODO
['TODO']
train
https://github.com/maxalbert/tohu/blob/43380162fadec99cdd5c5c3152dd6b7d3a9d39a8/tohu/v2/custom_generator.py#L228-L237
6,268
serkanyersen/underscore.py
src/underscore.py
underscore.functions
def functions(self): """ Return a sorted list of the function names available on the object. """ names = [] for i, k in enumerate(self.obj): if _(self.obj[k]).isCallable(): names.append(k) return self._wrap(sorted(names))
python
def functions(self): """ Return a sorted list of the function names available on the object. """ names = [] for i, k in enumerate(self.obj): if _(self.obj[k]).isCallable(): names.append(k) return self._wrap(sorted(names))
['def', 'functions', '(', 'self', ')', ':', 'names', '=', '[', ']', 'for', 'i', ',', 'k', 'in', 'enumerate', '(', 'self', '.', 'obj', ')', ':', 'if', '_', '(', 'self', '.', 'obj', '[', 'k', ']', ')', '.', 'isCallable', '(', ')', ':', 'names', '.', 'append', '(', 'k', ')', 'return', 'self', '.', '_wrap', '(', 'sorted', '(', 'names', ')', ')']
Return a sorted list of the function names available on the object.
['Return', 'a', 'sorted', 'list', 'of', 'the', 'function', 'names', 'available', 'on', 'the', 'object', '.']
train
https://github.com/serkanyersen/underscore.py/blob/07c25c3f0f789536e4ad47aa315faccc0da9602f/src/underscore.py#L973-L982
6,269
sorgerlab/indra
indra/literature/elsevier_client.py
download_article
def download_article(id_val, id_type='doi', on_retry=False): """Low level function to get an XML article for a particular id. Parameters ---------- id_val : str The value of the id. id_type : str The type of id, such as pmid (a.k.a. pubmed_id), doi, or eid. on_retry : bool This function has a recursive retry feature, and this is the only time this parameter should be used. Returns ------- content : str or None If found, the content string is returned, otherwise, None is returned. """ if id_type == 'pmid': id_type = 'pubmed_id' url = '%s/%s' % (elsevier_article_url_fmt % id_type, id_val) params = {'httpAccept': 'text/xml'} res = requests.get(url, params, headers=ELSEVIER_KEYS) if res.status_code == 404: logger.info("Resource for %s not available on elsevier." % url) return None elif res.status_code == 429: if not on_retry: logger.warning("Broke the speed limit. Waiting half a second then " "trying again...") sleep(0.5) return download_article(id_val, id_type, True) else: logger.error("Still breaking speed limit after waiting.") logger.error("Elsevier response: %s" % res.text) return None elif res.status_code != 200: logger.error('Could not download article %s: status code %d' % (url, res.status_code)) logger.error('Elsevier response: %s' % res.text) return None else: content_str = res.content.decode('utf-8') if content_str.startswith('<service-error>'): logger.error('Got a service error with 200 status: %s' % content_str) return None # Return the XML content as a unicode string, assuming UTF-8 encoding return content_str
python
def download_article(id_val, id_type='doi', on_retry=False): """Low level function to get an XML article for a particular id. Parameters ---------- id_val : str The value of the id. id_type : str The type of id, such as pmid (a.k.a. pubmed_id), doi, or eid. on_retry : bool This function has a recursive retry feature, and this is the only time this parameter should be used. Returns ------- content : str or None If found, the content string is returned, otherwise, None is returned. """ if id_type == 'pmid': id_type = 'pubmed_id' url = '%s/%s' % (elsevier_article_url_fmt % id_type, id_val) params = {'httpAccept': 'text/xml'} res = requests.get(url, params, headers=ELSEVIER_KEYS) if res.status_code == 404: logger.info("Resource for %s not available on elsevier." % url) return None elif res.status_code == 429: if not on_retry: logger.warning("Broke the speed limit. Waiting half a second then " "trying again...") sleep(0.5) return download_article(id_val, id_type, True) else: logger.error("Still breaking speed limit after waiting.") logger.error("Elsevier response: %s" % res.text) return None elif res.status_code != 200: logger.error('Could not download article %s: status code %d' % (url, res.status_code)) logger.error('Elsevier response: %s' % res.text) return None else: content_str = res.content.decode('utf-8') if content_str.startswith('<service-error>'): logger.error('Got a service error with 200 status: %s' % content_str) return None # Return the XML content as a unicode string, assuming UTF-8 encoding return content_str
['def', 'download_article', '(', 'id_val', ',', 'id_type', '=', "'doi'", ',', 'on_retry', '=', 'False', ')', ':', 'if', 'id_type', '==', "'pmid'", ':', 'id_type', '=', "'pubmed_id'", 'url', '=', "'%s/%s'", '%', '(', 'elsevier_article_url_fmt', '%', 'id_type', ',', 'id_val', ')', 'params', '=', '{', "'httpAccept'", ':', "'text/xml'", '}', 'res', '=', 'requests', '.', 'get', '(', 'url', ',', 'params', ',', 'headers', '=', 'ELSEVIER_KEYS', ')', 'if', 'res', '.', 'status_code', '==', '404', ':', 'logger', '.', 'info', '(', '"Resource for %s not available on elsevier."', '%', 'url', ')', 'return', 'None', 'elif', 'res', '.', 'status_code', '==', '429', ':', 'if', 'not', 'on_retry', ':', 'logger', '.', 'warning', '(', '"Broke the speed limit. Waiting half a second then "', '"trying again..."', ')', 'sleep', '(', '0.5', ')', 'return', 'download_article', '(', 'id_val', ',', 'id_type', ',', 'True', ')', 'else', ':', 'logger', '.', 'error', '(', '"Still breaking speed limit after waiting."', ')', 'logger', '.', 'error', '(', '"Elsevier response: %s"', '%', 'res', '.', 'text', ')', 'return', 'None', 'elif', 'res', '.', 'status_code', '!=', '200', ':', 'logger', '.', 'error', '(', "'Could not download article %s: status code %d'", '%', '(', 'url', ',', 'res', '.', 'status_code', ')', ')', 'logger', '.', 'error', '(', "'Elsevier response: %s'", '%', 'res', '.', 'text', ')', 'return', 'None', 'else', ':', 'content_str', '=', 'res', '.', 'content', '.', 'decode', '(', "'utf-8'", ')', 'if', 'content_str', '.', 'startswith', '(', "'<service-error>'", ')', ':', 'logger', '.', 'error', '(', "'Got a service error with 200 status: %s'", '%', 'content_str', ')', 'return', 'None', '# Return the XML content as a unicode string, assuming UTF-8 encoding', 'return', 'content_str']
Low level function to get an XML article for a particular id. Parameters ---------- id_val : str The value of the id. id_type : str The type of id, such as pmid (a.k.a. pubmed_id), doi, or eid. on_retry : bool This function has a recursive retry feature, and this is the only time this parameter should be used. Returns ------- content : str or None If found, the content string is returned, otherwise, None is returned.
['Low', 'level', 'function', 'to', 'get', 'an', 'XML', 'article', 'for', 'a', 'particular', 'id', '.']
train
https://github.com/sorgerlab/indra/blob/79a70415832c5702d7a820c7c9ccc8e25010124b/indra/literature/elsevier_client.py#L110-L158
6,270
GNS3/gns3-server
gns3server/compute/vmware/__init__.py
VMware.find_vmrun
def find_vmrun(self): """ Searches for vmrun. :returns: path to vmrun """ # look for vmrun vmrun_path = self.config.get_section_config("VMware").get("vmrun_path") if not vmrun_path: if sys.platform.startswith("win"): vmrun_path = shutil.which("vmrun") if vmrun_path is None: # look for vmrun.exe using the VMware Workstation directory listed in the registry vmrun_path = self._find_vmrun_registry(r"SOFTWARE\Wow6432Node\VMware, Inc.\VMware Workstation") if vmrun_path is None: # look for vmrun.exe using the VIX directory listed in the registry vmrun_path = self._find_vmrun_registry(r"SOFTWARE\Wow6432Node\VMware, Inc.\VMware VIX") elif sys.platform.startswith("darwin"): vmrun_path = "/Applications/VMware Fusion.app/Contents/Library/vmrun" else: vmrun_path = "vmrun" if vmrun_path and not os.path.isabs(vmrun_path): vmrun_path = shutil.which(vmrun_path) if not vmrun_path: raise VMwareError("Could not find VMware vmrun, please make sure it is installed") if not os.path.isfile(vmrun_path): raise VMwareError("vmrun {} is not accessible".format(vmrun_path)) if not os.access(vmrun_path, os.X_OK): raise VMwareError("vmrun is not executable") if os.path.basename(vmrun_path).lower() not in ["vmrun", "vmrun.exe"]: raise VMwareError("Invalid vmrun executable name {}".format(os.path.basename(vmrun_path))) self._vmrun_path = vmrun_path return vmrun_path
python
def find_vmrun(self): """ Searches for vmrun. :returns: path to vmrun """ # look for vmrun vmrun_path = self.config.get_section_config("VMware").get("vmrun_path") if not vmrun_path: if sys.platform.startswith("win"): vmrun_path = shutil.which("vmrun") if vmrun_path is None: # look for vmrun.exe using the VMware Workstation directory listed in the registry vmrun_path = self._find_vmrun_registry(r"SOFTWARE\Wow6432Node\VMware, Inc.\VMware Workstation") if vmrun_path is None: # look for vmrun.exe using the VIX directory listed in the registry vmrun_path = self._find_vmrun_registry(r"SOFTWARE\Wow6432Node\VMware, Inc.\VMware VIX") elif sys.platform.startswith("darwin"): vmrun_path = "/Applications/VMware Fusion.app/Contents/Library/vmrun" else: vmrun_path = "vmrun" if vmrun_path and not os.path.isabs(vmrun_path): vmrun_path = shutil.which(vmrun_path) if not vmrun_path: raise VMwareError("Could not find VMware vmrun, please make sure it is installed") if not os.path.isfile(vmrun_path): raise VMwareError("vmrun {} is not accessible".format(vmrun_path)) if not os.access(vmrun_path, os.X_OK): raise VMwareError("vmrun is not executable") if os.path.basename(vmrun_path).lower() not in ["vmrun", "vmrun.exe"]: raise VMwareError("Invalid vmrun executable name {}".format(os.path.basename(vmrun_path))) self._vmrun_path = vmrun_path return vmrun_path
['def', 'find_vmrun', '(', 'self', ')', ':', '# look for vmrun', 'vmrun_path', '=', 'self', '.', 'config', '.', 'get_section_config', '(', '"VMware"', ')', '.', 'get', '(', '"vmrun_path"', ')', 'if', 'not', 'vmrun_path', ':', 'if', 'sys', '.', 'platform', '.', 'startswith', '(', '"win"', ')', ':', 'vmrun_path', '=', 'shutil', '.', 'which', '(', '"vmrun"', ')', 'if', 'vmrun_path', 'is', 'None', ':', '# look for vmrun.exe using the VMware Workstation directory listed in the registry', 'vmrun_path', '=', 'self', '.', '_find_vmrun_registry', '(', 'r"SOFTWARE\\Wow6432Node\\VMware, Inc.\\VMware Workstation"', ')', 'if', 'vmrun_path', 'is', 'None', ':', '# look for vmrun.exe using the VIX directory listed in the registry', 'vmrun_path', '=', 'self', '.', '_find_vmrun_registry', '(', 'r"SOFTWARE\\Wow6432Node\\VMware, Inc.\\VMware VIX"', ')', 'elif', 'sys', '.', 'platform', '.', 'startswith', '(', '"darwin"', ')', ':', 'vmrun_path', '=', '"/Applications/VMware Fusion.app/Contents/Library/vmrun"', 'else', ':', 'vmrun_path', '=', '"vmrun"', 'if', 'vmrun_path', 'and', 'not', 'os', '.', 'path', '.', 'isabs', '(', 'vmrun_path', ')', ':', 'vmrun_path', '=', 'shutil', '.', 'which', '(', 'vmrun_path', ')', 'if', 'not', 'vmrun_path', ':', 'raise', 'VMwareError', '(', '"Could not find VMware vmrun, please make sure it is installed"', ')', 'if', 'not', 'os', '.', 'path', '.', 'isfile', '(', 'vmrun_path', ')', ':', 'raise', 'VMwareError', '(', '"vmrun {} is not accessible"', '.', 'format', '(', 'vmrun_path', ')', ')', 'if', 'not', 'os', '.', 'access', '(', 'vmrun_path', ',', 'os', '.', 'X_OK', ')', ':', 'raise', 'VMwareError', '(', '"vmrun is not executable"', ')', 'if', 'os', '.', 'path', '.', 'basename', '(', 'vmrun_path', ')', '.', 'lower', '(', ')', 'not', 'in', '[', '"vmrun"', ',', '"vmrun.exe"', ']', ':', 'raise', 'VMwareError', '(', '"Invalid vmrun executable name {}"', '.', 'format', '(', 'os', '.', 'path', '.', 'basename', '(', 'vmrun_path', ')', ')', ')', 'self', '.', '_vmrun_path', '=', 'vmrun_path', 'return', 'vmrun_path']
Searches for vmrun. :returns: path to vmrun
['Searches', 'for', 'vmrun', '.']
train
https://github.com/GNS3/gns3-server/blob/a221678448fb5d24e977ef562f81d56aacc89ab1/gns3server/compute/vmware/__init__.py#L87-L123
6,271
Kozea/pygal
pygal/colors.py
unparse_color
def unparse_color(r, g, b, a, type): """ Take the r, g, b, a color values and give back a type css color string. This is the inverse function of parse_color """ if type == '#rgb': # Don't lose precision on rgb shortcut if r % 17 == 0 and g % 17 == 0 and b % 17 == 0: return '#%x%x%x' % (int(r / 17), int(g / 17), int(b / 17)) type = '#rrggbb' if type == '#rgba': if r % 17 == 0 and g % 17 == 0 and b % 17 == 0: return '#%x%x%x%x' % ( int(r / 17), int(g / 17), int(b / 17), int(a * 15) ) type = '#rrggbbaa' if type == '#rrggbb': return '#%02x%02x%02x' % (r, g, b) if type == '#rrggbbaa': return '#%02x%02x%02x%02x' % (r, g, b, int(a * 255)) if type == 'rgb': return 'rgb(%d, %d, %d)' % (r, g, b) if type == 'rgba': return 'rgba(%d, %d, %d, %g)' % (r, g, b, a)
python
def unparse_color(r, g, b, a, type): """ Take the r, g, b, a color values and give back a type css color string. This is the inverse function of parse_color """ if type == '#rgb': # Don't lose precision on rgb shortcut if r % 17 == 0 and g % 17 == 0 and b % 17 == 0: return '#%x%x%x' % (int(r / 17), int(g / 17), int(b / 17)) type = '#rrggbb' if type == '#rgba': if r % 17 == 0 and g % 17 == 0 and b % 17 == 0: return '#%x%x%x%x' % ( int(r / 17), int(g / 17), int(b / 17), int(a * 15) ) type = '#rrggbbaa' if type == '#rrggbb': return '#%02x%02x%02x' % (r, g, b) if type == '#rrggbbaa': return '#%02x%02x%02x%02x' % (r, g, b, int(a * 255)) if type == 'rgb': return 'rgb(%d, %d, %d)' % (r, g, b) if type == 'rgba': return 'rgba(%d, %d, %d, %g)' % (r, g, b, a)
['def', 'unparse_color', '(', 'r', ',', 'g', ',', 'b', ',', 'a', ',', 'type', ')', ':', 'if', 'type', '==', "'#rgb'", ':', "# Don't lose precision on rgb shortcut", 'if', 'r', '%', '17', '==', '0', 'and', 'g', '%', '17', '==', '0', 'and', 'b', '%', '17', '==', '0', ':', 'return', "'#%x%x%x'", '%', '(', 'int', '(', 'r', '/', '17', ')', ',', 'int', '(', 'g', '/', '17', ')', ',', 'int', '(', 'b', '/', '17', ')', ')', 'type', '=', "'#rrggbb'", 'if', 'type', '==', "'#rgba'", ':', 'if', 'r', '%', '17', '==', '0', 'and', 'g', '%', '17', '==', '0', 'and', 'b', '%', '17', '==', '0', ':', 'return', "'#%x%x%x%x'", '%', '(', 'int', '(', 'r', '/', '17', ')', ',', 'int', '(', 'g', '/', '17', ')', ',', 'int', '(', 'b', '/', '17', ')', ',', 'int', '(', 'a', '*', '15', ')', ')', 'type', '=', "'#rrggbbaa'", 'if', 'type', '==', "'#rrggbb'", ':', 'return', "'#%02x%02x%02x'", '%', '(', 'r', ',', 'g', ',', 'b', ')', 'if', 'type', '==', "'#rrggbbaa'", ':', 'return', "'#%02x%02x%02x%02x'", '%', '(', 'r', ',', 'g', ',', 'b', ',', 'int', '(', 'a', '*', '255', ')', ')', 'if', 'type', '==', "'rgb'", ':', 'return', "'rgb(%d, %d, %d)'", '%', '(', 'r', ',', 'g', ',', 'b', ')', 'if', 'type', '==', "'rgba'", ':', 'return', "'rgba(%d, %d, %d, %g)'", '%', '(', 'r', ',', 'g', ',', 'b', ',', 'a', ')']
Take the r, g, b, a color values and give back a type css color string. This is the inverse function of parse_color
['Take', 'the', 'r', 'g', 'b', 'a', 'color', 'values', 'and', 'give', 'back', 'a', 'type', 'css', 'color', 'string', '.', 'This', 'is', 'the', 'inverse', 'function', 'of', 'parse_color']
train
https://github.com/Kozea/pygal/blob/5e25c98a59a0642eecd9fcc5dbfeeb2190fbb5e7/pygal/colors.py#L127-L155
6,272
Anaconda-Platform/anaconda-client
binstar_client/inspect_package/pypi.py
parse_specification
def parse_specification(spec): """ Parse a requirement from a python distribution metadata and return a tuple with name, extras, constraints, marker and url components. This method does not enforce strict specifications but extracts the information which is assumed to be *correct*. As such no errors are raised. Example ------- spec = 'requests[security, tests] >=3.3.0 ; foo >= 2.7 or bar == 1' ('requests', ['security', 'pyfoo'], '>=3.3.0', 'foo >= 2.7 or bar == 1', '') """ name, extras, const = spec, [], '' # Remove excess whitespace spec = ' '.join(p for p in spec.split(' ') if p).strip() # Extract marker (Assumes that there can only be one ';' inside the spec) spec, marker = split_spec(spec, ';') # Extract url (Assumes that there can only be one '@' inside the spec) spec, url = split_spec(spec, '@') # Find name, extras and constraints r = PARTIAL_PYPI_SPEC_PATTERN.match(spec) if r: # Normalize name name = r.group('name') # Clean extras extras = r.group('extras') extras = [e.strip() for e in extras.split(',') if e] if extras else [] # Clean constraints const = r.group('constraints') const = ''.join(c for c in const.split(' ') if c).strip() if const.startswith('(') and const.endswith(')'): # Remove parens const = const[1:-1] return name, extras, const, marker, url
python
def parse_specification(spec): """ Parse a requirement from a python distribution metadata and return a tuple with name, extras, constraints, marker and url components. This method does not enforce strict specifications but extracts the information which is assumed to be *correct*. As such no errors are raised. Example ------- spec = 'requests[security, tests] >=3.3.0 ; foo >= 2.7 or bar == 1' ('requests', ['security', 'pyfoo'], '>=3.3.0', 'foo >= 2.7 or bar == 1', '') """ name, extras, const = spec, [], '' # Remove excess whitespace spec = ' '.join(p for p in spec.split(' ') if p).strip() # Extract marker (Assumes that there can only be one ';' inside the spec) spec, marker = split_spec(spec, ';') # Extract url (Assumes that there can only be one '@' inside the spec) spec, url = split_spec(spec, '@') # Find name, extras and constraints r = PARTIAL_PYPI_SPEC_PATTERN.match(spec) if r: # Normalize name name = r.group('name') # Clean extras extras = r.group('extras') extras = [e.strip() for e in extras.split(',') if e] if extras else [] # Clean constraints const = r.group('constraints') const = ''.join(c for c in const.split(' ') if c).strip() if const.startswith('(') and const.endswith(')'): # Remove parens const = const[1:-1] return name, extras, const, marker, url
['def', 'parse_specification', '(', 'spec', ')', ':', 'name', ',', 'extras', ',', 'const', '=', 'spec', ',', '[', ']', ',', "''", '# Remove excess whitespace', 'spec', '=', "' '", '.', 'join', '(', 'p', 'for', 'p', 'in', 'spec', '.', 'split', '(', "' '", ')', 'if', 'p', ')', '.', 'strip', '(', ')', "# Extract marker (Assumes that there can only be one ';' inside the spec)", 'spec', ',', 'marker', '=', 'split_spec', '(', 'spec', ',', "';'", ')', "# Extract url (Assumes that there can only be one '@' inside the spec)", 'spec', ',', 'url', '=', 'split_spec', '(', 'spec', ',', "'@'", ')', '# Find name, extras and constraints', 'r', '=', 'PARTIAL_PYPI_SPEC_PATTERN', '.', 'match', '(', 'spec', ')', 'if', 'r', ':', '# Normalize name', 'name', '=', 'r', '.', 'group', '(', "'name'", ')', '# Clean extras', 'extras', '=', 'r', '.', 'group', '(', "'extras'", ')', 'extras', '=', '[', 'e', '.', 'strip', '(', ')', 'for', 'e', 'in', 'extras', '.', 'split', '(', "','", ')', 'if', 'e', ']', 'if', 'extras', 'else', '[', ']', '# Clean constraints', 'const', '=', 'r', '.', 'group', '(', "'constraints'", ')', 'const', '=', "''", '.', 'join', '(', 'c', 'for', 'c', 'in', 'const', '.', 'split', '(', "' '", ')', 'if', 'c', ')', '.', 'strip', '(', ')', 'if', 'const', '.', 'startswith', '(', "'('", ')', 'and', 'const', '.', 'endswith', '(', "')'", ')', ':', '# Remove parens', 'const', '=', 'const', '[', '1', ':', '-', '1', ']', 'return', 'name', ',', 'extras', ',', 'const', ',', 'marker', ',', 'url']
Parse a requirement from a python distribution metadata and return a tuple with name, extras, constraints, marker and url components. This method does not enforce strict specifications but extracts the information which is assumed to be *correct*. As such no errors are raised. Example ------- spec = 'requests[security, tests] >=3.3.0 ; foo >= 2.7 or bar == 1' ('requests', ['security', 'pyfoo'], '>=3.3.0', 'foo >= 2.7 or bar == 1', '')
['Parse', 'a', 'requirement', 'from', 'a', 'python', 'distribution', 'metadata', 'and', 'return', 'a', 'tuple', 'with', 'name', 'extras', 'constraints', 'marker', 'and', 'url', 'components', '.']
train
https://github.com/Anaconda-Platform/anaconda-client/blob/b276f0572744c73c184a8b43a897cfa7fc1dc523/binstar_client/inspect_package/pypi.py#L66-L108
6,273
HazyResearch/fonduer
src/fonduer/utils/data_model_utils/visual.py
same_page
def same_page(c): """Return true if all the components of c are on the same page of the document. Page numbers are based on the PDF rendering of the document. If a PDF file is provided, it is used. Otherwise, if only a HTML/XML document is provided, a PDF is created and then used to determine the page number of a Mention. :param c: The candidate to evaluate :rtype: boolean """ return all( [ _to_span(c[i]).sentence.is_visual() and bbox_from_span(_to_span(c[i])).page == bbox_from_span(_to_span(c[0])).page for i in range(len(c)) ] )
python
def same_page(c): """Return true if all the components of c are on the same page of the document. Page numbers are based on the PDF rendering of the document. If a PDF file is provided, it is used. Otherwise, if only a HTML/XML document is provided, a PDF is created and then used to determine the page number of a Mention. :param c: The candidate to evaluate :rtype: boolean """ return all( [ _to_span(c[i]).sentence.is_visual() and bbox_from_span(_to_span(c[i])).page == bbox_from_span(_to_span(c[0])).page for i in range(len(c)) ] )
['def', 'same_page', '(', 'c', ')', ':', 'return', 'all', '(', '[', '_to_span', '(', 'c', '[', 'i', ']', ')', '.', 'sentence', '.', 'is_visual', '(', ')', 'and', 'bbox_from_span', '(', '_to_span', '(', 'c', '[', 'i', ']', ')', ')', '.', 'page', '==', 'bbox_from_span', '(', '_to_span', '(', 'c', '[', '0', ']', ')', ')', '.', 'page', 'for', 'i', 'in', 'range', '(', 'len', '(', 'c', ')', ')', ']', ')']
Return true if all the components of c are on the same page of the document. Page numbers are based on the PDF rendering of the document. If a PDF file is provided, it is used. Otherwise, if only a HTML/XML document is provided, a PDF is created and then used to determine the page number of a Mention. :param c: The candidate to evaluate :rtype: boolean
['Return', 'true', 'if', 'all', 'the', 'components', 'of', 'c', 'are', 'on', 'the', 'same', 'page', 'of', 'the', 'document', '.']
train
https://github.com/HazyResearch/fonduer/blob/4520f86a716f03dcca458a9f4bddac75b4e7068f/src/fonduer/utils/data_model_utils/visual.py#L148-L165
6,274
KeplerGO/K2fov
K2fov/fov.py
KeplerFov.colRowIsOnSciencePixel
def colRowIsOnSciencePixel(self, col, row, padding=DEFAULT_PADDING): """Is col row on a science pixel? Ranges taken from Fig 25 or Instrument Handbook (p50) Padding allows for the fact that distortion means the results from getColRowWithinChannel can be off by a bit. Setting padding > 0 means that objects that are computed to lie a small amount off silicon will return True. To be conservative, set padding to negative """ if col < 12. - padding or col > 1111 + padding: return False if row < 20 - padding or row > 1043 + padding: return False return True
python
def colRowIsOnSciencePixel(self, col, row, padding=DEFAULT_PADDING): """Is col row on a science pixel? Ranges taken from Fig 25 or Instrument Handbook (p50) Padding allows for the fact that distortion means the results from getColRowWithinChannel can be off by a bit. Setting padding > 0 means that objects that are computed to lie a small amount off silicon will return True. To be conservative, set padding to negative """ if col < 12. - padding or col > 1111 + padding: return False if row < 20 - padding or row > 1043 + padding: return False return True
['def', 'colRowIsOnSciencePixel', '(', 'self', ',', 'col', ',', 'row', ',', 'padding', '=', 'DEFAULT_PADDING', ')', ':', 'if', 'col', '<', '12.', '-', 'padding', 'or', 'col', '>', '1111', '+', 'padding', ':', 'return', 'False', 'if', 'row', '<', '20', '-', 'padding', 'or', 'row', '>', '1043', '+', 'padding', ':', 'return', 'False', 'return', 'True']
Is col row on a science pixel? Ranges taken from Fig 25 or Instrument Handbook (p50) Padding allows for the fact that distortion means the results from getColRowWithinChannel can be off by a bit. Setting padding > 0 means that objects that are computed to lie a small amount off silicon will return True. To be conservative, set padding to negative
['Is', 'col', 'row', 'on', 'a', 'science', 'pixel?']
train
https://github.com/KeplerGO/K2fov/blob/fb122b35687340e0357cba9e0dd47b3be0760693/K2fov/fov.py#L399-L416
6,275
limodou/uliweb
uliweb/contrib/generic/__init__.py
MultiView._query_view
def _query_view(self, model, **kwargs): """ :param model: :return: (query, condition) Default use QueryForm """ QueryForm = functions.get_form('QueryForm') if 'form_cls' not in kwargs: kwargs['form_cls'] = QueryForm query = functions.QueryView(model, **kwargs) return query
python
def _query_view(self, model, **kwargs): """ :param model: :return: (query, condition) Default use QueryForm """ QueryForm = functions.get_form('QueryForm') if 'form_cls' not in kwargs: kwargs['form_cls'] = QueryForm query = functions.QueryView(model, **kwargs) return query
['def', '_query_view', '(', 'self', ',', 'model', ',', '*', '*', 'kwargs', ')', ':', 'QueryForm', '=', 'functions', '.', 'get_form', '(', "'QueryForm'", ')', 'if', "'form_cls'", 'not', 'in', 'kwargs', ':', 'kwargs', '[', "'form_cls'", ']', '=', 'QueryForm', 'query', '=', 'functions', '.', 'QueryView', '(', 'model', ',', '*', '*', 'kwargs', ')', 'return', 'query']
:param model: :return: (query, condition) Default use QueryForm
[':', 'param', 'model', ':', ':', 'return', ':', '(', 'query', 'condition', ')']
train
https://github.com/limodou/uliweb/blob/34472f25e4bc0b954a35346672f94e84ef18b076/uliweb/contrib/generic/__init__.py#L115-L127
6,276
thautwarm/Redy
Redy/Magic/Classic.py
singleton_init_by
def singleton_init_by(init_fn=None): """ >>> from Redy.Magic.Classic import singleton >>> @singleton >>> class S: >>> pass >>> assert isinstance(S, S) """ if not init_fn: def wrap_init(origin_init): return origin_init else: def wrap_init(origin_init): def __init__(self): origin_init(self) init_fn(self) return __init__ def inner(cls_def: type): if not hasattr(cls_def, '__instancecheck__') or isinstance(cls_def.__instancecheck__, (types.BuiltinMethodType, _slot_wrapper)): def __instancecheck__(self, instance): return instance is self cls_def.__instancecheck__ = __instancecheck__ _origin_init = cls_def.__init__ cls_def.__init__ = wrap_init(_origin_init) return cls_def() return inner
python
def singleton_init_by(init_fn=None): """ >>> from Redy.Magic.Classic import singleton >>> @singleton >>> class S: >>> pass >>> assert isinstance(S, S) """ if not init_fn: def wrap_init(origin_init): return origin_init else: def wrap_init(origin_init): def __init__(self): origin_init(self) init_fn(self) return __init__ def inner(cls_def: type): if not hasattr(cls_def, '__instancecheck__') or isinstance(cls_def.__instancecheck__, (types.BuiltinMethodType, _slot_wrapper)): def __instancecheck__(self, instance): return instance is self cls_def.__instancecheck__ = __instancecheck__ _origin_init = cls_def.__init__ cls_def.__init__ = wrap_init(_origin_init) return cls_def() return inner
['def', 'singleton_init_by', '(', 'init_fn', '=', 'None', ')', ':', 'if', 'not', 'init_fn', ':', 'def', 'wrap_init', '(', 'origin_init', ')', ':', 'return', 'origin_init', 'else', ':', 'def', 'wrap_init', '(', 'origin_init', ')', ':', 'def', '__init__', '(', 'self', ')', ':', 'origin_init', '(', 'self', ')', 'init_fn', '(', 'self', ')', 'return', '__init__', 'def', 'inner', '(', 'cls_def', ':', 'type', ')', ':', 'if', 'not', 'hasattr', '(', 'cls_def', ',', "'__instancecheck__'", ')', 'or', 'isinstance', '(', 'cls_def', '.', '__instancecheck__', ',', '(', 'types', '.', 'BuiltinMethodType', ',', '_slot_wrapper', ')', ')', ':', 'def', '__instancecheck__', '(', 'self', ',', 'instance', ')', ':', 'return', 'instance', 'is', 'self', 'cls_def', '.', '__instancecheck__', '=', '__instancecheck__', '_origin_init', '=', 'cls_def', '.', '__init__', 'cls_def', '.', '__init__', '=', 'wrap_init', '(', '_origin_init', ')', 'return', 'cls_def', '(', ')', 'return', 'inner']
>>> from Redy.Magic.Classic import singleton >>> @singleton >>> class S: >>> pass >>> assert isinstance(S, S)
['>>>', 'from', 'Redy', '.', 'Magic', '.', 'Classic', 'import', 'singleton', '>>>']
train
https://github.com/thautwarm/Redy/blob/8beee5c5f752edfd2754bb1e6b5f4acb016a7770/Redy/Magic/Classic.py#L18-L51
6,277
Feneric/doxypypy
doxypypy/doxypypy.py
AstWalker._checkIfCode
def _checkIfCode(self, inCodeBlockObj): """Checks whether or not a given line appears to be Python code.""" while True: line, lines, lineNum = (yield) testLineNum = 1 currentLineNum = 0 testLine = line.strip() lineOfCode = None while lineOfCode is None: match = AstWalker.__errorLineRE.match(testLine) if not testLine or testLine == '...' or match: # These are ambiguous. line, lines, lineNum = (yield) testLine = line.strip() #testLineNum = 1 elif testLine.startswith('>>>'): # This is definitely code. lineOfCode = True else: try: compLine = compile_command(testLine) if compLine and lines[currentLineNum].strip().startswith('#'): lineOfCode = True else: line, lines, lineNum = (yield) line = line.strip() if line.startswith('>>>'): # Definitely code, don't compile further. lineOfCode = True else: testLine += linesep + line testLine = testLine.strip() testLineNum += 1 except (SyntaxError, RuntimeError): # This is definitely not code. lineOfCode = False except Exception: # Other errors are ambiguous. line, lines, lineNum = (yield) testLine = line.strip() #testLineNum = 1 currentLineNum = lineNum - testLineNum if not inCodeBlockObj[0] and lineOfCode: inCodeBlockObj[0] = True lines[currentLineNum] = '{0}{1}# @code{1}'.format( lines[currentLineNum], linesep ) elif inCodeBlockObj[0] and lineOfCode is False: # None is ambiguous, so strict checking # against False is necessary. inCodeBlockObj[0] = False lines[currentLineNum] = '{0}{1}# @endcode{1}'.format( lines[currentLineNum], linesep )
python
def _checkIfCode(self, inCodeBlockObj): """Checks whether or not a given line appears to be Python code.""" while True: line, lines, lineNum = (yield) testLineNum = 1 currentLineNum = 0 testLine = line.strip() lineOfCode = None while lineOfCode is None: match = AstWalker.__errorLineRE.match(testLine) if not testLine or testLine == '...' or match: # These are ambiguous. line, lines, lineNum = (yield) testLine = line.strip() #testLineNum = 1 elif testLine.startswith('>>>'): # This is definitely code. lineOfCode = True else: try: compLine = compile_command(testLine) if compLine and lines[currentLineNum].strip().startswith('#'): lineOfCode = True else: line, lines, lineNum = (yield) line = line.strip() if line.startswith('>>>'): # Definitely code, don't compile further. lineOfCode = True else: testLine += linesep + line testLine = testLine.strip() testLineNum += 1 except (SyntaxError, RuntimeError): # This is definitely not code. lineOfCode = False except Exception: # Other errors are ambiguous. line, lines, lineNum = (yield) testLine = line.strip() #testLineNum = 1 currentLineNum = lineNum - testLineNum if not inCodeBlockObj[0] and lineOfCode: inCodeBlockObj[0] = True lines[currentLineNum] = '{0}{1}# @code{1}'.format( lines[currentLineNum], linesep ) elif inCodeBlockObj[0] and lineOfCode is False: # None is ambiguous, so strict checking # against False is necessary. inCodeBlockObj[0] = False lines[currentLineNum] = '{0}{1}# @endcode{1}'.format( lines[currentLineNum], linesep )
['def', '_checkIfCode', '(', 'self', ',', 'inCodeBlockObj', ')', ':', 'while', 'True', ':', 'line', ',', 'lines', ',', 'lineNum', '=', '(', 'yield', ')', 'testLineNum', '=', '1', 'currentLineNum', '=', '0', 'testLine', '=', 'line', '.', 'strip', '(', ')', 'lineOfCode', '=', 'None', 'while', 'lineOfCode', 'is', 'None', ':', 'match', '=', 'AstWalker', '.', '__errorLineRE', '.', 'match', '(', 'testLine', ')', 'if', 'not', 'testLine', 'or', 'testLine', '==', "'...'", 'or', 'match', ':', '# These are ambiguous.', 'line', ',', 'lines', ',', 'lineNum', '=', '(', 'yield', ')', 'testLine', '=', 'line', '.', 'strip', '(', ')', '#testLineNum = 1', 'elif', 'testLine', '.', 'startswith', '(', "'>>>'", ')', ':', '# This is definitely code.', 'lineOfCode', '=', 'True', 'else', ':', 'try', ':', 'compLine', '=', 'compile_command', '(', 'testLine', ')', 'if', 'compLine', 'and', 'lines', '[', 'currentLineNum', ']', '.', 'strip', '(', ')', '.', 'startswith', '(', "'#'", ')', ':', 'lineOfCode', '=', 'True', 'else', ':', 'line', ',', 'lines', ',', 'lineNum', '=', '(', 'yield', ')', 'line', '=', 'line', '.', 'strip', '(', ')', 'if', 'line', '.', 'startswith', '(', "'>>>'", ')', ':', "# Definitely code, don't compile further.", 'lineOfCode', '=', 'True', 'else', ':', 'testLine', '+=', 'linesep', '+', 'line', 'testLine', '=', 'testLine', '.', 'strip', '(', ')', 'testLineNum', '+=', '1', 'except', '(', 'SyntaxError', ',', 'RuntimeError', ')', ':', '# This is definitely not code.', 'lineOfCode', '=', 'False', 'except', 'Exception', ':', '# Other errors are ambiguous.', 'line', ',', 'lines', ',', 'lineNum', '=', '(', 'yield', ')', 'testLine', '=', 'line', '.', 'strip', '(', ')', '#testLineNum = 1', 'currentLineNum', '=', 'lineNum', '-', 'testLineNum', 'if', 'not', 'inCodeBlockObj', '[', '0', ']', 'and', 'lineOfCode', ':', 'inCodeBlockObj', '[', '0', ']', '=', 'True', 'lines', '[', 'currentLineNum', ']', '=', "'{0}{1}# @code{1}'", '.', 'format', '(', 'lines', '[', 'currentLineNum', ']', ',', 'linesep', ')', 'elif', 'inCodeBlockObj', '[', '0', ']', 'and', 'lineOfCode', 'is', 'False', ':', '# None is ambiguous, so strict checking', '# against False is necessary.', 'inCodeBlockObj', '[', '0', ']', '=', 'False', 'lines', '[', 'currentLineNum', ']', '=', "'{0}{1}# @endcode{1}'", '.', 'format', '(', 'lines', '[', 'currentLineNum', ']', ',', 'linesep', ')']
Checks whether or not a given line appears to be Python code.
['Checks', 'whether', 'or', 'not', 'a', 'given', 'line', 'appears', 'to', 'be', 'Python', 'code', '.']
train
https://github.com/Feneric/doxypypy/blob/a8555b15fa2a758ea8392372de31c0f635cc0d93/doxypypy/doxypypy.py#L121-L176
6,278
openstack/networking-cisco
networking_cisco/plugins/cisco/db/l3/ha_db.py
HA_db_mixin._update_redundancy_routers
def _update_redundancy_routers(self, context, updated_router, update_specification, requested_ha_settings, updated_router_db, gateway_changed): """To be called in update_router() AFTER router has been updated in DB. """ router_requested = update_specification['router'] ha_settings_db = updated_router_db.ha_settings ha_enabled_requested = requested_ha_settings.get(ha.ENABLED, False) if not (updated_router[ha.ENABLED] or ha_enabled_requested): # No HA currently enabled and no HA requested so we're done return # The redundancy routers need interfaces on the same networks as the # user visible router. ports = self._get_router_interfaces(updated_router_db) e_context = context.elevated() if not updated_router[ha.ENABLED] and ha_enabled_requested: # No HA currently enabled but HA requested router_requested.update(requested_ha_settings) router_requested[EXTERNAL_GW_INFO] = ( updated_router[EXTERNAL_GW_INFO]) requested_ha_settings = self._ensure_create_ha_compliant( router_requested, updated_router_db.hosting_info.router_type) self._create_redundancy_routers( e_context, updated_router, requested_ha_settings, updated_router_db, ports, expire_db=True) return rr_ids = self._get_redundancy_router_ids(context, updated_router['id']) ha_details_update_spec = requested_ha_settings.get(ha.DETAILS) if (updated_router[ha.ENABLED] and not requested_ha_settings.get( ha.ENABLED, updated_router[ha.ENABLED])): # HA currently enabled but HA disable requested # delete ha settings and extra port for gateway (VIP) port self._delete_ha_group(e_context, updated_router_db.gw_port_id) self._remove_redundancy_routers(e_context, rr_ids, ports, True) with context.session.begin(subtransactions=True): context.session.delete(ha_settings_db) elif ha_details_update_spec: # HA currently enabled and HA setting update (other than # disable HA) requested old_redundancy_level = ha_settings_db.redundancy_level ha_settings_db.update(ha_details_update_spec) diff = (ha_details_update_spec.get(ha.REDUNDANCY_LEVEL, old_redundancy_level) - old_redundancy_level) with context.session.begin(subtransactions=True): context.session.add(ha_settings_db) if diff < 0: # Remove -diff redundancy routers #TODO(bobmel): Ensure currently active router is excluded to_remove = rr_ids[len(rr_ids) + diff:] rr_ids = rr_ids[:len(rr_ids) + diff] self._remove_redundancy_routers(e_context, to_remove, ports) elif diff > 0: # Add diff redundancy routers start = old_redundancy_level + 1 stop = start + diff self._add_redundancy_routers(e_context, start, stop, updated_router, ports, ha_settings_db, False) if gateway_changed is True: self._change_ha_for_gateway(e_context, updated_router, updated_router_db, ha_settings_db, router_requested, expire=True) else: # Notify redundancy routers about changes self.notify_routers_updated(e_context, rr_ids) elif gateway_changed is True: # HA currently enabled (and to remain so) nor any HA setting update # and gateway has changed self._change_ha_for_gateway(e_context, updated_router, updated_router_db, ha_settings_db, router_requested) # pick up updates to other attributes where it makes sense # and push - right now it is only admin_state_up. other_updates_spec = {'router': {}} if 'admin_state_up' in update_specification['router']: other_updates_spec['router']['admin_state_up'] = ( update_specification['router']['admin_state_up']) if 'name' in update_specification['router']: other_updates_spec['router']['name'] = ( update_specification['router']['name']) if (other_updates_spec['router'] or 'routes' in update_specification['router']): self._process_other_router_updates(e_context, updated_router_db, other_updates_spec) # Ensure we get latest state from DB context.session.expire(updated_router_db) self._extend_router_dict_ha(updated_router, updated_router_db)
python
def _update_redundancy_routers(self, context, updated_router, update_specification, requested_ha_settings, updated_router_db, gateway_changed): """To be called in update_router() AFTER router has been updated in DB. """ router_requested = update_specification['router'] ha_settings_db = updated_router_db.ha_settings ha_enabled_requested = requested_ha_settings.get(ha.ENABLED, False) if not (updated_router[ha.ENABLED] or ha_enabled_requested): # No HA currently enabled and no HA requested so we're done return # The redundancy routers need interfaces on the same networks as the # user visible router. ports = self._get_router_interfaces(updated_router_db) e_context = context.elevated() if not updated_router[ha.ENABLED] and ha_enabled_requested: # No HA currently enabled but HA requested router_requested.update(requested_ha_settings) router_requested[EXTERNAL_GW_INFO] = ( updated_router[EXTERNAL_GW_INFO]) requested_ha_settings = self._ensure_create_ha_compliant( router_requested, updated_router_db.hosting_info.router_type) self._create_redundancy_routers( e_context, updated_router, requested_ha_settings, updated_router_db, ports, expire_db=True) return rr_ids = self._get_redundancy_router_ids(context, updated_router['id']) ha_details_update_spec = requested_ha_settings.get(ha.DETAILS) if (updated_router[ha.ENABLED] and not requested_ha_settings.get( ha.ENABLED, updated_router[ha.ENABLED])): # HA currently enabled but HA disable requested # delete ha settings and extra port for gateway (VIP) port self._delete_ha_group(e_context, updated_router_db.gw_port_id) self._remove_redundancy_routers(e_context, rr_ids, ports, True) with context.session.begin(subtransactions=True): context.session.delete(ha_settings_db) elif ha_details_update_spec: # HA currently enabled and HA setting update (other than # disable HA) requested old_redundancy_level = ha_settings_db.redundancy_level ha_settings_db.update(ha_details_update_spec) diff = (ha_details_update_spec.get(ha.REDUNDANCY_LEVEL, old_redundancy_level) - old_redundancy_level) with context.session.begin(subtransactions=True): context.session.add(ha_settings_db) if diff < 0: # Remove -diff redundancy routers #TODO(bobmel): Ensure currently active router is excluded to_remove = rr_ids[len(rr_ids) + diff:] rr_ids = rr_ids[:len(rr_ids) + diff] self._remove_redundancy_routers(e_context, to_remove, ports) elif diff > 0: # Add diff redundancy routers start = old_redundancy_level + 1 stop = start + diff self._add_redundancy_routers(e_context, start, stop, updated_router, ports, ha_settings_db, False) if gateway_changed is True: self._change_ha_for_gateway(e_context, updated_router, updated_router_db, ha_settings_db, router_requested, expire=True) else: # Notify redundancy routers about changes self.notify_routers_updated(e_context, rr_ids) elif gateway_changed is True: # HA currently enabled (and to remain so) nor any HA setting update # and gateway has changed self._change_ha_for_gateway(e_context, updated_router, updated_router_db, ha_settings_db, router_requested) # pick up updates to other attributes where it makes sense # and push - right now it is only admin_state_up. other_updates_spec = {'router': {}} if 'admin_state_up' in update_specification['router']: other_updates_spec['router']['admin_state_up'] = ( update_specification['router']['admin_state_up']) if 'name' in update_specification['router']: other_updates_spec['router']['name'] = ( update_specification['router']['name']) if (other_updates_spec['router'] or 'routes' in update_specification['router']): self._process_other_router_updates(e_context, updated_router_db, other_updates_spec) # Ensure we get latest state from DB context.session.expire(updated_router_db) self._extend_router_dict_ha(updated_router, updated_router_db)
['def', '_update_redundancy_routers', '(', 'self', ',', 'context', ',', 'updated_router', ',', 'update_specification', ',', 'requested_ha_settings', ',', 'updated_router_db', ',', 'gateway_changed', ')', ':', 'router_requested', '=', 'update_specification', '[', "'router'", ']', 'ha_settings_db', '=', 'updated_router_db', '.', 'ha_settings', 'ha_enabled_requested', '=', 'requested_ha_settings', '.', 'get', '(', 'ha', '.', 'ENABLED', ',', 'False', ')', 'if', 'not', '(', 'updated_router', '[', 'ha', '.', 'ENABLED', ']', 'or', 'ha_enabled_requested', ')', ':', "# No HA currently enabled and no HA requested so we're done", 'return', '# The redundancy routers need interfaces on the same networks as the', '# user visible router.', 'ports', '=', 'self', '.', '_get_router_interfaces', '(', 'updated_router_db', ')', 'e_context', '=', 'context', '.', 'elevated', '(', ')', 'if', 'not', 'updated_router', '[', 'ha', '.', 'ENABLED', ']', 'and', 'ha_enabled_requested', ':', '# No HA currently enabled but HA requested', 'router_requested', '.', 'update', '(', 'requested_ha_settings', ')', 'router_requested', '[', 'EXTERNAL_GW_INFO', ']', '=', '(', 'updated_router', '[', 'EXTERNAL_GW_INFO', ']', ')', 'requested_ha_settings', '=', 'self', '.', '_ensure_create_ha_compliant', '(', 'router_requested', ',', 'updated_router_db', '.', 'hosting_info', '.', 'router_type', ')', 'self', '.', '_create_redundancy_routers', '(', 'e_context', ',', 'updated_router', ',', 'requested_ha_settings', ',', 'updated_router_db', ',', 'ports', ',', 'expire_db', '=', 'True', ')', 'return', 'rr_ids', '=', 'self', '.', '_get_redundancy_router_ids', '(', 'context', ',', 'updated_router', '[', "'id'", ']', ')', 'ha_details_update_spec', '=', 'requested_ha_settings', '.', 'get', '(', 'ha', '.', 'DETAILS', ')', 'if', '(', 'updated_router', '[', 'ha', '.', 'ENABLED', ']', 'and', 'not', 'requested_ha_settings', '.', 'get', '(', 'ha', '.', 'ENABLED', ',', 'updated_router', '[', 'ha', '.', 'ENABLED', ']', ')', ')', ':', '# HA currently enabled but HA disable requested', '# delete ha settings and extra port for gateway (VIP) port', 'self', '.', '_delete_ha_group', '(', 'e_context', ',', 'updated_router_db', '.', 'gw_port_id', ')', 'self', '.', '_remove_redundancy_routers', '(', 'e_context', ',', 'rr_ids', ',', 'ports', ',', 'True', ')', 'with', 'context', '.', 'session', '.', 'begin', '(', 'subtransactions', '=', 'True', ')', ':', 'context', '.', 'session', '.', 'delete', '(', 'ha_settings_db', ')', 'elif', 'ha_details_update_spec', ':', '# HA currently enabled and HA setting update (other than', '# disable HA) requested', 'old_redundancy_level', '=', 'ha_settings_db', '.', 'redundancy_level', 'ha_settings_db', '.', 'update', '(', 'ha_details_update_spec', ')', 'diff', '=', '(', 'ha_details_update_spec', '.', 'get', '(', 'ha', '.', 'REDUNDANCY_LEVEL', ',', 'old_redundancy_level', ')', '-', 'old_redundancy_level', ')', 'with', 'context', '.', 'session', '.', 'begin', '(', 'subtransactions', '=', 'True', ')', ':', 'context', '.', 'session', '.', 'add', '(', 'ha_settings_db', ')', 'if', 'diff', '<', '0', ':', '# Remove -diff redundancy routers', '#TODO(bobmel): Ensure currently active router is excluded', 'to_remove', '=', 'rr_ids', '[', 'len', '(', 'rr_ids', ')', '+', 'diff', ':', ']', 'rr_ids', '=', 'rr_ids', '[', ':', 'len', '(', 'rr_ids', ')', '+', 'diff', ']', 'self', '.', '_remove_redundancy_routers', '(', 'e_context', ',', 'to_remove', ',', 'ports', ')', 'elif', 'diff', '>', '0', ':', '# Add diff redundancy routers', 'start', '=', 'old_redundancy_level', '+', '1', 'stop', '=', 'start', '+', 'diff', 'self', '.', '_add_redundancy_routers', '(', 'e_context', ',', 'start', ',', 'stop', ',', 'updated_router', ',', 'ports', ',', 'ha_settings_db', ',', 'False', ')', 'if', 'gateway_changed', 'is', 'True', ':', 'self', '.', '_change_ha_for_gateway', '(', 'e_context', ',', 'updated_router', ',', 'updated_router_db', ',', 'ha_settings_db', ',', 'router_requested', ',', 'expire', '=', 'True', ')', 'else', ':', '# Notify redundancy routers about changes', 'self', '.', 'notify_routers_updated', '(', 'e_context', ',', 'rr_ids', ')', 'elif', 'gateway_changed', 'is', 'True', ':', '# HA currently enabled (and to remain so) nor any HA setting update', '# and gateway has changed', 'self', '.', '_change_ha_for_gateway', '(', 'e_context', ',', 'updated_router', ',', 'updated_router_db', ',', 'ha_settings_db', ',', 'router_requested', ')', '# pick up updates to other attributes where it makes sense', '# and push - right now it is only admin_state_up.', 'other_updates_spec', '=', '{', "'router'", ':', '{', '}', '}', 'if', "'admin_state_up'", 'in', 'update_specification', '[', "'router'", ']', ':', 'other_updates_spec', '[', "'router'", ']', '[', "'admin_state_up'", ']', '=', '(', 'update_specification', '[', "'router'", ']', '[', "'admin_state_up'", ']', ')', 'if', "'name'", 'in', 'update_specification', '[', "'router'", ']', ':', 'other_updates_spec', '[', "'router'", ']', '[', "'name'", ']', '=', '(', 'update_specification', '[', "'router'", ']', '[', "'name'", ']', ')', 'if', '(', 'other_updates_spec', '[', "'router'", ']', 'or', "'routes'", 'in', 'update_specification', '[', "'router'", ']', ')', ':', 'self', '.', '_process_other_router_updates', '(', 'e_context', ',', 'updated_router_db', ',', 'other_updates_spec', ')', '# Ensure we get latest state from DB', 'context', '.', 'session', '.', 'expire', '(', 'updated_router_db', ')', 'self', '.', '_extend_router_dict_ha', '(', 'updated_router', ',', 'updated_router_db', ')']
To be called in update_router() AFTER router has been updated in DB.
['To', 'be', 'called', 'in', 'update_router', '()', 'AFTER', 'router', 'has', 'been', 'updated', 'in', 'DB', '.']
train
https://github.com/openstack/networking-cisco/blob/aa58a30aec25b86f9aa5952b0863045975debfa9/networking_cisco/plugins/cisco/db/l3/ha_db.py#L336-L425
6,279
HewlettPackard/python-hpOneView
hpOneView/resources/networking/ethernet_networks.py
EthernetNetworks.get_associated_uplink_groups
def get_associated_uplink_groups(self): """ Gets the uplink sets which are using an Ethernet network. Returns: list: URIs of the associated uplink sets. """ uri = "{}/associatedUplinkGroups".format(self.data['uri']) return self._helper.do_get(uri)
python
def get_associated_uplink_groups(self): """ Gets the uplink sets which are using an Ethernet network. Returns: list: URIs of the associated uplink sets. """ uri = "{}/associatedUplinkGroups".format(self.data['uri']) return self._helper.do_get(uri)
['def', 'get_associated_uplink_groups', '(', 'self', ')', ':', 'uri', '=', '"{}/associatedUplinkGroups"', '.', 'format', '(', 'self', '.', 'data', '[', "'uri'", ']', ')', 'return', 'self', '.', '_helper', '.', 'do_get', '(', 'uri', ')']
Gets the uplink sets which are using an Ethernet network. Returns: list: URIs of the associated uplink sets.
['Gets', 'the', 'uplink', 'sets', 'which', 'are', 'using', 'an', 'Ethernet', 'network', '.']
train
https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/ethernet_networks.py#L169-L178
6,280
boriel/zxbasic
arch/zx48k/optimizer.py
BasicBlock.get_first_non_label_instruction
def get_first_non_label_instruction(self): """ Returns the memcell of the given block, which is not a LABEL. """ for i in range(len(self)): if not self.mem[i].is_label: return self.mem[i] return None
python
def get_first_non_label_instruction(self): """ Returns the memcell of the given block, which is not a LABEL. """ for i in range(len(self)): if not self.mem[i].is_label: return self.mem[i] return None
['def', 'get_first_non_label_instruction', '(', 'self', ')', ':', 'for', 'i', 'in', 'range', '(', 'len', '(', 'self', ')', ')', ':', 'if', 'not', 'self', '.', 'mem', '[', 'i', ']', '.', 'is_label', ':', 'return', 'self', '.', 'mem', '[', 'i', ']', 'return', 'None']
Returns the memcell of the given block, which is not a LABEL.
['Returns', 'the', 'memcell', 'of', 'the', 'given', 'block', 'which', 'is', 'not', 'a', 'LABEL', '.']
train
https://github.com/boriel/zxbasic/blob/23b28db10e41117805bdb3c0f78543590853b132/arch/zx48k/optimizer.py#L1664-L1672
6,281
log2timeline/dfvfs
dfvfs/vfs/os_file_entry.py
OSDirectory._EntriesGenerator
def _EntriesGenerator(self): """Retrieves directory entries. Since a directory can contain a vast number of entries using a generator is more memory efficient. Yields: OSPathSpec: a path specification. Raises: AccessError: if the access to list the directory was denied. BackEndError: if the directory could not be listed. """ location = getattr(self.path_spec, 'location', None) if location is not None: # Windows will raise WindowsError, which can be caught by OSError, # if the process has not access to list the directory. The os.access() # function cannot be used since it will return true even when os.listdir() # fails. try: for directory_entry in os.listdir(location): directory_entry_location = self._file_system.JoinPath([ location, directory_entry]) yield os_path_spec.OSPathSpec(location=directory_entry_location) except OSError as exception: if exception.errno == errno.EACCES: exception_string = str(exception) if not isinstance(exception_string, py2to3.UNICODE_TYPE): exception_string = py2to3.UNICODE_TYPE( exception_string, errors='replace') raise errors.AccessError( 'Access to directory denied with error: {0!s}'.format( exception_string)) else: raise errors.BackEndError( 'Unable to list directory: {0:s} with error: {1!s}'.format( location, exception))
python
def _EntriesGenerator(self): """Retrieves directory entries. Since a directory can contain a vast number of entries using a generator is more memory efficient. Yields: OSPathSpec: a path specification. Raises: AccessError: if the access to list the directory was denied. BackEndError: if the directory could not be listed. """ location = getattr(self.path_spec, 'location', None) if location is not None: # Windows will raise WindowsError, which can be caught by OSError, # if the process has not access to list the directory. The os.access() # function cannot be used since it will return true even when os.listdir() # fails. try: for directory_entry in os.listdir(location): directory_entry_location = self._file_system.JoinPath([ location, directory_entry]) yield os_path_spec.OSPathSpec(location=directory_entry_location) except OSError as exception: if exception.errno == errno.EACCES: exception_string = str(exception) if not isinstance(exception_string, py2to3.UNICODE_TYPE): exception_string = py2to3.UNICODE_TYPE( exception_string, errors='replace') raise errors.AccessError( 'Access to directory denied with error: {0!s}'.format( exception_string)) else: raise errors.BackEndError( 'Unable to list directory: {0:s} with error: {1!s}'.format( location, exception))
['def', '_EntriesGenerator', '(', 'self', ')', ':', 'location', '=', 'getattr', '(', 'self', '.', 'path_spec', ',', "'location'", ',', 'None', ')', 'if', 'location', 'is', 'not', 'None', ':', '# Windows will raise WindowsError, which can be caught by OSError,', '# if the process has not access to list the directory. The os.access()', '# function cannot be used since it will return true even when os.listdir()', '# fails.', 'try', ':', 'for', 'directory_entry', 'in', 'os', '.', 'listdir', '(', 'location', ')', ':', 'directory_entry_location', '=', 'self', '.', '_file_system', '.', 'JoinPath', '(', '[', 'location', ',', 'directory_entry', ']', ')', 'yield', 'os_path_spec', '.', 'OSPathSpec', '(', 'location', '=', 'directory_entry_location', ')', 'except', 'OSError', 'as', 'exception', ':', 'if', 'exception', '.', 'errno', '==', 'errno', '.', 'EACCES', ':', 'exception_string', '=', 'str', '(', 'exception', ')', 'if', 'not', 'isinstance', '(', 'exception_string', ',', 'py2to3', '.', 'UNICODE_TYPE', ')', ':', 'exception_string', '=', 'py2to3', '.', 'UNICODE_TYPE', '(', 'exception_string', ',', 'errors', '=', "'replace'", ')', 'raise', 'errors', '.', 'AccessError', '(', "'Access to directory denied with error: {0!s}'", '.', 'format', '(', 'exception_string', ')', ')', 'else', ':', 'raise', 'errors', '.', 'BackEndError', '(', "'Unable to list directory: {0:s} with error: {1!s}'", '.', 'format', '(', 'location', ',', 'exception', ')', ')']
Retrieves directory entries. Since a directory can contain a vast number of entries using a generator is more memory efficient. Yields: OSPathSpec: a path specification. Raises: AccessError: if the access to list the directory was denied. BackEndError: if the directory could not be listed.
['Retrieves', 'directory', 'entries', '.']
train
https://github.com/log2timeline/dfvfs/blob/2b3ccd115f9901d89f383397d4a1376a873c83c4/dfvfs/vfs/os_file_entry.py#L25-L63
6,282
SuperCowPowers/workbench
workbench/workers/meta.py
MetaData.execute
def execute(self, input_data): ''' This worker computes meta data for any file type. ''' raw_bytes = input_data['sample']['raw_bytes'] self.meta['md5'] = hashlib.md5(raw_bytes).hexdigest() self.meta['tags'] = input_data['tags']['tags'] self.meta['type_tag'] = input_data['sample']['type_tag'] with magic.Magic() as mag: self.meta['file_type'] = mag.id_buffer(raw_bytes[:1024]) with magic.Magic(flags=magic.MAGIC_MIME_TYPE) as mag: self.meta['mime_type'] = mag.id_buffer(raw_bytes[:1024]) with magic.Magic(flags=magic.MAGIC_MIME_ENCODING) as mag: try: self.meta['encoding'] = mag.id_buffer(raw_bytes[:1024]) except magic.MagicError: self.meta['encoding'] = 'unknown' self.meta['file_size'] = len(raw_bytes) self.meta['filename'] = input_data['sample']['filename'] self.meta['import_time'] = input_data['sample']['import_time'] self.meta['customer'] = input_data['sample']['customer'] self.meta['length'] = input_data['sample']['length'] return self.meta
python
def execute(self, input_data): ''' This worker computes meta data for any file type. ''' raw_bytes = input_data['sample']['raw_bytes'] self.meta['md5'] = hashlib.md5(raw_bytes).hexdigest() self.meta['tags'] = input_data['tags']['tags'] self.meta['type_tag'] = input_data['sample']['type_tag'] with magic.Magic() as mag: self.meta['file_type'] = mag.id_buffer(raw_bytes[:1024]) with magic.Magic(flags=magic.MAGIC_MIME_TYPE) as mag: self.meta['mime_type'] = mag.id_buffer(raw_bytes[:1024]) with magic.Magic(flags=magic.MAGIC_MIME_ENCODING) as mag: try: self.meta['encoding'] = mag.id_buffer(raw_bytes[:1024]) except magic.MagicError: self.meta['encoding'] = 'unknown' self.meta['file_size'] = len(raw_bytes) self.meta['filename'] = input_data['sample']['filename'] self.meta['import_time'] = input_data['sample']['import_time'] self.meta['customer'] = input_data['sample']['customer'] self.meta['length'] = input_data['sample']['length'] return self.meta
['def', 'execute', '(', 'self', ',', 'input_data', ')', ':', 'raw_bytes', '=', 'input_data', '[', "'sample'", ']', '[', "'raw_bytes'", ']', 'self', '.', 'meta', '[', "'md5'", ']', '=', 'hashlib', '.', 'md5', '(', 'raw_bytes', ')', '.', 'hexdigest', '(', ')', 'self', '.', 'meta', '[', "'tags'", ']', '=', 'input_data', '[', "'tags'", ']', '[', "'tags'", ']', 'self', '.', 'meta', '[', "'type_tag'", ']', '=', 'input_data', '[', "'sample'", ']', '[', "'type_tag'", ']', 'with', 'magic', '.', 'Magic', '(', ')', 'as', 'mag', ':', 'self', '.', 'meta', '[', "'file_type'", ']', '=', 'mag', '.', 'id_buffer', '(', 'raw_bytes', '[', ':', '1024', ']', ')', 'with', 'magic', '.', 'Magic', '(', 'flags', '=', 'magic', '.', 'MAGIC_MIME_TYPE', ')', 'as', 'mag', ':', 'self', '.', 'meta', '[', "'mime_type'", ']', '=', 'mag', '.', 'id_buffer', '(', 'raw_bytes', '[', ':', '1024', ']', ')', 'with', 'magic', '.', 'Magic', '(', 'flags', '=', 'magic', '.', 'MAGIC_MIME_ENCODING', ')', 'as', 'mag', ':', 'try', ':', 'self', '.', 'meta', '[', "'encoding'", ']', '=', 'mag', '.', 'id_buffer', '(', 'raw_bytes', '[', ':', '1024', ']', ')', 'except', 'magic', '.', 'MagicError', ':', 'self', '.', 'meta', '[', "'encoding'", ']', '=', "'unknown'", 'self', '.', 'meta', '[', "'file_size'", ']', '=', 'len', '(', 'raw_bytes', ')', 'self', '.', 'meta', '[', "'filename'", ']', '=', 'input_data', '[', "'sample'", ']', '[', "'filename'", ']', 'self', '.', 'meta', '[', "'import_time'", ']', '=', 'input_data', '[', "'sample'", ']', '[', "'import_time'", ']', 'self', '.', 'meta', '[', "'customer'", ']', '=', 'input_data', '[', "'sample'", ']', '[', "'customer'", ']', 'self', '.', 'meta', '[', "'length'", ']', '=', 'input_data', '[', "'sample'", ']', '[', "'length'", ']', 'return', 'self', '.', 'meta']
This worker computes meta data for any file type.
['This', 'worker', 'computes', 'meta', 'data', 'for', 'any', 'file', 'type', '.']
train
https://github.com/SuperCowPowers/workbench/blob/710232756dd717f734253315e3d0b33c9628dafb/workbench/workers/meta.py#L15-L36
6,283
geophysics-ubonn/reda
lib/reda/eis/plots.py
sip_response.plot
def plot(self, filename, title=None, reciprocal=None, limits=None, dtype='rho', return_fig=False, **kwargs): """Standard plot of spectrum Parameters ---------- filename: string Output filename. Include the ending to specify the filetype (usually .pdf or .png) title: string, optional Title for the plot reciprocal: :class:`reda.eis.plots.sip_response`, optional If another :class:`reda.eis.plots.sip_response` object is provided here, use this as the reciprocal spectrum. limits: dict, optional A dictionary which contains plot limits. See code example below. dtype: string, optional Determines if the data plotted included geometric factors ('rho') or not ('r'). Default: 'rho' return_fig: bool, optional If True, then do not delete the figure object after saving to file and return the figure object. Default: False **kwargs: dict kwargs is piped through to the _plot function Returns ------- fig: :class:`matplotlib.Figure` The figure object. Only returned if return_fig is set to True Examples -------- >>> from reda.eis.plots import sip_response >>> import numpy as np >>> frequencies = np.array([ ... 1.00000000e-03, 1.77827941e-03, 3.16227766e-03, 5.62341325e-03, ... 1.00000000e-02, 1.77827941e-02, 3.16227766e-02, 5.62341325e-02, ... 1.00000000e-01, 1.77827941e-01, 3.16227766e-01, 5.62341325e-01, ... 1.00000000e+00, 1.77827941e+00, 3.16227766e+00, 5.62341325e+00, ... 1.00000000e+01, 1.77827941e+01, 3.16227766e+01, 5.62341325e+01, ... 1.00000000e+02, 1.77827941e+02, 3.16227766e+02, 5.62341325e+02, ... 1.00000000e+03]) >>> rcomplex = np.array([ ... 49.34369772-0.51828971j, 49.11781581-0.59248806j, ... 48.85819872-0.6331137j , 48.58762806-0.62835135j, ... 48.33331113-0.57965851j, 48.11599009-0.50083533j, ... 47.94405036-0.41005275j, 47.81528917-0.32210768j, ... 47.72215469-0.24543425j, 47.65607773-0.18297794j, ... 47.60962191-0.13433101j, 47.57706229-0.09755774j, ... 47.55424286-0.07031682j, 47.53822912-0.05041399j, ... 47.52697253-0.03601005j, 47.51904718-0.02565412j, ... 47.51345965-0.01824266j, 47.50951606-0.01295546j, ... 47.50673042-0.00919217j, 47.50476152-0.0065178j , ... 47.50336925-0.00461938j, 47.50238442-0.00327285j, ... 47.50168762-0.00231829j, 47.50119454-0.00164187j, ... 47.50084556-0.00116268j]) >>> spectrum = sip_response(frequencies=frequencies, rcomplex=rcomplex) >>> fig = spectrum.plot('spectrum.pdf', return_fig=True) """ fig, axes = self._plot( reciprocal=reciprocal, limits=limits, title=title, dtype=dtype, **kwargs ) fig.savefig(filename, dpi=300) if return_fig: return fig else: plt.close(fig)
python
def plot(self, filename, title=None, reciprocal=None, limits=None, dtype='rho', return_fig=False, **kwargs): """Standard plot of spectrum Parameters ---------- filename: string Output filename. Include the ending to specify the filetype (usually .pdf or .png) title: string, optional Title for the plot reciprocal: :class:`reda.eis.plots.sip_response`, optional If another :class:`reda.eis.plots.sip_response` object is provided here, use this as the reciprocal spectrum. limits: dict, optional A dictionary which contains plot limits. See code example below. dtype: string, optional Determines if the data plotted included geometric factors ('rho') or not ('r'). Default: 'rho' return_fig: bool, optional If True, then do not delete the figure object after saving to file and return the figure object. Default: False **kwargs: dict kwargs is piped through to the _plot function Returns ------- fig: :class:`matplotlib.Figure` The figure object. Only returned if return_fig is set to True Examples -------- >>> from reda.eis.plots import sip_response >>> import numpy as np >>> frequencies = np.array([ ... 1.00000000e-03, 1.77827941e-03, 3.16227766e-03, 5.62341325e-03, ... 1.00000000e-02, 1.77827941e-02, 3.16227766e-02, 5.62341325e-02, ... 1.00000000e-01, 1.77827941e-01, 3.16227766e-01, 5.62341325e-01, ... 1.00000000e+00, 1.77827941e+00, 3.16227766e+00, 5.62341325e+00, ... 1.00000000e+01, 1.77827941e+01, 3.16227766e+01, 5.62341325e+01, ... 1.00000000e+02, 1.77827941e+02, 3.16227766e+02, 5.62341325e+02, ... 1.00000000e+03]) >>> rcomplex = np.array([ ... 49.34369772-0.51828971j, 49.11781581-0.59248806j, ... 48.85819872-0.6331137j , 48.58762806-0.62835135j, ... 48.33331113-0.57965851j, 48.11599009-0.50083533j, ... 47.94405036-0.41005275j, 47.81528917-0.32210768j, ... 47.72215469-0.24543425j, 47.65607773-0.18297794j, ... 47.60962191-0.13433101j, 47.57706229-0.09755774j, ... 47.55424286-0.07031682j, 47.53822912-0.05041399j, ... 47.52697253-0.03601005j, 47.51904718-0.02565412j, ... 47.51345965-0.01824266j, 47.50951606-0.01295546j, ... 47.50673042-0.00919217j, 47.50476152-0.0065178j , ... 47.50336925-0.00461938j, 47.50238442-0.00327285j, ... 47.50168762-0.00231829j, 47.50119454-0.00164187j, ... 47.50084556-0.00116268j]) >>> spectrum = sip_response(frequencies=frequencies, rcomplex=rcomplex) >>> fig = spectrum.plot('spectrum.pdf', return_fig=True) """ fig, axes = self._plot( reciprocal=reciprocal, limits=limits, title=title, dtype=dtype, **kwargs ) fig.savefig(filename, dpi=300) if return_fig: return fig else: plt.close(fig)
['def', 'plot', '(', 'self', ',', 'filename', ',', 'title', '=', 'None', ',', 'reciprocal', '=', 'None', ',', 'limits', '=', 'None', ',', 'dtype', '=', "'rho'", ',', 'return_fig', '=', 'False', ',', '*', '*', 'kwargs', ')', ':', 'fig', ',', 'axes', '=', 'self', '.', '_plot', '(', 'reciprocal', '=', 'reciprocal', ',', 'limits', '=', 'limits', ',', 'title', '=', 'title', ',', 'dtype', '=', 'dtype', ',', '*', '*', 'kwargs', ')', 'fig', '.', 'savefig', '(', 'filename', ',', 'dpi', '=', '300', ')', 'if', 'return_fig', ':', 'return', 'fig', 'else', ':', 'plt', '.', 'close', '(', 'fig', ')']
Standard plot of spectrum Parameters ---------- filename: string Output filename. Include the ending to specify the filetype (usually .pdf or .png) title: string, optional Title for the plot reciprocal: :class:`reda.eis.plots.sip_response`, optional If another :class:`reda.eis.plots.sip_response` object is provided here, use this as the reciprocal spectrum. limits: dict, optional A dictionary which contains plot limits. See code example below. dtype: string, optional Determines if the data plotted included geometric factors ('rho') or not ('r'). Default: 'rho' return_fig: bool, optional If True, then do not delete the figure object after saving to file and return the figure object. Default: False **kwargs: dict kwargs is piped through to the _plot function Returns ------- fig: :class:`matplotlib.Figure` The figure object. Only returned if return_fig is set to True Examples -------- >>> from reda.eis.plots import sip_response >>> import numpy as np >>> frequencies = np.array([ ... 1.00000000e-03, 1.77827941e-03, 3.16227766e-03, 5.62341325e-03, ... 1.00000000e-02, 1.77827941e-02, 3.16227766e-02, 5.62341325e-02, ... 1.00000000e-01, 1.77827941e-01, 3.16227766e-01, 5.62341325e-01, ... 1.00000000e+00, 1.77827941e+00, 3.16227766e+00, 5.62341325e+00, ... 1.00000000e+01, 1.77827941e+01, 3.16227766e+01, 5.62341325e+01, ... 1.00000000e+02, 1.77827941e+02, 3.16227766e+02, 5.62341325e+02, ... 1.00000000e+03]) >>> rcomplex = np.array([ ... 49.34369772-0.51828971j, 49.11781581-0.59248806j, ... 48.85819872-0.6331137j , 48.58762806-0.62835135j, ... 48.33331113-0.57965851j, 48.11599009-0.50083533j, ... 47.94405036-0.41005275j, 47.81528917-0.32210768j, ... 47.72215469-0.24543425j, 47.65607773-0.18297794j, ... 47.60962191-0.13433101j, 47.57706229-0.09755774j, ... 47.55424286-0.07031682j, 47.53822912-0.05041399j, ... 47.52697253-0.03601005j, 47.51904718-0.02565412j, ... 47.51345965-0.01824266j, 47.50951606-0.01295546j, ... 47.50673042-0.00919217j, 47.50476152-0.0065178j , ... 47.50336925-0.00461938j, 47.50238442-0.00327285j, ... 47.50168762-0.00231829j, 47.50119454-0.00164187j, ... 47.50084556-0.00116268j]) >>> spectrum = sip_response(frequencies=frequencies, rcomplex=rcomplex) >>> fig = spectrum.plot('spectrum.pdf', return_fig=True)
['Standard', 'plot', 'of', 'spectrum']
train
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/eis/plots.py#L250-L321
6,284
theislab/anndata
anndata/base.py
_gen_keys_from_multicol_key
def _gen_keys_from_multicol_key(key_multicol, n_keys): """Generates single-column keys from multicolumn key.""" keys = [('{}{:03}of{:03}') .format(key_multicol, i+1, n_keys) for i in range(n_keys)] return keys
python
def _gen_keys_from_multicol_key(key_multicol, n_keys): """Generates single-column keys from multicolumn key.""" keys = [('{}{:03}of{:03}') .format(key_multicol, i+1, n_keys) for i in range(n_keys)] return keys
['def', '_gen_keys_from_multicol_key', '(', 'key_multicol', ',', 'n_keys', ')', ':', 'keys', '=', '[', '(', "'{}{:03}of{:03}'", ')', '.', 'format', '(', 'key_multicol', ',', 'i', '+', '1', ',', 'n_keys', ')', 'for', 'i', 'in', 'range', '(', 'n_keys', ')', ']', 'return', 'keys']
Generates single-column keys from multicolumn key.
['Generates', 'single', '-', 'column', 'keys', 'from', 'multicolumn', 'key', '.']
train
https://github.com/theislab/anndata/blob/34f4eb63710628fbc15e7050e5efcac1d7806062/anndata/base.py#L186-L190
6,285
singularityhub/singularity-cli
spython/main/execute.py
execute
def execute(self, image = None, command = None, app = None, writable = False, contain = False, bind = None, stream = False, nv = False, return_result=False): ''' execute: send a command to a container Parameters ========== image: full path to singularity image command: command to send to container app: if not None, execute a command in context of an app writable: This option makes the file system accessible as read/write contain: This option disables the automatic sharing of writable filesystems on your host bind: list or single string of bind paths. This option allows you to map directories on your host system to directories within your container using bind mounts nv: if True, load Nvidia Drivers in runtime (default False) return_result: if True, return entire json object with return code and message result (default is False) ''' from spython.utils import check_install check_install() cmd = self._init_command('exec') # nv option leverages any GPU cards if nv is True: cmd += ['--nv'] # If the image is given as a list, it's probably the command if isinstance(image, list): command = image image = None if command is not None: # No image provided, default to use the client's loaded image if image is None: image = self._get_uri() self.quiet = True # If an instance is provided, grab it's name if isinstance(image, self.instance): image = image.get_uri() # Does the user want to use bind paths option? if bind is not None: cmd += self._generate_bind_list(bind) # Does the user want to run an app? if app is not None: cmd = cmd + ['--app', app] sudo = False if writable is True: sudo = True if not isinstance(command, list): command = command.split(' ') cmd = cmd + [image] + command if stream is False: return self._run_command(cmd, sudo=sudo, return_result=return_result) return stream_command(cmd, sudo=sudo) bot.error('Please include a command (list) to execute.')
python
def execute(self, image = None, command = None, app = None, writable = False, contain = False, bind = None, stream = False, nv = False, return_result=False): ''' execute: send a command to a container Parameters ========== image: full path to singularity image command: command to send to container app: if not None, execute a command in context of an app writable: This option makes the file system accessible as read/write contain: This option disables the automatic sharing of writable filesystems on your host bind: list or single string of bind paths. This option allows you to map directories on your host system to directories within your container using bind mounts nv: if True, load Nvidia Drivers in runtime (default False) return_result: if True, return entire json object with return code and message result (default is False) ''' from spython.utils import check_install check_install() cmd = self._init_command('exec') # nv option leverages any GPU cards if nv is True: cmd += ['--nv'] # If the image is given as a list, it's probably the command if isinstance(image, list): command = image image = None if command is not None: # No image provided, default to use the client's loaded image if image is None: image = self._get_uri() self.quiet = True # If an instance is provided, grab it's name if isinstance(image, self.instance): image = image.get_uri() # Does the user want to use bind paths option? if bind is not None: cmd += self._generate_bind_list(bind) # Does the user want to run an app? if app is not None: cmd = cmd + ['--app', app] sudo = False if writable is True: sudo = True if not isinstance(command, list): command = command.split(' ') cmd = cmd + [image] + command if stream is False: return self._run_command(cmd, sudo=sudo, return_result=return_result) return stream_command(cmd, sudo=sudo) bot.error('Please include a command (list) to execute.')
['def', 'execute', '(', 'self', ',', 'image', '=', 'None', ',', 'command', '=', 'None', ',', 'app', '=', 'None', ',', 'writable', '=', 'False', ',', 'contain', '=', 'False', ',', 'bind', '=', 'None', ',', 'stream', '=', 'False', ',', 'nv', '=', 'False', ',', 'return_result', '=', 'False', ')', ':', 'from', 'spython', '.', 'utils', 'import', 'check_install', 'check_install', '(', ')', 'cmd', '=', 'self', '.', '_init_command', '(', "'exec'", ')', '# nv option leverages any GPU cards', 'if', 'nv', 'is', 'True', ':', 'cmd', '+=', '[', "'--nv'", ']', "# If the image is given as a list, it's probably the command", 'if', 'isinstance', '(', 'image', ',', 'list', ')', ':', 'command', '=', 'image', 'image', '=', 'None', 'if', 'command', 'is', 'not', 'None', ':', "# No image provided, default to use the client's loaded image", 'if', 'image', 'is', 'None', ':', 'image', '=', 'self', '.', '_get_uri', '(', ')', 'self', '.', 'quiet', '=', 'True', "# If an instance is provided, grab it's name", 'if', 'isinstance', '(', 'image', ',', 'self', '.', 'instance', ')', ':', 'image', '=', 'image', '.', 'get_uri', '(', ')', '# Does the user want to use bind paths option?', 'if', 'bind', 'is', 'not', 'None', ':', 'cmd', '+=', 'self', '.', '_generate_bind_list', '(', 'bind', ')', '# Does the user want to run an app?', 'if', 'app', 'is', 'not', 'None', ':', 'cmd', '=', 'cmd', '+', '[', "'--app'", ',', 'app', ']', 'sudo', '=', 'False', 'if', 'writable', 'is', 'True', ':', 'sudo', '=', 'True', 'if', 'not', 'isinstance', '(', 'command', ',', 'list', ')', ':', 'command', '=', 'command', '.', 'split', '(', "' '", ')', 'cmd', '=', 'cmd', '+', '[', 'image', ']', '+', 'command', 'if', 'stream', 'is', 'False', ':', 'return', 'self', '.', '_run_command', '(', 'cmd', ',', 'sudo', '=', 'sudo', ',', 'return_result', '=', 'return_result', ')', 'return', 'stream_command', '(', 'cmd', ',', 'sudo', '=', 'sudo', ')', 'bot', '.', 'error', '(', "'Please include a command (list) to execute.'", ')']
execute: send a command to a container Parameters ========== image: full path to singularity image command: command to send to container app: if not None, execute a command in context of an app writable: This option makes the file system accessible as read/write contain: This option disables the automatic sharing of writable filesystems on your host bind: list or single string of bind paths. This option allows you to map directories on your host system to directories within your container using bind mounts nv: if True, load Nvidia Drivers in runtime (default False) return_result: if True, return entire json object with return code and message result (default is False)
['execute', ':', 'send', 'a', 'command', 'to', 'a', 'container', 'Parameters', '==========']
train
https://github.com/singularityhub/singularity-cli/blob/cb36b4504812ca87e29c6a40b222a545d1865799/spython/main/execute.py#L15-L92
6,286
mapbox/mapbox-sdk-py
mapbox/services/matrix.py
DirectionsMatrix.matrix
def matrix(self, coordinates, profile='mapbox/driving', sources=None, destinations=None, annotations=None): """Request a directions matrix for trips between coordinates In the default case, the matrix returns a symmetric matrix, using all input coordinates as sources and destinations. You may also generate an asymmetric matrix, with only some coordinates as sources or destinations: Parameters ---------- coordinates : sequence A sequence of coordinates, which may be represented as GeoJSON features, GeoJSON geometries, or (longitude, latitude) pairs. profile : str The trip travel mode. Valid modes are listed in the class's valid_profiles attribute. annotations : list Used to specify the resulting matrices. Possible values are listed in the class's valid_annotations attribute. sources : list Indices of source coordinates to include in the matrix. Default is all coordinates. destinations : list Indices of destination coordinates to include in the matrix. Default is all coordinates. Returns ------- requests.Response Note: the directions matrix itself is obtained by calling the response's json() method. The resulting mapping has a code, the destinations and the sources, and depending of the annotations specified, it can also contain a durations matrix, a distances matrix or both of them (by default, only the durations matrix is provided). code : str Status of the response sources : list Results of snapping selected coordinates to the nearest addresses. destinations : list Results of snapping selected coordinates to the nearest addresses. durations : list An array of arrays representing the matrix in row-major order. durations[i][j] gives the travel time from the i-th source to the j-th destination. All values are in seconds. The duration between the same coordinate is always 0. If a duration can not be found, the result is null. distances : list An array of arrays representing the matrix in row-major order. distances[i][j] gives the distance from the i-th source to the j-th destination. All values are in meters. The distance between the same coordinate is always 0. If a distance can not be found, the result is null. """ annotations = self._validate_annotations(annotations) profile = self._validate_profile(profile) coords = encode_waypoints(coordinates) params = self._make_query(sources, destinations) if annotations is not None: params.update({'annotations': ','.join(annotations)}) uri = '{0}/{1}/{2}'.format(self.baseuri, profile, coords) res = self.session.get(uri, params=params) self.handle_http_error(res) return res
python
def matrix(self, coordinates, profile='mapbox/driving', sources=None, destinations=None, annotations=None): """Request a directions matrix for trips between coordinates In the default case, the matrix returns a symmetric matrix, using all input coordinates as sources and destinations. You may also generate an asymmetric matrix, with only some coordinates as sources or destinations: Parameters ---------- coordinates : sequence A sequence of coordinates, which may be represented as GeoJSON features, GeoJSON geometries, or (longitude, latitude) pairs. profile : str The trip travel mode. Valid modes are listed in the class's valid_profiles attribute. annotations : list Used to specify the resulting matrices. Possible values are listed in the class's valid_annotations attribute. sources : list Indices of source coordinates to include in the matrix. Default is all coordinates. destinations : list Indices of destination coordinates to include in the matrix. Default is all coordinates. Returns ------- requests.Response Note: the directions matrix itself is obtained by calling the response's json() method. The resulting mapping has a code, the destinations and the sources, and depending of the annotations specified, it can also contain a durations matrix, a distances matrix or both of them (by default, only the durations matrix is provided). code : str Status of the response sources : list Results of snapping selected coordinates to the nearest addresses. destinations : list Results of snapping selected coordinates to the nearest addresses. durations : list An array of arrays representing the matrix in row-major order. durations[i][j] gives the travel time from the i-th source to the j-th destination. All values are in seconds. The duration between the same coordinate is always 0. If a duration can not be found, the result is null. distances : list An array of arrays representing the matrix in row-major order. distances[i][j] gives the distance from the i-th source to the j-th destination. All values are in meters. The distance between the same coordinate is always 0. If a distance can not be found, the result is null. """ annotations = self._validate_annotations(annotations) profile = self._validate_profile(profile) coords = encode_waypoints(coordinates) params = self._make_query(sources, destinations) if annotations is not None: params.update({'annotations': ','.join(annotations)}) uri = '{0}/{1}/{2}'.format(self.baseuri, profile, coords) res = self.session.get(uri, params=params) self.handle_http_error(res) return res
['def', 'matrix', '(', 'self', ',', 'coordinates', ',', 'profile', '=', "'mapbox/driving'", ',', 'sources', '=', 'None', ',', 'destinations', '=', 'None', ',', 'annotations', '=', 'None', ')', ':', 'annotations', '=', 'self', '.', '_validate_annotations', '(', 'annotations', ')', 'profile', '=', 'self', '.', '_validate_profile', '(', 'profile', ')', 'coords', '=', 'encode_waypoints', '(', 'coordinates', ')', 'params', '=', 'self', '.', '_make_query', '(', 'sources', ',', 'destinations', ')', 'if', 'annotations', 'is', 'not', 'None', ':', 'params', '.', 'update', '(', '{', "'annotations'", ':', "','", '.', 'join', '(', 'annotations', ')', '}', ')', 'uri', '=', "'{0}/{1}/{2}'", '.', 'format', '(', 'self', '.', 'baseuri', ',', 'profile', ',', 'coords', ')', 'res', '=', 'self', '.', 'session', '.', 'get', '(', 'uri', ',', 'params', '=', 'params', ')', 'self', '.', 'handle_http_error', '(', 'res', ')', 'return', 'res']
Request a directions matrix for trips between coordinates In the default case, the matrix returns a symmetric matrix, using all input coordinates as sources and destinations. You may also generate an asymmetric matrix, with only some coordinates as sources or destinations: Parameters ---------- coordinates : sequence A sequence of coordinates, which may be represented as GeoJSON features, GeoJSON geometries, or (longitude, latitude) pairs. profile : str The trip travel mode. Valid modes are listed in the class's valid_profiles attribute. annotations : list Used to specify the resulting matrices. Possible values are listed in the class's valid_annotations attribute. sources : list Indices of source coordinates to include in the matrix. Default is all coordinates. destinations : list Indices of destination coordinates to include in the matrix. Default is all coordinates. Returns ------- requests.Response Note: the directions matrix itself is obtained by calling the response's json() method. The resulting mapping has a code, the destinations and the sources, and depending of the annotations specified, it can also contain a durations matrix, a distances matrix or both of them (by default, only the durations matrix is provided). code : str Status of the response sources : list Results of snapping selected coordinates to the nearest addresses. destinations : list Results of snapping selected coordinates to the nearest addresses. durations : list An array of arrays representing the matrix in row-major order. durations[i][j] gives the travel time from the i-th source to the j-th destination. All values are in seconds. The duration between the same coordinate is always 0. If a duration can not be found, the result is null. distances : list An array of arrays representing the matrix in row-major order. distances[i][j] gives the distance from the i-th source to the j-th destination. All values are in meters. The distance between the same coordinate is always 0. If a distance can not be found, the result is null.
['Request', 'a', 'directions', 'matrix', 'for', 'trips', 'between', 'coordinates']
train
https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/matrix.py#L65-L138
6,287
Kopachris/seshet
seshet/config.py
build_bot
def build_bot(config_file=None): """Parse a config and return a SeshetBot instance. After, the bot can be run simply by calling .connect() and then .start() Optional arguments: config_file - valid file path or ConfigParser instance If config_file is None, will read default config defined in this module. """ from . import bot config = ConfigParser(interpolation=None) if config_file is None: config.read_string(default_config) elif isinstance(config_file, ConfigParser): config = config_file else: config.read(config_file) # shorter names db_conf = config['database'] conn_conf = config['connection'] client_conf = config['client'] log_conf = config['logging'] verbosity = config['debug']['verbosity'].lower() or 'notset' debug_file = config['debug']['file'] or None # add more as they're used if db_conf.getboolean('use_db'): db = DAL(db_conf['db_string']) build_db_tables(db) log_file = None log_fmts = {} else: db = None log_file = log_conf.pop('file') log_fmts = dict(log_conf) # debug logging debug_lvls = {'notset': 0, 'debug': 10, 'info': 20, 'warning': 30, 'error': 40, 'critical': 50, } lvl = int(debug_lvls[verbosity]) seshetbot = bot.SeshetBot(client_conf['nickname'], db, debug_file, lvl) # connection info for connect() seshetbot.default_host = conn_conf['server'] seshetbot.default_port = int(conn_conf['port']) seshetbot.default_channel = conn_conf['channels'].split(',') seshetbot.default_use_ssl = conn_conf.getboolean('ssl') # client info seshetbot.user = client_conf['user'] seshetbot.real_name = client_conf['realname'] # logging info seshetbot.log_file = log_file seshetbot.log_formats = log_fmts seshetbot.locale = dict(config['locale']) return seshetbot
python
def build_bot(config_file=None): """Parse a config and return a SeshetBot instance. After, the bot can be run simply by calling .connect() and then .start() Optional arguments: config_file - valid file path or ConfigParser instance If config_file is None, will read default config defined in this module. """ from . import bot config = ConfigParser(interpolation=None) if config_file is None: config.read_string(default_config) elif isinstance(config_file, ConfigParser): config = config_file else: config.read(config_file) # shorter names db_conf = config['database'] conn_conf = config['connection'] client_conf = config['client'] log_conf = config['logging'] verbosity = config['debug']['verbosity'].lower() or 'notset' debug_file = config['debug']['file'] or None # add more as they're used if db_conf.getboolean('use_db'): db = DAL(db_conf['db_string']) build_db_tables(db) log_file = None log_fmts = {} else: db = None log_file = log_conf.pop('file') log_fmts = dict(log_conf) # debug logging debug_lvls = {'notset': 0, 'debug': 10, 'info': 20, 'warning': 30, 'error': 40, 'critical': 50, } lvl = int(debug_lvls[verbosity]) seshetbot = bot.SeshetBot(client_conf['nickname'], db, debug_file, lvl) # connection info for connect() seshetbot.default_host = conn_conf['server'] seshetbot.default_port = int(conn_conf['port']) seshetbot.default_channel = conn_conf['channels'].split(',') seshetbot.default_use_ssl = conn_conf.getboolean('ssl') # client info seshetbot.user = client_conf['user'] seshetbot.real_name = client_conf['realname'] # logging info seshetbot.log_file = log_file seshetbot.log_formats = log_fmts seshetbot.locale = dict(config['locale']) return seshetbot
['def', 'build_bot', '(', 'config_file', '=', 'None', ')', ':', 'from', '.', 'import', 'bot', 'config', '=', 'ConfigParser', '(', 'interpolation', '=', 'None', ')', 'if', 'config_file', 'is', 'None', ':', 'config', '.', 'read_string', '(', 'default_config', ')', 'elif', 'isinstance', '(', 'config_file', ',', 'ConfigParser', ')', ':', 'config', '=', 'config_file', 'else', ':', 'config', '.', 'read', '(', 'config_file', ')', '# shorter names', 'db_conf', '=', 'config', '[', "'database'", ']', 'conn_conf', '=', 'config', '[', "'connection'", ']', 'client_conf', '=', 'config', '[', "'client'", ']', 'log_conf', '=', 'config', '[', "'logging'", ']', 'verbosity', '=', 'config', '[', "'debug'", ']', '[', "'verbosity'", ']', '.', 'lower', '(', ')', 'or', "'notset'", 'debug_file', '=', 'config', '[', "'debug'", ']', '[', "'file'", ']', 'or', 'None', "# add more as they're used", 'if', 'db_conf', '.', 'getboolean', '(', "'use_db'", ')', ':', 'db', '=', 'DAL', '(', 'db_conf', '[', "'db_string'", ']', ')', 'build_db_tables', '(', 'db', ')', 'log_file', '=', 'None', 'log_fmts', '=', '{', '}', 'else', ':', 'db', '=', 'None', 'log_file', '=', 'log_conf', '.', 'pop', '(', "'file'", ')', 'log_fmts', '=', 'dict', '(', 'log_conf', ')', '# debug logging', 'debug_lvls', '=', '{', "'notset'", ':', '0', ',', "'debug'", ':', '10', ',', "'info'", ':', '20', ',', "'warning'", ':', '30', ',', "'error'", ':', '40', ',', "'critical'", ':', '50', ',', '}', 'lvl', '=', 'int', '(', 'debug_lvls', '[', 'verbosity', ']', ')', 'seshetbot', '=', 'bot', '.', 'SeshetBot', '(', 'client_conf', '[', "'nickname'", ']', ',', 'db', ',', 'debug_file', ',', 'lvl', ')', '# connection info for connect()', 'seshetbot', '.', 'default_host', '=', 'conn_conf', '[', "'server'", ']', 'seshetbot', '.', 'default_port', '=', 'int', '(', 'conn_conf', '[', "'port'", ']', ')', 'seshetbot', '.', 'default_channel', '=', 'conn_conf', '[', "'channels'", ']', '.', 'split', '(', "','", ')', 'seshetbot', '.', 'default_use_ssl', '=', 'conn_conf', '.', 'getboolean', '(', "'ssl'", ')', '# client info', 'seshetbot', '.', 'user', '=', 'client_conf', '[', "'user'", ']', 'seshetbot', '.', 'real_name', '=', 'client_conf', '[', "'realname'", ']', '# logging info', 'seshetbot', '.', 'log_file', '=', 'log_file', 'seshetbot', '.', 'log_formats', '=', 'log_fmts', 'seshetbot', '.', 'locale', '=', 'dict', '(', 'config', '[', "'locale'", ']', ')', 'return', 'seshetbot']
Parse a config and return a SeshetBot instance. After, the bot can be run simply by calling .connect() and then .start() Optional arguments: config_file - valid file path or ConfigParser instance If config_file is None, will read default config defined in this module.
['Parse', 'a', 'config', 'and', 'return', 'a', 'SeshetBot', 'instance', '.', 'After', 'the', 'bot', 'can', 'be', 'run', 'simply', 'by', 'calling', '.', 'connect', '()', 'and', 'then', '.', 'start', '()', 'Optional', 'arguments', ':', 'config_file', '-', 'valid', 'file', 'path', 'or', 'ConfigParser', 'instance', 'If', 'config_file', 'is', 'None', 'will', 'read', 'default', 'config', 'defined', 'in', 'this', 'module', '.']
train
https://github.com/Kopachris/seshet/blob/d55bae01cff56762c5467138474145a2c17d1932/seshet/config.py#L156-L222
6,288
Erotemic/utool
utool/util_list.py
scalar_input_map
def scalar_input_map(func, input_): """ Map like function Args: func: function to apply input_ : either an iterable or scalar value Returns: If ``input_`` is iterable this function behaves like map otherwise applies func to ``input_`` """ if util_iter.isiterable(input_): return list(map(func, input_)) else: return func(input_)
python
def scalar_input_map(func, input_): """ Map like function Args: func: function to apply input_ : either an iterable or scalar value Returns: If ``input_`` is iterable this function behaves like map otherwise applies func to ``input_`` """ if util_iter.isiterable(input_): return list(map(func, input_)) else: return func(input_)
['def', 'scalar_input_map', '(', 'func', ',', 'input_', ')', ':', 'if', 'util_iter', '.', 'isiterable', '(', 'input_', ')', ':', 'return', 'list', '(', 'map', '(', 'func', ',', 'input_', ')', ')', 'else', ':', 'return', 'func', '(', 'input_', ')']
Map like function Args: func: function to apply input_ : either an iterable or scalar value Returns: If ``input_`` is iterable this function behaves like map otherwise applies func to ``input_``
['Map', 'like', 'function']
train
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_list.py#L2061-L2076
6,289
GNS3/gns3-server
gns3server/compute/port_manager.py
PortManager._check_port
def _check_port(host, port, socket_type): """ Check if an a port is available and raise an OSError if port is not available :returns: boolean """ if socket_type == "UDP": socket_type = socket.SOCK_DGRAM else: socket_type = socket.SOCK_STREAM for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket_type, 0, socket.AI_PASSIVE): af, socktype, proto, _, sa = res with socket.socket(af, socktype, proto) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind(sa) # the port is available if bind is a success return True
python
def _check_port(host, port, socket_type): """ Check if an a port is available and raise an OSError if port is not available :returns: boolean """ if socket_type == "UDP": socket_type = socket.SOCK_DGRAM else: socket_type = socket.SOCK_STREAM for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket_type, 0, socket.AI_PASSIVE): af, socktype, proto, _, sa = res with socket.socket(af, socktype, proto) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind(sa) # the port is available if bind is a success return True
['def', '_check_port', '(', 'host', ',', 'port', ',', 'socket_type', ')', ':', 'if', 'socket_type', '==', '"UDP"', ':', 'socket_type', '=', 'socket', '.', 'SOCK_DGRAM', 'else', ':', 'socket_type', '=', 'socket', '.', 'SOCK_STREAM', 'for', 'res', 'in', 'socket', '.', 'getaddrinfo', '(', 'host', ',', 'port', ',', 'socket', '.', 'AF_UNSPEC', ',', 'socket_type', ',', '0', ',', 'socket', '.', 'AI_PASSIVE', ')', ':', 'af', ',', 'socktype', ',', 'proto', ',', '_', ',', 'sa', '=', 'res', 'with', 'socket', '.', 'socket', '(', 'af', ',', 'socktype', ',', 'proto', ')', 'as', 's', ':', 's', '.', 'setsockopt', '(', 'socket', '.', 'SOL_SOCKET', ',', 'socket', '.', 'SO_REUSEADDR', ',', '1', ')', 's', '.', 'bind', '(', 'sa', ')', '# the port is available if bind is a success', 'return', 'True']
Check if an a port is available and raise an OSError if port is not available :returns: boolean
['Check', 'if', 'an', 'a', 'port', 'is', 'available', 'and', 'raise', 'an', 'OSError', 'if', 'port', 'is', 'not', 'available']
train
https://github.com/GNS3/gns3-server/blob/a221678448fb5d24e977ef562f81d56aacc89ab1/gns3server/compute/port_manager.py#L168-L184
6,290
oauthlib/oauthlib
oauthlib/oauth1/rfc5849/__init__.py
Client.get_oauth_params
def get_oauth_params(self, request): """Get the basic OAuth parameters to be used in generating a signature. """ nonce = (generate_nonce() if self.nonce is None else self.nonce) timestamp = (generate_timestamp() if self.timestamp is None else self.timestamp) params = [ ('oauth_nonce', nonce), ('oauth_timestamp', timestamp), ('oauth_version', '1.0'), ('oauth_signature_method', self.signature_method), ('oauth_consumer_key', self.client_key), ] if self.resource_owner_key: params.append(('oauth_token', self.resource_owner_key)) if self.callback_uri: params.append(('oauth_callback', self.callback_uri)) if self.verifier: params.append(('oauth_verifier', self.verifier)) # providing body hash for requests other than x-www-form-urlencoded # as described in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-4.1.1 # 4.1.1. When to include the body hash # * [...] MUST NOT include an oauth_body_hash parameter on requests with form-encoded request bodies # * [...] SHOULD include the oauth_body_hash parameter on all other requests. # Note that SHA-1 is vulnerable. The spec acknowledges that in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-6.2 # At this time, no further effort has been made to replace SHA-1 for the OAuth Request Body Hash extension. content_type = request.headers.get('Content-Type', None) content_type_eligible = content_type and content_type.find('application/x-www-form-urlencoded') < 0 if request.body is not None and content_type_eligible: params.append(('oauth_body_hash', base64.b64encode(hashlib.sha1(request.body.encode('utf-8')).digest()).decode('utf-8'))) return params
python
def get_oauth_params(self, request): """Get the basic OAuth parameters to be used in generating a signature. """ nonce = (generate_nonce() if self.nonce is None else self.nonce) timestamp = (generate_timestamp() if self.timestamp is None else self.timestamp) params = [ ('oauth_nonce', nonce), ('oauth_timestamp', timestamp), ('oauth_version', '1.0'), ('oauth_signature_method', self.signature_method), ('oauth_consumer_key', self.client_key), ] if self.resource_owner_key: params.append(('oauth_token', self.resource_owner_key)) if self.callback_uri: params.append(('oauth_callback', self.callback_uri)) if self.verifier: params.append(('oauth_verifier', self.verifier)) # providing body hash for requests other than x-www-form-urlencoded # as described in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-4.1.1 # 4.1.1. When to include the body hash # * [...] MUST NOT include an oauth_body_hash parameter on requests with form-encoded request bodies # * [...] SHOULD include the oauth_body_hash parameter on all other requests. # Note that SHA-1 is vulnerable. The spec acknowledges that in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-6.2 # At this time, no further effort has been made to replace SHA-1 for the OAuth Request Body Hash extension. content_type = request.headers.get('Content-Type', None) content_type_eligible = content_type and content_type.find('application/x-www-form-urlencoded') < 0 if request.body is not None and content_type_eligible: params.append(('oauth_body_hash', base64.b64encode(hashlib.sha1(request.body.encode('utf-8')).digest()).decode('utf-8'))) return params
['def', 'get_oauth_params', '(', 'self', ',', 'request', ')', ':', 'nonce', '=', '(', 'generate_nonce', '(', ')', 'if', 'self', '.', 'nonce', 'is', 'None', 'else', 'self', '.', 'nonce', ')', 'timestamp', '=', '(', 'generate_timestamp', '(', ')', 'if', 'self', '.', 'timestamp', 'is', 'None', 'else', 'self', '.', 'timestamp', ')', 'params', '=', '[', '(', "'oauth_nonce'", ',', 'nonce', ')', ',', '(', "'oauth_timestamp'", ',', 'timestamp', ')', ',', '(', "'oauth_version'", ',', "'1.0'", ')', ',', '(', "'oauth_signature_method'", ',', 'self', '.', 'signature_method', ')', ',', '(', "'oauth_consumer_key'", ',', 'self', '.', 'client_key', ')', ',', ']', 'if', 'self', '.', 'resource_owner_key', ':', 'params', '.', 'append', '(', '(', "'oauth_token'", ',', 'self', '.', 'resource_owner_key', ')', ')', 'if', 'self', '.', 'callback_uri', ':', 'params', '.', 'append', '(', '(', "'oauth_callback'", ',', 'self', '.', 'callback_uri', ')', ')', 'if', 'self', '.', 'verifier', ':', 'params', '.', 'append', '(', '(', "'oauth_verifier'", ',', 'self', '.', 'verifier', ')', ')', '# providing body hash for requests other than x-www-form-urlencoded', '# as described in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-4.1.1', '# 4.1.1. When to include the body hash', '# * [...] MUST NOT include an oauth_body_hash parameter on requests with form-encoded request bodies', '# * [...] SHOULD include the oauth_body_hash parameter on all other requests.', '# Note that SHA-1 is vulnerable. The spec acknowledges that in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-6.2', '# At this time, no further effort has been made to replace SHA-1 for the OAuth Request Body Hash extension.', 'content_type', '=', 'request', '.', 'headers', '.', 'get', '(', "'Content-Type'", ',', 'None', ')', 'content_type_eligible', '=', 'content_type', 'and', 'content_type', '.', 'find', '(', "'application/x-www-form-urlencoded'", ')', '<', '0', 'if', 'request', '.', 'body', 'is', 'not', 'None', 'and', 'content_type_eligible', ':', 'params', '.', 'append', '(', '(', "'oauth_body_hash'", ',', 'base64', '.', 'b64encode', '(', 'hashlib', '.', 'sha1', '(', 'request', '.', 'body', '.', 'encode', '(', "'utf-8'", ')', ')', '.', 'digest', '(', ')', ')', '.', 'decode', '(', "'utf-8'", ')', ')', ')', 'return', 'params']
Get the basic OAuth parameters to be used in generating a signature.
['Get', 'the', 'basic', 'OAuth', 'parameters', 'to', 'be', 'used', 'in', 'generating', 'a', 'signature', '.']
train
https://github.com/oauthlib/oauthlib/blob/30321dd3c0ca784d3508a1970cf90d9f76835c79/oauthlib/oauth1/rfc5849/__init__.py#L153-L186
6,291
guaix-ucm/pyemir
emirdrp/processing/bardetect.py
position_half_h
def position_half_h(pslit, cpix, backw=4): """Find the position where the value is half of the peak""" # Find the first peak to the right of cpix next_peak = simple_prot(pslit, cpix) if next_peak is None: raise ValueError dis_peak = next_peak - cpix wpos2 = cpix - dis_peak wpos1 = wpos2 - backw # Compute background in a window of width backw # in a position simetrical to the peak # around cpix left_background = pslit[wpos1:wpos2].min() # height of the peak height = pslit[next_peak] - left_background half_height = left_background + 0.5 * height # Position at halg peak, linear interpolation vv = pslit[wpos1:next_peak+1] - half_height res1, = numpy.nonzero(numpy.diff(vv > 0)) i1 = res1[0] xint = wpos1 + i1 + (0 - vv[i1]) / (vv[i1+1] - vv[i1]) return xint, next_peak, wpos1, wpos2, left_background, half_height
python
def position_half_h(pslit, cpix, backw=4): """Find the position where the value is half of the peak""" # Find the first peak to the right of cpix next_peak = simple_prot(pslit, cpix) if next_peak is None: raise ValueError dis_peak = next_peak - cpix wpos2 = cpix - dis_peak wpos1 = wpos2 - backw # Compute background in a window of width backw # in a position simetrical to the peak # around cpix left_background = pslit[wpos1:wpos2].min() # height of the peak height = pslit[next_peak] - left_background half_height = left_background + 0.5 * height # Position at halg peak, linear interpolation vv = pslit[wpos1:next_peak+1] - half_height res1, = numpy.nonzero(numpy.diff(vv > 0)) i1 = res1[0] xint = wpos1 + i1 + (0 - vv[i1]) / (vv[i1+1] - vv[i1]) return xint, next_peak, wpos1, wpos2, left_background, half_height
['def', 'position_half_h', '(', 'pslit', ',', 'cpix', ',', 'backw', '=', '4', ')', ':', '# Find the first peak to the right of cpix', 'next_peak', '=', 'simple_prot', '(', 'pslit', ',', 'cpix', ')', 'if', 'next_peak', 'is', 'None', ':', 'raise', 'ValueError', 'dis_peak', '=', 'next_peak', '-', 'cpix', 'wpos2', '=', 'cpix', '-', 'dis_peak', 'wpos1', '=', 'wpos2', '-', 'backw', '# Compute background in a window of width backw', '# in a position simetrical to the peak', '# around cpix', 'left_background', '=', 'pslit', '[', 'wpos1', ':', 'wpos2', ']', '.', 'min', '(', ')', '# height of the peak', 'height', '=', 'pslit', '[', 'next_peak', ']', '-', 'left_background', 'half_height', '=', 'left_background', '+', '0.5', '*', 'height', '# Position at halg peak, linear interpolation', 'vv', '=', 'pslit', '[', 'wpos1', ':', 'next_peak', '+', '1', ']', '-', 'half_height', 'res1', ',', '=', 'numpy', '.', 'nonzero', '(', 'numpy', '.', 'diff', '(', 'vv', '>', '0', ')', ')', 'i1', '=', 'res1', '[', '0', ']', 'xint', '=', 'wpos1', '+', 'i1', '+', '(', '0', '-', 'vv', '[', 'i1', ']', ')', '/', '(', 'vv', '[', 'i1', '+', '1', ']', '-', 'vv', '[', 'i1', ']', ')', 'return', 'xint', ',', 'next_peak', ',', 'wpos1', ',', 'wpos2', ',', 'left_background', ',', 'half_height']
Find the position where the value is half of the peak
['Find', 'the', 'position', 'where', 'the', 'value', 'is', 'half', 'of', 'the', 'peak']
train
https://github.com/guaix-ucm/pyemir/blob/fef6bbabcb13f80123cafd1800a0f508a3c21702/emirdrp/processing/bardetect.py#L123-L155
6,292
melizalab/arf
arf.py
convert_timestamp
def convert_timestamp(obj): """Make an ARF timestamp from an object. Argument can be a datetime.datetime object, a time.struct_time, an integer, a float, or a tuple of integers. The returned value is a numpy array with the integer number of seconds since the Epoch and any additional microseconds. Note that because floating point values are approximate, the conversion between float and integer tuple may not be reversible. """ import numbers from datetime import datetime from time import mktime, struct_time from numpy import zeros out = zeros(2, dtype='int64') if isinstance(obj, datetime): out[0] = mktime(obj.timetuple()) out[1] = obj.microsecond elif isinstance(obj, struct_time): out[0] = mktime(obj) elif isinstance(obj, numbers.Integral): out[0] = obj elif isinstance(obj, numbers.Real): out[0] = obj out[1] = (obj - out[0]) * 1e6 else: try: out[:2] = obj[:2] except: raise TypeError("unable to convert %s to timestamp" % obj) return out
python
def convert_timestamp(obj): """Make an ARF timestamp from an object. Argument can be a datetime.datetime object, a time.struct_time, an integer, a float, or a tuple of integers. The returned value is a numpy array with the integer number of seconds since the Epoch and any additional microseconds. Note that because floating point values are approximate, the conversion between float and integer tuple may not be reversible. """ import numbers from datetime import datetime from time import mktime, struct_time from numpy import zeros out = zeros(2, dtype='int64') if isinstance(obj, datetime): out[0] = mktime(obj.timetuple()) out[1] = obj.microsecond elif isinstance(obj, struct_time): out[0] = mktime(obj) elif isinstance(obj, numbers.Integral): out[0] = obj elif isinstance(obj, numbers.Real): out[0] = obj out[1] = (obj - out[0]) * 1e6 else: try: out[:2] = obj[:2] except: raise TypeError("unable to convert %s to timestamp" % obj) return out
['def', 'convert_timestamp', '(', 'obj', ')', ':', 'import', 'numbers', 'from', 'datetime', 'import', 'datetime', 'from', 'time', 'import', 'mktime', ',', 'struct_time', 'from', 'numpy', 'import', 'zeros', 'out', '=', 'zeros', '(', '2', ',', 'dtype', '=', "'int64'", ')', 'if', 'isinstance', '(', 'obj', ',', 'datetime', ')', ':', 'out', '[', '0', ']', '=', 'mktime', '(', 'obj', '.', 'timetuple', '(', ')', ')', 'out', '[', '1', ']', '=', 'obj', '.', 'microsecond', 'elif', 'isinstance', '(', 'obj', ',', 'struct_time', ')', ':', 'out', '[', '0', ']', '=', 'mktime', '(', 'obj', ')', 'elif', 'isinstance', '(', 'obj', ',', 'numbers', '.', 'Integral', ')', ':', 'out', '[', '0', ']', '=', 'obj', 'elif', 'isinstance', '(', 'obj', ',', 'numbers', '.', 'Real', ')', ':', 'out', '[', '0', ']', '=', 'obj', 'out', '[', '1', ']', '=', '(', 'obj', '-', 'out', '[', '0', ']', ')', '*', '1e6', 'else', ':', 'try', ':', 'out', '[', ':', '2', ']', '=', 'obj', '[', ':', '2', ']', 'except', ':', 'raise', 'TypeError', '(', '"unable to convert %s to timestamp"', '%', 'obj', ')', 'return', 'out']
Make an ARF timestamp from an object. Argument can be a datetime.datetime object, a time.struct_time, an integer, a float, or a tuple of integers. The returned value is a numpy array with the integer number of seconds since the Epoch and any additional microseconds. Note that because floating point values are approximate, the conversion between float and integer tuple may not be reversible.
['Make', 'an', 'ARF', 'timestamp', 'from', 'an', 'object', '.']
train
https://github.com/melizalab/arf/blob/71746d9edbe7993a783d4acaf84b9631f3230283/arf.py#L280-L313
6,293
O365/python-o365
O365/utils/token.py
BaseTokenBackend.token
def token(self, value): """ Setter to convert any token dict into Token instance """ if value and not isinstance(value, Token): value = Token(value) self._token = value
python
def token(self, value): """ Setter to convert any token dict into Token instance """ if value and not isinstance(value, Token): value = Token(value) self._token = value
['def', 'token', '(', 'self', ',', 'value', ')', ':', 'if', 'value', 'and', 'not', 'isinstance', '(', 'value', ',', 'Token', ')', ':', 'value', '=', 'Token', '(', 'value', ')', 'self', '.', '_token', '=', 'value']
Setter to convert any token dict into Token instance
['Setter', 'to', 'convert', 'any', 'token', 'dict', 'into', 'Token', 'instance']
train
https://github.com/O365/python-o365/blob/02a71cf3775cc6a3c042e003365d6a07c8c75a73/O365/utils/token.py#L63-L67
6,294
ladybug-tools/ladybug
ladybug/psychrometrics.py
db_temp_from_wb_rh
def db_temp_from_wb_rh(wet_bulb, rel_humid, b_press=101325): """Dry Bulb Temperature (C) and humidity_ratio at at wet_bulb (C), rel_humid (%) and Pressure b_press (Pa). Formula is only valid for rel_humid == 0 or rel_humid == 100. """ assert rel_humid == 0 or rel_humid == 100, 'formula is only valid for' \ ' rel_humid == 0 or rel_humid == 100' humidity_ratio = humid_ratio_from_db_rh(wet_bulb, rel_humid, b_press) hr_saturation = humid_ratio_from_db_rh(wet_bulb, 100, b_press) db_temp = wet_bulb + (((hr_saturation - humidity_ratio) * 2260000) / (1005)) return db_temp, humidity_ratio
python
def db_temp_from_wb_rh(wet_bulb, rel_humid, b_press=101325): """Dry Bulb Temperature (C) and humidity_ratio at at wet_bulb (C), rel_humid (%) and Pressure b_press (Pa). Formula is only valid for rel_humid == 0 or rel_humid == 100. """ assert rel_humid == 0 or rel_humid == 100, 'formula is only valid for' \ ' rel_humid == 0 or rel_humid == 100' humidity_ratio = humid_ratio_from_db_rh(wet_bulb, rel_humid, b_press) hr_saturation = humid_ratio_from_db_rh(wet_bulb, 100, b_press) db_temp = wet_bulb + (((hr_saturation - humidity_ratio) * 2260000) / (1005)) return db_temp, humidity_ratio
['def', 'db_temp_from_wb_rh', '(', 'wet_bulb', ',', 'rel_humid', ',', 'b_press', '=', '101325', ')', ':', 'assert', 'rel_humid', '==', '0', 'or', 'rel_humid', '==', '100', ',', "'formula is only valid for'", "' rel_humid == 0 or rel_humid == 100'", 'humidity_ratio', '=', 'humid_ratio_from_db_rh', '(', 'wet_bulb', ',', 'rel_humid', ',', 'b_press', ')', 'hr_saturation', '=', 'humid_ratio_from_db_rh', '(', 'wet_bulb', ',', '100', ',', 'b_press', ')', 'db_temp', '=', 'wet_bulb', '+', '(', '(', '(', 'hr_saturation', '-', 'humidity_ratio', ')', '*', '2260000', ')', '/', '(', '1005', ')', ')', 'return', 'db_temp', ',', 'humidity_ratio']
Dry Bulb Temperature (C) and humidity_ratio at at wet_bulb (C), rel_humid (%) and Pressure b_press (Pa). Formula is only valid for rel_humid == 0 or rel_humid == 100.
['Dry', 'Bulb', 'Temperature', '(', 'C', ')', 'and', 'humidity_ratio', 'at', 'at', 'wet_bulb', '(', 'C', ')', 'rel_humid', '(', '%', ')', 'and', 'Pressure', 'b_press', '(', 'Pa', ')', '.']
train
https://github.com/ladybug-tools/ladybug/blob/c08b7308077a48d5612f644943f92d5b5dade583/ladybug/psychrometrics.py#L212-L223
6,295
thumbor/thumbor
thumbor/metrics/statsd_metrics.py
Metrics.client
def client(cls, config): """ Cache statsd client so it doesn't do a DNS lookup over and over """ if not hasattr(cls, "_client"): cls._client = statsd.StatsClient(config.STATSD_HOST, config.STATSD_PORT, config.STATSD_PREFIX) return cls._client
python
def client(cls, config): """ Cache statsd client so it doesn't do a DNS lookup over and over """ if not hasattr(cls, "_client"): cls._client = statsd.StatsClient(config.STATSD_HOST, config.STATSD_PORT, config.STATSD_PREFIX) return cls._client
['def', 'client', '(', 'cls', ',', 'config', ')', ':', 'if', 'not', 'hasattr', '(', 'cls', ',', '"_client"', ')', ':', 'cls', '.', '_client', '=', 'statsd', '.', 'StatsClient', '(', 'config', '.', 'STATSD_HOST', ',', 'config', '.', 'STATSD_PORT', ',', 'config', '.', 'STATSD_PREFIX', ')', 'return', 'cls', '.', '_client']
Cache statsd client so it doesn't do a DNS lookup over and over
['Cache', 'statsd', 'client', 'so', 'it', 'doesn', 't', 'do', 'a', 'DNS', 'lookup', 'over', 'and', 'over']
train
https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/metrics/statsd_metrics.py#L18-L25
6,296
biolink/ontobio
ontobio/sim/api/owlsim2.py
OwlSim2Api._simsearch_to_simresult
def _simsearch_to_simresult(self, sim_resp: Dict, method: SimAlgorithm) -> SimResult: """ Convert owlsim json to SimResult object :param sim_resp: owlsim response from search_by_attribute_set() :param method: SimAlgorithm :return: SimResult object """ sim_ids = get_nodes_from_ids(sim_resp['query_IRIs']) sim_resp['results'] = OwlSim2Api._rank_results(sim_resp['results'], method) # get id type map: ids = [result['j']['id'] for result in sim_resp['results']] id_type_map = get_id_type_map(ids) matches = [] for result in sim_resp['results']: matches.append( SimMatch( id=result['j']['id'], label=result['j']['label'], rank=result['rank'], score=result[OwlSim2Api.method2key[method]], type=id_type_map[result['j']['id']][0], taxon=get_taxon(result['j']['id']), significance="NaN", pairwise_match=OwlSim2Api._make_pairwise_matches(result) ) ) return SimResult( query=SimQuery( ids=sim_ids, unresolved_ids=sim_resp['unresolved'], target_ids=[[]] ), matches=matches, metadata=SimMetadata( max_max_ic=self.statistics.max_max_ic ) )
python
def _simsearch_to_simresult(self, sim_resp: Dict, method: SimAlgorithm) -> SimResult: """ Convert owlsim json to SimResult object :param sim_resp: owlsim response from search_by_attribute_set() :param method: SimAlgorithm :return: SimResult object """ sim_ids = get_nodes_from_ids(sim_resp['query_IRIs']) sim_resp['results'] = OwlSim2Api._rank_results(sim_resp['results'], method) # get id type map: ids = [result['j']['id'] for result in sim_resp['results']] id_type_map = get_id_type_map(ids) matches = [] for result in sim_resp['results']: matches.append( SimMatch( id=result['j']['id'], label=result['j']['label'], rank=result['rank'], score=result[OwlSim2Api.method2key[method]], type=id_type_map[result['j']['id']][0], taxon=get_taxon(result['j']['id']), significance="NaN", pairwise_match=OwlSim2Api._make_pairwise_matches(result) ) ) return SimResult( query=SimQuery( ids=sim_ids, unresolved_ids=sim_resp['unresolved'], target_ids=[[]] ), matches=matches, metadata=SimMetadata( max_max_ic=self.statistics.max_max_ic ) )
['def', '_simsearch_to_simresult', '(', 'self', ',', 'sim_resp', ':', 'Dict', ',', 'method', ':', 'SimAlgorithm', ')', '->', 'SimResult', ':', 'sim_ids', '=', 'get_nodes_from_ids', '(', 'sim_resp', '[', "'query_IRIs'", ']', ')', 'sim_resp', '[', "'results'", ']', '=', 'OwlSim2Api', '.', '_rank_results', '(', 'sim_resp', '[', "'results'", ']', ',', 'method', ')', '# get id type map:', 'ids', '=', '[', 'result', '[', "'j'", ']', '[', "'id'", ']', 'for', 'result', 'in', 'sim_resp', '[', "'results'", ']', ']', 'id_type_map', '=', 'get_id_type_map', '(', 'ids', ')', 'matches', '=', '[', ']', 'for', 'result', 'in', 'sim_resp', '[', "'results'", ']', ':', 'matches', '.', 'append', '(', 'SimMatch', '(', 'id', '=', 'result', '[', "'j'", ']', '[', "'id'", ']', ',', 'label', '=', 'result', '[', "'j'", ']', '[', "'label'", ']', ',', 'rank', '=', 'result', '[', "'rank'", ']', ',', 'score', '=', 'result', '[', 'OwlSim2Api', '.', 'method2key', '[', 'method', ']', ']', ',', 'type', '=', 'id_type_map', '[', 'result', '[', "'j'", ']', '[', "'id'", ']', ']', '[', '0', ']', ',', 'taxon', '=', 'get_taxon', '(', 'result', '[', "'j'", ']', '[', "'id'", ']', ')', ',', 'significance', '=', '"NaN"', ',', 'pairwise_match', '=', 'OwlSim2Api', '.', '_make_pairwise_matches', '(', 'result', ')', ')', ')', 'return', 'SimResult', '(', 'query', '=', 'SimQuery', '(', 'ids', '=', 'sim_ids', ',', 'unresolved_ids', '=', 'sim_resp', '[', "'unresolved'", ']', ',', 'target_ids', '=', '[', '[', ']', ']', ')', ',', 'matches', '=', 'matches', ',', 'metadata', '=', 'SimMetadata', '(', 'max_max_ic', '=', 'self', '.', 'statistics', '.', 'max_max_ic', ')', ')']
Convert owlsim json to SimResult object :param sim_resp: owlsim response from search_by_attribute_set() :param method: SimAlgorithm :return: SimResult object
['Convert', 'owlsim', 'json', 'to', 'SimResult', 'object']
train
https://github.com/biolink/ontobio/blob/4e512a7831cfe6bc1b32f2c3be2ba41bc5cf7345/ontobio/sim/api/owlsim2.py#L311-L353
6,297
Azure/azure-cli-extensions
src/alias/azext_alias/alias.py
AliasManager.load_collided_alias
def load_collided_alias(self): """ Load (create, if not exist) the collided alias file. """ # w+ creates the alias config file if it does not exist open_mode = 'r+' if os.path.exists(GLOBAL_COLLIDED_ALIAS_PATH) else 'w+' with open(GLOBAL_COLLIDED_ALIAS_PATH, open_mode) as collided_alias_file: collided_alias_str = collided_alias_file.read() try: self.collided_alias = json.loads(collided_alias_str if collided_alias_str else '{}') except Exception: # pylint: disable=broad-except self.collided_alias = {}
python
def load_collided_alias(self): """ Load (create, if not exist) the collided alias file. """ # w+ creates the alias config file if it does not exist open_mode = 'r+' if os.path.exists(GLOBAL_COLLIDED_ALIAS_PATH) else 'w+' with open(GLOBAL_COLLIDED_ALIAS_PATH, open_mode) as collided_alias_file: collided_alias_str = collided_alias_file.read() try: self.collided_alias = json.loads(collided_alias_str if collided_alias_str else '{}') except Exception: # pylint: disable=broad-except self.collided_alias = {}
['def', 'load_collided_alias', '(', 'self', ')', ':', '# w+ creates the alias config file if it does not exist', 'open_mode', '=', "'r+'", 'if', 'os', '.', 'path', '.', 'exists', '(', 'GLOBAL_COLLIDED_ALIAS_PATH', ')', 'else', "'w+'", 'with', 'open', '(', 'GLOBAL_COLLIDED_ALIAS_PATH', ',', 'open_mode', ')', 'as', 'collided_alias_file', ':', 'collided_alias_str', '=', 'collided_alias_file', '.', 'read', '(', ')', 'try', ':', 'self', '.', 'collided_alias', '=', 'json', '.', 'loads', '(', 'collided_alias_str', 'if', 'collided_alias_str', 'else', "'{}'", ')', 'except', 'Exception', ':', '# pylint: disable=broad-except', 'self', '.', 'collided_alias', '=', '{', '}']
Load (create, if not exist) the collided alias file.
['Load', '(', 'create', 'if', 'not', 'exist', ')', 'the', 'collided', 'alias', 'file', '.']
train
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/alias/azext_alias/alias.py#L79-L90
6,298
hosford42/xcs
xcs/algorithms/xcs.py
XCSAlgorithm._action_set_subsumption
def _action_set_subsumption(self, action_set): """Perform action set subsumption.""" # Select a condition with maximum bit count among those having # sufficient experience and sufficiently low error. selected_rule = None selected_bit_count = None for rule in action_set: if not (rule.experience > self.subsumption_threshold and rule.error < self.error_threshold): continue bit_count = rule.condition.count() if (selected_rule is None or bit_count > selected_bit_count or (bit_count == selected_bit_count and random.randrange(2))): selected_rule = rule selected_bit_count = bit_count # If no rule was found satisfying the requirements, return # early. if selected_rule is None: return # Subsume each rule which the selected rule generalizes. When a # rule is subsumed, all instances of the subsumed rule are replaced # with instances of the more general one in the population. to_remove = [] for rule in action_set: if (selected_rule is not rule and selected_rule.condition(rule.condition)): selected_rule.numerosity += rule.numerosity action_set.model.discard(rule, rule.numerosity) to_remove.append(rule) for rule in to_remove: action_set.remove(rule)
python
def _action_set_subsumption(self, action_set): """Perform action set subsumption.""" # Select a condition with maximum bit count among those having # sufficient experience and sufficiently low error. selected_rule = None selected_bit_count = None for rule in action_set: if not (rule.experience > self.subsumption_threshold and rule.error < self.error_threshold): continue bit_count = rule.condition.count() if (selected_rule is None or bit_count > selected_bit_count or (bit_count == selected_bit_count and random.randrange(2))): selected_rule = rule selected_bit_count = bit_count # If no rule was found satisfying the requirements, return # early. if selected_rule is None: return # Subsume each rule which the selected rule generalizes. When a # rule is subsumed, all instances of the subsumed rule are replaced # with instances of the more general one in the population. to_remove = [] for rule in action_set: if (selected_rule is not rule and selected_rule.condition(rule.condition)): selected_rule.numerosity += rule.numerosity action_set.model.discard(rule, rule.numerosity) to_remove.append(rule) for rule in to_remove: action_set.remove(rule)
['def', '_action_set_subsumption', '(', 'self', ',', 'action_set', ')', ':', '# Select a condition with maximum bit count among those having', '# sufficient experience and sufficiently low error.', 'selected_rule', '=', 'None', 'selected_bit_count', '=', 'None', 'for', 'rule', 'in', 'action_set', ':', 'if', 'not', '(', 'rule', '.', 'experience', '>', 'self', '.', 'subsumption_threshold', 'and', 'rule', '.', 'error', '<', 'self', '.', 'error_threshold', ')', ':', 'continue', 'bit_count', '=', 'rule', '.', 'condition', '.', 'count', '(', ')', 'if', '(', 'selected_rule', 'is', 'None', 'or', 'bit_count', '>', 'selected_bit_count', 'or', '(', 'bit_count', '==', 'selected_bit_count', 'and', 'random', '.', 'randrange', '(', '2', ')', ')', ')', ':', 'selected_rule', '=', 'rule', 'selected_bit_count', '=', 'bit_count', '# If no rule was found satisfying the requirements, return', '# early.', 'if', 'selected_rule', 'is', 'None', ':', 'return', '# Subsume each rule which the selected rule generalizes. When a', '# rule is subsumed, all instances of the subsumed rule are replaced', '# with instances of the more general one in the population.', 'to_remove', '=', '[', ']', 'for', 'rule', 'in', 'action_set', ':', 'if', '(', 'selected_rule', 'is', 'not', 'rule', 'and', 'selected_rule', '.', 'condition', '(', 'rule', '.', 'condition', ')', ')', ':', 'selected_rule', '.', 'numerosity', '+=', 'rule', '.', 'numerosity', 'action_set', '.', 'model', '.', 'discard', '(', 'rule', ',', 'rule', '.', 'numerosity', ')', 'to_remove', '.', 'append', '(', 'rule', ')', 'for', 'rule', 'in', 'to_remove', ':', 'action_set', '.', 'remove', '(', 'rule', ')']
Perform action set subsumption.
['Perform', 'action', 'set', 'subsumption', '.']
train
https://github.com/hosford42/xcs/blob/183bdd0dd339e19ded3be202f86e1b38bdb9f1e5/xcs/algorithms/xcs.py#L779-L813
6,299
marcomusy/vtkplotter
vtkplotter/actors.py
Actor.subdivide
def subdivide(self, N=1, method=0): """Increase the number of vertices of a surface mesh. :param int N: number of subdivisions. :param int method: Loop(0), Linear(1), Adaptive(2), Butterfly(3) .. hint:: |tutorial_subdivide| |tutorial.py|_ """ triangles = vtk.vtkTriangleFilter() triangles.SetInputData(self.polydata()) triangles.Update() originalMesh = triangles.GetOutput() if method == 0: sdf = vtk.vtkLoopSubdivisionFilter() elif method == 1: sdf = vtk.vtkLinearSubdivisionFilter() elif method == 2: sdf = vtk.vtkAdaptiveSubdivisionFilter() elif method == 3: sdf = vtk.vtkButterflySubdivisionFilter() else: colors.printc("~times Error in subdivide: unknown method.", c="r") exit() if method != 2: sdf.SetNumberOfSubdivisions(N) sdf.SetInputData(originalMesh) sdf.Update() return self.updateMesh(sdf.GetOutput())
python
def subdivide(self, N=1, method=0): """Increase the number of vertices of a surface mesh. :param int N: number of subdivisions. :param int method: Loop(0), Linear(1), Adaptive(2), Butterfly(3) .. hint:: |tutorial_subdivide| |tutorial.py|_ """ triangles = vtk.vtkTriangleFilter() triangles.SetInputData(self.polydata()) triangles.Update() originalMesh = triangles.GetOutput() if method == 0: sdf = vtk.vtkLoopSubdivisionFilter() elif method == 1: sdf = vtk.vtkLinearSubdivisionFilter() elif method == 2: sdf = vtk.vtkAdaptiveSubdivisionFilter() elif method == 3: sdf = vtk.vtkButterflySubdivisionFilter() else: colors.printc("~times Error in subdivide: unknown method.", c="r") exit() if method != 2: sdf.SetNumberOfSubdivisions(N) sdf.SetInputData(originalMesh) sdf.Update() return self.updateMesh(sdf.GetOutput())
['def', 'subdivide', '(', 'self', ',', 'N', '=', '1', ',', 'method', '=', '0', ')', ':', 'triangles', '=', 'vtk', '.', 'vtkTriangleFilter', '(', ')', 'triangles', '.', 'SetInputData', '(', 'self', '.', 'polydata', '(', ')', ')', 'triangles', '.', 'Update', '(', ')', 'originalMesh', '=', 'triangles', '.', 'GetOutput', '(', ')', 'if', 'method', '==', '0', ':', 'sdf', '=', 'vtk', '.', 'vtkLoopSubdivisionFilter', '(', ')', 'elif', 'method', '==', '1', ':', 'sdf', '=', 'vtk', '.', 'vtkLinearSubdivisionFilter', '(', ')', 'elif', 'method', '==', '2', ':', 'sdf', '=', 'vtk', '.', 'vtkAdaptiveSubdivisionFilter', '(', ')', 'elif', 'method', '==', '3', ':', 'sdf', '=', 'vtk', '.', 'vtkButterflySubdivisionFilter', '(', ')', 'else', ':', 'colors', '.', 'printc', '(', '"~times Error in subdivide: unknown method."', ',', 'c', '=', '"r"', ')', 'exit', '(', ')', 'if', 'method', '!=', '2', ':', 'sdf', '.', 'SetNumberOfSubdivisions', '(', 'N', ')', 'sdf', '.', 'SetInputData', '(', 'originalMesh', ')', 'sdf', '.', 'Update', '(', ')', 'return', 'self', '.', 'updateMesh', '(', 'sdf', '.', 'GetOutput', '(', ')', ')']
Increase the number of vertices of a surface mesh. :param int N: number of subdivisions. :param int method: Loop(0), Linear(1), Adaptive(2), Butterfly(3) .. hint:: |tutorial_subdivide| |tutorial.py|_
['Increase', 'the', 'number', 'of', 'vertices', 'of', 'a', 'surface', 'mesh', '.']
train
https://github.com/marcomusy/vtkplotter/blob/692c3396782722ec525bc1346a26999868c650c6/vtkplotter/actors.py#L1858-L1885