code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def timeline(self, timeline="home", max_id=None, min_id=None, since_id=None, limit=None):
"""
Fetch statuses, most recent ones first. `timeline` can be 'home', 'local', 'public',
'tag/hashtag' or 'list/id'. See the following functions documentation for what those do.
Local hashtag timelines are supported via the `timeline_hashtag()`_ function.
The default timeline is the "home" timeline.
Media only queries are supported via the `timeline_public()`_ and `timeline_hashtag()`_ functions.
Returns a list of `toot dicts`_.
"""
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
params_initial = locals()
if timeline == "local":
timeline = "public"
params_initial['local'] = True
params = self.__generate_params(params_initial, ['timeline'])
url = '/api/v1/timelines/{0}'.format(timeline)
return self.__api_request('GET', url, params) | def function[timeline, parameter[self, timeline, max_id, min_id, since_id, limit]]:
constant[
Fetch statuses, most recent ones first. `timeline` can be 'home', 'local', 'public',
'tag/hashtag' or 'list/id'. See the following functions documentation for what those do.
Local hashtag timelines are supported via the `timeline_hashtag()`_ function.
The default timeline is the "home" timeline.
Media only queries are supported via the `timeline_public()`_ and `timeline_hashtag()`_ functions.
Returns a list of `toot dicts`_.
]
if compare[name[max_id] not_equal[!=] constant[None]] begin[:]
variable[max_id] assign[=] call[name[self].__unpack_id, parameter[name[max_id]]]
if compare[name[min_id] not_equal[!=] constant[None]] begin[:]
variable[min_id] assign[=] call[name[self].__unpack_id, parameter[name[min_id]]]
if compare[name[since_id] not_equal[!=] constant[None]] begin[:]
variable[since_id] assign[=] call[name[self].__unpack_id, parameter[name[since_id]]]
variable[params_initial] assign[=] call[name[locals], parameter[]]
if compare[name[timeline] equal[==] constant[local]] begin[:]
variable[timeline] assign[=] constant[public]
call[name[params_initial]][constant[local]] assign[=] constant[True]
variable[params] assign[=] call[name[self].__generate_params, parameter[name[params_initial], list[[<ast.Constant object at 0x7da20c7c8df0>]]]]
variable[url] assign[=] call[constant[/api/v1/timelines/{0}].format, parameter[name[timeline]]]
return[call[name[self].__api_request, parameter[constant[GET], name[url], name[params]]]] | keyword[def] identifier[timeline] ( identifier[self] , identifier[timeline] = literal[string] , identifier[max_id] = keyword[None] , identifier[min_id] = keyword[None] , identifier[since_id] = keyword[None] , identifier[limit] = keyword[None] ):
literal[string]
keyword[if] identifier[max_id] != keyword[None] :
identifier[max_id] = identifier[self] . identifier[__unpack_id] ( identifier[max_id] )
keyword[if] identifier[min_id] != keyword[None] :
identifier[min_id] = identifier[self] . identifier[__unpack_id] ( identifier[min_id] )
keyword[if] identifier[since_id] != keyword[None] :
identifier[since_id] = identifier[self] . identifier[__unpack_id] ( identifier[since_id] )
identifier[params_initial] = identifier[locals] ()
keyword[if] identifier[timeline] == literal[string] :
identifier[timeline] = literal[string]
identifier[params_initial] [ literal[string] ]= keyword[True]
identifier[params] = identifier[self] . identifier[__generate_params] ( identifier[params_initial] ,[ literal[string] ])
identifier[url] = literal[string] . identifier[format] ( identifier[timeline] )
keyword[return] identifier[self] . identifier[__api_request] ( literal[string] , identifier[url] , identifier[params] ) | def timeline(self, timeline='home', max_id=None, min_id=None, since_id=None, limit=None):
"""
Fetch statuses, most recent ones first. `timeline` can be 'home', 'local', 'public',
'tag/hashtag' or 'list/id'. See the following functions documentation for what those do.
Local hashtag timelines are supported via the `timeline_hashtag()`_ function.
The default timeline is the "home" timeline.
Media only queries are supported via the `timeline_public()`_ and `timeline_hashtag()`_ functions.
Returns a list of `toot dicts`_.
"""
if max_id != None:
max_id = self.__unpack_id(max_id) # depends on [control=['if'], data=['max_id']]
if min_id != None:
min_id = self.__unpack_id(min_id) # depends on [control=['if'], data=['min_id']]
if since_id != None:
since_id = self.__unpack_id(since_id) # depends on [control=['if'], data=['since_id']]
params_initial = locals()
if timeline == 'local':
timeline = 'public'
params_initial['local'] = True # depends on [control=['if'], data=['timeline']]
params = self.__generate_params(params_initial, ['timeline'])
url = '/api/v1/timelines/{0}'.format(timeline)
return self.__api_request('GET', url, params) |
def clone(self, _, scene):
"""
Create a clone of this Frame into a new Screen.
:param _: ignored.
:param scene: The new Scene object to clone into.
"""
# Assume that the application creates a new set of Frames and so we need to match up the
# data from the old object to the new (using the name).
if self._name is not None:
for effect in scene.effects:
if isinstance(effect, Frame):
if effect._name == self._name:
effect.data = self.data
for layout in self._layouts:
layout.update_widgets(new_frame=effect) | def function[clone, parameter[self, _, scene]]:
constant[
Create a clone of this Frame into a new Screen.
:param _: ignored.
:param scene: The new Scene object to clone into.
]
if compare[name[self]._name is_not constant[None]] begin[:]
for taget[name[effect]] in starred[name[scene].effects] begin[:]
if call[name[isinstance], parameter[name[effect], name[Frame]]] begin[:]
if compare[name[effect]._name equal[==] name[self]._name] begin[:]
name[effect].data assign[=] name[self].data
for taget[name[layout]] in starred[name[self]._layouts] begin[:]
call[name[layout].update_widgets, parameter[]] | keyword[def] identifier[clone] ( identifier[self] , identifier[_] , identifier[scene] ):
literal[string]
keyword[if] identifier[self] . identifier[_name] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[effect] keyword[in] identifier[scene] . identifier[effects] :
keyword[if] identifier[isinstance] ( identifier[effect] , identifier[Frame] ):
keyword[if] identifier[effect] . identifier[_name] == identifier[self] . identifier[_name] :
identifier[effect] . identifier[data] = identifier[self] . identifier[data]
keyword[for] identifier[layout] keyword[in] identifier[self] . identifier[_layouts] :
identifier[layout] . identifier[update_widgets] ( identifier[new_frame] = identifier[effect] ) | def clone(self, _, scene):
"""
Create a clone of this Frame into a new Screen.
:param _: ignored.
:param scene: The new Scene object to clone into.
"""
# Assume that the application creates a new set of Frames and so we need to match up the
# data from the old object to the new (using the name).
if self._name is not None:
for effect in scene.effects:
if isinstance(effect, Frame):
if effect._name == self._name:
effect.data = self.data
for layout in self._layouts:
layout.update_widgets(new_frame=effect) # depends on [control=['for'], data=['layout']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['effect']] # depends on [control=['if'], data=[]] |
def _makeStoreOwnerPerson(self):
"""
Make a L{Person} representing the owner of the store that this
L{Organizer} is installed in.
@rtype: L{Person}
"""
if self.store is None:
return None
userInfo = self.store.findFirst(signup.UserInfo)
name = u''
if userInfo is not None:
name = userInfo.realName
account = self.store.findUnique(LoginAccount,
LoginAccount.avatars == self.store, None)
ownerPerson = self.createPerson(name)
if account is not None:
for method in (self.store.query(
LoginMethod,
attributes.AND(LoginMethod.account == account,
LoginMethod.internal == False))):
self.createContactItem(
EmailContactType(self.store),
ownerPerson, dict(
email=method.localpart + u'@' + method.domain))
return ownerPerson | def function[_makeStoreOwnerPerson, parameter[self]]:
constant[
Make a L{Person} representing the owner of the store that this
L{Organizer} is installed in.
@rtype: L{Person}
]
if compare[name[self].store is constant[None]] begin[:]
return[constant[None]]
variable[userInfo] assign[=] call[name[self].store.findFirst, parameter[name[signup].UserInfo]]
variable[name] assign[=] constant[]
if compare[name[userInfo] is_not constant[None]] begin[:]
variable[name] assign[=] name[userInfo].realName
variable[account] assign[=] call[name[self].store.findUnique, parameter[name[LoginAccount], compare[name[LoginAccount].avatars equal[==] name[self].store], constant[None]]]
variable[ownerPerson] assign[=] call[name[self].createPerson, parameter[name[name]]]
if compare[name[account] is_not constant[None]] begin[:]
for taget[name[method]] in starred[call[name[self].store.query, parameter[name[LoginMethod], call[name[attributes].AND, parameter[compare[name[LoginMethod].account equal[==] name[account]], compare[name[LoginMethod].internal equal[==] constant[False]]]]]]] begin[:]
call[name[self].createContactItem, parameter[call[name[EmailContactType], parameter[name[self].store]], name[ownerPerson], call[name[dict], parameter[]]]]
return[name[ownerPerson]] | keyword[def] identifier[_makeStoreOwnerPerson] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[store] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[userInfo] = identifier[self] . identifier[store] . identifier[findFirst] ( identifier[signup] . identifier[UserInfo] )
identifier[name] = literal[string]
keyword[if] identifier[userInfo] keyword[is] keyword[not] keyword[None] :
identifier[name] = identifier[userInfo] . identifier[realName]
identifier[account] = identifier[self] . identifier[store] . identifier[findUnique] ( identifier[LoginAccount] ,
identifier[LoginAccount] . identifier[avatars] == identifier[self] . identifier[store] , keyword[None] )
identifier[ownerPerson] = identifier[self] . identifier[createPerson] ( identifier[name] )
keyword[if] identifier[account] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[method] keyword[in] ( identifier[self] . identifier[store] . identifier[query] (
identifier[LoginMethod] ,
identifier[attributes] . identifier[AND] ( identifier[LoginMethod] . identifier[account] == identifier[account] ,
identifier[LoginMethod] . identifier[internal] == keyword[False] ))):
identifier[self] . identifier[createContactItem] (
identifier[EmailContactType] ( identifier[self] . identifier[store] ),
identifier[ownerPerson] , identifier[dict] (
identifier[email] = identifier[method] . identifier[localpart] + literal[string] + identifier[method] . identifier[domain] ))
keyword[return] identifier[ownerPerson] | def _makeStoreOwnerPerson(self):
"""
Make a L{Person} representing the owner of the store that this
L{Organizer} is installed in.
@rtype: L{Person}
"""
if self.store is None:
return None # depends on [control=['if'], data=[]]
userInfo = self.store.findFirst(signup.UserInfo)
name = u''
if userInfo is not None:
name = userInfo.realName # depends on [control=['if'], data=['userInfo']]
account = self.store.findUnique(LoginAccount, LoginAccount.avatars == self.store, None)
ownerPerson = self.createPerson(name)
if account is not None:
for method in self.store.query(LoginMethod, attributes.AND(LoginMethod.account == account, LoginMethod.internal == False)):
self.createContactItem(EmailContactType(self.store), ownerPerson, dict(email=method.localpart + u'@' + method.domain)) # depends on [control=['for'], data=['method']] # depends on [control=['if'], data=['account']]
return ownerPerson |
def paste(self):
"""
Pastes text from the clipboard into this edit.
"""
html = QApplication.clipboard().text()
if not self.isRichTextEditEnabled():
self.insertPlainText(projex.text.toAscii(html))
else:
super(XTextEdit, self).paste() | def function[paste, parameter[self]]:
constant[
Pastes text from the clipboard into this edit.
]
variable[html] assign[=] call[call[name[QApplication].clipboard, parameter[]].text, parameter[]]
if <ast.UnaryOp object at 0x7da18eb558a0> begin[:]
call[name[self].insertPlainText, parameter[call[name[projex].text.toAscii, parameter[name[html]]]]] | keyword[def] identifier[paste] ( identifier[self] ):
literal[string]
identifier[html] = identifier[QApplication] . identifier[clipboard] (). identifier[text] ()
keyword[if] keyword[not] identifier[self] . identifier[isRichTextEditEnabled] ():
identifier[self] . identifier[insertPlainText] ( identifier[projex] . identifier[text] . identifier[toAscii] ( identifier[html] ))
keyword[else] :
identifier[super] ( identifier[XTextEdit] , identifier[self] ). identifier[paste] () | def paste(self):
"""
Pastes text from the clipboard into this edit.
"""
html = QApplication.clipboard().text()
if not self.isRichTextEditEnabled():
self.insertPlainText(projex.text.toAscii(html)) # depends on [control=['if'], data=[]]
else:
super(XTextEdit, self).paste() |
def format(self, method, data):
''' Calls format on list or detail '''
if data is None:
if method == 'GET':
raise NotFound()
return ''
return self._meta.formatter.format(data) | def function[format, parameter[self, method, data]]:
constant[ Calls format on list or detail ]
if compare[name[data] is constant[None]] begin[:]
if compare[name[method] equal[==] constant[GET]] begin[:]
<ast.Raise object at 0x7da204621e40>
return[constant[]]
return[call[name[self]._meta.formatter.format, parameter[name[data]]]] | keyword[def] identifier[format] ( identifier[self] , identifier[method] , identifier[data] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[None] :
keyword[if] identifier[method] == literal[string] :
keyword[raise] identifier[NotFound] ()
keyword[return] literal[string]
keyword[return] identifier[self] . identifier[_meta] . identifier[formatter] . identifier[format] ( identifier[data] ) | def format(self, method, data):
""" Calls format on list or detail """
if data is None:
if method == 'GET':
raise NotFound() # depends on [control=['if'], data=[]]
return '' # depends on [control=['if'], data=[]]
return self._meta.formatter.format(data) |
def camera_event_motion_enum(self, camera_id, **kwargs):
"""Return motion settings matching camera_id."""
api = self._api_info['camera_event']
payload = dict({
'_sid': self._sid,
'api': api['name'],
'method': 'MotionEnum',
'version': api['version'],
'camId': camera_id,
}, **kwargs)
response = self._get_json_with_retry(api['url'], payload)
return MotionSetting(camera_id, response['data']['MDParam']) | def function[camera_event_motion_enum, parameter[self, camera_id]]:
constant[Return motion settings matching camera_id.]
variable[api] assign[=] call[name[self]._api_info][constant[camera_event]]
variable[payload] assign[=] call[name[dict], parameter[dictionary[[<ast.Constant object at 0x7da20c6aad70>, <ast.Constant object at 0x7da20c6abc10>, <ast.Constant object at 0x7da20c6a98d0>, <ast.Constant object at 0x7da20c6aae00>, <ast.Constant object at 0x7da20c6a90c0>], [<ast.Attribute object at 0x7da20c6aa800>, <ast.Subscript object at 0x7da20c6abf40>, <ast.Constant object at 0x7da20c6aa110>, <ast.Subscript object at 0x7da20c6ab2e0>, <ast.Name object at 0x7da20c6a8370>]]]]
variable[response] assign[=] call[name[self]._get_json_with_retry, parameter[call[name[api]][constant[url]], name[payload]]]
return[call[name[MotionSetting], parameter[name[camera_id], call[call[name[response]][constant[data]]][constant[MDParam]]]]] | keyword[def] identifier[camera_event_motion_enum] ( identifier[self] , identifier[camera_id] ,** identifier[kwargs] ):
literal[string]
identifier[api] = identifier[self] . identifier[_api_info] [ literal[string] ]
identifier[payload] = identifier[dict] ({
literal[string] : identifier[self] . identifier[_sid] ,
literal[string] : identifier[api] [ literal[string] ],
literal[string] : literal[string] ,
literal[string] : identifier[api] [ literal[string] ],
literal[string] : identifier[camera_id] ,
},** identifier[kwargs] )
identifier[response] = identifier[self] . identifier[_get_json_with_retry] ( identifier[api] [ literal[string] ], identifier[payload] )
keyword[return] identifier[MotionSetting] ( identifier[camera_id] , identifier[response] [ literal[string] ][ literal[string] ]) | def camera_event_motion_enum(self, camera_id, **kwargs):
"""Return motion settings matching camera_id."""
api = self._api_info['camera_event']
payload = dict({'_sid': self._sid, 'api': api['name'], 'method': 'MotionEnum', 'version': api['version'], 'camId': camera_id}, **kwargs)
response = self._get_json_with_retry(api['url'], payload)
return MotionSetting(camera_id, response['data']['MDParam']) |
def jsonschemas(self):
"""Load deposit JSON schemas."""
_jsonschemas = {
k: v['jsonschema']
for k, v in self.app.config['DEPOSIT_RECORDS_UI_ENDPOINTS'].items()
if 'jsonschema' in v
}
return defaultdict(
lambda: self.app.config['DEPOSIT_DEFAULT_JSONSCHEMA'], _jsonschemas
) | def function[jsonschemas, parameter[self]]:
constant[Load deposit JSON schemas.]
variable[_jsonschemas] assign[=] <ast.DictComp object at 0x7da1afe6cf40>
return[call[name[defaultdict], parameter[<ast.Lambda object at 0x7da1afe6e1d0>, name[_jsonschemas]]]] | keyword[def] identifier[jsonschemas] ( identifier[self] ):
literal[string]
identifier[_jsonschemas] ={
identifier[k] : identifier[v] [ literal[string] ]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[app] . identifier[config] [ literal[string] ]. identifier[items] ()
keyword[if] literal[string] keyword[in] identifier[v]
}
keyword[return] identifier[defaultdict] (
keyword[lambda] : identifier[self] . identifier[app] . identifier[config] [ literal[string] ], identifier[_jsonschemas]
) | def jsonschemas(self):
"""Load deposit JSON schemas."""
_jsonschemas = {k: v['jsonschema'] for (k, v) in self.app.config['DEPOSIT_RECORDS_UI_ENDPOINTS'].items() if 'jsonschema' in v}
return defaultdict(lambda : self.app.config['DEPOSIT_DEFAULT_JSONSCHEMA'], _jsonschemas) |
def add_dihedrals(self, indexes, deg=False, cossin=False, periodic=True):
"""
Adds the list of dihedrals to the feature list
Parameters
----------
indexes : np.ndarray, shape=(num_pairs, 4), dtype=int
an array with quadruplets of atom indices
deg : bool, optional, default = False
If False (default), angles will be computed in radians.
If True, angles will be computed in degrees.
cossin : bool, optional, default = False
If True, each angle will be returned as a pair of (sin(x), cos(x)).
This is useful, if you calculate the mean (e.g TICA/PCA, clustering)
in that space.
periodic : bool, optional, default = True
If `periodic` is True and the trajectory contains unitcell
information, we will treat dihedrals that cross periodic images
using the minimum image convention.
"""
from .angles import DihedralFeature
indexes = self._check_indices(indexes, pair_n=4)
f = DihedralFeature(self.topology, indexes, deg=deg, cossin=cossin,
periodic=periodic)
self.__add_feature(f) | def function[add_dihedrals, parameter[self, indexes, deg, cossin, periodic]]:
constant[
Adds the list of dihedrals to the feature list
Parameters
----------
indexes : np.ndarray, shape=(num_pairs, 4), dtype=int
an array with quadruplets of atom indices
deg : bool, optional, default = False
If False (default), angles will be computed in radians.
If True, angles will be computed in degrees.
cossin : bool, optional, default = False
If True, each angle will be returned as a pair of (sin(x), cos(x)).
This is useful, if you calculate the mean (e.g TICA/PCA, clustering)
in that space.
periodic : bool, optional, default = True
If `periodic` is True and the trajectory contains unitcell
information, we will treat dihedrals that cross periodic images
using the minimum image convention.
]
from relative_module[angles] import module[DihedralFeature]
variable[indexes] assign[=] call[name[self]._check_indices, parameter[name[indexes]]]
variable[f] assign[=] call[name[DihedralFeature], parameter[name[self].topology, name[indexes]]]
call[name[self].__add_feature, parameter[name[f]]] | keyword[def] identifier[add_dihedrals] ( identifier[self] , identifier[indexes] , identifier[deg] = keyword[False] , identifier[cossin] = keyword[False] , identifier[periodic] = keyword[True] ):
literal[string]
keyword[from] . identifier[angles] keyword[import] identifier[DihedralFeature]
identifier[indexes] = identifier[self] . identifier[_check_indices] ( identifier[indexes] , identifier[pair_n] = literal[int] )
identifier[f] = identifier[DihedralFeature] ( identifier[self] . identifier[topology] , identifier[indexes] , identifier[deg] = identifier[deg] , identifier[cossin] = identifier[cossin] ,
identifier[periodic] = identifier[periodic] )
identifier[self] . identifier[__add_feature] ( identifier[f] ) | def add_dihedrals(self, indexes, deg=False, cossin=False, periodic=True):
"""
Adds the list of dihedrals to the feature list
Parameters
----------
indexes : np.ndarray, shape=(num_pairs, 4), dtype=int
an array with quadruplets of atom indices
deg : bool, optional, default = False
If False (default), angles will be computed in radians.
If True, angles will be computed in degrees.
cossin : bool, optional, default = False
If True, each angle will be returned as a pair of (sin(x), cos(x)).
This is useful, if you calculate the mean (e.g TICA/PCA, clustering)
in that space.
periodic : bool, optional, default = True
If `periodic` is True and the trajectory contains unitcell
information, we will treat dihedrals that cross periodic images
using the minimum image convention.
"""
from .angles import DihedralFeature
indexes = self._check_indices(indexes, pair_n=4)
f = DihedralFeature(self.topology, indexes, deg=deg, cossin=cossin, periodic=periodic)
self.__add_feature(f) |
def update_resources(self, cpu, gpu, **kwargs):
"""EXPERIMENTAL: Updates the resource requirements.
Should only be called when the trial is not running.
Raises:
ValueError if trial status is running.
"""
if self.status is Trial.RUNNING:
raise ValueError("Cannot update resources while Trial is running.")
self.resources = Resources(cpu, gpu, **kwargs) | def function[update_resources, parameter[self, cpu, gpu]]:
constant[EXPERIMENTAL: Updates the resource requirements.
Should only be called when the trial is not running.
Raises:
ValueError if trial status is running.
]
if compare[name[self].status is name[Trial].RUNNING] begin[:]
<ast.Raise object at 0x7da1b2346fe0>
name[self].resources assign[=] call[name[Resources], parameter[name[cpu], name[gpu]]] | keyword[def] identifier[update_resources] ( identifier[self] , identifier[cpu] , identifier[gpu] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[status] keyword[is] identifier[Trial] . identifier[RUNNING] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[resources] = identifier[Resources] ( identifier[cpu] , identifier[gpu] ,** identifier[kwargs] ) | def update_resources(self, cpu, gpu, **kwargs):
"""EXPERIMENTAL: Updates the resource requirements.
Should only be called when the trial is not running.
Raises:
ValueError if trial status is running.
"""
if self.status is Trial.RUNNING:
raise ValueError('Cannot update resources while Trial is running.') # depends on [control=['if'], data=[]]
self.resources = Resources(cpu, gpu, **kwargs) |
def get_all_contacts_of_client(self, client_id):
"""
Get all contacts of client
This will iterate over all pages until it gets all elements.
So if the rate limit exceeded it will throw an Exception and you will get nothing
:param client_id: The id of the client
:return: list
"""
return self._iterate_through_pages(
get_function=self.get_contacts_of_client_per_page,
resource=CONTACTS,
**{'client_id': client_id}
) | def function[get_all_contacts_of_client, parameter[self, client_id]]:
constant[
Get all contacts of client
This will iterate over all pages until it gets all elements.
So if the rate limit exceeded it will throw an Exception and you will get nothing
:param client_id: The id of the client
:return: list
]
return[call[name[self]._iterate_through_pages, parameter[]]] | keyword[def] identifier[get_all_contacts_of_client] ( identifier[self] , identifier[client_id] ):
literal[string]
keyword[return] identifier[self] . identifier[_iterate_through_pages] (
identifier[get_function] = identifier[self] . identifier[get_contacts_of_client_per_page] ,
identifier[resource] = identifier[CONTACTS] ,
**{ literal[string] : identifier[client_id] }
) | def get_all_contacts_of_client(self, client_id):
"""
Get all contacts of client
This will iterate over all pages until it gets all elements.
So if the rate limit exceeded it will throw an Exception and you will get nothing
:param client_id: The id of the client
:return: list
"""
return self._iterate_through_pages(get_function=self.get_contacts_of_client_per_page, resource=CONTACTS, **{'client_id': client_id}) |
def first(self):
"""Returns the first item from the query, or None if there are no results"""
if self._results_cache:
return self._results_cache[0]
query = PaginatedResponse(func=self._func, lwrap_type=self._lwrap_type, **self._kwargs)
try:
return next(query)
except StopIteration:
return None | def function[first, parameter[self]]:
constant[Returns the first item from the query, or None if there are no results]
if name[self]._results_cache begin[:]
return[call[name[self]._results_cache][constant[0]]]
variable[query] assign[=] call[name[PaginatedResponse], parameter[]]
<ast.Try object at 0x7da1b0593250> | keyword[def] identifier[first] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_results_cache] :
keyword[return] identifier[self] . identifier[_results_cache] [ literal[int] ]
identifier[query] = identifier[PaginatedResponse] ( identifier[func] = identifier[self] . identifier[_func] , identifier[lwrap_type] = identifier[self] . identifier[_lwrap_type] ,** identifier[self] . identifier[_kwargs] )
keyword[try] :
keyword[return] identifier[next] ( identifier[query] )
keyword[except] identifier[StopIteration] :
keyword[return] keyword[None] | def first(self):
"""Returns the first item from the query, or None if there are no results"""
if self._results_cache:
return self._results_cache[0] # depends on [control=['if'], data=[]]
query = PaginatedResponse(func=self._func, lwrap_type=self._lwrap_type, **self._kwargs)
try:
return next(query) # depends on [control=['try'], data=[]]
except StopIteration:
return None # depends on [control=['except'], data=[]] |
def com_google_fonts_check_metadata_parses(family_directory):
""" Check METADATA.pb parse correctly. """
from google.protobuf import text_format
from fontbakery.utils import get_FamilyProto_Message
try:
pb_file = os.path.join(family_directory, "METADATA.pb")
get_FamilyProto_Message(pb_file)
yield PASS, "METADATA.pb parsed successfuly."
except text_format.ParseError as e:
yield FAIL, (f"Family metadata at {family_directory} failed to parse.\n"
f"TRACEBACK:\n{e}")
except FileNotFoundError:
yield SKIP, f"Font family at '{family_directory}' lacks a METADATA.pb file." | def function[com_google_fonts_check_metadata_parses, parameter[family_directory]]:
constant[ Check METADATA.pb parse correctly. ]
from relative_module[google.protobuf] import module[text_format]
from relative_module[fontbakery.utils] import module[get_FamilyProto_Message]
<ast.Try object at 0x7da1b1253fa0> | keyword[def] identifier[com_google_fonts_check_metadata_parses] ( identifier[family_directory] ):
literal[string]
keyword[from] identifier[google] . identifier[protobuf] keyword[import] identifier[text_format]
keyword[from] identifier[fontbakery] . identifier[utils] keyword[import] identifier[get_FamilyProto_Message]
keyword[try] :
identifier[pb_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[family_directory] , literal[string] )
identifier[get_FamilyProto_Message] ( identifier[pb_file] )
keyword[yield] identifier[PASS] , literal[string]
keyword[except] identifier[text_format] . identifier[ParseError] keyword[as] identifier[e] :
keyword[yield] identifier[FAIL] ,( literal[string]
literal[string] )
keyword[except] identifier[FileNotFoundError] :
keyword[yield] identifier[SKIP] , literal[string] | def com_google_fonts_check_metadata_parses(family_directory):
""" Check METADATA.pb parse correctly. """
from google.protobuf import text_format
from fontbakery.utils import get_FamilyProto_Message
try:
pb_file = os.path.join(family_directory, 'METADATA.pb')
get_FamilyProto_Message(pb_file)
yield (PASS, 'METADATA.pb parsed successfuly.') # depends on [control=['try'], data=[]]
except text_format.ParseError as e:
yield (FAIL, f'Family metadata at {family_directory} failed to parse.\nTRACEBACK:\n{e}') # depends on [control=['except'], data=['e']]
except FileNotFoundError:
yield (SKIP, f"Font family at '{family_directory}' lacks a METADATA.pb file.") # depends on [control=['except'], data=[]] |
def toFormMarkup(self,
action_url,
form_tag_attrs=None,
submit_text="Continue"):
"""Generate HTML form markup that contains the values in this
message, to be HTTP POSTed as x-www-form-urlencoded UTF-8.
@param action_url: The URL to which the form will be POSTed
@type action_url: str
@param form_tag_attrs: Dictionary of attributes to be added to
the form tag. 'accept-charset' and 'enctype' have defaults
that can be overridden. If a value is supplied for
'action' or 'method', it will be replaced.
@type form_tag_attrs: {unicode: unicode}
@param submit_text: The text that will appear on the submit
button for this form.
@type submit_text: unicode
@returns: A string containing (X)HTML markup for a form that
encodes the values in this Message object.
@rtype: str
"""
if ElementTree is None:
raise RuntimeError('This function requires ElementTree.')
assert action_url is not None
form = ElementTree.Element('form')
if form_tag_attrs:
for name, attr in form_tag_attrs.items():
form.attrib[name] = attr
form.attrib['action'] = oidutil.toUnicode(action_url)
form.attrib['method'] = 'post'
form.attrib['accept-charset'] = 'UTF-8'
form.attrib['enctype'] = 'application/x-www-form-urlencoded'
for name, value in self.toPostArgs().items():
attrs = {
'type': 'hidden',
'name': oidutil.toUnicode(name),
'value': oidutil.toUnicode(value)
}
form.append(ElementTree.Element('input', attrs))
submit = ElementTree.Element(
'input',
{'type': 'submit',
'value': oidutil.toUnicode(submit_text)})
form.append(submit)
return str(ElementTree.tostring(form, encoding='utf-8'),
encoding="utf-8") | def function[toFormMarkup, parameter[self, action_url, form_tag_attrs, submit_text]]:
constant[Generate HTML form markup that contains the values in this
message, to be HTTP POSTed as x-www-form-urlencoded UTF-8.
@param action_url: The URL to which the form will be POSTed
@type action_url: str
@param form_tag_attrs: Dictionary of attributes to be added to
the form tag. 'accept-charset' and 'enctype' have defaults
that can be overridden. If a value is supplied for
'action' or 'method', it will be replaced.
@type form_tag_attrs: {unicode: unicode}
@param submit_text: The text that will appear on the submit
button for this form.
@type submit_text: unicode
@returns: A string containing (X)HTML markup for a form that
encodes the values in this Message object.
@rtype: str
]
if compare[name[ElementTree] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0627a60>
assert[compare[name[action_url] is_not constant[None]]]
variable[form] assign[=] call[name[ElementTree].Element, parameter[constant[form]]]
if name[form_tag_attrs] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0625ed0>, <ast.Name object at 0x7da1b06276a0>]]] in starred[call[name[form_tag_attrs].items, parameter[]]] begin[:]
call[name[form].attrib][name[name]] assign[=] name[attr]
call[name[form].attrib][constant[action]] assign[=] call[name[oidutil].toUnicode, parameter[name[action_url]]]
call[name[form].attrib][constant[method]] assign[=] constant[post]
call[name[form].attrib][constant[accept-charset]] assign[=] constant[UTF-8]
call[name[form].attrib][constant[enctype]] assign[=] constant[application/x-www-form-urlencoded]
for taget[tuple[[<ast.Name object at 0x7da1b0627340>, <ast.Name object at 0x7da1b0627eb0>]]] in starred[call[call[name[self].toPostArgs, parameter[]].items, parameter[]]] begin[:]
variable[attrs] assign[=] dictionary[[<ast.Constant object at 0x7da1b0626830>, <ast.Constant object at 0x7da1b0626680>, <ast.Constant object at 0x7da1b0627550>], [<ast.Constant object at 0x7da1b0626590>, <ast.Call object at 0x7da1b0627880>, <ast.Call object at 0x7da1b0625810>]]
call[name[form].append, parameter[call[name[ElementTree].Element, parameter[constant[input], name[attrs]]]]]
variable[submit] assign[=] call[name[ElementTree].Element, parameter[constant[input], dictionary[[<ast.Constant object at 0x7da1b061a380>, <ast.Constant object at 0x7da1b0618310>], [<ast.Constant object at 0x7da1b0619d50>, <ast.Call object at 0x7da1b061b790>]]]]
call[name[form].append, parameter[name[submit]]]
return[call[name[str], parameter[call[name[ElementTree].tostring, parameter[name[form]]]]]] | keyword[def] identifier[toFormMarkup] ( identifier[self] ,
identifier[action_url] ,
identifier[form_tag_attrs] = keyword[None] ,
identifier[submit_text] = literal[string] ):
literal[string]
keyword[if] identifier[ElementTree] keyword[is] keyword[None] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[assert] identifier[action_url] keyword[is] keyword[not] keyword[None]
identifier[form] = identifier[ElementTree] . identifier[Element] ( literal[string] )
keyword[if] identifier[form_tag_attrs] :
keyword[for] identifier[name] , identifier[attr] keyword[in] identifier[form_tag_attrs] . identifier[items] ():
identifier[form] . identifier[attrib] [ identifier[name] ]= identifier[attr]
identifier[form] . identifier[attrib] [ literal[string] ]= identifier[oidutil] . identifier[toUnicode] ( identifier[action_url] )
identifier[form] . identifier[attrib] [ literal[string] ]= literal[string]
identifier[form] . identifier[attrib] [ literal[string] ]= literal[string]
identifier[form] . identifier[attrib] [ literal[string] ]= literal[string]
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[self] . identifier[toPostArgs] (). identifier[items] ():
identifier[attrs] ={
literal[string] : literal[string] ,
literal[string] : identifier[oidutil] . identifier[toUnicode] ( identifier[name] ),
literal[string] : identifier[oidutil] . identifier[toUnicode] ( identifier[value] )
}
identifier[form] . identifier[append] ( identifier[ElementTree] . identifier[Element] ( literal[string] , identifier[attrs] ))
identifier[submit] = identifier[ElementTree] . identifier[Element] (
literal[string] ,
{ literal[string] : literal[string] ,
literal[string] : identifier[oidutil] . identifier[toUnicode] ( identifier[submit_text] )})
identifier[form] . identifier[append] ( identifier[submit] )
keyword[return] identifier[str] ( identifier[ElementTree] . identifier[tostring] ( identifier[form] , identifier[encoding] = literal[string] ),
identifier[encoding] = literal[string] ) | def toFormMarkup(self, action_url, form_tag_attrs=None, submit_text='Continue'):
"""Generate HTML form markup that contains the values in this
message, to be HTTP POSTed as x-www-form-urlencoded UTF-8.
@param action_url: The URL to which the form will be POSTed
@type action_url: str
@param form_tag_attrs: Dictionary of attributes to be added to
the form tag. 'accept-charset' and 'enctype' have defaults
that can be overridden. If a value is supplied for
'action' or 'method', it will be replaced.
@type form_tag_attrs: {unicode: unicode}
@param submit_text: The text that will appear on the submit
button for this form.
@type submit_text: unicode
@returns: A string containing (X)HTML markup for a form that
encodes the values in this Message object.
@rtype: str
"""
if ElementTree is None:
raise RuntimeError('This function requires ElementTree.') # depends on [control=['if'], data=[]]
assert action_url is not None
form = ElementTree.Element('form')
if form_tag_attrs:
for (name, attr) in form_tag_attrs.items():
form.attrib[name] = attr # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
form.attrib['action'] = oidutil.toUnicode(action_url)
form.attrib['method'] = 'post'
form.attrib['accept-charset'] = 'UTF-8'
form.attrib['enctype'] = 'application/x-www-form-urlencoded'
for (name, value) in self.toPostArgs().items():
attrs = {'type': 'hidden', 'name': oidutil.toUnicode(name), 'value': oidutil.toUnicode(value)}
form.append(ElementTree.Element('input', attrs)) # depends on [control=['for'], data=[]]
submit = ElementTree.Element('input', {'type': 'submit', 'value': oidutil.toUnicode(submit_text)})
form.append(submit)
return str(ElementTree.tostring(form, encoding='utf-8'), encoding='utf-8') |
def giving_up(self, message):
"""
Called when a message has been received where ``msg.attempts > max_tries``
This is useful to subclass and override to perform a task (such as writing to disk, etc.)
:param message: the :class:`nsq.Message` received
"""
logger.warning('[%s] giving up on message %s after %d tries (max:%d) %r',
self.name, message.id, message.attempts, self.max_tries, message.body) | def function[giving_up, parameter[self, message]]:
constant[
Called when a message has been received where ``msg.attempts > max_tries``
This is useful to subclass and override to perform a task (such as writing to disk, etc.)
:param message: the :class:`nsq.Message` received
]
call[name[logger].warning, parameter[constant[[%s] giving up on message %s after %d tries (max:%d) %r], name[self].name, name[message].id, name[message].attempts, name[self].max_tries, name[message].body]] | keyword[def] identifier[giving_up] ( identifier[self] , identifier[message] ):
literal[string]
identifier[logger] . identifier[warning] ( literal[string] ,
identifier[self] . identifier[name] , identifier[message] . identifier[id] , identifier[message] . identifier[attempts] , identifier[self] . identifier[max_tries] , identifier[message] . identifier[body] ) | def giving_up(self, message):
"""
Called when a message has been received where ``msg.attempts > max_tries``
This is useful to subclass and override to perform a task (such as writing to disk, etc.)
:param message: the :class:`nsq.Message` received
"""
logger.warning('[%s] giving up on message %s after %d tries (max:%d) %r', self.name, message.id, message.attempts, self.max_tries, message.body) |
def transform_to(ext):
"""
Decorator to create an output filename from an output filename with
the specified extension. Changes the extension, in_file is transformed
to a new type.
Takes functions like this to decorate:
f(in_file, out_dir=None, out_file=None) or,
f(in_file=in_file, out_dir=None, out_file=None)
examples:
@transform(".bam")
f("the/input/path/file.sam") ->
f("the/input/path/file.sam", out_file="the/input/path/file.bam")
@transform(".bam")
f("the/input/path/file.sam", out_dir="results") ->
f("the/input/path/file.sam", out_file="results/file.bam")
"""
def decor(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
out_file = kwargs.get("out_file", None)
if not out_file:
in_path = kwargs.get("in_file", args[0])
out_dir = kwargs.get("out_dir", os.path.dirname(in_path))
safe_mkdir(out_dir)
out_name = replace_suffix(os.path.basename(in_path), ext)
out_file = os.path.join(out_dir, out_name)
kwargs["out_file"] = out_file
if not file_exists(out_file):
out_file = f(*args, **kwargs)
return out_file
return wrapper
return decor | def function[transform_to, parameter[ext]]:
constant[
Decorator to create an output filename from an output filename with
the specified extension. Changes the extension, in_file is transformed
to a new type.
Takes functions like this to decorate:
f(in_file, out_dir=None, out_file=None) or,
f(in_file=in_file, out_dir=None, out_file=None)
examples:
@transform(".bam")
f("the/input/path/file.sam") ->
f("the/input/path/file.sam", out_file="the/input/path/file.bam")
@transform(".bam")
f("the/input/path/file.sam", out_dir="results") ->
f("the/input/path/file.sam", out_file="results/file.bam")
]
def function[decor, parameter[f]]:
def function[wrapper, parameter[]]:
variable[out_file] assign[=] call[name[kwargs].get, parameter[constant[out_file], constant[None]]]
if <ast.UnaryOp object at 0x7da20e9b1a50> begin[:]
variable[in_path] assign[=] call[name[kwargs].get, parameter[constant[in_file], call[name[args]][constant[0]]]]
variable[out_dir] assign[=] call[name[kwargs].get, parameter[constant[out_dir], call[name[os].path.dirname, parameter[name[in_path]]]]]
call[name[safe_mkdir], parameter[name[out_dir]]]
variable[out_name] assign[=] call[name[replace_suffix], parameter[call[name[os].path.basename, parameter[name[in_path]]], name[ext]]]
variable[out_file] assign[=] call[name[os].path.join, parameter[name[out_dir], name[out_name]]]
call[name[kwargs]][constant[out_file]] assign[=] name[out_file]
if <ast.UnaryOp object at 0x7da204345480> begin[:]
variable[out_file] assign[=] call[name[f], parameter[<ast.Starred object at 0x7da204346320>]]
return[name[out_file]]
return[name[wrapper]]
return[name[decor]] | keyword[def] identifier[transform_to] ( identifier[ext] ):
literal[string]
keyword[def] identifier[decor] ( identifier[f] ):
@ identifier[functools] . identifier[wraps] ( identifier[f] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[out_file] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] keyword[not] identifier[out_file] :
identifier[in_path] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[args] [ literal[int] ])
identifier[out_dir] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[os] . identifier[path] . identifier[dirname] ( identifier[in_path] ))
identifier[safe_mkdir] ( identifier[out_dir] )
identifier[out_name] = identifier[replace_suffix] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[in_path] ), identifier[ext] )
identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , identifier[out_name] )
identifier[kwargs] [ literal[string] ]= identifier[out_file]
keyword[if] keyword[not] identifier[file_exists] ( identifier[out_file] ):
identifier[out_file] = identifier[f] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[out_file]
keyword[return] identifier[wrapper]
keyword[return] identifier[decor] | def transform_to(ext):
"""
Decorator to create an output filename from an output filename with
the specified extension. Changes the extension, in_file is transformed
to a new type.
Takes functions like this to decorate:
f(in_file, out_dir=None, out_file=None) or,
f(in_file=in_file, out_dir=None, out_file=None)
examples:
@transform(".bam")
f("the/input/path/file.sam") ->
f("the/input/path/file.sam", out_file="the/input/path/file.bam")
@transform(".bam")
f("the/input/path/file.sam", out_dir="results") ->
f("the/input/path/file.sam", out_file="results/file.bam")
"""
def decor(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
out_file = kwargs.get('out_file', None)
if not out_file:
in_path = kwargs.get('in_file', args[0])
out_dir = kwargs.get('out_dir', os.path.dirname(in_path))
safe_mkdir(out_dir)
out_name = replace_suffix(os.path.basename(in_path), ext)
out_file = os.path.join(out_dir, out_name) # depends on [control=['if'], data=[]]
kwargs['out_file'] = out_file
if not file_exists(out_file):
out_file = f(*args, **kwargs) # depends on [control=['if'], data=[]]
return out_file
return wrapper
return decor |
def permissions(self):
"""Instance depends on the API version:
* 2015-07-01: :class:`PermissionsOperations<azure.mgmt.authorization.v2015_07_01.operations.PermissionsOperations>`
* 2018-01-01-preview: :class:`PermissionsOperations<azure.mgmt.authorization.v2018_01_01_preview.operations.PermissionsOperations>`
"""
api_version = self._get_api_version('permissions')
if api_version == '2015-07-01':
from .v2015_07_01.operations import PermissionsOperations as OperationClass
elif api_version == '2018-01-01-preview':
from .v2018_01_01_preview.operations import PermissionsOperations as OperationClass
else:
raise NotImplementedError("APIVersion {} is not available".format(api_version))
return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) | def function[permissions, parameter[self]]:
constant[Instance depends on the API version:
* 2015-07-01: :class:`PermissionsOperations<azure.mgmt.authorization.v2015_07_01.operations.PermissionsOperations>`
* 2018-01-01-preview: :class:`PermissionsOperations<azure.mgmt.authorization.v2018_01_01_preview.operations.PermissionsOperations>`
]
variable[api_version] assign[=] call[name[self]._get_api_version, parameter[constant[permissions]]]
if compare[name[api_version] equal[==] constant[2015-07-01]] begin[:]
from relative_module[v2015_07_01.operations] import module[PermissionsOperations]
return[call[name[OperationClass], parameter[name[self]._client, name[self].config, call[name[Serializer], parameter[call[name[self]._models_dict, parameter[name[api_version]]]]], call[name[Deserializer], parameter[call[name[self]._models_dict, parameter[name[api_version]]]]]]]] | keyword[def] identifier[permissions] ( identifier[self] ):
literal[string]
identifier[api_version] = identifier[self] . identifier[_get_api_version] ( literal[string] )
keyword[if] identifier[api_version] == literal[string] :
keyword[from] . identifier[v2015_07_01] . identifier[operations] keyword[import] identifier[PermissionsOperations] keyword[as] identifier[OperationClass]
keyword[elif] identifier[api_version] == literal[string] :
keyword[from] . identifier[v2018_01_01_preview] . identifier[operations] keyword[import] identifier[PermissionsOperations] keyword[as] identifier[OperationClass]
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] . identifier[format] ( identifier[api_version] ))
keyword[return] identifier[OperationClass] ( identifier[self] . identifier[_client] , identifier[self] . identifier[config] , identifier[Serializer] ( identifier[self] . identifier[_models_dict] ( identifier[api_version] )), identifier[Deserializer] ( identifier[self] . identifier[_models_dict] ( identifier[api_version] ))) | def permissions(self):
"""Instance depends on the API version:
* 2015-07-01: :class:`PermissionsOperations<azure.mgmt.authorization.v2015_07_01.operations.PermissionsOperations>`
* 2018-01-01-preview: :class:`PermissionsOperations<azure.mgmt.authorization.v2018_01_01_preview.operations.PermissionsOperations>`
"""
api_version = self._get_api_version('permissions')
if api_version == '2015-07-01':
from .v2015_07_01.operations import PermissionsOperations as OperationClass # depends on [control=['if'], data=[]]
elif api_version == '2018-01-01-preview':
from .v2018_01_01_preview.operations import PermissionsOperations as OperationClass # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('APIVersion {} is not available'.format(api_version))
return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) |
def connect(self, *args):
"""Registers new sockets and their clients and allocates uuids"""
self.log("Connect ", args, lvl=verbose)
try:
sock = args[0]
ip = args[1]
if sock not in self._sockets:
self.log("New client connected:", ip, lvl=debug)
clientuuid = str(uuid4())
self._sockets[sock] = Socket(ip, clientuuid)
# Key uuid is temporary, until signin, will then be replaced
# with account uuid
self._clients[clientuuid] = Client(
sock=sock,
ip=ip,
clientuuid=clientuuid,
)
self.log("Client connected:", clientuuid, lvl=debug)
else:
self.log("Old IP reconnected!", lvl=warn)
# self.fireEvent(write(sock, "Another client is
# connecting from your IP!"))
# self._sockets[sock] = (ip, uuid.uuid4())
except Exception as e:
self.log("Error during connect: ", e, type(e), lvl=critical) | def function[connect, parameter[self]]:
constant[Registers new sockets and their clients and allocates uuids]
call[name[self].log, parameter[constant[Connect ], name[args]]]
<ast.Try object at 0x7da1b0f3bbb0> | keyword[def] identifier[connect] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[self] . identifier[log] ( literal[string] , identifier[args] , identifier[lvl] = identifier[verbose] )
keyword[try] :
identifier[sock] = identifier[args] [ literal[int] ]
identifier[ip] = identifier[args] [ literal[int] ]
keyword[if] identifier[sock] keyword[not] keyword[in] identifier[self] . identifier[_sockets] :
identifier[self] . identifier[log] ( literal[string] , identifier[ip] , identifier[lvl] = identifier[debug] )
identifier[clientuuid] = identifier[str] ( identifier[uuid4] ())
identifier[self] . identifier[_sockets] [ identifier[sock] ]= identifier[Socket] ( identifier[ip] , identifier[clientuuid] )
identifier[self] . identifier[_clients] [ identifier[clientuuid] ]= identifier[Client] (
identifier[sock] = identifier[sock] ,
identifier[ip] = identifier[ip] ,
identifier[clientuuid] = identifier[clientuuid] ,
)
identifier[self] . identifier[log] ( literal[string] , identifier[clientuuid] , identifier[lvl] = identifier[debug] )
keyword[else] :
identifier[self] . identifier[log] ( literal[string] , identifier[lvl] = identifier[warn] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[log] ( literal[string] , identifier[e] , identifier[type] ( identifier[e] ), identifier[lvl] = identifier[critical] ) | def connect(self, *args):
"""Registers new sockets and their clients and allocates uuids"""
self.log('Connect ', args, lvl=verbose)
try:
sock = args[0]
ip = args[1]
if sock not in self._sockets:
self.log('New client connected:', ip, lvl=debug)
clientuuid = str(uuid4())
self._sockets[sock] = Socket(ip, clientuuid)
# Key uuid is temporary, until signin, will then be replaced
# with account uuid
self._clients[clientuuid] = Client(sock=sock, ip=ip, clientuuid=clientuuid)
self.log('Client connected:', clientuuid, lvl=debug) # depends on [control=['if'], data=['sock']]
else:
self.log('Old IP reconnected!', lvl=warn) # depends on [control=['try'], data=[]]
# self.fireEvent(write(sock, "Another client is
# connecting from your IP!"))
# self._sockets[sock] = (ip, uuid.uuid4())
except Exception as e:
self.log('Error during connect: ', e, type(e), lvl=critical) # depends on [control=['except'], data=['e']] |
def polling(self, none_stop=False, interval=0, timeout=20):
"""
This function creates a new Thread that calls an internal __retrieve_updates function.
This allows the bot to retrieve Updates automagically and notify listeners and message handlers accordingly.
Warning: Do not call this function more than once!
Always get updates.
:param interval:
:param none_stop: Do not stop polling when an ApiException occurs.
:param timeout: Timeout in seconds for long polling.
:return:
"""
if self.threaded:
self.__threaded_polling(none_stop, interval, timeout)
else:
self.__non_threaded_polling(none_stop, interval, timeout) | def function[polling, parameter[self, none_stop, interval, timeout]]:
constant[
This function creates a new Thread that calls an internal __retrieve_updates function.
This allows the bot to retrieve Updates automagically and notify listeners and message handlers accordingly.
Warning: Do not call this function more than once!
Always get updates.
:param interval:
:param none_stop: Do not stop polling when an ApiException occurs.
:param timeout: Timeout in seconds for long polling.
:return:
]
if name[self].threaded begin[:]
call[name[self].__threaded_polling, parameter[name[none_stop], name[interval], name[timeout]]] | keyword[def] identifier[polling] ( identifier[self] , identifier[none_stop] = keyword[False] , identifier[interval] = literal[int] , identifier[timeout] = literal[int] ):
literal[string]
keyword[if] identifier[self] . identifier[threaded] :
identifier[self] . identifier[__threaded_polling] ( identifier[none_stop] , identifier[interval] , identifier[timeout] )
keyword[else] :
identifier[self] . identifier[__non_threaded_polling] ( identifier[none_stop] , identifier[interval] , identifier[timeout] ) | def polling(self, none_stop=False, interval=0, timeout=20):
"""
This function creates a new Thread that calls an internal __retrieve_updates function.
This allows the bot to retrieve Updates automagically and notify listeners and message handlers accordingly.
Warning: Do not call this function more than once!
Always get updates.
:param interval:
:param none_stop: Do not stop polling when an ApiException occurs.
:param timeout: Timeout in seconds for long polling.
:return:
"""
if self.threaded:
self.__threaded_polling(none_stop, interval, timeout) # depends on [control=['if'], data=[]]
else:
self.__non_threaded_polling(none_stop, interval, timeout) |
def after(self, context):
"Invokes all after functions with context passed to them."
self._invoke(self._after, context)
run.after_each.execute(context) | def function[after, parameter[self, context]]:
constant[Invokes all after functions with context passed to them.]
call[name[self]._invoke, parameter[name[self]._after, name[context]]]
call[name[run].after_each.execute, parameter[name[context]]] | keyword[def] identifier[after] ( identifier[self] , identifier[context] ):
literal[string]
identifier[self] . identifier[_invoke] ( identifier[self] . identifier[_after] , identifier[context] )
identifier[run] . identifier[after_each] . identifier[execute] ( identifier[context] ) | def after(self, context):
"""Invokes all after functions with context passed to them."""
self._invoke(self._after, context)
run.after_each.execute(context) |
def _leave_event_hide(self):
""" Hides the tooltip after some time has passed (assuming the cursor is
not over the tooltip).
"""
if (not self._hide_timer.isActive() and
# If Enter events always came after Leave events, we wouldn't need
# this check. But on Mac OS, it sometimes happens the other way
# around when the tooltip is created.
QtGui.qApp.topLevelAt(QtGui.QCursor.pos()) != self):
self._hide_timer.start(300, self) | def function[_leave_event_hide, parameter[self]]:
constant[ Hides the tooltip after some time has passed (assuming the cursor is
not over the tooltip).
]
if <ast.BoolOp object at 0x7da2041db0d0> begin[:]
call[name[self]._hide_timer.start, parameter[constant[300], name[self]]] | keyword[def] identifier[_leave_event_hide] ( identifier[self] ):
literal[string]
keyword[if] ( keyword[not] identifier[self] . identifier[_hide_timer] . identifier[isActive] () keyword[and]
identifier[QtGui] . identifier[qApp] . identifier[topLevelAt] ( identifier[QtGui] . identifier[QCursor] . identifier[pos] ())!= identifier[self] ):
identifier[self] . identifier[_hide_timer] . identifier[start] ( literal[int] , identifier[self] ) | def _leave_event_hide(self):
""" Hides the tooltip after some time has passed (assuming the cursor is
not over the tooltip).
"""
if not self._hide_timer.isActive() and QtGui.qApp.topLevelAt(QtGui.QCursor.pos()) != self:
# If Enter events always came after Leave events, we wouldn't need
# this check. But on Mac OS, it sometimes happens the other way
# around when the tooltip is created.
self._hide_timer.start(300, self) # depends on [control=['if'], data=[]] |
def template_inheritance(obj):
'''
Generator that iterates the template and its ancestors.
The order is from most specialized (furthest descendant) to
most general (furthest ancestor).
obj can be either:
1. Mako Template object
2. Mako `self` object (available within a rendering template)
'''
if isinstance(obj, MakoTemplate):
obj = create_mako_context(obj)['self']
elif isinstance(obj, MakoContext):
obj = obj['self']
while obj is not None:
yield obj.template
obj = obj.inherits | def function[template_inheritance, parameter[obj]]:
constant[
Generator that iterates the template and its ancestors.
The order is from most specialized (furthest descendant) to
most general (furthest ancestor).
obj can be either:
1. Mako Template object
2. Mako `self` object (available within a rendering template)
]
if call[name[isinstance], parameter[name[obj], name[MakoTemplate]]] begin[:]
variable[obj] assign[=] call[call[name[create_mako_context], parameter[name[obj]]]][constant[self]]
while compare[name[obj] is_not constant[None]] begin[:]
<ast.Yield object at 0x7da1b11be110>
variable[obj] assign[=] name[obj].inherits | keyword[def] identifier[template_inheritance] ( identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[MakoTemplate] ):
identifier[obj] = identifier[create_mako_context] ( identifier[obj] )[ literal[string] ]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[MakoContext] ):
identifier[obj] = identifier[obj] [ literal[string] ]
keyword[while] identifier[obj] keyword[is] keyword[not] keyword[None] :
keyword[yield] identifier[obj] . identifier[template]
identifier[obj] = identifier[obj] . identifier[inherits] | def template_inheritance(obj):
"""
Generator that iterates the template and its ancestors.
The order is from most specialized (furthest descendant) to
most general (furthest ancestor).
obj can be either:
1. Mako Template object
2. Mako `self` object (available within a rendering template)
"""
if isinstance(obj, MakoTemplate):
obj = create_mako_context(obj)['self'] # depends on [control=['if'], data=[]]
elif isinstance(obj, MakoContext):
obj = obj['self'] # depends on [control=['if'], data=[]]
while obj is not None:
yield obj.template
obj = obj.inherits # depends on [control=['while'], data=['obj']] |
def from_frequencies(cls, frequencies, concat=None):
"""
Build Huffman code table from given symbol frequencies
:param frequencies: symbol to frequency mapping
:param concat: function to concatenate symbols
"""
concat = concat or _guess_concat(next(iter(frequencies)))
# Heap consists of tuples: (frequency, [list of tuples: (symbol, (bitsize, value))])
heap = [(f, [(s, (0, 0))]) for s, f in frequencies.items()]
# Add EOF symbol.
# TODO: argument to set frequency of EOF?
heap.append((1, [(_EOF, (0, 0))]))
# Use heapq approach to build the encodings of the huffman tree leaves.
heapify(heap)
while len(heap) > 1:
# Pop the 2 smallest items from heap
a = heappop(heap)
b = heappop(heap)
# Merge nodes (update codes for each symbol appropriately)
merged = (
a[0] + b[0],
[(s, (n + 1, v)) for (s, (n, v)) in a[1]]
+ [(s, (n + 1, (1 << n) + v)) for (s, (n, v)) in b[1]]
)
heappush(heap, merged)
# Code table is dictionary mapping symbol to (bitsize, value)
table = dict(heappop(heap)[1])
return cls(table, concat=concat, check=False) | def function[from_frequencies, parameter[cls, frequencies, concat]]:
constant[
Build Huffman code table from given symbol frequencies
:param frequencies: symbol to frequency mapping
:param concat: function to concatenate symbols
]
variable[concat] assign[=] <ast.BoolOp object at 0x7da1b07143a0>
variable[heap] assign[=] <ast.ListComp object at 0x7da1b0775720>
call[name[heap].append, parameter[tuple[[<ast.Constant object at 0x7da1b07770a0>, <ast.List object at 0x7da1b0774280>]]]]
call[name[heapify], parameter[name[heap]]]
while compare[call[name[len], parameter[name[heap]]] greater[>] constant[1]] begin[:]
variable[a] assign[=] call[name[heappop], parameter[name[heap]]]
variable[b] assign[=] call[name[heappop], parameter[name[heap]]]
variable[merged] assign[=] tuple[[<ast.BinOp object at 0x7da1b07763b0>, <ast.BinOp object at 0x7da1b072e5c0>]]
call[name[heappush], parameter[name[heap], name[merged]]]
variable[table] assign[=] call[name[dict], parameter[call[call[name[heappop], parameter[name[heap]]]][constant[1]]]]
return[call[name[cls], parameter[name[table]]]] | keyword[def] identifier[from_frequencies] ( identifier[cls] , identifier[frequencies] , identifier[concat] = keyword[None] ):
literal[string]
identifier[concat] = identifier[concat] keyword[or] identifier[_guess_concat] ( identifier[next] ( identifier[iter] ( identifier[frequencies] )))
identifier[heap] =[( identifier[f] ,[( identifier[s] ,( literal[int] , literal[int] ))]) keyword[for] identifier[s] , identifier[f] keyword[in] identifier[frequencies] . identifier[items] ()]
identifier[heap] . identifier[append] (( literal[int] ,[( identifier[_EOF] ,( literal[int] , literal[int] ))]))
identifier[heapify] ( identifier[heap] )
keyword[while] identifier[len] ( identifier[heap] )> literal[int] :
identifier[a] = identifier[heappop] ( identifier[heap] )
identifier[b] = identifier[heappop] ( identifier[heap] )
identifier[merged] =(
identifier[a] [ literal[int] ]+ identifier[b] [ literal[int] ],
[( identifier[s] ,( identifier[n] + literal[int] , identifier[v] )) keyword[for] ( identifier[s] ,( identifier[n] , identifier[v] )) keyword[in] identifier[a] [ literal[int] ]]
+[( identifier[s] ,( identifier[n] + literal[int] ,( literal[int] << identifier[n] )+ identifier[v] )) keyword[for] ( identifier[s] ,( identifier[n] , identifier[v] )) keyword[in] identifier[b] [ literal[int] ]]
)
identifier[heappush] ( identifier[heap] , identifier[merged] )
identifier[table] = identifier[dict] ( identifier[heappop] ( identifier[heap] )[ literal[int] ])
keyword[return] identifier[cls] ( identifier[table] , identifier[concat] = identifier[concat] , identifier[check] = keyword[False] ) | def from_frequencies(cls, frequencies, concat=None):
"""
Build Huffman code table from given symbol frequencies
:param frequencies: symbol to frequency mapping
:param concat: function to concatenate symbols
"""
concat = concat or _guess_concat(next(iter(frequencies)))
# Heap consists of tuples: (frequency, [list of tuples: (symbol, (bitsize, value))])
heap = [(f, [(s, (0, 0))]) for (s, f) in frequencies.items()]
# Add EOF symbol.
# TODO: argument to set frequency of EOF?
heap.append((1, [(_EOF, (0, 0))]))
# Use heapq approach to build the encodings of the huffman tree leaves.
heapify(heap)
while len(heap) > 1:
# Pop the 2 smallest items from heap
a = heappop(heap)
b = heappop(heap)
# Merge nodes (update codes for each symbol appropriately)
merged = (a[0] + b[0], [(s, (n + 1, v)) for (s, (n, v)) in a[1]] + [(s, (n + 1, (1 << n) + v)) for (s, (n, v)) in b[1]])
heappush(heap, merged) # depends on [control=['while'], data=[]]
# Code table is dictionary mapping symbol to (bitsize, value)
table = dict(heappop(heap)[1])
return cls(table, concat=concat, check=False) |
def send_notification(self, method, *args):
"""Send a JSON-RPC notification.
The notification *method* is sent with positional arguments *args*.
"""
message = self._version.create_request(method, args, notification=True)
self.send_message(message) | def function[send_notification, parameter[self, method]]:
constant[Send a JSON-RPC notification.
The notification *method* is sent with positional arguments *args*.
]
variable[message] assign[=] call[name[self]._version.create_request, parameter[name[method], name[args]]]
call[name[self].send_message, parameter[name[message]]] | keyword[def] identifier[send_notification] ( identifier[self] , identifier[method] ,* identifier[args] ):
literal[string]
identifier[message] = identifier[self] . identifier[_version] . identifier[create_request] ( identifier[method] , identifier[args] , identifier[notification] = keyword[True] )
identifier[self] . identifier[send_message] ( identifier[message] ) | def send_notification(self, method, *args):
"""Send a JSON-RPC notification.
The notification *method* is sent with positional arguments *args*.
"""
message = self._version.create_request(method, args, notification=True)
self.send_message(message) |
def ls(path, load_path=None): # pylint: disable=C0103
'''
List the direct children of a node
CLI Example:
.. code-block:: bash
salt '*' augeas.ls /files/etc/passwd
path
The path to list
.. versionadded:: 2016.3.0
load_path
A colon-spearated list of directories that modules should be searched
in. This is in addition to the standard load path and the directories
in AUGEAS_LENS_LIB.
'''
def _match(path):
''' Internal match function '''
try:
matches = aug.match(salt.utils.stringutils.to_str(path))
except RuntimeError:
return {}
ret = {}
for _ma in matches:
ret[_ma] = aug.get(_ma)
return ret
load_path = _check_load_paths(load_path)
aug = _Augeas(loadpath=load_path)
path = path.rstrip('/') + '/'
match_path = path + '*'
matches = _match(match_path)
ret = {}
for key, value in six.iteritems(matches):
name = _lstrip_word(key, path)
if _match(key + '/*'):
ret[name + '/'] = value # has sub nodes, e.g. directory
else:
ret[name] = value
return ret | def function[ls, parameter[path, load_path]]:
constant[
List the direct children of a node
CLI Example:
.. code-block:: bash
salt '*' augeas.ls /files/etc/passwd
path
The path to list
.. versionadded:: 2016.3.0
load_path
A colon-spearated list of directories that modules should be searched
in. This is in addition to the standard load path and the directories
in AUGEAS_LENS_LIB.
]
def function[_match, parameter[path]]:
constant[ Internal match function ]
<ast.Try object at 0x7da1b1f49ab0>
variable[ret] assign[=] dictionary[[], []]
for taget[name[_ma]] in starred[name[matches]] begin[:]
call[name[ret]][name[_ma]] assign[=] call[name[aug].get, parameter[name[_ma]]]
return[name[ret]]
variable[load_path] assign[=] call[name[_check_load_paths], parameter[name[load_path]]]
variable[aug] assign[=] call[name[_Augeas], parameter[]]
variable[path] assign[=] binary_operation[call[name[path].rstrip, parameter[constant[/]]] + constant[/]]
variable[match_path] assign[=] binary_operation[name[path] + constant[*]]
variable[matches] assign[=] call[name[_match], parameter[name[match_path]]]
variable[ret] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1f76ec0>, <ast.Name object at 0x7da1b1f76e60>]]] in starred[call[name[six].iteritems, parameter[name[matches]]]] begin[:]
variable[name] assign[=] call[name[_lstrip_word], parameter[name[key], name[path]]]
if call[name[_match], parameter[binary_operation[name[key] + constant[/*]]]] begin[:]
call[name[ret]][binary_operation[name[name] + constant[/]]] assign[=] name[value]
return[name[ret]] | keyword[def] identifier[ls] ( identifier[path] , identifier[load_path] = keyword[None] ):
literal[string]
keyword[def] identifier[_match] ( identifier[path] ):
literal[string]
keyword[try] :
identifier[matches] = identifier[aug] . identifier[match] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[path] ))
keyword[except] identifier[RuntimeError] :
keyword[return] {}
identifier[ret] ={}
keyword[for] identifier[_ma] keyword[in] identifier[matches] :
identifier[ret] [ identifier[_ma] ]= identifier[aug] . identifier[get] ( identifier[_ma] )
keyword[return] identifier[ret]
identifier[load_path] = identifier[_check_load_paths] ( identifier[load_path] )
identifier[aug] = identifier[_Augeas] ( identifier[loadpath] = identifier[load_path] )
identifier[path] = identifier[path] . identifier[rstrip] ( literal[string] )+ literal[string]
identifier[match_path] = identifier[path] + literal[string]
identifier[matches] = identifier[_match] ( identifier[match_path] )
identifier[ret] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[matches] ):
identifier[name] = identifier[_lstrip_word] ( identifier[key] , identifier[path] )
keyword[if] identifier[_match] ( identifier[key] + literal[string] ):
identifier[ret] [ identifier[name] + literal[string] ]= identifier[value]
keyword[else] :
identifier[ret] [ identifier[name] ]= identifier[value]
keyword[return] identifier[ret] | def ls(path, load_path=None): # pylint: disable=C0103
"\n List the direct children of a node\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' augeas.ls /files/etc/passwd\n\n path\n The path to list\n\n .. versionadded:: 2016.3.0\n\n load_path\n A colon-spearated list of directories that modules should be searched\n in. This is in addition to the standard load path and the directories\n in AUGEAS_LENS_LIB.\n "
def _match(path):
""" Internal match function """
try:
matches = aug.match(salt.utils.stringutils.to_str(path)) # depends on [control=['try'], data=[]]
except RuntimeError:
return {} # depends on [control=['except'], data=[]]
ret = {}
for _ma in matches:
ret[_ma] = aug.get(_ma) # depends on [control=['for'], data=['_ma']]
return ret
load_path = _check_load_paths(load_path)
aug = _Augeas(loadpath=load_path)
path = path.rstrip('/') + '/'
match_path = path + '*'
matches = _match(match_path)
ret = {}
for (key, value) in six.iteritems(matches):
name = _lstrip_word(key, path)
if _match(key + '/*'):
ret[name + '/'] = value # has sub nodes, e.g. directory # depends on [control=['if'], data=[]]
else:
ret[name] = value # depends on [control=['for'], data=[]]
return ret |
def open(self):
""" Search device on USB tree and set is as escpos device """
self.device = usb.core.find(idVendor=self.idVendor, idProduct=self.idProduct)
if self.device is None:
raise NoDeviceError()
try:
if self.device.is_kernel_driver_active(self.interface):
self.device.detach_kernel_driver(self.interface)
self.device.set_configuration()
usb.util.claim_interface(self.device, self.interface)
except usb.core.USBError as e:
raise HandleDeviceError(e) | def function[open, parameter[self]]:
constant[ Search device on USB tree and set is as escpos device ]
name[self].device assign[=] call[name[usb].core.find, parameter[]]
if compare[name[self].device is constant[None]] begin[:]
<ast.Raise object at 0x7da18dc9a290>
<ast.Try object at 0x7da18dc98160> | keyword[def] identifier[open] ( identifier[self] ):
literal[string]
identifier[self] . identifier[device] = identifier[usb] . identifier[core] . identifier[find] ( identifier[idVendor] = identifier[self] . identifier[idVendor] , identifier[idProduct] = identifier[self] . identifier[idProduct] )
keyword[if] identifier[self] . identifier[device] keyword[is] keyword[None] :
keyword[raise] identifier[NoDeviceError] ()
keyword[try] :
keyword[if] identifier[self] . identifier[device] . identifier[is_kernel_driver_active] ( identifier[self] . identifier[interface] ):
identifier[self] . identifier[device] . identifier[detach_kernel_driver] ( identifier[self] . identifier[interface] )
identifier[self] . identifier[device] . identifier[set_configuration] ()
identifier[usb] . identifier[util] . identifier[claim_interface] ( identifier[self] . identifier[device] , identifier[self] . identifier[interface] )
keyword[except] identifier[usb] . identifier[core] . identifier[USBError] keyword[as] identifier[e] :
keyword[raise] identifier[HandleDeviceError] ( identifier[e] ) | def open(self):
""" Search device on USB tree and set is as escpos device """
self.device = usb.core.find(idVendor=self.idVendor, idProduct=self.idProduct)
if self.device is None:
raise NoDeviceError() # depends on [control=['if'], data=[]]
try:
if self.device.is_kernel_driver_active(self.interface):
self.device.detach_kernel_driver(self.interface) # depends on [control=['if'], data=[]]
self.device.set_configuration()
usb.util.claim_interface(self.device, self.interface) # depends on [control=['try'], data=[]]
except usb.core.USBError as e:
raise HandleDeviceError(e) # depends on [control=['except'], data=['e']] |
def prepend(self, error_message):
"""Add an ErrorMessage to the beginning of the queue.
Tracebacks are not prepended.
:param error_message: An element to add to the message.
:type error_message: ErrorMessage
"""
self.problems = error_message.problems + self.problems
self.details = error_message.details + self.details
self.suggestions = error_message.suggestions + self.suggestions
new_tracebacks = error_message.tracebacks
new_tracebacks.items.extend(self.tracebacks.items)
self.tracebacks = new_tracebacks | def function[prepend, parameter[self, error_message]]:
constant[Add an ErrorMessage to the beginning of the queue.
Tracebacks are not prepended.
:param error_message: An element to add to the message.
:type error_message: ErrorMessage
]
name[self].problems assign[=] binary_operation[name[error_message].problems + name[self].problems]
name[self].details assign[=] binary_operation[name[error_message].details + name[self].details]
name[self].suggestions assign[=] binary_operation[name[error_message].suggestions + name[self].suggestions]
variable[new_tracebacks] assign[=] name[error_message].tracebacks
call[name[new_tracebacks].items.extend, parameter[name[self].tracebacks.items]]
name[self].tracebacks assign[=] name[new_tracebacks] | keyword[def] identifier[prepend] ( identifier[self] , identifier[error_message] ):
literal[string]
identifier[self] . identifier[problems] = identifier[error_message] . identifier[problems] + identifier[self] . identifier[problems]
identifier[self] . identifier[details] = identifier[error_message] . identifier[details] + identifier[self] . identifier[details]
identifier[self] . identifier[suggestions] = identifier[error_message] . identifier[suggestions] + identifier[self] . identifier[suggestions]
identifier[new_tracebacks] = identifier[error_message] . identifier[tracebacks]
identifier[new_tracebacks] . identifier[items] . identifier[extend] ( identifier[self] . identifier[tracebacks] . identifier[items] )
identifier[self] . identifier[tracebacks] = identifier[new_tracebacks] | def prepend(self, error_message):
"""Add an ErrorMessage to the beginning of the queue.
Tracebacks are not prepended.
:param error_message: An element to add to the message.
:type error_message: ErrorMessage
"""
self.problems = error_message.problems + self.problems
self.details = error_message.details + self.details
self.suggestions = error_message.suggestions + self.suggestions
new_tracebacks = error_message.tracebacks
new_tracebacks.items.extend(self.tracebacks.items)
self.tracebacks = new_tracebacks |
def mtime(self, key):
"""Return the last modification time for the cache record with key.
May be useful for cache instances where the stored values can get
'stale', such as caching file or network resource contents."""
if key not in self.__dict:
raise CacheKeyError(key)
else:
node = self.__dict[key]
return node.mtime | def function[mtime, parameter[self, key]]:
constant[Return the last modification time for the cache record with key.
May be useful for cache instances where the stored values can get
'stale', such as caching file or network resource contents.]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self].__dict] begin[:]
<ast.Raise object at 0x7da1b0d0e3b0> | keyword[def] identifier[mtime] ( identifier[self] , identifier[key] ):
literal[string]
keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[__dict] :
keyword[raise] identifier[CacheKeyError] ( identifier[key] )
keyword[else] :
identifier[node] = identifier[self] . identifier[__dict] [ identifier[key] ]
keyword[return] identifier[node] . identifier[mtime] | def mtime(self, key):
"""Return the last modification time for the cache record with key.
May be useful for cache instances where the stored values can get
'stale', such as caching file or network resource contents."""
if key not in self.__dict:
raise CacheKeyError(key) # depends on [control=['if'], data=['key']]
else:
node = self.__dict[key]
return node.mtime |
def add (self, ps):
""" Creates a new property set containing the properties in this one,
plus the ones of the property set passed as argument.
"""
assert isinstance(ps, PropertySet)
if ps not in self.added_:
self.added_[ps] = create(self.all_ + ps.all())
return self.added_[ps] | def function[add, parameter[self, ps]]:
constant[ Creates a new property set containing the properties in this one,
plus the ones of the property set passed as argument.
]
assert[call[name[isinstance], parameter[name[ps], name[PropertySet]]]]
if compare[name[ps] <ast.NotIn object at 0x7da2590d7190> name[self].added_] begin[:]
call[name[self].added_][name[ps]] assign[=] call[name[create], parameter[binary_operation[name[self].all_ + call[name[ps].all, parameter[]]]]]
return[call[name[self].added_][name[ps]]] | keyword[def] identifier[add] ( identifier[self] , identifier[ps] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[ps] , identifier[PropertySet] )
keyword[if] identifier[ps] keyword[not] keyword[in] identifier[self] . identifier[added_] :
identifier[self] . identifier[added_] [ identifier[ps] ]= identifier[create] ( identifier[self] . identifier[all_] + identifier[ps] . identifier[all] ())
keyword[return] identifier[self] . identifier[added_] [ identifier[ps] ] | def add(self, ps):
""" Creates a new property set containing the properties in this one,
plus the ones of the property set passed as argument.
"""
assert isinstance(ps, PropertySet)
if ps not in self.added_:
self.added_[ps] = create(self.all_ + ps.all()) # depends on [control=['if'], data=['ps']]
return self.added_[ps] |
def paged_search_ext_s(self, base, scope, filterstr='(objectClass=*)', attrlist=None, attrsonly=0,
serverctrls=None, clientctrls=None, timeout=-1, sizelimit=0):
"""
Behaves exactly like LDAPObject.search_ext_s() but internally uses the
simple paged results control to retrieve search results in chunks.
"""
req_ctrl = SimplePagedResultsControl(True, size=self.conf_LDAP_SYNC_BIND_PAGESIZE, cookie='')
# Send first search request
msgid = self.search_ext(base, ldap.SCOPE_SUBTREE, filterstr, attrlist=attrlist,
serverctrls=(serverctrls or []) + [req_ctrl])
results = []
while True:
rtype, rdata, rmsgid, rctrls = self.result3(msgid)
results.extend(rdata)
# Extract the simple paged results response control
pctrls = [c for c in rctrls if c.controlType == SimplePagedResultsControl.controlType]
if pctrls:
if pctrls[0].cookie:
# Copy cookie from response control to request control
req_ctrl.cookie = pctrls[0].cookie
msgid = self.search_ext(base, ldap.SCOPE_SUBTREE, filterstr, attrlist=attrlist,
serverctrls=(serverctrls or []) + [req_ctrl])
else:
break
return results | def function[paged_search_ext_s, parameter[self, base, scope, filterstr, attrlist, attrsonly, serverctrls, clientctrls, timeout, sizelimit]]:
constant[
Behaves exactly like LDAPObject.search_ext_s() but internally uses the
simple paged results control to retrieve search results in chunks.
]
variable[req_ctrl] assign[=] call[name[SimplePagedResultsControl], parameter[constant[True]]]
variable[msgid] assign[=] call[name[self].search_ext, parameter[name[base], name[ldap].SCOPE_SUBTREE, name[filterstr]]]
variable[results] assign[=] list[[]]
while constant[True] begin[:]
<ast.Tuple object at 0x7da18c4cfcd0> assign[=] call[name[self].result3, parameter[name[msgid]]]
call[name[results].extend, parameter[name[rdata]]]
variable[pctrls] assign[=] <ast.ListComp object at 0x7da18c4ce410>
if name[pctrls] begin[:]
if call[name[pctrls]][constant[0]].cookie begin[:]
name[req_ctrl].cookie assign[=] call[name[pctrls]][constant[0]].cookie
variable[msgid] assign[=] call[name[self].search_ext, parameter[name[base], name[ldap].SCOPE_SUBTREE, name[filterstr]]]
return[name[results]] | keyword[def] identifier[paged_search_ext_s] ( identifier[self] , identifier[base] , identifier[scope] , identifier[filterstr] = literal[string] , identifier[attrlist] = keyword[None] , identifier[attrsonly] = literal[int] ,
identifier[serverctrls] = keyword[None] , identifier[clientctrls] = keyword[None] , identifier[timeout] =- literal[int] , identifier[sizelimit] = literal[int] ):
literal[string]
identifier[req_ctrl] = identifier[SimplePagedResultsControl] ( keyword[True] , identifier[size] = identifier[self] . identifier[conf_LDAP_SYNC_BIND_PAGESIZE] , identifier[cookie] = literal[string] )
identifier[msgid] = identifier[self] . identifier[search_ext] ( identifier[base] , identifier[ldap] . identifier[SCOPE_SUBTREE] , identifier[filterstr] , identifier[attrlist] = identifier[attrlist] ,
identifier[serverctrls] =( identifier[serverctrls] keyword[or] [])+[ identifier[req_ctrl] ])
identifier[results] =[]
keyword[while] keyword[True] :
identifier[rtype] , identifier[rdata] , identifier[rmsgid] , identifier[rctrls] = identifier[self] . identifier[result3] ( identifier[msgid] )
identifier[results] . identifier[extend] ( identifier[rdata] )
identifier[pctrls] =[ identifier[c] keyword[for] identifier[c] keyword[in] identifier[rctrls] keyword[if] identifier[c] . identifier[controlType] == identifier[SimplePagedResultsControl] . identifier[controlType] ]
keyword[if] identifier[pctrls] :
keyword[if] identifier[pctrls] [ literal[int] ]. identifier[cookie] :
identifier[req_ctrl] . identifier[cookie] = identifier[pctrls] [ literal[int] ]. identifier[cookie]
identifier[msgid] = identifier[self] . identifier[search_ext] ( identifier[base] , identifier[ldap] . identifier[SCOPE_SUBTREE] , identifier[filterstr] , identifier[attrlist] = identifier[attrlist] ,
identifier[serverctrls] =( identifier[serverctrls] keyword[or] [])+[ identifier[req_ctrl] ])
keyword[else] :
keyword[break]
keyword[return] identifier[results] | def paged_search_ext_s(self, base, scope, filterstr='(objectClass=*)', attrlist=None, attrsonly=0, serverctrls=None, clientctrls=None, timeout=-1, sizelimit=0):
"""
Behaves exactly like LDAPObject.search_ext_s() but internally uses the
simple paged results control to retrieve search results in chunks.
"""
req_ctrl = SimplePagedResultsControl(True, size=self.conf_LDAP_SYNC_BIND_PAGESIZE, cookie='')
# Send first search request
msgid = self.search_ext(base, ldap.SCOPE_SUBTREE, filterstr, attrlist=attrlist, serverctrls=(serverctrls or []) + [req_ctrl])
results = []
while True:
(rtype, rdata, rmsgid, rctrls) = self.result3(msgid)
results.extend(rdata)
# Extract the simple paged results response control
pctrls = [c for c in rctrls if c.controlType == SimplePagedResultsControl.controlType]
if pctrls:
if pctrls[0].cookie:
# Copy cookie from response control to request control
req_ctrl.cookie = pctrls[0].cookie
msgid = self.search_ext(base, ldap.SCOPE_SUBTREE, filterstr, attrlist=attrlist, serverctrls=(serverctrls or []) + [req_ctrl]) # depends on [control=['if'], data=[]]
else:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return results |
def to_df(self) -> pd.DataFrame:
"""Convert to pandas dataframe."""
df = pd.DataFrame(index=RangeIndex(0, self.shape[0], name=None))
for key in self.keys():
value = self[key]
for icolumn, column in enumerate(value.T):
df['{}{}'.format(key, icolumn+1)] = column
return df | def function[to_df, parameter[self]]:
constant[Convert to pandas dataframe.]
variable[df] assign[=] call[name[pd].DataFrame, parameter[]]
for taget[name[key]] in starred[call[name[self].keys, parameter[]]] begin[:]
variable[value] assign[=] call[name[self]][name[key]]
for taget[tuple[[<ast.Name object at 0x7da1b2069060>, <ast.Name object at 0x7da1b206b250>]]] in starred[call[name[enumerate], parameter[name[value].T]]] begin[:]
call[name[df]][call[constant[{}{}].format, parameter[name[key], binary_operation[name[icolumn] + constant[1]]]]] assign[=] name[column]
return[name[df]] | keyword[def] identifier[to_df] ( identifier[self] )-> identifier[pd] . identifier[DataFrame] :
literal[string]
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[index] = identifier[RangeIndex] ( literal[int] , identifier[self] . identifier[shape] [ literal[int] ], identifier[name] = keyword[None] ))
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[keys] ():
identifier[value] = identifier[self] [ identifier[key] ]
keyword[for] identifier[icolumn] , identifier[column] keyword[in] identifier[enumerate] ( identifier[value] . identifier[T] ):
identifier[df] [ literal[string] . identifier[format] ( identifier[key] , identifier[icolumn] + literal[int] )]= identifier[column]
keyword[return] identifier[df] | def to_df(self) -> pd.DataFrame:
"""Convert to pandas dataframe."""
df = pd.DataFrame(index=RangeIndex(0, self.shape[0], name=None))
for key in self.keys():
value = self[key]
for (icolumn, column) in enumerate(value.T):
df['{}{}'.format(key, icolumn + 1)] = column # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['key']]
return df |
def subscribe(self, topics=(), pattern=None, listener=None):
"""Subscribe to a list of topics, or a topic regex pattern.
Partitions will be dynamically assigned via a group coordinator.
Topic subscriptions are not incremental: this list will replace the
current assignment (if there is one).
This method is incompatible with :meth:`~kafka.KafkaConsumer.assign`.
Arguments:
topics (list): List of topics for subscription.
pattern (str): Pattern to match available topics. You must provide
either topics or pattern, but not both.
listener (ConsumerRebalanceListener): Optionally include listener
callback, which will be called before and after each rebalance
operation.
As part of group management, the consumer will keep track of the
list of consumers that belong to a particular group and will
trigger a rebalance operation if one of the following events
trigger:
* Number of partitions change for any of the subscribed topics
* Topic is created or deleted
* An existing member of the consumer group dies
* A new member is added to the consumer group
When any of these events are triggered, the provided listener
will be invoked first to indicate that the consumer's assignment
has been revoked, and then again when the new assignment has
been received. Note that this listener will immediately override
any listener set in a previous call to subscribe. It is
guaranteed, however, that the partitions revoked/assigned
through this interface are from topics subscribed in this call.
Raises:
IllegalStateError: If called after previously calling
:meth:`~kafka.KafkaConsumer.assign`.
AssertionError: If neither topics or pattern is provided.
TypeError: If listener is not a ConsumerRebalanceListener.
"""
# SubscriptionState handles error checking
self._subscription.subscribe(topics=topics,
pattern=pattern,
listener=listener)
# Regex will need all topic metadata
if pattern is not None:
self._client.cluster.need_all_topic_metadata = True
self._client.set_topics([])
self._client.cluster.request_update()
log.debug("Subscribed to topic pattern: %s", pattern)
else:
self._client.cluster.need_all_topic_metadata = False
self._client.set_topics(self._subscription.group_subscription())
log.debug("Subscribed to topic(s): %s", topics) | def function[subscribe, parameter[self, topics, pattern, listener]]:
constant[Subscribe to a list of topics, or a topic regex pattern.
Partitions will be dynamically assigned via a group coordinator.
Topic subscriptions are not incremental: this list will replace the
current assignment (if there is one).
This method is incompatible with :meth:`~kafka.KafkaConsumer.assign`.
Arguments:
topics (list): List of topics for subscription.
pattern (str): Pattern to match available topics. You must provide
either topics or pattern, but not both.
listener (ConsumerRebalanceListener): Optionally include listener
callback, which will be called before and after each rebalance
operation.
As part of group management, the consumer will keep track of the
list of consumers that belong to a particular group and will
trigger a rebalance operation if one of the following events
trigger:
* Number of partitions change for any of the subscribed topics
* Topic is created or deleted
* An existing member of the consumer group dies
* A new member is added to the consumer group
When any of these events are triggered, the provided listener
will be invoked first to indicate that the consumer's assignment
has been revoked, and then again when the new assignment has
been received. Note that this listener will immediately override
any listener set in a previous call to subscribe. It is
guaranteed, however, that the partitions revoked/assigned
through this interface are from topics subscribed in this call.
Raises:
IllegalStateError: If called after previously calling
:meth:`~kafka.KafkaConsumer.assign`.
AssertionError: If neither topics or pattern is provided.
TypeError: If listener is not a ConsumerRebalanceListener.
]
call[name[self]._subscription.subscribe, parameter[]]
if compare[name[pattern] is_not constant[None]] begin[:]
name[self]._client.cluster.need_all_topic_metadata assign[=] constant[True]
call[name[self]._client.set_topics, parameter[list[[]]]]
call[name[self]._client.cluster.request_update, parameter[]]
call[name[log].debug, parameter[constant[Subscribed to topic pattern: %s], name[pattern]]] | keyword[def] identifier[subscribe] ( identifier[self] , identifier[topics] =(), identifier[pattern] = keyword[None] , identifier[listener] = keyword[None] ):
literal[string]
identifier[self] . identifier[_subscription] . identifier[subscribe] ( identifier[topics] = identifier[topics] ,
identifier[pattern] = identifier[pattern] ,
identifier[listener] = identifier[listener] )
keyword[if] identifier[pattern] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_client] . identifier[cluster] . identifier[need_all_topic_metadata] = keyword[True]
identifier[self] . identifier[_client] . identifier[set_topics] ([])
identifier[self] . identifier[_client] . identifier[cluster] . identifier[request_update] ()
identifier[log] . identifier[debug] ( literal[string] , identifier[pattern] )
keyword[else] :
identifier[self] . identifier[_client] . identifier[cluster] . identifier[need_all_topic_metadata] = keyword[False]
identifier[self] . identifier[_client] . identifier[set_topics] ( identifier[self] . identifier[_subscription] . identifier[group_subscription] ())
identifier[log] . identifier[debug] ( literal[string] , identifier[topics] ) | def subscribe(self, topics=(), pattern=None, listener=None):
"""Subscribe to a list of topics, or a topic regex pattern.
Partitions will be dynamically assigned via a group coordinator.
Topic subscriptions are not incremental: this list will replace the
current assignment (if there is one).
This method is incompatible with :meth:`~kafka.KafkaConsumer.assign`.
Arguments:
topics (list): List of topics for subscription.
pattern (str): Pattern to match available topics. You must provide
either topics or pattern, but not both.
listener (ConsumerRebalanceListener): Optionally include listener
callback, which will be called before and after each rebalance
operation.
As part of group management, the consumer will keep track of the
list of consumers that belong to a particular group and will
trigger a rebalance operation if one of the following events
trigger:
* Number of partitions change for any of the subscribed topics
* Topic is created or deleted
* An existing member of the consumer group dies
* A new member is added to the consumer group
When any of these events are triggered, the provided listener
will be invoked first to indicate that the consumer's assignment
has been revoked, and then again when the new assignment has
been received. Note that this listener will immediately override
any listener set in a previous call to subscribe. It is
guaranteed, however, that the partitions revoked/assigned
through this interface are from topics subscribed in this call.
Raises:
IllegalStateError: If called after previously calling
:meth:`~kafka.KafkaConsumer.assign`.
AssertionError: If neither topics or pattern is provided.
TypeError: If listener is not a ConsumerRebalanceListener.
"""
# SubscriptionState handles error checking
self._subscription.subscribe(topics=topics, pattern=pattern, listener=listener)
# Regex will need all topic metadata
if pattern is not None:
self._client.cluster.need_all_topic_metadata = True
self._client.set_topics([])
self._client.cluster.request_update()
log.debug('Subscribed to topic pattern: %s', pattern) # depends on [control=['if'], data=['pattern']]
else:
self._client.cluster.need_all_topic_metadata = False
self._client.set_topics(self._subscription.group_subscription())
log.debug('Subscribed to topic(s): %s', topics) |
def QA_fetch_stock_terminated(collections=DATABASE.stock_terminated):
'获取股票基本信息 , 已经退市的股票列表'
# 🛠todo 转变成 dataframe 类型数据
return pd.DataFrame([item for item in collections.find()]).drop('_id', axis=1, inplace=False).set_index('code', drop=False) | def function[QA_fetch_stock_terminated, parameter[collections]]:
constant[获取股票基本信息 , 已经退市的股票列表]
return[call[call[call[name[pd].DataFrame, parameter[<ast.ListComp object at 0x7da1b1ff2110>]].drop, parameter[constant[_id]]].set_index, parameter[constant[code]]]] | keyword[def] identifier[QA_fetch_stock_terminated] ( identifier[collections] = identifier[DATABASE] . identifier[stock_terminated] ):
literal[string]
keyword[return] identifier[pd] . identifier[DataFrame] ([ identifier[item] keyword[for] identifier[item] keyword[in] identifier[collections] . identifier[find] ()]). identifier[drop] ( literal[string] , identifier[axis] = literal[int] , identifier[inplace] = keyword[False] ). identifier[set_index] ( literal[string] , identifier[drop] = keyword[False] ) | def QA_fetch_stock_terminated(collections=DATABASE.stock_terminated):
"""获取股票基本信息 , 已经退市的股票列表"""
# 🛠todo 转变成 dataframe 类型数据
return pd.DataFrame([item for item in collections.find()]).drop('_id', axis=1, inplace=False).set_index('code', drop=False) |
def check_partition_column(partition_column, cols):
""" Check partition_column existence and type
Args:
partition_column: partition_column name
cols: dict with columns names and python types
Returns:
None
"""
for k, v in cols.items():
if k == partition_column:
if v == "int":
return
else:
raise InvalidPartitionColumn(
"partition_column must be int, and not {0}".format(v)
)
raise InvalidPartitionColumn(
"partition_column {0} not found in the query".format(partition_column)
) | def function[check_partition_column, parameter[partition_column, cols]]:
constant[ Check partition_column existence and type
Args:
partition_column: partition_column name
cols: dict with columns names and python types
Returns:
None
]
for taget[tuple[[<ast.Name object at 0x7da207f018a0>, <ast.Name object at 0x7da207f03a00>]]] in starred[call[name[cols].items, parameter[]]] begin[:]
if compare[name[k] equal[==] name[partition_column]] begin[:]
if compare[name[v] equal[==] constant[int]] begin[:]
return[None]
<ast.Raise object at 0x7da207f03880> | keyword[def] identifier[check_partition_column] ( identifier[partition_column] , identifier[cols] ):
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[cols] . identifier[items] ():
keyword[if] identifier[k] == identifier[partition_column] :
keyword[if] identifier[v] == literal[string] :
keyword[return]
keyword[else] :
keyword[raise] identifier[InvalidPartitionColumn] (
literal[string] . identifier[format] ( identifier[v] )
)
keyword[raise] identifier[InvalidPartitionColumn] (
literal[string] . identifier[format] ( identifier[partition_column] )
) | def check_partition_column(partition_column, cols):
""" Check partition_column existence and type
Args:
partition_column: partition_column name
cols: dict with columns names and python types
Returns:
None
"""
for (k, v) in cols.items():
if k == partition_column:
if v == 'int':
return # depends on [control=['if'], data=[]]
else:
raise InvalidPartitionColumn('partition_column must be int, and not {0}'.format(v)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raise InvalidPartitionColumn('partition_column {0} not found in the query'.format(partition_column)) |
def total_flux(F, A=None):
r"""Compute the total flux, or turnover flux, that is produced by
the flux sources and consumed by the flux sinks.
Parameters
----------
F : (M, M) ndarray
Matrix of flux values between pairs of states.
A : array_like (optional)
List of integer state labels for set A (reactant)
Returns
-------
F : float
The total flux, or turnover flux, that is produced by the flux
sources and consumed by the flux sinks
References
----------
.. [1] P. Metzner, C. Schuette and E. Vanden-Eijnden.
Transition Path Theory for Markov Jump Processes.
Multiscale Model Simul 7: 1192-1219 (2009)
"""
if issparse(F):
return sparse.tpt.total_flux(F, A=A)
elif isdense(F):
return dense.tpt.total_flux(F, A=A)
else:
raise _type_not_supported | def function[total_flux, parameter[F, A]]:
constant[Compute the total flux, or turnover flux, that is produced by
the flux sources and consumed by the flux sinks.
Parameters
----------
F : (M, M) ndarray
Matrix of flux values between pairs of states.
A : array_like (optional)
List of integer state labels for set A (reactant)
Returns
-------
F : float
The total flux, or turnover flux, that is produced by the flux
sources and consumed by the flux sinks
References
----------
.. [1] P. Metzner, C. Schuette and E. Vanden-Eijnden.
Transition Path Theory for Markov Jump Processes.
Multiscale Model Simul 7: 1192-1219 (2009)
]
if call[name[issparse], parameter[name[F]]] begin[:]
return[call[name[sparse].tpt.total_flux, parameter[name[F]]]] | keyword[def] identifier[total_flux] ( identifier[F] , identifier[A] = keyword[None] ):
literal[string]
keyword[if] identifier[issparse] ( identifier[F] ):
keyword[return] identifier[sparse] . identifier[tpt] . identifier[total_flux] ( identifier[F] , identifier[A] = identifier[A] )
keyword[elif] identifier[isdense] ( identifier[F] ):
keyword[return] identifier[dense] . identifier[tpt] . identifier[total_flux] ( identifier[F] , identifier[A] = identifier[A] )
keyword[else] :
keyword[raise] identifier[_type_not_supported] | def total_flux(F, A=None):
"""Compute the total flux, or turnover flux, that is produced by
the flux sources and consumed by the flux sinks.
Parameters
----------
F : (M, M) ndarray
Matrix of flux values between pairs of states.
A : array_like (optional)
List of integer state labels for set A (reactant)
Returns
-------
F : float
The total flux, or turnover flux, that is produced by the flux
sources and consumed by the flux sinks
References
----------
.. [1] P. Metzner, C. Schuette and E. Vanden-Eijnden.
Transition Path Theory for Markov Jump Processes.
Multiscale Model Simul 7: 1192-1219 (2009)
"""
if issparse(F):
return sparse.tpt.total_flux(F, A=A) # depends on [control=['if'], data=[]]
elif isdense(F):
return dense.tpt.total_flux(F, A=A) # depends on [control=['if'], data=[]]
else:
raise _type_not_supported |
def exists_table_upgrades(self):
"""Return if the upgrades table exists
Returns
-------
bool
True if the table exists
False if the table don't exists"""
query = """
SELECT EXISTS (
SELECT 1
FROM information_schema.tables
WHERE table_schema = '{}'
AND table_name = '{}'
);
""".format(self.upgrades_table[:self.upgrades_table.index('.')],
self.upgrades_table[self.upgrades_table.index('.')+1:])
self.cursor.execute(query)
return self.cursor.fetchone()[0] | def function[exists_table_upgrades, parameter[self]]:
constant[Return if the upgrades table exists
Returns
-------
bool
True if the table exists
False if the table don't exists]
variable[query] assign[=] call[constant[
SELECT EXISTS (
SELECT 1
FROM information_schema.tables
WHERE table_schema = '{}'
AND table_name = '{}'
);
].format, parameter[call[name[self].upgrades_table][<ast.Slice object at 0x7da18eb547c0>], call[name[self].upgrades_table][<ast.Slice object at 0x7da20c992e30>]]]
call[name[self].cursor.execute, parameter[name[query]]]
return[call[call[name[self].cursor.fetchone, parameter[]]][constant[0]]] | keyword[def] identifier[exists_table_upgrades] ( identifier[self] ):
literal[string]
identifier[query] = literal[string] . identifier[format] ( identifier[self] . identifier[upgrades_table] [: identifier[self] . identifier[upgrades_table] . identifier[index] ( literal[string] )],
identifier[self] . identifier[upgrades_table] [ identifier[self] . identifier[upgrades_table] . identifier[index] ( literal[string] )+ literal[int] :])
identifier[self] . identifier[cursor] . identifier[execute] ( identifier[query] )
keyword[return] identifier[self] . identifier[cursor] . identifier[fetchone] ()[ literal[int] ] | def exists_table_upgrades(self):
"""Return if the upgrades table exists
Returns
-------
bool
True if the table exists
False if the table don't exists"""
query = "\n SELECT EXISTS (\n SELECT 1\n FROM information_schema.tables\n WHERE table_schema = '{}'\n AND table_name = '{}'\n );\n ".format(self.upgrades_table[:self.upgrades_table.index('.')], self.upgrades_table[self.upgrades_table.index('.') + 1:])
self.cursor.execute(query)
return self.cursor.fetchone()[0] |
def to_float(self):
""" Converts to 32-bit data.
Returns
-------
:obj:`DepthImage`
depth image with 32 bit float data
"""
return DepthImage(self.data.astype(np.float32), frame=self.frame) | def function[to_float, parameter[self]]:
constant[ Converts to 32-bit data.
Returns
-------
:obj:`DepthImage`
depth image with 32 bit float data
]
return[call[name[DepthImage], parameter[call[name[self].data.astype, parameter[name[np].float32]]]]] | keyword[def] identifier[to_float] ( identifier[self] ):
literal[string]
keyword[return] identifier[DepthImage] ( identifier[self] . identifier[data] . identifier[astype] ( identifier[np] . identifier[float32] ), identifier[frame] = identifier[self] . identifier[frame] ) | def to_float(self):
""" Converts to 32-bit data.
Returns
-------
:obj:`DepthImage`
depth image with 32 bit float data
"""
return DepthImage(self.data.astype(np.float32), frame=self.frame) |
def make_device_class(spark_cloud, entries, timeout=30):
"""Returns a dynamic Device class based on what a GET device list from
the Spark Cloud returns.
spark_cloud parameter should be the caller instance of SparkCloud.
entries parameter should be the list of fields the Spark Cloud API is
returning.
"""
attrs = list(
set(
list(entries) + [
'requires_deep_update', 'functions', 'variables', 'api', 'status'
]
)
)
return type(
'Device',
(_BaseDevice, namedtuple('Device', attrs)),
{'__slots__': (), 'spark_cloud': spark_cloud, 'timeout' : timeout}
) | def function[make_device_class, parameter[spark_cloud, entries, timeout]]:
constant[Returns a dynamic Device class based on what a GET device list from
the Spark Cloud returns.
spark_cloud parameter should be the caller instance of SparkCloud.
entries parameter should be the list of fields the Spark Cloud API is
returning.
]
variable[attrs] assign[=] call[name[list], parameter[call[name[set], parameter[binary_operation[call[name[list], parameter[name[entries]]] + list[[<ast.Constant object at 0x7da18f58fa90>, <ast.Constant object at 0x7da18f58cb20>, <ast.Constant object at 0x7da18f58e530>, <ast.Constant object at 0x7da18f58d330>, <ast.Constant object at 0x7da18f58f640>]]]]]]]
return[call[name[type], parameter[constant[Device], tuple[[<ast.Name object at 0x7da18f58c7f0>, <ast.Call object at 0x7da18f58ea40>]], dictionary[[<ast.Constant object at 0x7da18f58fe80>, <ast.Constant object at 0x7da18f58d870>, <ast.Constant object at 0x7da18f58fcd0>], [<ast.Tuple object at 0x7da18f58e1d0>, <ast.Name object at 0x7da18f58d510>, <ast.Name object at 0x7da18f58ed40>]]]]] | keyword[def] identifier[make_device_class] ( identifier[spark_cloud] , identifier[entries] , identifier[timeout] = literal[int] ):
literal[string]
identifier[attrs] = identifier[list] (
identifier[set] (
identifier[list] ( identifier[entries] )+[
literal[string] , literal[string] , literal[string] , literal[string] , literal[string]
]
)
)
keyword[return] identifier[type] (
literal[string] ,
( identifier[_BaseDevice] , identifier[namedtuple] ( literal[string] , identifier[attrs] )),
{ literal[string] :(), literal[string] : identifier[spark_cloud] , literal[string] : identifier[timeout] }
) | def make_device_class(spark_cloud, entries, timeout=30):
"""Returns a dynamic Device class based on what a GET device list from
the Spark Cloud returns.
spark_cloud parameter should be the caller instance of SparkCloud.
entries parameter should be the list of fields the Spark Cloud API is
returning.
"""
attrs = list(set(list(entries) + ['requires_deep_update', 'functions', 'variables', 'api', 'status']))
return type('Device', (_BaseDevice, namedtuple('Device', attrs)), {'__slots__': (), 'spark_cloud': spark_cloud, 'timeout': timeout}) |
def has_main_target (self, name):
"""Tells if a main target with the specified name exists."""
assert isinstance(name, basestring)
if not self.built_main_targets_:
self.build_main_targets()
return name in self.main_target_ | def function[has_main_target, parameter[self, name]]:
constant[Tells if a main target with the specified name exists.]
assert[call[name[isinstance], parameter[name[name], name[basestring]]]]
if <ast.UnaryOp object at 0x7da1b1f09840> begin[:]
call[name[self].build_main_targets, parameter[]]
return[compare[name[name] in name[self].main_target_]] | keyword[def] identifier[has_main_target] ( identifier[self] , identifier[name] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[name] , identifier[basestring] )
keyword[if] keyword[not] identifier[self] . identifier[built_main_targets_] :
identifier[self] . identifier[build_main_targets] ()
keyword[return] identifier[name] keyword[in] identifier[self] . identifier[main_target_] | def has_main_target(self, name):
"""Tells if a main target with the specified name exists."""
assert isinstance(name, basestring)
if not self.built_main_targets_:
self.build_main_targets() # depends on [control=['if'], data=[]]
return name in self.main_target_ |
def remove_root_metadata(docgraph):
"""
removes the ``metadata`` attribute of the root node of a document graph.
this is necessary for some exporters, as the attribute may contain
(nested) dictionaries.
"""
docgraph.node[docgraph.root].pop('metadata', None)
# delete metadata from the generic root node (which probably only exists
# when we merge graphs on the command line, cf. issue #89
if 'discoursegraph:root_node' in docgraph.node:
docgraph.node['discoursegraph:root_node'].pop('metadata', None)
# delete the metadata from all former root nodes which have been merged
# into this graph
if hasattr(docgraph, 'merged_rootnodes'):
for merged_rootnode in docgraph.merged_rootnodes:
try: # some of these nodes may not exist any longer
docgraph.node[merged_rootnode].pop('metadata', None)
except KeyError as e:
pass | def function[remove_root_metadata, parameter[docgraph]]:
constant[
removes the ``metadata`` attribute of the root node of a document graph.
this is necessary for some exporters, as the attribute may contain
(nested) dictionaries.
]
call[call[name[docgraph].node][name[docgraph].root].pop, parameter[constant[metadata], constant[None]]]
if compare[constant[discoursegraph:root_node] in name[docgraph].node] begin[:]
call[call[name[docgraph].node][constant[discoursegraph:root_node]].pop, parameter[constant[metadata], constant[None]]]
if call[name[hasattr], parameter[name[docgraph], constant[merged_rootnodes]]] begin[:]
for taget[name[merged_rootnode]] in starred[name[docgraph].merged_rootnodes] begin[:]
<ast.Try object at 0x7da1b2546680> | keyword[def] identifier[remove_root_metadata] ( identifier[docgraph] ):
literal[string]
identifier[docgraph] . identifier[node] [ identifier[docgraph] . identifier[root] ]. identifier[pop] ( literal[string] , keyword[None] )
keyword[if] literal[string] keyword[in] identifier[docgraph] . identifier[node] :
identifier[docgraph] . identifier[node] [ literal[string] ]. identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[hasattr] ( identifier[docgraph] , literal[string] ):
keyword[for] identifier[merged_rootnode] keyword[in] identifier[docgraph] . identifier[merged_rootnodes] :
keyword[try] :
identifier[docgraph] . identifier[node] [ identifier[merged_rootnode] ]. identifier[pop] ( literal[string] , keyword[None] )
keyword[except] identifier[KeyError] keyword[as] identifier[e] :
keyword[pass] | def remove_root_metadata(docgraph):
"""
removes the ``metadata`` attribute of the root node of a document graph.
this is necessary for some exporters, as the attribute may contain
(nested) dictionaries.
"""
docgraph.node[docgraph.root].pop('metadata', None)
# delete metadata from the generic root node (which probably only exists
# when we merge graphs on the command line, cf. issue #89
if 'discoursegraph:root_node' in docgraph.node:
docgraph.node['discoursegraph:root_node'].pop('metadata', None) # depends on [control=['if'], data=[]]
# delete the metadata from all former root nodes which have been merged
# into this graph
if hasattr(docgraph, 'merged_rootnodes'):
for merged_rootnode in docgraph.merged_rootnodes:
try: # some of these nodes may not exist any longer
docgraph.node[merged_rootnode].pop('metadata', None) # depends on [control=['try'], data=[]]
except KeyError as e:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['merged_rootnode']] # depends on [control=['if'], data=[]] |
def bot(self, id):
"""
Retrieve a single bot.
Args:
id (str): UUID or username of the bot
Returns:
SkypeBotUser: resulting bot user object
"""
json = self.skype.conn("GET", "{0}/agents".format(SkypeConnection.API_BOT), params={"agentId": id},
auth=SkypeConnection.Auth.SkypeToken).json().get("agentDescriptions", [])
return self.merge(SkypeBotUser.fromRaw(self.skype, json[0])) if json else None | def function[bot, parameter[self, id]]:
constant[
Retrieve a single bot.
Args:
id (str): UUID or username of the bot
Returns:
SkypeBotUser: resulting bot user object
]
variable[json] assign[=] call[call[call[name[self].skype.conn, parameter[constant[GET], call[constant[{0}/agents].format, parameter[name[SkypeConnection].API_BOT]]]].json, parameter[]].get, parameter[constant[agentDescriptions], list[[]]]]
return[<ast.IfExp object at 0x7da18f722dd0>] | keyword[def] identifier[bot] ( identifier[self] , identifier[id] ):
literal[string]
identifier[json] = identifier[self] . identifier[skype] . identifier[conn] ( literal[string] , literal[string] . identifier[format] ( identifier[SkypeConnection] . identifier[API_BOT] ), identifier[params] ={ literal[string] : identifier[id] },
identifier[auth] = identifier[SkypeConnection] . identifier[Auth] . identifier[SkypeToken] ). identifier[json] (). identifier[get] ( literal[string] ,[])
keyword[return] identifier[self] . identifier[merge] ( identifier[SkypeBotUser] . identifier[fromRaw] ( identifier[self] . identifier[skype] , identifier[json] [ literal[int] ])) keyword[if] identifier[json] keyword[else] keyword[None] | def bot(self, id):
"""
Retrieve a single bot.
Args:
id (str): UUID or username of the bot
Returns:
SkypeBotUser: resulting bot user object
"""
json = self.skype.conn('GET', '{0}/agents'.format(SkypeConnection.API_BOT), params={'agentId': id}, auth=SkypeConnection.Auth.SkypeToken).json().get('agentDescriptions', [])
return self.merge(SkypeBotUser.fromRaw(self.skype, json[0])) if json else None |
def Add(self, service, method, request, global_params=None):
"""Add a request to the batch.
Args:
service: A class inheriting base_api.BaseApiService.
method: A string indicated desired method from the service. See
the example in the class docstring.
request: An input message appropriate for the specified
service.method.
global_params: Optional additional parameters to pass into
method.PrepareHttpRequest.
Returns:
None
"""
# Retrieve the configs for the desired method and service.
method_config = service.GetMethodConfig(method)
upload_config = service.GetUploadConfig(method)
# Prepare the HTTP Request.
http_request = service.PrepareHttpRequest(
method_config, request, global_params=global_params,
upload_config=upload_config)
# Create the request and add it to our master list.
api_request = self.ApiCall(
http_request, self.retryable_codes, service, method_config)
self.api_requests.append(api_request) | def function[Add, parameter[self, service, method, request, global_params]]:
constant[Add a request to the batch.
Args:
service: A class inheriting base_api.BaseApiService.
method: A string indicated desired method from the service. See
the example in the class docstring.
request: An input message appropriate for the specified
service.method.
global_params: Optional additional parameters to pass into
method.PrepareHttpRequest.
Returns:
None
]
variable[method_config] assign[=] call[name[service].GetMethodConfig, parameter[name[method]]]
variable[upload_config] assign[=] call[name[service].GetUploadConfig, parameter[name[method]]]
variable[http_request] assign[=] call[name[service].PrepareHttpRequest, parameter[name[method_config], name[request]]]
variable[api_request] assign[=] call[name[self].ApiCall, parameter[name[http_request], name[self].retryable_codes, name[service], name[method_config]]]
call[name[self].api_requests.append, parameter[name[api_request]]] | keyword[def] identifier[Add] ( identifier[self] , identifier[service] , identifier[method] , identifier[request] , identifier[global_params] = keyword[None] ):
literal[string]
identifier[method_config] = identifier[service] . identifier[GetMethodConfig] ( identifier[method] )
identifier[upload_config] = identifier[service] . identifier[GetUploadConfig] ( identifier[method] )
identifier[http_request] = identifier[service] . identifier[PrepareHttpRequest] (
identifier[method_config] , identifier[request] , identifier[global_params] = identifier[global_params] ,
identifier[upload_config] = identifier[upload_config] )
identifier[api_request] = identifier[self] . identifier[ApiCall] (
identifier[http_request] , identifier[self] . identifier[retryable_codes] , identifier[service] , identifier[method_config] )
identifier[self] . identifier[api_requests] . identifier[append] ( identifier[api_request] ) | def Add(self, service, method, request, global_params=None):
"""Add a request to the batch.
Args:
service: A class inheriting base_api.BaseApiService.
method: A string indicated desired method from the service. See
the example in the class docstring.
request: An input message appropriate for the specified
service.method.
global_params: Optional additional parameters to pass into
method.PrepareHttpRequest.
Returns:
None
"""
# Retrieve the configs for the desired method and service.
method_config = service.GetMethodConfig(method)
upload_config = service.GetUploadConfig(method)
# Prepare the HTTP Request.
http_request = service.PrepareHttpRequest(method_config, request, global_params=global_params, upload_config=upload_config)
# Create the request and add it to our master list.
api_request = self.ApiCall(http_request, self.retryable_codes, service, method_config)
self.api_requests.append(api_request) |
def write(config):
"""Commits any pending modifications, ie save a configuration file if
it has been marked "dirty" as a result of an normal
assignment. The modifications are written to the first
writable source in this config object.
.. note::
This is a static method, ie not a method on any object
instance. This is because all attribute access on a
LayeredConfig object is meant to retrieve configuration
settings.
:param config: The configuration object to save
:type config: layeredconfig.LayeredConfig
"""
root = config
while root._parent:
root = root._parent
for source in root._sources:
if source.writable and source.dirty:
source.save() | def function[write, parameter[config]]:
constant[Commits any pending modifications, ie save a configuration file if
it has been marked "dirty" as a result of an normal
assignment. The modifications are written to the first
writable source in this config object.
.. note::
This is a static method, ie not a method on any object
instance. This is because all attribute access on a
LayeredConfig object is meant to retrieve configuration
settings.
:param config: The configuration object to save
:type config: layeredconfig.LayeredConfig
]
variable[root] assign[=] name[config]
while name[root]._parent begin[:]
variable[root] assign[=] name[root]._parent
for taget[name[source]] in starred[name[root]._sources] begin[:]
if <ast.BoolOp object at 0x7da1b1496380> begin[:]
call[name[source].save, parameter[]] | keyword[def] identifier[write] ( identifier[config] ):
literal[string]
identifier[root] = identifier[config]
keyword[while] identifier[root] . identifier[_parent] :
identifier[root] = identifier[root] . identifier[_parent]
keyword[for] identifier[source] keyword[in] identifier[root] . identifier[_sources] :
keyword[if] identifier[source] . identifier[writable] keyword[and] identifier[source] . identifier[dirty] :
identifier[source] . identifier[save] () | def write(config):
"""Commits any pending modifications, ie save a configuration file if
it has been marked "dirty" as a result of an normal
assignment. The modifications are written to the first
writable source in this config object.
.. note::
This is a static method, ie not a method on any object
instance. This is because all attribute access on a
LayeredConfig object is meant to retrieve configuration
settings.
:param config: The configuration object to save
:type config: layeredconfig.LayeredConfig
"""
root = config
while root._parent:
root = root._parent # depends on [control=['while'], data=[]]
for source in root._sources:
if source.writable and source.dirty:
source.save() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['source']] |
def add_message_event(proto_message, span, message_event_type, message_id=1):
"""Adds a MessageEvent to the span based off of the given protobuf
message
"""
span.add_time_event(
time_event=time_event.TimeEvent(
datetime.utcnow(),
message_event=time_event.MessageEvent(
message_id,
type=message_event_type,
uncompressed_size_bytes=proto_message.ByteSize()
)
)
) | def function[add_message_event, parameter[proto_message, span, message_event_type, message_id]]:
constant[Adds a MessageEvent to the span based off of the given protobuf
message
]
call[name[span].add_time_event, parameter[]] | keyword[def] identifier[add_message_event] ( identifier[proto_message] , identifier[span] , identifier[message_event_type] , identifier[message_id] = literal[int] ):
literal[string]
identifier[span] . identifier[add_time_event] (
identifier[time_event] = identifier[time_event] . identifier[TimeEvent] (
identifier[datetime] . identifier[utcnow] (),
identifier[message_event] = identifier[time_event] . identifier[MessageEvent] (
identifier[message_id] ,
identifier[type] = identifier[message_event_type] ,
identifier[uncompressed_size_bytes] = identifier[proto_message] . identifier[ByteSize] ()
)
)
) | def add_message_event(proto_message, span, message_event_type, message_id=1):
"""Adds a MessageEvent to the span based off of the given protobuf
message
"""
span.add_time_event(time_event=time_event.TimeEvent(datetime.utcnow(), message_event=time_event.MessageEvent(message_id, type=message_event_type, uncompressed_size_bytes=proto_message.ByteSize()))) |
def twice(self):
"""
Inspected function should be called two times
Return: self
"""
def check(): #pylint: disable=missing-docstring
return super(SinonExpectation, self).calledTwice
self.valid_list.append(check)
return self | def function[twice, parameter[self]]:
constant[
Inspected function should be called two times
Return: self
]
def function[check, parameter[]]:
return[call[name[super], parameter[name[SinonExpectation], name[self]]].calledTwice]
call[name[self].valid_list.append, parameter[name[check]]]
return[name[self]] | keyword[def] identifier[twice] ( identifier[self] ):
literal[string]
keyword[def] identifier[check] ():
keyword[return] identifier[super] ( identifier[SinonExpectation] , identifier[self] ). identifier[calledTwice]
identifier[self] . identifier[valid_list] . identifier[append] ( identifier[check] )
keyword[return] identifier[self] | def twice(self):
"""
Inspected function should be called two times
Return: self
"""
def check(): #pylint: disable=missing-docstring
return super(SinonExpectation, self).calledTwice
self.valid_list.append(check)
return self |
def _symbol_extract(self, regex, plus = True, brackets=False):
"""Extracts a symbol or full symbol from the current line,
optionally including the character under the cursor.
:arg regex: the compiled regular expression to use for extraction.
:arg plus: when true, the character under the cursor *is* included.
:arg brackets: when true, matching pairs of brackets are first removed
before the regex is run.
"""
charplus = self.pos[1] + (1 if plus else -1)
consider = self.current_line[:charplus][::-1]
#We want to remove matching pairs of brackets so that derived types
#that have arrays still get intellisense.
if brackets==True:
#The string has already been reversed, just run through it.
rightb = []
lastchar = None
for i in range(len(consider)):
if consider[i] == ")":
rightb.append(i)
elif consider[i] == "(" and len(rightb) > 0:
lastchar = i
rightb.pop()
if lastchar is not None:
consider = '%' + consider[lastchar+1:]
rematch = regex.match(consider)
if rematch is not None:
return rematch.group("symbol")[::-1]
else:
return "" | def function[_symbol_extract, parameter[self, regex, plus, brackets]]:
constant[Extracts a symbol or full symbol from the current line,
optionally including the character under the cursor.
:arg regex: the compiled regular expression to use for extraction.
:arg plus: when true, the character under the cursor *is* included.
:arg brackets: when true, matching pairs of brackets are first removed
before the regex is run.
]
variable[charplus] assign[=] binary_operation[call[name[self].pos][constant[1]] + <ast.IfExp object at 0x7da1b25849a0>]
variable[consider] assign[=] call[call[name[self].current_line][<ast.Slice object at 0x7da1b2584970>]][<ast.Slice object at 0x7da1b2585c00>]
if compare[name[brackets] equal[==] constant[True]] begin[:]
variable[rightb] assign[=] list[[]]
variable[lastchar] assign[=] constant[None]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[consider]]]]]] begin[:]
if compare[call[name[consider]][name[i]] equal[==] constant[)]] begin[:]
call[name[rightb].append, parameter[name[i]]]
if compare[name[lastchar] is_not constant[None]] begin[:]
variable[consider] assign[=] binary_operation[constant[%] + call[name[consider]][<ast.Slice object at 0x7da1b25851b0>]]
variable[rematch] assign[=] call[name[regex].match, parameter[name[consider]]]
if compare[name[rematch] is_not constant[None]] begin[:]
return[call[call[name[rematch].group, parameter[constant[symbol]]]][<ast.Slice object at 0x7da1b25844f0>]] | keyword[def] identifier[_symbol_extract] ( identifier[self] , identifier[regex] , identifier[plus] = keyword[True] , identifier[brackets] = keyword[False] ):
literal[string]
identifier[charplus] = identifier[self] . identifier[pos] [ literal[int] ]+( literal[int] keyword[if] identifier[plus] keyword[else] - literal[int] )
identifier[consider] = identifier[self] . identifier[current_line] [: identifier[charplus] ][::- literal[int] ]
keyword[if] identifier[brackets] == keyword[True] :
identifier[rightb] =[]
identifier[lastchar] = keyword[None]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[consider] )):
keyword[if] identifier[consider] [ identifier[i] ]== literal[string] :
identifier[rightb] . identifier[append] ( identifier[i] )
keyword[elif] identifier[consider] [ identifier[i] ]== literal[string] keyword[and] identifier[len] ( identifier[rightb] )> literal[int] :
identifier[lastchar] = identifier[i]
identifier[rightb] . identifier[pop] ()
keyword[if] identifier[lastchar] keyword[is] keyword[not] keyword[None] :
identifier[consider] = literal[string] + identifier[consider] [ identifier[lastchar] + literal[int] :]
identifier[rematch] = identifier[regex] . identifier[match] ( identifier[consider] )
keyword[if] identifier[rematch] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[rematch] . identifier[group] ( literal[string] )[::- literal[int] ]
keyword[else] :
keyword[return] literal[string] | def _symbol_extract(self, regex, plus=True, brackets=False):
"""Extracts a symbol or full symbol from the current line,
optionally including the character under the cursor.
:arg regex: the compiled regular expression to use for extraction.
:arg plus: when true, the character under the cursor *is* included.
:arg brackets: when true, matching pairs of brackets are first removed
before the regex is run.
"""
charplus = self.pos[1] + (1 if plus else -1)
consider = self.current_line[:charplus][::-1]
#We want to remove matching pairs of brackets so that derived types
#that have arrays still get intellisense.
if brackets == True:
#The string has already been reversed, just run through it.
rightb = []
lastchar = None
for i in range(len(consider)):
if consider[i] == ')':
rightb.append(i) # depends on [control=['if'], data=[]]
elif consider[i] == '(' and len(rightb) > 0:
lastchar = i
rightb.pop() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
if lastchar is not None:
consider = '%' + consider[lastchar + 1:] # depends on [control=['if'], data=['lastchar']] # depends on [control=['if'], data=[]]
rematch = regex.match(consider)
if rematch is not None:
return rematch.group('symbol')[::-1] # depends on [control=['if'], data=['rematch']]
else:
return '' |
def get_hierarchy(ontology, ols_base=None):
"""Iterates over the parent-child relationships in an ontolog
:param str ontology: The name of the ontology
:param str ols_base: An optional, custom OLS base url
:rtype: iter[tuple[str,str]]
"""
client = OlsClient(ols_base=ols_base)
return client.iter_hierarchy(ontology) | def function[get_hierarchy, parameter[ontology, ols_base]]:
constant[Iterates over the parent-child relationships in an ontolog
:param str ontology: The name of the ontology
:param str ols_base: An optional, custom OLS base url
:rtype: iter[tuple[str,str]]
]
variable[client] assign[=] call[name[OlsClient], parameter[]]
return[call[name[client].iter_hierarchy, parameter[name[ontology]]]] | keyword[def] identifier[get_hierarchy] ( identifier[ontology] , identifier[ols_base] = keyword[None] ):
literal[string]
identifier[client] = identifier[OlsClient] ( identifier[ols_base] = identifier[ols_base] )
keyword[return] identifier[client] . identifier[iter_hierarchy] ( identifier[ontology] ) | def get_hierarchy(ontology, ols_base=None):
"""Iterates over the parent-child relationships in an ontolog
:param str ontology: The name of the ontology
:param str ols_base: An optional, custom OLS base url
:rtype: iter[tuple[str,str]]
"""
client = OlsClient(ols_base=ols_base)
return client.iter_hierarchy(ontology) |
def predict_proba(self, a, b, **kwargs):
"""Prediction method for pairwise causal inference using the ANM model.
Args:
a (numpy.ndarray): Variable 1
b (numpy.ndarray): Variable 2
Returns:
float: Causation score (Value : 1 if a->b and -1 if b->a)
"""
a = scale(a).reshape((-1, 1))
b = scale(b).reshape((-1, 1))
return self.anm_score(b, a) - self.anm_score(a, b) | def function[predict_proba, parameter[self, a, b]]:
constant[Prediction method for pairwise causal inference using the ANM model.
Args:
a (numpy.ndarray): Variable 1
b (numpy.ndarray): Variable 2
Returns:
float: Causation score (Value : 1 if a->b and -1 if b->a)
]
variable[a] assign[=] call[call[name[scale], parameter[name[a]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da1b0137a00>, <ast.Constant object at 0x7da1b0136e60>]]]]
variable[b] assign[=] call[call[name[scale], parameter[name[b]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da2049632e0>, <ast.Constant object at 0x7da204962380>]]]]
return[binary_operation[call[name[self].anm_score, parameter[name[b], name[a]]] - call[name[self].anm_score, parameter[name[a], name[b]]]]] | keyword[def] identifier[predict_proba] ( identifier[self] , identifier[a] , identifier[b] ,** identifier[kwargs] ):
literal[string]
identifier[a] = identifier[scale] ( identifier[a] ). identifier[reshape] ((- literal[int] , literal[int] ))
identifier[b] = identifier[scale] ( identifier[b] ). identifier[reshape] ((- literal[int] , literal[int] ))
keyword[return] identifier[self] . identifier[anm_score] ( identifier[b] , identifier[a] )- identifier[self] . identifier[anm_score] ( identifier[a] , identifier[b] ) | def predict_proba(self, a, b, **kwargs):
"""Prediction method for pairwise causal inference using the ANM model.
Args:
a (numpy.ndarray): Variable 1
b (numpy.ndarray): Variable 2
Returns:
float: Causation score (Value : 1 if a->b and -1 if b->a)
"""
a = scale(a).reshape((-1, 1))
b = scale(b).reshape((-1, 1))
return self.anm_score(b, a) - self.anm_score(a, b) |
def convert(self):
"""
:return: Converted value.
:raises typepy.TypeConversionError:
If the value cannot convert.
"""
if self.is_type():
return self.force_convert()
raise TypeConversionError(
"failed to convert from {} to {}".format(type(self._data).__name__, self.typename)
) | def function[convert, parameter[self]]:
constant[
:return: Converted value.
:raises typepy.TypeConversionError:
If the value cannot convert.
]
if call[name[self].is_type, parameter[]] begin[:]
return[call[name[self].force_convert, parameter[]]]
<ast.Raise object at 0x7da20c76c6a0> | keyword[def] identifier[convert] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[is_type] ():
keyword[return] identifier[self] . identifier[force_convert] ()
keyword[raise] identifier[TypeConversionError] (
literal[string] . identifier[format] ( identifier[type] ( identifier[self] . identifier[_data] ). identifier[__name__] , identifier[self] . identifier[typename] )
) | def convert(self):
"""
:return: Converted value.
:raises typepy.TypeConversionError:
If the value cannot convert.
"""
if self.is_type():
return self.force_convert() # depends on [control=['if'], data=[]]
raise TypeConversionError('failed to convert from {} to {}'.format(type(self._data).__name__, self.typename)) |
def magic_help(keyhelp):
"""
returns a help message for a give magic key
"""
helpme = {}
helpme["er_location_name"] = "Name for location or drill site"
helpme["er_location_alternatives"] = "Colon-delimited list of alternative names and abbreviations"
helpme["location_type"] = "Location type"
helpme["location_begin_lat"] = "Begin of section or core or outcrop -- latitude"
helpme["location_begin_lon"] = "Begin of section or core or outcrop -- longitude"
helpme["location_begin_elevation"] = "Begin of section or core or outcrop -- elevation relative to sealevel"
helpme["location_end_lat"] = "Ending of section or core -- latitude "
helpme["location_end_lon"] = "Ending of section or core -- longitude "
helpme["location_end_elevation"] = "Ending of section or core -- elevation relative to sealevel"
helpme["location_geoid"] = "Geoid used in determination of latitude and longitude: WGS84, GEOID03, USGG2003, GEOID99, G99SSS , G99BM, DEFLEC99 "
helpme["continent_ocean"] = "Name for continent or ocean island region"
helpme["ocean_sea"] = "Name for location in an ocean or sea"
helpme["country"] = "Country name"
helpme["region"] = "Region name"
helpme["plate_block"] = "Plate or tectonic block name"
helpme["terrane"] = "Terrane name"
helpme["tectonic_setting"] = "Tectonic setting"
helpme["location_description"] = "Detailed description"
helpme["location_url"] = "Website URL for the location explicitly"
helpme["er_scientist_mail_names"] = "Colon-delimited list of names for scientists who described location"
helpme["er_citation_names"] = "Colon-delimited list of citations"
helpme["er_formation_name"] = "Name for formation"
helpme["er_formation_alternatives"] = "Colon-delimited list of alternative names and abbreviations"
helpme["formation_class"] = "General lithology class: igneous, metamorphic or sedimentary"
helpme["formation_lithology"] = "Lithology: e.g., basalt, sandstone, etc."
helpme["formation_paleo_enviroment"] = "Depositional environment"
helpme["formation_thickness"] = "Formation thickness"
helpme["er_member_name"] = "Name for member"
helpme["er_member_alternatives"] = "Colon-delimited list of alternative names and abbreviations"
helpme["er_formation_name"] = "Name for formation"
helpme["member_class"] = "General lithology type"
helpme["member_lithology"] = "Lithology"
helpme["member_paleo_environment"] = "Depositional environment"
helpme["member_thickness"] = "Member thickness"
helpme["member_description"] = "Detailed description"
helpme["er_section_name"] = "Name for section or core"
helpme["er_section_alternatives"] = "Colon-delimited list of alternative names and abbreviations"
helpme["er_expedition_name"] = "Name for seagoing or land expedition"
helpme["er_location_name"] = "Name for location or drill site"
helpme["er_formation_name"] = "Name for formation"
helpme["er_member_name"] = "Name for member"
helpme["section_definition"] = "General definition of section"
helpme["section_class"] = "General lithology type"
helpme["section_lithology"] = "Section lithology or archeological classification"
helpme["section_type"] = "Section type"
helpme["section_n"] = "Number of subsections included composite (stacked) section"
helpme["section_begin_lat"] = "Begin of section or core -- latitude"
helpme["section_begin_lon"] = "Begin of section or core -- longitude"
helpme["section_begin_elevation"] = "Begin of section or core -- elevation relative to sealevel"
helpme["section_begin_height"] = "Begin of section or core -- stratigraphic height"
helpme["section_begin_drill_depth"] = "Begin of section or core -- depth in MBSF as used by ODP"
helpme["section_begin_composite_depth"] = "Begin of section or core -- composite depth in MBSF as used by ODP"
helpme["section_end_lat"] = "End of section or core -- latitude "
helpme["section_end_lon"] = "End of section or core -- longitude "
helpme["section_end_elevation"] = "End of section or core -- elevation relative to sealevel"
helpme["section_end_height"] = "End of section or core -- stratigraphic height"
helpme["section_end_drill_depth"] = "End of section or core -- depth in MBSF as used by ODP"
helpme["section_end_composite_depth"] = "End of section or core -- composite depth in MBSF as used by ODP"
helpme["section_azimuth"] = "Section azimuth as measured clockwise from the north"
helpme["section_dip"] = "Section dip as measured into the outcrop"
helpme["section_description"] = "Detailed description"
helpme["er_site_name"] = "Name for site"
helpme["er_site_alternatives"] = "Colon-delimited list of alternative names and abbreviations"
helpme["er_expedition_name"] = "Name for seagoing or land expedition"
helpme["er_location_name"] = "Name for location or drill site"
helpme["er_section_name"] = "Name for section or core"
helpme["er_formation_name"] = "Name for formation"
helpme["er_member_name"] = "Name for member"
helpme["site_definition"] = "General definition of site"
helpme["site_class"] = "[A]rchaeologic,[E]xtrusive,[I]ntrusive,[M]etamorphic,[S]edimentary"
helpme["site_lithology"] = "Site lithology or archeological classification"
helpme["site_type"] = "Site type: slag, lava flow, sediment layer, etc."
helpme["site_lat"] = "Site location -- latitude"
helpme["site_lon"] = "Site location -- longitude"
helpme["site_location_precision"] = "Site location -- precision in latitude and longitude"
helpme["site_elevation"] = "Site location -- elevation relative to sealevel"
helpme["site_height"] = "Site location -- stratigraphic height"
helpme["site_drill_depth"] = "Site location -- depth in MBSF as used by ODP"
helpme["site_composite_depth"] = "Site location -- composite depth in MBSF as used by ODP"
helpme["site_description"] = "Detailed description"
helpme["magic_method_codes"] = "Colon-delimited list of method codes"
helpme["er_sample_name"] = "Name for sample"
helpme["er_sample_alternatives"] = "Colon-delimited list of alternative names and abbreviations"
helpme["er_expedition_name"] = "Name for seagoing or land expedition"
helpme["er_location_name"] = "Name for location or drill site"
helpme["er_section_name"] = "Name for section or core"
helpme["er_formation_name"] = "Name for formation"
helpme["er_member_name"] = "Name for member"
helpme["er_site_name"] = "Name for site"
helpme["sample_class"] = "General lithology type"
helpme["sample_lithology"] = "Sample lithology or archeological classification"
helpme["sample_type"] = "Sample type"
helpme["sample_texture"] = "Sample texture"
helpme["sample_alteration"] = "Sample alteration grade"
helpme["sample_alteration_type"] = "Sample alteration type"
helpme["sample_lat"] = "Sample location -- latitude"
helpme["sample_lon"] = "Sample location -- longitude"
helpme["sample_location_precision"] = "Sample location -- precision in latitude and longitude"
helpme["sample_elevation"] = "Sample location -- elevation relative to sealevel"
helpme["sample_height"] = "Sample location -- stratigraphic height"
helpme["sample_drill_depth"] = "Sample location -- depth in MBSF as used by ODP"
helpme["sample_composite_depth"] = "Sample location -- composite depth in MBSF as used by ODP"
helpme["sample_date"] = "Sampling date"
helpme["sample_time_zone"] = "Sampling time zone"
helpme["sample_azimuth"] = "Sample azimuth as measured clockwise from the north"
helpme["sample_dip"] = "Sample dip as measured into the outcrop"
helpme["sample_bed_dip_direction"] = "Direction of the dip of a paleo-horizontal plane in the bedding"
helpme["sample_bed_dip"] = "Dip of the bedding as measured to the right of strike direction"
helpme["sample_cooling_rate"] = "Estimated ancient in-situ cooling rate per Ma"
helpme["er_specimen_name"] = "Name for specimen"
helpme["er_specimen_alternatives"] = "Colon-delimited list of alternative names and abbreviations"
helpme["er_expedition_name"] = "Name for seagoing or land expedition"
helpme["er_location_name"] = "Name for location or drill site"
helpme["er_section_name"] = "Name for section or core"
helpme["er_formation_name"] = "Name for formation"
helpme["er_member_name"] = "Name for member"
helpme["er_site_name"] = "Name for site"
helpme["er_sample_name"] = "Name for sample"
helpme["specimen_class"] = "General lithology type"
helpme["specimen_lithology"] = "Specimen lithology or archeological classification"
helpme["specimen_type"] = "Specimen type"
helpme["specimen_texture"] = "Specimen texture"
helpme["specimen_alteration"] = "Specimen alteration grade"
helpme["specimen_alteration_type"] = "Specimen alteration type"
helpme["specimen_elevation"] = "Specimen location -- elevation relative to sealevel"
helpme["specimen_height"] = "Specimen location -- stratigraphic height"
helpme["specimen_drill_depth"] = "Specimen location -- depth in MBSF as used by ODP"
helpme["specimen_composite_depth"] = "Specimen location -- composite depth in MBSF as used by ODP"
helpme["specimen_azimuth"] = "Specimen azimuth as measured clockwise from the north"
helpme["specimen_dip"] = "Specimen dip as measured into the outcrop"
helpme["specimen_volume"] = "Specimen volume"
helpme["specimen_weight"] = "Specimen weight"
helpme["specimen_density"] = "Specimen density"
helpme["specimen_size"] = "Specimen grain size fraction"
helpme["er_expedition_name"] = "Name for seagoing or land expedition"
helpme["er_location_name"] = "Name for location or drill site"
helpme["er_formation_name"] = "Name for formation"
helpme["er_member_name"] = "Name for member"
helpme["er_site_name"] = "Name for site"
helpme["er_sample_name"] = "Name for sample"
helpme["er_specimen_name"] = "Name for specimen"
helpme["er_fossil_name"] = "Name for fossil"
helpme["er_mineral_name"] = "Name for mineral"
helpme["GM-ALPHA"] = "Age determination by using alpha counting"
helpme["GM-ARAR"] = "40Ar/39Ar age determination"
helpme["GM-ARAR-AP"] = "40Ar/39Ar age determination: Age plateau"
helpme["GM-ARAR-II"] = "40Ar/39Ar age determination: Inverse isochron"
helpme["GM-ARAR-NI"] = "40Ar/39Ar age determination: Normal isochron"
helpme["GM-ARAR-TF"] = "40Ar/39Ar age determination: Total fusion or recombined age"
helpme["GM-C14"] = "Radiocarbon age determination"
helpme["GM-C14-AMS"] = "Radiocarbon age determination: AMS"
helpme["GM-C14-BETA"] = "Radiocarbon age determination: Beta decay counting"
helpme["GM-C14-CAL"] = "Radiocarbon age determination: Calibrated"
helpme["GM-CC"] = "Correlation chronology"
helpme["GM-CC-ARCH"] = "Correlation chronology: Archeology"
helpme["GM-CC-ARM"] = "Correlation chronology: ARM"
helpme["GM-CC-ASTRO"] = "Correlation chronology: Astronomical"
helpme["GM-CC-CACO3"] = "Correlation chronology: Calcium carbonate"
helpme["GM-CC-COLOR"] = "Correlation chronology: Color or reflectance"
helpme["GM-CC-GRAPE"] = "Correlation chronology: Gamma Ray Polarimeter Experiment"
helpme["GM-CC-IRM"] = "Correlation chronology: IRM"
helpme["GM-CC-ISO"] = "Correlation chronology: Stable isotopes"
helpme["GM-CC-REL"] = "Correlation chronology: Relative chronology other than stratigraphic successions"
helpme["GM-CC-STRAT"] = "Correlation chronology: Stratigraphic succession"
helpme["GM-CC-TECT"] = "Correlation chronology: Tectites and microtectites"
helpme["GM-CC-TEPH"] = "Correlation chronology: Tephrochronology"
helpme["GM-CC-X"] = "Correlation chronology: Susceptibility"
helpme["GM-CHEM"] = "Chemical chronology"
helpme["GM-CHEM-AAR"] = "Chemical chronology: Amino acid racemization"
helpme["GM-CHEM-OH"] = "Chemical chronology: Obsidian hydration"
helpme["GM-CHEM-SC"] = "Chemical chronology: Stoan coatings CaCO3"
helpme["GM-CHEM-TH"] = "Chemical chronology: Tephra hydration"
helpme["GM-COSMO"] = "Cosmogenic age determination"
helpme["GM-COSMO-AL26"] = "Cosmogenic age determination: 26Al"
helpme["GM-COSMO-AR39"] = "Cosmogenic age determination: 39Ar"
helpme["GM-COSMO-BE10"] = "Cosmogenic age determination: 10Be"
helpme["GM-COSMO-C14"] = "Cosmogenic age determination: 14C"
helpme["GM-COSMO-CL36"] = "Cosmogenic age determination: 36Cl"
helpme["GM-COSMO-HE3"] = "Cosmogenic age determination: 3He"
helpme["GM-COSMO-KR81"] = "Cosmogenic age determination: 81Kr"
helpme["GM-COSMO-NE21"] = "Cosmogenic age determination: 21Ne"
helpme["GM-COSMO-NI59"] = "Cosmogenic age determination: 59Ni"
helpme["GM-COSMO-SI32"] = "Cosmogenic age determination: 32Si"
helpme["GM-DENDRO"] = "Dendrochronology"
helpme["GM-ESR"] = "Electron Spin Resonance"
helpme["GM-FOSSIL"] = "Age determined from fossil record"
helpme["GM-FT"] = "Fission track age determination"
helpme["GM-HIST"] = "Historically recorded geological event"
helpme["GM-INT"] = "Age determination through interpolation between at least two geological units of known age"
helpme["GM-INT-L"] = "Age determination through interpolation between at least two geological units of known age: Linear"
helpme["GM-INT-S"] = "Age determination through interpolation between at least two geological units of known age: Cubic spline"
helpme["GM-ISO"] = "Age determined by isotopic dating, but no further details available"
helpme["GM-KAR"] = "40K-40Ar age determination"
helpme["GM-KAR-I"] = "40K-40Ar age determination: Isochron"
helpme["GM-KAR-MA"] = "40K-40Ar age determination: Model age"
helpme["GM-KCA"] = "40K-40Ca age determination"
helpme["GM-KCA-I"] = "40K-40Ca age determination: Isochron"
helpme["GM-KCA-MA"] = "40K-40Ca age determination: Model age"
helpme["GM-LABA"] = "138La-138Ba age determination"
helpme["GM-LABA-I"] = "138La-138Ba age determination: Isochron"
helpme["GM-LABA-MA"] = "138La-138Ba age determination: Model age"
helpme["GM-LACE"] = "138La-138Ce age determination"
helpme["GM-LACE-I"] = "138La-138Ce age determination: Isochron"
helpme["GM-LACE-MA"] = "138La-138Ce age determination: Model age"
helpme["GM-LICHE"] = "Lichenometry"
helpme["GM-LUHF"] = "176Lu-176Hf age determination"
helpme["GM-LUHF-I"] = "176Lu-176Hf age determination: Isochron"
helpme["GM-LUHF-MA"] = "176Lu-176Hf age determination: Model age"
helpme["GM-LUM"] = "Luminescence"
helpme["GM-LUM-IRS"] = "Luminescence: Infrared stimulated luminescence"
helpme["GM-LUM-OS"] = "Luminescence: Optically stimulated luminescence"
helpme["GM-LUM-TH"] = "Luminescence: Thermoluminescence"
helpme["GM-MOD"] = "Model curve fit to available age dates"
helpme["GM-MOD-L"] = "Model curve fit to available age dates: Linear"
helpme["GM-MOD-S"] = "Model curve fit to available age dates: Cubic spline"
helpme["GM-MORPH"] = "Geomorphic chronology"
helpme["GM-MORPH-DEF"] = "Geomorphic chronology: Rate of deformation"
helpme["GM-MORPH-DEP"] = "Geomorphic chronology: Rate of deposition"
helpme["GM-MORPH-POS"] = "Geomorphic chronology: Geomorphology position"
helpme["GM-MORPH-WEATH"] = "Geomorphic chronology: Rock and mineral weathering"
helpme["GM-NO"] = "Unknown geochronology method"
helpme["GM-O18"] = "Oxygen isotope dating"
helpme["GM-PBPB"] = "207Pb-206Pb age determination"
helpme["GM-PBPB-C"] = "207Pb-206Pb age determination: Common Pb"
helpme["GM-PBPB-I"] = "207Pb-206Pb age determination: Isochron"
helpme["GM-PLEO"] = "Pleochroic haloes"
helpme["GM-PMAG-ANOM"] = "Paleomagnetic age determination: Magnetic anomaly identification"
helpme["GM-PMAG-APWP"] = "Paleomagnetic age determination: Comparing paleomagnetic data to APWP"
helpme["GM-PMAG-ARCH"] = "Paleomagnetic age determination: Archeomagnetism"
helpme["GM-PMAG-DIR"] = "Paleomagnetic age determination: Directions"
helpme["GM-PMAG-POL"] = "Paleomagnetic age determination: Polarities"
helpme["GM-PMAG-REGSV"] = "Paleomagnetic age determination: Correlation to a regional secular variation curve"
helpme["GM-PMAG-RPI"] = "Paleomagnetic age determination: Relative paleointensity"
helpme["GM-PMAG-VEC"] = "Paleomagnetic age determination: Full vector"
helpme["GM-RATH"] = "226Ra-230Th age determination"
helpme["GM-RBSR"] = "87Rb-87Sr age determination"
helpme["GM-RBSR-I"] = "87Rb-87Sr age determination: Isochron"
helpme["GM-RBSR-MA"] = "87Rb-87Sr age determination: Model age"
helpme["GM-REOS"] = "187Re-187Os age determination"
helpme["GM-REOS-I"] = "187Re-187Os age determination: Isochron"
helpme["GM-REOS-MA"] = "187Re-187Os age determination: Model age"
helpme["GM-REOS-PT"] = "187Re-187Os age determination: Pt normalization of 186Os"
helpme["GM-SCLERO"] = "Screlochronology"
helpme["GM-SHRIMP"] = "SHRIMP age dating"
helpme["GM-SMND"] = "147Sm-143Nd age determination"
helpme["GM-SMND-I"] = "147Sm-143Nd age determination: Isochron"
helpme["GM-SMND-MA"] = "147Sm-143Nd age determination: Model age"
helpme["GM-THPB"] = "232Th-208Pb age determination"
helpme["GM-THPB-I"] = "232Th-208Pb age determination: Isochron"
helpme["GM-THPB-MA"] = "232Th-208Pb age determination: Model age"
helpme["GM-UPA"] = "235U-231Pa age determination"
helpme["GM-UPB"] = "U-Pb age determination"
helpme["GM-UPB-CC-T0"] = "U-Pb age determination: Concordia diagram age, upper intersection"
helpme["GM-UPB-CC-T1"] = "U-Pb age determination: Concordia diagram age, lower intersection"
helpme["GM-UPB-I-206"] = "U-Pb age determination: 238U-206Pb isochron"
helpme["GM-UPB-I-207"] = "U-Pb age determination: 235U-207Pb isochron"
helpme["GM-UPB-MA-206"] = "U-Pb age determination: 238U-206Pb model age"
helpme["GM-UPB-MA-207"] = "U-Pb age determination: 235U-207Pb model age"
helpme["GM-USD"] = "Uranium series disequilibrium age determination"
helpme["GM-USD-PA231-TH230"] = "Uranium series disequilibrium age determination: 231Pa-230Th"
helpme["GM-USD-PA231-U235"] = "Uranium series disequilibrium age determination: 231Pa-235U"
helpme["GM-USD-PB210"] = "Uranium series disequilibrium age determination: 210Pb"
helpme["GM-USD-RA226-TH230"] = "Uranium series disequilibrium age determination: 226Ra-230Th"
helpme["GM-USD-RA228-TH232"] = "Uranium series disequilibrium age determination: 228Ra-232Th"
helpme["GM-USD-TH228-TH232"] = "Uranium series disequilibrium age determination: 228Th-232Th"
helpme["GM-USD-TH230"] = "Uranium series disequilibrium age determination: 230Th"
helpme["GM-USD-TH230-TH232"] = "Uranium series disequilibrium age determination: 230Th-232Th"
helpme["GM-USD-TH230-U234"] = "Uranium series disequilibrium age determination: 230Th-234U"
helpme["GM-USD-TH230-U238"] = "Uranium series disequilibrium age determination: 230Th-238U"
helpme["GM-USD-U234-U238"] = "Uranium series disequilibrium age determination: 234U-238U"
helpme["GM-UTH"] = "238U-230Th age determination"
helpme["GM-UTHHE"] = "U-Th-He age determination"
helpme["GM-UTHPB"] = "U-Th-Pb age determination"
helpme["GM-UTHPB-CC-T0"] = "U-Th-Pb age determination: Concordia diagram intersection age, upper intercept"
helpme["GM-UTHPB-CC-T1"] = "U-Th-Pb age determination: Concordia diagram intersection age, lower intercept"
helpme["GM-VARVE"] = "Age determined by varve counting"
helpme["tiepoint_name"] = "Name for tiepoint horizon"
helpme["tiepoint_alternatives"] = "Colon-delimited list of alternative names and abbreviations"
helpme["tiepoint_height"] = "Tiepoint stratigraphic height relative to reference tiepoint"
helpme["tiepoint_height_sigma"] = "Tiepoint stratigraphic height uncertainty"
helpme["tiepoint_elevation"] = "Tiepoint elevation relative to sealevel"
helpme["tiepoint_type"] = "Tiepoint type"
helpme["age"] = "Age"
helpme["age_sigma"] = "Age -- uncertainty"
helpme["age_range_low"] = "Age -- low range"
helpme["age_range_high"] = "Age -- high range"
helpme["age_unit"] = "Age -- unit"
helpme["timescale_eon"] = "Timescale eon"
helpme["timescale_era"] = "Timescale era"
helpme["timescale_period"] = "Timescale period"
helpme["timescale_epoch"] = "Timescale epoch"
helpme["timescale_stage"] = "Timescale stage"
helpme["biostrat_zone"] = "Biostratigraphic zone"
helpme["conodont_zone"] = "Conodont zone"
helpme["magnetic_reversal_chron"] = "Magnetic reversal chron"
helpme["astronomical_stage"] = "Astronomical stage name"
helpme["oxygen_stage"] = "Oxygen stage name"
helpme["age_culture_name"] = "Age culture name"
return helpme[keyhelp] | def function[magic_help, parameter[keyhelp]]:
constant[
returns a help message for a give magic key
]
variable[helpme] assign[=] dictionary[[], []]
call[name[helpme]][constant[er_location_name]] assign[=] constant[Name for location or drill site]
call[name[helpme]][constant[er_location_alternatives]] assign[=] constant[Colon-delimited list of alternative names and abbreviations]
call[name[helpme]][constant[location_type]] assign[=] constant[Location type]
call[name[helpme]][constant[location_begin_lat]] assign[=] constant[Begin of section or core or outcrop -- latitude]
call[name[helpme]][constant[location_begin_lon]] assign[=] constant[Begin of section or core or outcrop -- longitude]
call[name[helpme]][constant[location_begin_elevation]] assign[=] constant[Begin of section or core or outcrop -- elevation relative to sealevel]
call[name[helpme]][constant[location_end_lat]] assign[=] constant[Ending of section or core -- latitude ]
call[name[helpme]][constant[location_end_lon]] assign[=] constant[Ending of section or core -- longitude ]
call[name[helpme]][constant[location_end_elevation]] assign[=] constant[Ending of section or core -- elevation relative to sealevel]
call[name[helpme]][constant[location_geoid]] assign[=] constant[Geoid used in determination of latitude and longitude: WGS84, GEOID03, USGG2003, GEOID99, G99SSS , G99BM, DEFLEC99 ]
call[name[helpme]][constant[continent_ocean]] assign[=] constant[Name for continent or ocean island region]
call[name[helpme]][constant[ocean_sea]] assign[=] constant[Name for location in an ocean or sea]
call[name[helpme]][constant[country]] assign[=] constant[Country name]
call[name[helpme]][constant[region]] assign[=] constant[Region name]
call[name[helpme]][constant[plate_block]] assign[=] constant[Plate or tectonic block name]
call[name[helpme]][constant[terrane]] assign[=] constant[Terrane name]
call[name[helpme]][constant[tectonic_setting]] assign[=] constant[Tectonic setting]
call[name[helpme]][constant[location_description]] assign[=] constant[Detailed description]
call[name[helpme]][constant[location_url]] assign[=] constant[Website URL for the location explicitly]
call[name[helpme]][constant[er_scientist_mail_names]] assign[=] constant[Colon-delimited list of names for scientists who described location]
call[name[helpme]][constant[er_citation_names]] assign[=] constant[Colon-delimited list of citations]
call[name[helpme]][constant[er_formation_name]] assign[=] constant[Name for formation]
call[name[helpme]][constant[er_formation_alternatives]] assign[=] constant[Colon-delimited list of alternative names and abbreviations]
call[name[helpme]][constant[formation_class]] assign[=] constant[General lithology class: igneous, metamorphic or sedimentary]
call[name[helpme]][constant[formation_lithology]] assign[=] constant[Lithology: e.g., basalt, sandstone, etc.]
call[name[helpme]][constant[formation_paleo_enviroment]] assign[=] constant[Depositional environment]
call[name[helpme]][constant[formation_thickness]] assign[=] constant[Formation thickness]
call[name[helpme]][constant[er_member_name]] assign[=] constant[Name for member]
call[name[helpme]][constant[er_member_alternatives]] assign[=] constant[Colon-delimited list of alternative names and abbreviations]
call[name[helpme]][constant[er_formation_name]] assign[=] constant[Name for formation]
call[name[helpme]][constant[member_class]] assign[=] constant[General lithology type]
call[name[helpme]][constant[member_lithology]] assign[=] constant[Lithology]
call[name[helpme]][constant[member_paleo_environment]] assign[=] constant[Depositional environment]
call[name[helpme]][constant[member_thickness]] assign[=] constant[Member thickness]
call[name[helpme]][constant[member_description]] assign[=] constant[Detailed description]
call[name[helpme]][constant[er_section_name]] assign[=] constant[Name for section or core]
call[name[helpme]][constant[er_section_alternatives]] assign[=] constant[Colon-delimited list of alternative names and abbreviations]
call[name[helpme]][constant[er_expedition_name]] assign[=] constant[Name for seagoing or land expedition]
call[name[helpme]][constant[er_location_name]] assign[=] constant[Name for location or drill site]
call[name[helpme]][constant[er_formation_name]] assign[=] constant[Name for formation]
call[name[helpme]][constant[er_member_name]] assign[=] constant[Name for member]
call[name[helpme]][constant[section_definition]] assign[=] constant[General definition of section]
call[name[helpme]][constant[section_class]] assign[=] constant[General lithology type]
call[name[helpme]][constant[section_lithology]] assign[=] constant[Section lithology or archeological classification]
call[name[helpme]][constant[section_type]] assign[=] constant[Section type]
call[name[helpme]][constant[section_n]] assign[=] constant[Number of subsections included composite (stacked) section]
call[name[helpme]][constant[section_begin_lat]] assign[=] constant[Begin of section or core -- latitude]
call[name[helpme]][constant[section_begin_lon]] assign[=] constant[Begin of section or core -- longitude]
call[name[helpme]][constant[section_begin_elevation]] assign[=] constant[Begin of section or core -- elevation relative to sealevel]
call[name[helpme]][constant[section_begin_height]] assign[=] constant[Begin of section or core -- stratigraphic height]
call[name[helpme]][constant[section_begin_drill_depth]] assign[=] constant[Begin of section or core -- depth in MBSF as used by ODP]
call[name[helpme]][constant[section_begin_composite_depth]] assign[=] constant[Begin of section or core -- composite depth in MBSF as used by ODP]
call[name[helpme]][constant[section_end_lat]] assign[=] constant[End of section or core -- latitude ]
call[name[helpme]][constant[section_end_lon]] assign[=] constant[End of section or core -- longitude ]
call[name[helpme]][constant[section_end_elevation]] assign[=] constant[End of section or core -- elevation relative to sealevel]
call[name[helpme]][constant[section_end_height]] assign[=] constant[End of section or core -- stratigraphic height]
call[name[helpme]][constant[section_end_drill_depth]] assign[=] constant[End of section or core -- depth in MBSF as used by ODP]
call[name[helpme]][constant[section_end_composite_depth]] assign[=] constant[End of section or core -- composite depth in MBSF as used by ODP]
call[name[helpme]][constant[section_azimuth]] assign[=] constant[Section azimuth as measured clockwise from the north]
call[name[helpme]][constant[section_dip]] assign[=] constant[Section dip as measured into the outcrop]
call[name[helpme]][constant[section_description]] assign[=] constant[Detailed description]
call[name[helpme]][constant[er_site_name]] assign[=] constant[Name for site]
call[name[helpme]][constant[er_site_alternatives]] assign[=] constant[Colon-delimited list of alternative names and abbreviations]
call[name[helpme]][constant[er_expedition_name]] assign[=] constant[Name for seagoing or land expedition]
call[name[helpme]][constant[er_location_name]] assign[=] constant[Name for location or drill site]
call[name[helpme]][constant[er_section_name]] assign[=] constant[Name for section or core]
call[name[helpme]][constant[er_formation_name]] assign[=] constant[Name for formation]
call[name[helpme]][constant[er_member_name]] assign[=] constant[Name for member]
call[name[helpme]][constant[site_definition]] assign[=] constant[General definition of site]
call[name[helpme]][constant[site_class]] assign[=] constant[[A]rchaeologic,[E]xtrusive,[I]ntrusive,[M]etamorphic,[S]edimentary]
call[name[helpme]][constant[site_lithology]] assign[=] constant[Site lithology or archeological classification]
call[name[helpme]][constant[site_type]] assign[=] constant[Site type: slag, lava flow, sediment layer, etc.]
call[name[helpme]][constant[site_lat]] assign[=] constant[Site location -- latitude]
call[name[helpme]][constant[site_lon]] assign[=] constant[Site location -- longitude]
call[name[helpme]][constant[site_location_precision]] assign[=] constant[Site location -- precision in latitude and longitude]
call[name[helpme]][constant[site_elevation]] assign[=] constant[Site location -- elevation relative to sealevel]
call[name[helpme]][constant[site_height]] assign[=] constant[Site location -- stratigraphic height]
call[name[helpme]][constant[site_drill_depth]] assign[=] constant[Site location -- depth in MBSF as used by ODP]
call[name[helpme]][constant[site_composite_depth]] assign[=] constant[Site location -- composite depth in MBSF as used by ODP]
call[name[helpme]][constant[site_description]] assign[=] constant[Detailed description]
call[name[helpme]][constant[magic_method_codes]] assign[=] constant[Colon-delimited list of method codes]
call[name[helpme]][constant[er_sample_name]] assign[=] constant[Name for sample]
call[name[helpme]][constant[er_sample_alternatives]] assign[=] constant[Colon-delimited list of alternative names and abbreviations]
call[name[helpme]][constant[er_expedition_name]] assign[=] constant[Name for seagoing or land expedition]
call[name[helpme]][constant[er_location_name]] assign[=] constant[Name for location or drill site]
call[name[helpme]][constant[er_section_name]] assign[=] constant[Name for section or core]
call[name[helpme]][constant[er_formation_name]] assign[=] constant[Name for formation]
call[name[helpme]][constant[er_member_name]] assign[=] constant[Name for member]
call[name[helpme]][constant[er_site_name]] assign[=] constant[Name for site]
call[name[helpme]][constant[sample_class]] assign[=] constant[General lithology type]
call[name[helpme]][constant[sample_lithology]] assign[=] constant[Sample lithology or archeological classification]
call[name[helpme]][constant[sample_type]] assign[=] constant[Sample type]
call[name[helpme]][constant[sample_texture]] assign[=] constant[Sample texture]
call[name[helpme]][constant[sample_alteration]] assign[=] constant[Sample alteration grade]
call[name[helpme]][constant[sample_alteration_type]] assign[=] constant[Sample alteration type]
call[name[helpme]][constant[sample_lat]] assign[=] constant[Sample location -- latitude]
call[name[helpme]][constant[sample_lon]] assign[=] constant[Sample location -- longitude]
call[name[helpme]][constant[sample_location_precision]] assign[=] constant[Sample location -- precision in latitude and longitude]
call[name[helpme]][constant[sample_elevation]] assign[=] constant[Sample location -- elevation relative to sealevel]
call[name[helpme]][constant[sample_height]] assign[=] constant[Sample location -- stratigraphic height]
call[name[helpme]][constant[sample_drill_depth]] assign[=] constant[Sample location -- depth in MBSF as used by ODP]
call[name[helpme]][constant[sample_composite_depth]] assign[=] constant[Sample location -- composite depth in MBSF as used by ODP]
call[name[helpme]][constant[sample_date]] assign[=] constant[Sampling date]
call[name[helpme]][constant[sample_time_zone]] assign[=] constant[Sampling time zone]
call[name[helpme]][constant[sample_azimuth]] assign[=] constant[Sample azimuth as measured clockwise from the north]
call[name[helpme]][constant[sample_dip]] assign[=] constant[Sample dip as measured into the outcrop]
call[name[helpme]][constant[sample_bed_dip_direction]] assign[=] constant[Direction of the dip of a paleo-horizontal plane in the bedding]
call[name[helpme]][constant[sample_bed_dip]] assign[=] constant[Dip of the bedding as measured to the right of strike direction]
call[name[helpme]][constant[sample_cooling_rate]] assign[=] constant[Estimated ancient in-situ cooling rate per Ma]
call[name[helpme]][constant[er_specimen_name]] assign[=] constant[Name for specimen]
call[name[helpme]][constant[er_specimen_alternatives]] assign[=] constant[Colon-delimited list of alternative names and abbreviations]
call[name[helpme]][constant[er_expedition_name]] assign[=] constant[Name for seagoing or land expedition]
call[name[helpme]][constant[er_location_name]] assign[=] constant[Name for location or drill site]
call[name[helpme]][constant[er_section_name]] assign[=] constant[Name for section or core]
call[name[helpme]][constant[er_formation_name]] assign[=] constant[Name for formation]
call[name[helpme]][constant[er_member_name]] assign[=] constant[Name for member]
call[name[helpme]][constant[er_site_name]] assign[=] constant[Name for site]
call[name[helpme]][constant[er_sample_name]] assign[=] constant[Name for sample]
call[name[helpme]][constant[specimen_class]] assign[=] constant[General lithology type]
call[name[helpme]][constant[specimen_lithology]] assign[=] constant[Specimen lithology or archeological classification]
call[name[helpme]][constant[specimen_type]] assign[=] constant[Specimen type]
call[name[helpme]][constant[specimen_texture]] assign[=] constant[Specimen texture]
call[name[helpme]][constant[specimen_alteration]] assign[=] constant[Specimen alteration grade]
call[name[helpme]][constant[specimen_alteration_type]] assign[=] constant[Specimen alteration type]
call[name[helpme]][constant[specimen_elevation]] assign[=] constant[Specimen location -- elevation relative to sealevel]
call[name[helpme]][constant[specimen_height]] assign[=] constant[Specimen location -- stratigraphic height]
call[name[helpme]][constant[specimen_drill_depth]] assign[=] constant[Specimen location -- depth in MBSF as used by ODP]
call[name[helpme]][constant[specimen_composite_depth]] assign[=] constant[Specimen location -- composite depth in MBSF as used by ODP]
call[name[helpme]][constant[specimen_azimuth]] assign[=] constant[Specimen azimuth as measured clockwise from the north]
call[name[helpme]][constant[specimen_dip]] assign[=] constant[Specimen dip as measured into the outcrop]
call[name[helpme]][constant[specimen_volume]] assign[=] constant[Specimen volume]
call[name[helpme]][constant[specimen_weight]] assign[=] constant[Specimen weight]
call[name[helpme]][constant[specimen_density]] assign[=] constant[Specimen density]
call[name[helpme]][constant[specimen_size]] assign[=] constant[Specimen grain size fraction]
call[name[helpme]][constant[er_expedition_name]] assign[=] constant[Name for seagoing or land expedition]
call[name[helpme]][constant[er_location_name]] assign[=] constant[Name for location or drill site]
call[name[helpme]][constant[er_formation_name]] assign[=] constant[Name for formation]
call[name[helpme]][constant[er_member_name]] assign[=] constant[Name for member]
call[name[helpme]][constant[er_site_name]] assign[=] constant[Name for site]
call[name[helpme]][constant[er_sample_name]] assign[=] constant[Name for sample]
call[name[helpme]][constant[er_specimen_name]] assign[=] constant[Name for specimen]
call[name[helpme]][constant[er_fossil_name]] assign[=] constant[Name for fossil]
call[name[helpme]][constant[er_mineral_name]] assign[=] constant[Name for mineral]
call[name[helpme]][constant[GM-ALPHA]] assign[=] constant[Age determination by using alpha counting]
call[name[helpme]][constant[GM-ARAR]] assign[=] constant[40Ar/39Ar age determination]
call[name[helpme]][constant[GM-ARAR-AP]] assign[=] constant[40Ar/39Ar age determination: Age plateau]
call[name[helpme]][constant[GM-ARAR-II]] assign[=] constant[40Ar/39Ar age determination: Inverse isochron]
call[name[helpme]][constant[GM-ARAR-NI]] assign[=] constant[40Ar/39Ar age determination: Normal isochron]
call[name[helpme]][constant[GM-ARAR-TF]] assign[=] constant[40Ar/39Ar age determination: Total fusion or recombined age]
call[name[helpme]][constant[GM-C14]] assign[=] constant[Radiocarbon age determination]
call[name[helpme]][constant[GM-C14-AMS]] assign[=] constant[Radiocarbon age determination: AMS]
call[name[helpme]][constant[GM-C14-BETA]] assign[=] constant[Radiocarbon age determination: Beta decay counting]
call[name[helpme]][constant[GM-C14-CAL]] assign[=] constant[Radiocarbon age determination: Calibrated]
call[name[helpme]][constant[GM-CC]] assign[=] constant[Correlation chronology]
call[name[helpme]][constant[GM-CC-ARCH]] assign[=] constant[Correlation chronology: Archeology]
call[name[helpme]][constant[GM-CC-ARM]] assign[=] constant[Correlation chronology: ARM]
call[name[helpme]][constant[GM-CC-ASTRO]] assign[=] constant[Correlation chronology: Astronomical]
call[name[helpme]][constant[GM-CC-CACO3]] assign[=] constant[Correlation chronology: Calcium carbonate]
call[name[helpme]][constant[GM-CC-COLOR]] assign[=] constant[Correlation chronology: Color or reflectance]
call[name[helpme]][constant[GM-CC-GRAPE]] assign[=] constant[Correlation chronology: Gamma Ray Polarimeter Experiment]
call[name[helpme]][constant[GM-CC-IRM]] assign[=] constant[Correlation chronology: IRM]
call[name[helpme]][constant[GM-CC-ISO]] assign[=] constant[Correlation chronology: Stable isotopes]
call[name[helpme]][constant[GM-CC-REL]] assign[=] constant[Correlation chronology: Relative chronology other than stratigraphic successions]
call[name[helpme]][constant[GM-CC-STRAT]] assign[=] constant[Correlation chronology: Stratigraphic succession]
call[name[helpme]][constant[GM-CC-TECT]] assign[=] constant[Correlation chronology: Tectites and microtectites]
call[name[helpme]][constant[GM-CC-TEPH]] assign[=] constant[Correlation chronology: Tephrochronology]
call[name[helpme]][constant[GM-CC-X]] assign[=] constant[Correlation chronology: Susceptibility]
call[name[helpme]][constant[GM-CHEM]] assign[=] constant[Chemical chronology]
call[name[helpme]][constant[GM-CHEM-AAR]] assign[=] constant[Chemical chronology: Amino acid racemization]
call[name[helpme]][constant[GM-CHEM-OH]] assign[=] constant[Chemical chronology: Obsidian hydration]
call[name[helpme]][constant[GM-CHEM-SC]] assign[=] constant[Chemical chronology: Stoan coatings CaCO3]
call[name[helpme]][constant[GM-CHEM-TH]] assign[=] constant[Chemical chronology: Tephra hydration]
call[name[helpme]][constant[GM-COSMO]] assign[=] constant[Cosmogenic age determination]
call[name[helpme]][constant[GM-COSMO-AL26]] assign[=] constant[Cosmogenic age determination: 26Al]
call[name[helpme]][constant[GM-COSMO-AR39]] assign[=] constant[Cosmogenic age determination: 39Ar]
call[name[helpme]][constant[GM-COSMO-BE10]] assign[=] constant[Cosmogenic age determination: 10Be]
call[name[helpme]][constant[GM-COSMO-C14]] assign[=] constant[Cosmogenic age determination: 14C]
call[name[helpme]][constant[GM-COSMO-CL36]] assign[=] constant[Cosmogenic age determination: 36Cl]
call[name[helpme]][constant[GM-COSMO-HE3]] assign[=] constant[Cosmogenic age determination: 3He]
call[name[helpme]][constant[GM-COSMO-KR81]] assign[=] constant[Cosmogenic age determination: 81Kr]
call[name[helpme]][constant[GM-COSMO-NE21]] assign[=] constant[Cosmogenic age determination: 21Ne]
call[name[helpme]][constant[GM-COSMO-NI59]] assign[=] constant[Cosmogenic age determination: 59Ni]
call[name[helpme]][constant[GM-COSMO-SI32]] assign[=] constant[Cosmogenic age determination: 32Si]
call[name[helpme]][constant[GM-DENDRO]] assign[=] constant[Dendrochronology]
call[name[helpme]][constant[GM-ESR]] assign[=] constant[Electron Spin Resonance]
call[name[helpme]][constant[GM-FOSSIL]] assign[=] constant[Age determined from fossil record]
call[name[helpme]][constant[GM-FT]] assign[=] constant[Fission track age determination]
call[name[helpme]][constant[GM-HIST]] assign[=] constant[Historically recorded geological event]
call[name[helpme]][constant[GM-INT]] assign[=] constant[Age determination through interpolation between at least two geological units of known age]
call[name[helpme]][constant[GM-INT-L]] assign[=] constant[Age determination through interpolation between at least two geological units of known age: Linear]
call[name[helpme]][constant[GM-INT-S]] assign[=] constant[Age determination through interpolation between at least two geological units of known age: Cubic spline]
call[name[helpme]][constant[GM-ISO]] assign[=] constant[Age determined by isotopic dating, but no further details available]
call[name[helpme]][constant[GM-KAR]] assign[=] constant[40K-40Ar age determination]
call[name[helpme]][constant[GM-KAR-I]] assign[=] constant[40K-40Ar age determination: Isochron]
call[name[helpme]][constant[GM-KAR-MA]] assign[=] constant[40K-40Ar age determination: Model age]
call[name[helpme]][constant[GM-KCA]] assign[=] constant[40K-40Ca age determination]
call[name[helpme]][constant[GM-KCA-I]] assign[=] constant[40K-40Ca age determination: Isochron]
call[name[helpme]][constant[GM-KCA-MA]] assign[=] constant[40K-40Ca age determination: Model age]
call[name[helpme]][constant[GM-LABA]] assign[=] constant[138La-138Ba age determination]
call[name[helpme]][constant[GM-LABA-I]] assign[=] constant[138La-138Ba age determination: Isochron]
call[name[helpme]][constant[GM-LABA-MA]] assign[=] constant[138La-138Ba age determination: Model age]
call[name[helpme]][constant[GM-LACE]] assign[=] constant[138La-138Ce age determination]
call[name[helpme]][constant[GM-LACE-I]] assign[=] constant[138La-138Ce age determination: Isochron]
call[name[helpme]][constant[GM-LACE-MA]] assign[=] constant[138La-138Ce age determination: Model age]
call[name[helpme]][constant[GM-LICHE]] assign[=] constant[Lichenometry]
call[name[helpme]][constant[GM-LUHF]] assign[=] constant[176Lu-176Hf age determination]
call[name[helpme]][constant[GM-LUHF-I]] assign[=] constant[176Lu-176Hf age determination: Isochron]
call[name[helpme]][constant[GM-LUHF-MA]] assign[=] constant[176Lu-176Hf age determination: Model age]
call[name[helpme]][constant[GM-LUM]] assign[=] constant[Luminescence]
call[name[helpme]][constant[GM-LUM-IRS]] assign[=] constant[Luminescence: Infrared stimulated luminescence]
call[name[helpme]][constant[GM-LUM-OS]] assign[=] constant[Luminescence: Optically stimulated luminescence]
call[name[helpme]][constant[GM-LUM-TH]] assign[=] constant[Luminescence: Thermoluminescence]
call[name[helpme]][constant[GM-MOD]] assign[=] constant[Model curve fit to available age dates]
call[name[helpme]][constant[GM-MOD-L]] assign[=] constant[Model curve fit to available age dates: Linear]
call[name[helpme]][constant[GM-MOD-S]] assign[=] constant[Model curve fit to available age dates: Cubic spline]
call[name[helpme]][constant[GM-MORPH]] assign[=] constant[Geomorphic chronology]
call[name[helpme]][constant[GM-MORPH-DEF]] assign[=] constant[Geomorphic chronology: Rate of deformation]
call[name[helpme]][constant[GM-MORPH-DEP]] assign[=] constant[Geomorphic chronology: Rate of deposition]
call[name[helpme]][constant[GM-MORPH-POS]] assign[=] constant[Geomorphic chronology: Geomorphology position]
call[name[helpme]][constant[GM-MORPH-WEATH]] assign[=] constant[Geomorphic chronology: Rock and mineral weathering]
call[name[helpme]][constant[GM-NO]] assign[=] constant[Unknown geochronology method]
call[name[helpme]][constant[GM-O18]] assign[=] constant[Oxygen isotope dating]
call[name[helpme]][constant[GM-PBPB]] assign[=] constant[207Pb-206Pb age determination]
call[name[helpme]][constant[GM-PBPB-C]] assign[=] constant[207Pb-206Pb age determination: Common Pb]
call[name[helpme]][constant[GM-PBPB-I]] assign[=] constant[207Pb-206Pb age determination: Isochron]
call[name[helpme]][constant[GM-PLEO]] assign[=] constant[Pleochroic haloes]
call[name[helpme]][constant[GM-PMAG-ANOM]] assign[=] constant[Paleomagnetic age determination: Magnetic anomaly identification]
call[name[helpme]][constant[GM-PMAG-APWP]] assign[=] constant[Paleomagnetic age determination: Comparing paleomagnetic data to APWP]
call[name[helpme]][constant[GM-PMAG-ARCH]] assign[=] constant[Paleomagnetic age determination: Archeomagnetism]
call[name[helpme]][constant[GM-PMAG-DIR]] assign[=] constant[Paleomagnetic age determination: Directions]
call[name[helpme]][constant[GM-PMAG-POL]] assign[=] constant[Paleomagnetic age determination: Polarities]
call[name[helpme]][constant[GM-PMAG-REGSV]] assign[=] constant[Paleomagnetic age determination: Correlation to a regional secular variation curve]
call[name[helpme]][constant[GM-PMAG-RPI]] assign[=] constant[Paleomagnetic age determination: Relative paleointensity]
call[name[helpme]][constant[GM-PMAG-VEC]] assign[=] constant[Paleomagnetic age determination: Full vector]
call[name[helpme]][constant[GM-RATH]] assign[=] constant[226Ra-230Th age determination]
call[name[helpme]][constant[GM-RBSR]] assign[=] constant[87Rb-87Sr age determination]
call[name[helpme]][constant[GM-RBSR-I]] assign[=] constant[87Rb-87Sr age determination: Isochron]
call[name[helpme]][constant[GM-RBSR-MA]] assign[=] constant[87Rb-87Sr age determination: Model age]
call[name[helpme]][constant[GM-REOS]] assign[=] constant[187Re-187Os age determination]
call[name[helpme]][constant[GM-REOS-I]] assign[=] constant[187Re-187Os age determination: Isochron]
call[name[helpme]][constant[GM-REOS-MA]] assign[=] constant[187Re-187Os age determination: Model age]
call[name[helpme]][constant[GM-REOS-PT]] assign[=] constant[187Re-187Os age determination: Pt normalization of 186Os]
call[name[helpme]][constant[GM-SCLERO]] assign[=] constant[Screlochronology]
call[name[helpme]][constant[GM-SHRIMP]] assign[=] constant[SHRIMP age dating]
call[name[helpme]][constant[GM-SMND]] assign[=] constant[147Sm-143Nd age determination]
call[name[helpme]][constant[GM-SMND-I]] assign[=] constant[147Sm-143Nd age determination: Isochron]
call[name[helpme]][constant[GM-SMND-MA]] assign[=] constant[147Sm-143Nd age determination: Model age]
call[name[helpme]][constant[GM-THPB]] assign[=] constant[232Th-208Pb age determination]
call[name[helpme]][constant[GM-THPB-I]] assign[=] constant[232Th-208Pb age determination: Isochron]
call[name[helpme]][constant[GM-THPB-MA]] assign[=] constant[232Th-208Pb age determination: Model age]
call[name[helpme]][constant[GM-UPA]] assign[=] constant[235U-231Pa age determination]
call[name[helpme]][constant[GM-UPB]] assign[=] constant[U-Pb age determination]
call[name[helpme]][constant[GM-UPB-CC-T0]] assign[=] constant[U-Pb age determination: Concordia diagram age, upper intersection]
call[name[helpme]][constant[GM-UPB-CC-T1]] assign[=] constant[U-Pb age determination: Concordia diagram age, lower intersection]
call[name[helpme]][constant[GM-UPB-I-206]] assign[=] constant[U-Pb age determination: 238U-206Pb isochron]
call[name[helpme]][constant[GM-UPB-I-207]] assign[=] constant[U-Pb age determination: 235U-207Pb isochron]
call[name[helpme]][constant[GM-UPB-MA-206]] assign[=] constant[U-Pb age determination: 238U-206Pb model age]
call[name[helpme]][constant[GM-UPB-MA-207]] assign[=] constant[U-Pb age determination: 235U-207Pb model age]
call[name[helpme]][constant[GM-USD]] assign[=] constant[Uranium series disequilibrium age determination]
call[name[helpme]][constant[GM-USD-PA231-TH230]] assign[=] constant[Uranium series disequilibrium age determination: 231Pa-230Th]
call[name[helpme]][constant[GM-USD-PA231-U235]] assign[=] constant[Uranium series disequilibrium age determination: 231Pa-235U]
call[name[helpme]][constant[GM-USD-PB210]] assign[=] constant[Uranium series disequilibrium age determination: 210Pb]
call[name[helpme]][constant[GM-USD-RA226-TH230]] assign[=] constant[Uranium series disequilibrium age determination: 226Ra-230Th]
call[name[helpme]][constant[GM-USD-RA228-TH232]] assign[=] constant[Uranium series disequilibrium age determination: 228Ra-232Th]
call[name[helpme]][constant[GM-USD-TH228-TH232]] assign[=] constant[Uranium series disequilibrium age determination: 228Th-232Th]
call[name[helpme]][constant[GM-USD-TH230]] assign[=] constant[Uranium series disequilibrium age determination: 230Th]
call[name[helpme]][constant[GM-USD-TH230-TH232]] assign[=] constant[Uranium series disequilibrium age determination: 230Th-232Th]
call[name[helpme]][constant[GM-USD-TH230-U234]] assign[=] constant[Uranium series disequilibrium age determination: 230Th-234U]
call[name[helpme]][constant[GM-USD-TH230-U238]] assign[=] constant[Uranium series disequilibrium age determination: 230Th-238U]
call[name[helpme]][constant[GM-USD-U234-U238]] assign[=] constant[Uranium series disequilibrium age determination: 234U-238U]
call[name[helpme]][constant[GM-UTH]] assign[=] constant[238U-230Th age determination]
call[name[helpme]][constant[GM-UTHHE]] assign[=] constant[U-Th-He age determination]
call[name[helpme]][constant[GM-UTHPB]] assign[=] constant[U-Th-Pb age determination]
call[name[helpme]][constant[GM-UTHPB-CC-T0]] assign[=] constant[U-Th-Pb age determination: Concordia diagram intersection age, upper intercept]
call[name[helpme]][constant[GM-UTHPB-CC-T1]] assign[=] constant[U-Th-Pb age determination: Concordia diagram intersection age, lower intercept]
call[name[helpme]][constant[GM-VARVE]] assign[=] constant[Age determined by varve counting]
call[name[helpme]][constant[tiepoint_name]] assign[=] constant[Name for tiepoint horizon]
call[name[helpme]][constant[tiepoint_alternatives]] assign[=] constant[Colon-delimited list of alternative names and abbreviations]
call[name[helpme]][constant[tiepoint_height]] assign[=] constant[Tiepoint stratigraphic height relative to reference tiepoint]
call[name[helpme]][constant[tiepoint_height_sigma]] assign[=] constant[Tiepoint stratigraphic height uncertainty]
call[name[helpme]][constant[tiepoint_elevation]] assign[=] constant[Tiepoint elevation relative to sealevel]
call[name[helpme]][constant[tiepoint_type]] assign[=] constant[Tiepoint type]
call[name[helpme]][constant[age]] assign[=] constant[Age]
call[name[helpme]][constant[age_sigma]] assign[=] constant[Age -- uncertainty]
call[name[helpme]][constant[age_range_low]] assign[=] constant[Age -- low range]
call[name[helpme]][constant[age_range_high]] assign[=] constant[Age -- high range]
call[name[helpme]][constant[age_unit]] assign[=] constant[Age -- unit]
call[name[helpme]][constant[timescale_eon]] assign[=] constant[Timescale eon]
call[name[helpme]][constant[timescale_era]] assign[=] constant[Timescale era]
call[name[helpme]][constant[timescale_period]] assign[=] constant[Timescale period]
call[name[helpme]][constant[timescale_epoch]] assign[=] constant[Timescale epoch]
call[name[helpme]][constant[timescale_stage]] assign[=] constant[Timescale stage]
call[name[helpme]][constant[biostrat_zone]] assign[=] constant[Biostratigraphic zone]
call[name[helpme]][constant[conodont_zone]] assign[=] constant[Conodont zone]
call[name[helpme]][constant[magnetic_reversal_chron]] assign[=] constant[Magnetic reversal chron]
call[name[helpme]][constant[astronomical_stage]] assign[=] constant[Astronomical stage name]
call[name[helpme]][constant[oxygen_stage]] assign[=] constant[Oxygen stage name]
call[name[helpme]][constant[age_culture_name]] assign[=] constant[Age culture name]
return[call[name[helpme]][name[keyhelp]]] | keyword[def] identifier[magic_help] ( identifier[keyhelp] ):
literal[string]
identifier[helpme] ={}
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
identifier[helpme] [ literal[string] ]= literal[string]
keyword[return] identifier[helpme] [ identifier[keyhelp] ] | def magic_help(keyhelp):
"""
returns a help message for a give magic key
"""
helpme = {}
helpme['er_location_name'] = 'Name for location or drill site'
helpme['er_location_alternatives'] = 'Colon-delimited list of alternative names and abbreviations'
helpme['location_type'] = 'Location type'
helpme['location_begin_lat'] = 'Begin of section or core or outcrop -- latitude'
helpme['location_begin_lon'] = 'Begin of section or core or outcrop -- longitude'
helpme['location_begin_elevation'] = 'Begin of section or core or outcrop -- elevation relative to sealevel'
helpme['location_end_lat'] = 'Ending of section or core -- latitude '
helpme['location_end_lon'] = 'Ending of section or core -- longitude '
helpme['location_end_elevation'] = 'Ending of section or core -- elevation relative to sealevel'
helpme['location_geoid'] = 'Geoid used in determination of latitude and longitude: WGS84, GEOID03, USGG2003, GEOID99, G99SSS , G99BM, DEFLEC99 '
helpme['continent_ocean'] = 'Name for continent or ocean island region'
helpme['ocean_sea'] = 'Name for location in an ocean or sea'
helpme['country'] = 'Country name'
helpme['region'] = 'Region name'
helpme['plate_block'] = 'Plate or tectonic block name'
helpme['terrane'] = 'Terrane name'
helpme['tectonic_setting'] = 'Tectonic setting'
helpme['location_description'] = 'Detailed description'
helpme['location_url'] = 'Website URL for the location explicitly'
helpme['er_scientist_mail_names'] = 'Colon-delimited list of names for scientists who described location'
helpme['er_citation_names'] = 'Colon-delimited list of citations'
helpme['er_formation_name'] = 'Name for formation'
helpme['er_formation_alternatives'] = 'Colon-delimited list of alternative names and abbreviations'
helpme['formation_class'] = 'General lithology class: igneous, metamorphic or sedimentary'
helpme['formation_lithology'] = 'Lithology: e.g., basalt, sandstone, etc.'
helpme['formation_paleo_enviroment'] = 'Depositional environment'
helpme['formation_thickness'] = 'Formation thickness'
helpme['er_member_name'] = 'Name for member'
helpme['er_member_alternatives'] = 'Colon-delimited list of alternative names and abbreviations'
helpme['er_formation_name'] = 'Name for formation'
helpme['member_class'] = 'General lithology type'
helpme['member_lithology'] = 'Lithology'
helpme['member_paleo_environment'] = 'Depositional environment'
helpme['member_thickness'] = 'Member thickness'
helpme['member_description'] = 'Detailed description'
helpme['er_section_name'] = 'Name for section or core'
helpme['er_section_alternatives'] = 'Colon-delimited list of alternative names and abbreviations'
helpme['er_expedition_name'] = 'Name for seagoing or land expedition'
helpme['er_location_name'] = 'Name for location or drill site'
helpme['er_formation_name'] = 'Name for formation'
helpme['er_member_name'] = 'Name for member'
helpme['section_definition'] = 'General definition of section'
helpme['section_class'] = 'General lithology type'
helpme['section_lithology'] = 'Section lithology or archeological classification'
helpme['section_type'] = 'Section type'
helpme['section_n'] = 'Number of subsections included composite (stacked) section'
helpme['section_begin_lat'] = 'Begin of section or core -- latitude'
helpme['section_begin_lon'] = 'Begin of section or core -- longitude'
helpme['section_begin_elevation'] = 'Begin of section or core -- elevation relative to sealevel'
helpme['section_begin_height'] = 'Begin of section or core -- stratigraphic height'
helpme['section_begin_drill_depth'] = 'Begin of section or core -- depth in MBSF as used by ODP'
helpme['section_begin_composite_depth'] = 'Begin of section or core -- composite depth in MBSF as used by ODP'
helpme['section_end_lat'] = 'End of section or core -- latitude '
helpme['section_end_lon'] = 'End of section or core -- longitude '
helpme['section_end_elevation'] = 'End of section or core -- elevation relative to sealevel'
helpme['section_end_height'] = 'End of section or core -- stratigraphic height'
helpme['section_end_drill_depth'] = 'End of section or core -- depth in MBSF as used by ODP'
helpme['section_end_composite_depth'] = 'End of section or core -- composite depth in MBSF as used by ODP'
helpme['section_azimuth'] = 'Section azimuth as measured clockwise from the north'
helpme['section_dip'] = 'Section dip as measured into the outcrop'
helpme['section_description'] = 'Detailed description'
helpme['er_site_name'] = 'Name for site'
helpme['er_site_alternatives'] = 'Colon-delimited list of alternative names and abbreviations'
helpme['er_expedition_name'] = 'Name for seagoing or land expedition'
helpme['er_location_name'] = 'Name for location or drill site'
helpme['er_section_name'] = 'Name for section or core'
helpme['er_formation_name'] = 'Name for formation'
helpme['er_member_name'] = 'Name for member'
helpme['site_definition'] = 'General definition of site'
helpme['site_class'] = '[A]rchaeologic,[E]xtrusive,[I]ntrusive,[M]etamorphic,[S]edimentary'
helpme['site_lithology'] = 'Site lithology or archeological classification'
helpme['site_type'] = 'Site type: slag, lava flow, sediment layer, etc.'
helpme['site_lat'] = 'Site location -- latitude'
helpme['site_lon'] = 'Site location -- longitude'
helpme['site_location_precision'] = 'Site location -- precision in latitude and longitude'
helpme['site_elevation'] = 'Site location -- elevation relative to sealevel'
helpme['site_height'] = 'Site location -- stratigraphic height'
helpme['site_drill_depth'] = 'Site location -- depth in MBSF as used by ODP'
helpme['site_composite_depth'] = 'Site location -- composite depth in MBSF as used by ODP'
helpme['site_description'] = 'Detailed description'
helpme['magic_method_codes'] = 'Colon-delimited list of method codes'
helpme['er_sample_name'] = 'Name for sample'
helpme['er_sample_alternatives'] = 'Colon-delimited list of alternative names and abbreviations'
helpme['er_expedition_name'] = 'Name for seagoing or land expedition'
helpme['er_location_name'] = 'Name for location or drill site'
helpme['er_section_name'] = 'Name for section or core'
helpme['er_formation_name'] = 'Name for formation'
helpme['er_member_name'] = 'Name for member'
helpme['er_site_name'] = 'Name for site'
helpme['sample_class'] = 'General lithology type'
helpme['sample_lithology'] = 'Sample lithology or archeological classification'
helpme['sample_type'] = 'Sample type'
helpme['sample_texture'] = 'Sample texture'
helpme['sample_alteration'] = 'Sample alteration grade'
helpme['sample_alteration_type'] = 'Sample alteration type'
helpme['sample_lat'] = 'Sample location -- latitude'
helpme['sample_lon'] = 'Sample location -- longitude'
helpme['sample_location_precision'] = 'Sample location -- precision in latitude and longitude'
helpme['sample_elevation'] = 'Sample location -- elevation relative to sealevel'
helpme['sample_height'] = 'Sample location -- stratigraphic height'
helpme['sample_drill_depth'] = 'Sample location -- depth in MBSF as used by ODP'
helpme['sample_composite_depth'] = 'Sample location -- composite depth in MBSF as used by ODP'
helpme['sample_date'] = 'Sampling date'
helpme['sample_time_zone'] = 'Sampling time zone'
helpme['sample_azimuth'] = 'Sample azimuth as measured clockwise from the north'
helpme['sample_dip'] = 'Sample dip as measured into the outcrop'
helpme['sample_bed_dip_direction'] = 'Direction of the dip of a paleo-horizontal plane in the bedding'
helpme['sample_bed_dip'] = 'Dip of the bedding as measured to the right of strike direction'
helpme['sample_cooling_rate'] = 'Estimated ancient in-situ cooling rate per Ma'
helpme['er_specimen_name'] = 'Name for specimen'
helpme['er_specimen_alternatives'] = 'Colon-delimited list of alternative names and abbreviations'
helpme['er_expedition_name'] = 'Name for seagoing or land expedition'
helpme['er_location_name'] = 'Name for location or drill site'
helpme['er_section_name'] = 'Name for section or core'
helpme['er_formation_name'] = 'Name for formation'
helpme['er_member_name'] = 'Name for member'
helpme['er_site_name'] = 'Name for site'
helpme['er_sample_name'] = 'Name for sample'
helpme['specimen_class'] = 'General lithology type'
helpme['specimen_lithology'] = 'Specimen lithology or archeological classification'
helpme['specimen_type'] = 'Specimen type'
helpme['specimen_texture'] = 'Specimen texture'
helpme['specimen_alteration'] = 'Specimen alteration grade'
helpme['specimen_alteration_type'] = 'Specimen alteration type'
helpme['specimen_elevation'] = 'Specimen location -- elevation relative to sealevel'
helpme['specimen_height'] = 'Specimen location -- stratigraphic height'
helpme['specimen_drill_depth'] = 'Specimen location -- depth in MBSF as used by ODP'
helpme['specimen_composite_depth'] = 'Specimen location -- composite depth in MBSF as used by ODP'
helpme['specimen_azimuth'] = 'Specimen azimuth as measured clockwise from the north'
helpme['specimen_dip'] = 'Specimen dip as measured into the outcrop'
helpme['specimen_volume'] = 'Specimen volume'
helpme['specimen_weight'] = 'Specimen weight'
helpme['specimen_density'] = 'Specimen density'
helpme['specimen_size'] = 'Specimen grain size fraction'
helpme['er_expedition_name'] = 'Name for seagoing or land expedition'
helpme['er_location_name'] = 'Name for location or drill site'
helpme['er_formation_name'] = 'Name for formation'
helpme['er_member_name'] = 'Name for member'
helpme['er_site_name'] = 'Name for site'
helpme['er_sample_name'] = 'Name for sample'
helpme['er_specimen_name'] = 'Name for specimen'
helpme['er_fossil_name'] = 'Name for fossil'
helpme['er_mineral_name'] = 'Name for mineral'
helpme['GM-ALPHA'] = 'Age determination by using alpha counting'
helpme['GM-ARAR'] = '40Ar/39Ar age determination'
helpme['GM-ARAR-AP'] = '40Ar/39Ar age determination: Age plateau'
helpme['GM-ARAR-II'] = '40Ar/39Ar age determination: Inverse isochron'
helpme['GM-ARAR-NI'] = '40Ar/39Ar age determination: Normal isochron'
helpme['GM-ARAR-TF'] = '40Ar/39Ar age determination: Total fusion or recombined age'
helpme['GM-C14'] = 'Radiocarbon age determination'
helpme['GM-C14-AMS'] = 'Radiocarbon age determination: AMS'
helpme['GM-C14-BETA'] = 'Radiocarbon age determination: Beta decay counting'
helpme['GM-C14-CAL'] = 'Radiocarbon age determination: Calibrated'
helpme['GM-CC'] = 'Correlation chronology'
helpme['GM-CC-ARCH'] = 'Correlation chronology: Archeology'
helpme['GM-CC-ARM'] = 'Correlation chronology: ARM'
helpme['GM-CC-ASTRO'] = 'Correlation chronology: Astronomical'
helpme['GM-CC-CACO3'] = 'Correlation chronology: Calcium carbonate'
helpme['GM-CC-COLOR'] = 'Correlation chronology: Color or reflectance'
helpme['GM-CC-GRAPE'] = 'Correlation chronology: Gamma Ray Polarimeter Experiment'
helpme['GM-CC-IRM'] = 'Correlation chronology: IRM'
helpme['GM-CC-ISO'] = 'Correlation chronology: Stable isotopes'
helpme['GM-CC-REL'] = 'Correlation chronology: Relative chronology other than stratigraphic successions'
helpme['GM-CC-STRAT'] = 'Correlation chronology: Stratigraphic succession'
helpme['GM-CC-TECT'] = 'Correlation chronology: Tectites and microtectites'
helpme['GM-CC-TEPH'] = 'Correlation chronology: Tephrochronology'
helpme['GM-CC-X'] = 'Correlation chronology: Susceptibility'
helpme['GM-CHEM'] = 'Chemical chronology'
helpme['GM-CHEM-AAR'] = 'Chemical chronology: Amino acid racemization'
helpme['GM-CHEM-OH'] = 'Chemical chronology: Obsidian hydration'
helpme['GM-CHEM-SC'] = 'Chemical chronology: Stoan coatings CaCO3'
helpme['GM-CHEM-TH'] = 'Chemical chronology: Tephra hydration'
helpme['GM-COSMO'] = 'Cosmogenic age determination'
helpme['GM-COSMO-AL26'] = 'Cosmogenic age determination: 26Al'
helpme['GM-COSMO-AR39'] = 'Cosmogenic age determination: 39Ar'
helpme['GM-COSMO-BE10'] = 'Cosmogenic age determination: 10Be'
helpme['GM-COSMO-C14'] = 'Cosmogenic age determination: 14C'
helpme['GM-COSMO-CL36'] = 'Cosmogenic age determination: 36Cl'
helpme['GM-COSMO-HE3'] = 'Cosmogenic age determination: 3He'
helpme['GM-COSMO-KR81'] = 'Cosmogenic age determination: 81Kr'
helpme['GM-COSMO-NE21'] = 'Cosmogenic age determination: 21Ne'
helpme['GM-COSMO-NI59'] = 'Cosmogenic age determination: 59Ni'
helpme['GM-COSMO-SI32'] = 'Cosmogenic age determination: 32Si'
helpme['GM-DENDRO'] = 'Dendrochronology'
helpme['GM-ESR'] = 'Electron Spin Resonance'
helpme['GM-FOSSIL'] = 'Age determined from fossil record'
helpme['GM-FT'] = 'Fission track age determination'
helpme['GM-HIST'] = 'Historically recorded geological event'
helpme['GM-INT'] = 'Age determination through interpolation between at least two geological units of known age'
helpme['GM-INT-L'] = 'Age determination through interpolation between at least two geological units of known age: Linear'
helpme['GM-INT-S'] = 'Age determination through interpolation between at least two geological units of known age: Cubic spline'
helpme['GM-ISO'] = 'Age determined by isotopic dating, but no further details available'
helpme['GM-KAR'] = '40K-40Ar age determination'
helpme['GM-KAR-I'] = '40K-40Ar age determination: Isochron'
helpme['GM-KAR-MA'] = '40K-40Ar age determination: Model age'
helpme['GM-KCA'] = '40K-40Ca age determination'
helpme['GM-KCA-I'] = '40K-40Ca age determination: Isochron'
helpme['GM-KCA-MA'] = '40K-40Ca age determination: Model age'
helpme['GM-LABA'] = '138La-138Ba age determination'
helpme['GM-LABA-I'] = '138La-138Ba age determination: Isochron'
helpme['GM-LABA-MA'] = '138La-138Ba age determination: Model age'
helpme['GM-LACE'] = '138La-138Ce age determination'
helpme['GM-LACE-I'] = '138La-138Ce age determination: Isochron'
helpme['GM-LACE-MA'] = '138La-138Ce age determination: Model age'
helpme['GM-LICHE'] = 'Lichenometry'
helpme['GM-LUHF'] = '176Lu-176Hf age determination'
helpme['GM-LUHF-I'] = '176Lu-176Hf age determination: Isochron'
helpme['GM-LUHF-MA'] = '176Lu-176Hf age determination: Model age'
helpme['GM-LUM'] = 'Luminescence'
helpme['GM-LUM-IRS'] = 'Luminescence: Infrared stimulated luminescence'
helpme['GM-LUM-OS'] = 'Luminescence: Optically stimulated luminescence'
helpme['GM-LUM-TH'] = 'Luminescence: Thermoluminescence'
helpme['GM-MOD'] = 'Model curve fit to available age dates'
helpme['GM-MOD-L'] = 'Model curve fit to available age dates: Linear'
helpme['GM-MOD-S'] = 'Model curve fit to available age dates: Cubic spline'
helpme['GM-MORPH'] = 'Geomorphic chronology'
helpme['GM-MORPH-DEF'] = 'Geomorphic chronology: Rate of deformation'
helpme['GM-MORPH-DEP'] = 'Geomorphic chronology: Rate of deposition'
helpme['GM-MORPH-POS'] = 'Geomorphic chronology: Geomorphology position'
helpme['GM-MORPH-WEATH'] = 'Geomorphic chronology: Rock and mineral weathering'
helpme['GM-NO'] = 'Unknown geochronology method'
helpme['GM-O18'] = 'Oxygen isotope dating'
helpme['GM-PBPB'] = '207Pb-206Pb age determination'
helpme['GM-PBPB-C'] = '207Pb-206Pb age determination: Common Pb'
helpme['GM-PBPB-I'] = '207Pb-206Pb age determination: Isochron'
helpme['GM-PLEO'] = 'Pleochroic haloes'
helpme['GM-PMAG-ANOM'] = 'Paleomagnetic age determination: Magnetic anomaly identification'
helpme['GM-PMAG-APWP'] = 'Paleomagnetic age determination: Comparing paleomagnetic data to APWP'
helpme['GM-PMAG-ARCH'] = 'Paleomagnetic age determination: Archeomagnetism'
helpme['GM-PMAG-DIR'] = 'Paleomagnetic age determination: Directions'
helpme['GM-PMAG-POL'] = 'Paleomagnetic age determination: Polarities'
helpme['GM-PMAG-REGSV'] = 'Paleomagnetic age determination: Correlation to a regional secular variation curve'
helpme['GM-PMAG-RPI'] = 'Paleomagnetic age determination: Relative paleointensity'
helpme['GM-PMAG-VEC'] = 'Paleomagnetic age determination: Full vector'
helpme['GM-RATH'] = '226Ra-230Th age determination'
helpme['GM-RBSR'] = '87Rb-87Sr age determination'
helpme['GM-RBSR-I'] = '87Rb-87Sr age determination: Isochron'
helpme['GM-RBSR-MA'] = '87Rb-87Sr age determination: Model age'
helpme['GM-REOS'] = '187Re-187Os age determination'
helpme['GM-REOS-I'] = '187Re-187Os age determination: Isochron'
helpme['GM-REOS-MA'] = '187Re-187Os age determination: Model age'
helpme['GM-REOS-PT'] = '187Re-187Os age determination: Pt normalization of 186Os'
helpme['GM-SCLERO'] = 'Screlochronology'
helpme['GM-SHRIMP'] = 'SHRIMP age dating'
helpme['GM-SMND'] = '147Sm-143Nd age determination'
helpme['GM-SMND-I'] = '147Sm-143Nd age determination: Isochron'
helpme['GM-SMND-MA'] = '147Sm-143Nd age determination: Model age'
helpme['GM-THPB'] = '232Th-208Pb age determination'
helpme['GM-THPB-I'] = '232Th-208Pb age determination: Isochron'
helpme['GM-THPB-MA'] = '232Th-208Pb age determination: Model age'
helpme['GM-UPA'] = '235U-231Pa age determination'
helpme['GM-UPB'] = 'U-Pb age determination'
helpme['GM-UPB-CC-T0'] = 'U-Pb age determination: Concordia diagram age, upper intersection'
helpme['GM-UPB-CC-T1'] = 'U-Pb age determination: Concordia diagram age, lower intersection'
helpme['GM-UPB-I-206'] = 'U-Pb age determination: 238U-206Pb isochron'
helpme['GM-UPB-I-207'] = 'U-Pb age determination: 235U-207Pb isochron'
helpme['GM-UPB-MA-206'] = 'U-Pb age determination: 238U-206Pb model age'
helpme['GM-UPB-MA-207'] = 'U-Pb age determination: 235U-207Pb model age'
helpme['GM-USD'] = 'Uranium series disequilibrium age determination'
helpme['GM-USD-PA231-TH230'] = 'Uranium series disequilibrium age determination: 231Pa-230Th'
helpme['GM-USD-PA231-U235'] = 'Uranium series disequilibrium age determination: 231Pa-235U'
helpme['GM-USD-PB210'] = 'Uranium series disequilibrium age determination: 210Pb'
helpme['GM-USD-RA226-TH230'] = 'Uranium series disequilibrium age determination: 226Ra-230Th'
helpme['GM-USD-RA228-TH232'] = 'Uranium series disequilibrium age determination: 228Ra-232Th'
helpme['GM-USD-TH228-TH232'] = 'Uranium series disequilibrium age determination: 228Th-232Th'
helpme['GM-USD-TH230'] = 'Uranium series disequilibrium age determination: 230Th'
helpme['GM-USD-TH230-TH232'] = 'Uranium series disequilibrium age determination: 230Th-232Th'
helpme['GM-USD-TH230-U234'] = 'Uranium series disequilibrium age determination: 230Th-234U'
helpme['GM-USD-TH230-U238'] = 'Uranium series disequilibrium age determination: 230Th-238U'
helpme['GM-USD-U234-U238'] = 'Uranium series disequilibrium age determination: 234U-238U'
helpme['GM-UTH'] = '238U-230Th age determination'
helpme['GM-UTHHE'] = 'U-Th-He age determination'
helpme['GM-UTHPB'] = 'U-Th-Pb age determination'
helpme['GM-UTHPB-CC-T0'] = 'U-Th-Pb age determination: Concordia diagram intersection age, upper intercept'
helpme['GM-UTHPB-CC-T1'] = 'U-Th-Pb age determination: Concordia diagram intersection age, lower intercept'
helpme['GM-VARVE'] = 'Age determined by varve counting'
helpme['tiepoint_name'] = 'Name for tiepoint horizon'
helpme['tiepoint_alternatives'] = 'Colon-delimited list of alternative names and abbreviations'
helpme['tiepoint_height'] = 'Tiepoint stratigraphic height relative to reference tiepoint'
helpme['tiepoint_height_sigma'] = 'Tiepoint stratigraphic height uncertainty'
helpme['tiepoint_elevation'] = 'Tiepoint elevation relative to sealevel'
helpme['tiepoint_type'] = 'Tiepoint type'
helpme['age'] = 'Age'
helpme['age_sigma'] = 'Age -- uncertainty'
helpme['age_range_low'] = 'Age -- low range'
helpme['age_range_high'] = 'Age -- high range'
helpme['age_unit'] = 'Age -- unit'
helpme['timescale_eon'] = 'Timescale eon'
helpme['timescale_era'] = 'Timescale era'
helpme['timescale_period'] = 'Timescale period'
helpme['timescale_epoch'] = 'Timescale epoch'
helpme['timescale_stage'] = 'Timescale stage'
helpme['biostrat_zone'] = 'Biostratigraphic zone'
helpme['conodont_zone'] = 'Conodont zone'
helpme['magnetic_reversal_chron'] = 'Magnetic reversal chron'
helpme['astronomical_stage'] = 'Astronomical stage name'
helpme['oxygen_stage'] = 'Oxygen stage name'
helpme['age_culture_name'] = 'Age culture name'
return helpme[keyhelp] |
def filehash(path):
"""Make an MD5 hash of a file, ignoring any differences in line
ending characters."""
with open(path, "rU") as f:
return md5(py3compat.str_to_bytes(f.read())).hexdigest() | def function[filehash, parameter[path]]:
constant[Make an MD5 hash of a file, ignoring any differences in line
ending characters.]
with call[name[open], parameter[name[path], constant[rU]]] begin[:]
return[call[call[name[md5], parameter[call[name[py3compat].str_to_bytes, parameter[call[name[f].read, parameter[]]]]]].hexdigest, parameter[]]] | keyword[def] identifier[filehash] ( identifier[path] ):
literal[string]
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] :
keyword[return] identifier[md5] ( identifier[py3compat] . identifier[str_to_bytes] ( identifier[f] . identifier[read] ())). identifier[hexdigest] () | def filehash(path):
"""Make an MD5 hash of a file, ignoring any differences in line
ending characters."""
with open(path, 'rU') as f:
return md5(py3compat.str_to_bytes(f.read())).hexdigest() # depends on [control=['with'], data=['f']] |
def _get_scope_versions(self, pkg_versions):
'''
Get available difference between next possible matches.
:return:
'''
get_in_versions = []
for p_version in pkg_versions:
if fnmatch.fnmatch(p_version, self.version):
get_in_versions.append(p_version)
return get_in_versions | def function[_get_scope_versions, parameter[self, pkg_versions]]:
constant[
Get available difference between next possible matches.
:return:
]
variable[get_in_versions] assign[=] list[[]]
for taget[name[p_version]] in starred[name[pkg_versions]] begin[:]
if call[name[fnmatch].fnmatch, parameter[name[p_version], name[self].version]] begin[:]
call[name[get_in_versions].append, parameter[name[p_version]]]
return[name[get_in_versions]] | keyword[def] identifier[_get_scope_versions] ( identifier[self] , identifier[pkg_versions] ):
literal[string]
identifier[get_in_versions] =[]
keyword[for] identifier[p_version] keyword[in] identifier[pkg_versions] :
keyword[if] identifier[fnmatch] . identifier[fnmatch] ( identifier[p_version] , identifier[self] . identifier[version] ):
identifier[get_in_versions] . identifier[append] ( identifier[p_version] )
keyword[return] identifier[get_in_versions] | def _get_scope_versions(self, pkg_versions):
"""
Get available difference between next possible matches.
:return:
"""
get_in_versions = []
for p_version in pkg_versions:
if fnmatch.fnmatch(p_version, self.version):
get_in_versions.append(p_version) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p_version']]
return get_in_versions |
def _build_parser(
cls,
dictionary,
parser,
section_name,
delimiter=DEFAULT_DELIMITER,
empty_sections=False,
):
""" Populates a parser instance with the content of a dictionary.
:param dict dictionary: The dictionary to use for populating the parser instance
:param configparser.ConfigParser parser: The parser instance
:param str section_name: The current section name to add the dictionary keys to
:param str delimiter: The nested dictionary delimiter character,
defaults to ":", optional
:param bool empty_sections: Flag to allow the representation of empty sections
to exist, defaults to False, optional
:return: The populated parser
:rtype: configparser.ConfigParser
"""
for (key, value) in dictionary.items():
if isinstance(value, dict):
nested_section = delimiter.join([section_name, key])
is_empty = all(isinstance(_, dict) for _ in value.values())
if not is_empty or empty_sections:
parser.add_section(nested_section)
cls._build_parser(value, parser, nested_section, delimiter=delimiter)
elif isinstance(value, (list, tuple, set, frozenset)):
if any(isinstance(_, dict) for _ in value):
raise ValueError(
f"INI files cannot support arrays with mappings, "
f"found in key {key!r}"
)
parser.set(
section_name, key, "\n".join(cls._encode_var(_) for _ in value)
)
else:
parser.set(section_name, key, cls._encode_var(value))
return parser | def function[_build_parser, parameter[cls, dictionary, parser, section_name, delimiter, empty_sections]]:
constant[ Populates a parser instance with the content of a dictionary.
:param dict dictionary: The dictionary to use for populating the parser instance
:param configparser.ConfigParser parser: The parser instance
:param str section_name: The current section name to add the dictionary keys to
:param str delimiter: The nested dictionary delimiter character,
defaults to ":", optional
:param bool empty_sections: Flag to allow the representation of empty sections
to exist, defaults to False, optional
:return: The populated parser
:rtype: configparser.ConfigParser
]
for taget[tuple[[<ast.Name object at 0x7da1b0c4e0b0>, <ast.Name object at 0x7da1b0c4d480>]]] in starred[call[name[dictionary].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[value], name[dict]]] begin[:]
variable[nested_section] assign[=] call[name[delimiter].join, parameter[list[[<ast.Name object at 0x7da1b0c4db10>, <ast.Name object at 0x7da1b0c4e9b0>]]]]
variable[is_empty] assign[=] call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b0c4d5a0>]]
if <ast.BoolOp object at 0x7da1b0c4e9e0> begin[:]
call[name[parser].add_section, parameter[name[nested_section]]]
call[name[cls]._build_parser, parameter[name[value], name[parser], name[nested_section]]]
return[name[parser]] | keyword[def] identifier[_build_parser] (
identifier[cls] ,
identifier[dictionary] ,
identifier[parser] ,
identifier[section_name] ,
identifier[delimiter] = identifier[DEFAULT_DELIMITER] ,
identifier[empty_sections] = keyword[False] ,
):
literal[string]
keyword[for] ( identifier[key] , identifier[value] ) keyword[in] identifier[dictionary] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ):
identifier[nested_section] = identifier[delimiter] . identifier[join] ([ identifier[section_name] , identifier[key] ])
identifier[is_empty] = identifier[all] ( identifier[isinstance] ( identifier[_] , identifier[dict] ) keyword[for] identifier[_] keyword[in] identifier[value] . identifier[values] ())
keyword[if] keyword[not] identifier[is_empty] keyword[or] identifier[empty_sections] :
identifier[parser] . identifier[add_section] ( identifier[nested_section] )
identifier[cls] . identifier[_build_parser] ( identifier[value] , identifier[parser] , identifier[nested_section] , identifier[delimiter] = identifier[delimiter] )
keyword[elif] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[tuple] , identifier[set] , identifier[frozenset] )):
keyword[if] identifier[any] ( identifier[isinstance] ( identifier[_] , identifier[dict] ) keyword[for] identifier[_] keyword[in] identifier[value] ):
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
)
identifier[parser] . identifier[set] (
identifier[section_name] , identifier[key] , literal[string] . identifier[join] ( identifier[cls] . identifier[_encode_var] ( identifier[_] ) keyword[for] identifier[_] keyword[in] identifier[value] )
)
keyword[else] :
identifier[parser] . identifier[set] ( identifier[section_name] , identifier[key] , identifier[cls] . identifier[_encode_var] ( identifier[value] ))
keyword[return] identifier[parser] | def _build_parser(cls, dictionary, parser, section_name, delimiter=DEFAULT_DELIMITER, empty_sections=False):
""" Populates a parser instance with the content of a dictionary.
:param dict dictionary: The dictionary to use for populating the parser instance
:param configparser.ConfigParser parser: The parser instance
:param str section_name: The current section name to add the dictionary keys to
:param str delimiter: The nested dictionary delimiter character,
defaults to ":", optional
:param bool empty_sections: Flag to allow the representation of empty sections
to exist, defaults to False, optional
:return: The populated parser
:rtype: configparser.ConfigParser
"""
for (key, value) in dictionary.items():
if isinstance(value, dict):
nested_section = delimiter.join([section_name, key])
is_empty = all((isinstance(_, dict) for _ in value.values()))
if not is_empty or empty_sections:
parser.add_section(nested_section) # depends on [control=['if'], data=[]]
cls._build_parser(value, parser, nested_section, delimiter=delimiter) # depends on [control=['if'], data=[]]
elif isinstance(value, (list, tuple, set, frozenset)):
if any((isinstance(_, dict) for _ in value)):
raise ValueError(f'INI files cannot support arrays with mappings, found in key {key!r}') # depends on [control=['if'], data=[]]
parser.set(section_name, key, '\n'.join((cls._encode_var(_) for _ in value))) # depends on [control=['if'], data=[]]
else:
parser.set(section_name, key, cls._encode_var(value)) # depends on [control=['for'], data=[]]
return parser |
def Associators(self, ObjectName, AssocClass=None, ResultClass=None,
Role=None, ResultRole=None, IncludeQualifiers=None,
IncludeClassOrigin=None, PropertyList=None, **extra):
# pylint: disable=invalid-name, line-too-long
"""
Retrieve the instances associated to a source instance, or the classes
associated to a source class.
This method performs the Associators operation
(see :term:`DSP0200`). See :ref:`WBEM operations` for a list of all
methods performing such operations.
If the operation succeeds, this method returns.
Otherwise, this method raises an exception.
Parameters:
ObjectName:
The object path of the source object, selecting instance-level or
class-level use of this operation, as follows:
* For selecting instance-level use: The instance path of the
source instance, as a :class:`~pywbem.CIMInstanceName` object.
If this object does not specify a namespace, the default namespace
of the connection is used.
Its `host` attribute will be ignored.
* For selecting class-level use: The class path of the source
class, as a :term:`string` or :class:`~pywbem.CIMClassName` object:
If specified as a string, the string is interpreted as a class
name in the default namespace of the connection
(case independent).
If specified as a :class:`~pywbem.CIMClassName` object, its `host`
attribute will be ignored. If this object does not specify
a namespace, the default namespace of the connection is used.
AssocClass (:term:`string` or :class:`~pywbem.CIMClassName`):
Class name of an association class (case independent),
to filter the result to include only traversals of that association
class (or subclasses).
`None` means that no such filtering is peformed.
ResultClass (:term:`string` or :class:`~pywbem.CIMClassName`):
Class name of an associated class (case independent),
to filter the result to include only traversals to that associated
class (or subclasses).
`None` means that no such filtering is peformed.
Role (:term:`string`):
Role name (= property name) of the source end (case independent),
to filter the result to include only traversals from that source
role.
`None` means that no such filtering is peformed.
ResultRole (:term:`string`):
Role name (= property name) of the far end (case independent),
to filter the result to include only traversals to that far
role.
`None` means that no such filtering is peformed.
IncludeQualifiers (:class:`py:bool`):
Indicates that qualifiers are to be included in the returned
instances (or classes), as follows:
* If `False`, qualifiers are not included.
* If `True`, qualifiers are included if the WBEM server implements
support for this parameter.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default to be used. :term:`DSP0200`
defines that the server-implemented default is `False`.
This parameter has been deprecated in :term:`DSP0200`. Clients
cannot rely on qualifiers to be returned in this operation.
IncludeClassOrigin (:class:`py:bool`):
Indicates that class origin information is to be included on each
property or method in the returned instances (or classes), as
follows:
* If `False`, class origin information is not included.
* If `True`, class origin information is included.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default to be used. :term:`DSP0200`
defines that the server-implemented default is `False`.
This parameter has been deprecated in :term:`DSP0200` for
instance-level use. WBEM servers may either implement this
parameter as specified, or may treat any specified value as `False`.
PropertyList (:term:`string` or :term:`py:iterable` of :term:`string`):
An iterable specifying the names of the properties (or a string
that defines a single property) to be included in the returned
instances (or classes) (case independent).
An empty iterable indicates to include no properties.
If `None`, all properties are included.
**extra :
Additional keyword arguments are passed as additional operation
parameters to the WBEM server.
Note that :term:`DSP0200` does not define any additional parameters
for this operation.
Returns:
: The returned list of objects depend on the usage:
* For instance-level use: A list of
:class:`~pywbem.CIMInstance` objects that are representations
of the associated instances.
The `path` attribute of each :class:`~pywbem.CIMInstance`
object is a :class:`~pywbem.CIMInstanceName` object with its
attributes set as follows:
* `classname`: Name of the creation class of the instance.
* `keybindings`: Keybindings of the instance.
* `namespace`: Name of the CIM namespace containing the instance.
* `host`: Host and optionally port of the WBEM server containing
the CIM namespace, or `None` if the server did not return host
information.
* For class-level use: A list of :func:`py:tuple` of
(classpath, class) objects that are representations of the
associated classes.
Each tuple represents one class and has these items:
* classpath (:class:`~pywbem.CIMClassName`): The class
path of the class, with its attributes set as follows:
* `classname`: Name of the class.
* `namespace`: Name of the CIM namespace containing the class.
* `host`: Host and optionally port of the WBEM server containing
the CIM namespace, or `None` if the server did not return host
information.
* class (:class:`~pywbem.CIMClass`): The representation of the
class, with its `path` attribute set to the `classpath` tuple
item.
Raises:
Exceptions described in :class:`~pywbem.WBEMConnection`.
""" # noqa: E501
exc = None
objects = None
method_name = 'Associators'
if self._operation_recorders:
self.operation_recorder_reset()
self.operation_recorder_stage_pywbem_args(
method=method_name,
ObjectName=ObjectName,
AssocClass=AssocClass,
ResultClass=ResultClass,
Role=Role,
ResultRole=ResultRole,
IncludeQualifiers=IncludeQualifiers,
IncludeClassOrigin=IncludeClassOrigin,
PropertyList=PropertyList,
**extra)
try:
stats = self.statistics.start_timer(method_name)
namespace = self._iparam_namespace_from_objectname(
ObjectName, 'ObjectName')
objectname = self._iparam_objectname(ObjectName, 'ObjectName')
PropertyList = _iparam_propertylist(PropertyList)
result = self._imethodcall(
method_name,
namespace,
ObjectName=objectname,
AssocClass=self._iparam_classname(AssocClass, 'AssocClass'),
ResultClass=self._iparam_classname(ResultClass, 'ResultClass'),
Role=Role,
ResultRole=ResultRole,
IncludeQualifiers=IncludeQualifiers,
IncludeClassOrigin=IncludeClassOrigin,
PropertyList=PropertyList,
**extra)
# instance-level invocation: list of CIMInstance
# class-level invocation: list of CIMClass
if result is None:
objects = []
else:
objects = [x[2] for x in result[0][2]]
if isinstance(objectname, CIMInstanceName):
# instance-level invocation
for instance in objects:
if not isinstance(instance, CIMInstance):
raise CIMXMLParseError(
_format("Expecting CIMInstance object in result "
"list, got {0} object",
instance.__class__.__name__),
conn_id=self.conn_id)
# path and namespace are already set
else:
# class-level invocation
for classpath, klass in objects:
if not isinstance(classpath, CIMClassName) or \
not isinstance(klass, CIMClass):
raise CIMXMLParseError(
_format("Expecting tuple (CIMClassName, CIMClass) "
"in result list, got tuple ({0}, {1})",
classpath.__class__.__name__,
klass.__class__.__name__),
conn_id=self.conn_id)
# path and namespace are already set
return objects
except (CIMXMLParseError, XMLParseError) as exce:
exce.request_data = self.last_raw_request
exce.response_data = self.last_raw_reply
exc = exce
raise
except Exception as exce:
exc = exce
raise
finally:
self._last_operation_time = stats.stop_timer(
self.last_request_len, self.last_reply_len,
self.last_server_response_time, exc)
if self._operation_recorders:
self.operation_recorder_stage_result(objects, exc) | def function[Associators, parameter[self, ObjectName, AssocClass, ResultClass, Role, ResultRole, IncludeQualifiers, IncludeClassOrigin, PropertyList]]:
constant[
Retrieve the instances associated to a source instance, or the classes
associated to a source class.
This method performs the Associators operation
(see :term:`DSP0200`). See :ref:`WBEM operations` for a list of all
methods performing such operations.
If the operation succeeds, this method returns.
Otherwise, this method raises an exception.
Parameters:
ObjectName:
The object path of the source object, selecting instance-level or
class-level use of this operation, as follows:
* For selecting instance-level use: The instance path of the
source instance, as a :class:`~pywbem.CIMInstanceName` object.
If this object does not specify a namespace, the default namespace
of the connection is used.
Its `host` attribute will be ignored.
* For selecting class-level use: The class path of the source
class, as a :term:`string` or :class:`~pywbem.CIMClassName` object:
If specified as a string, the string is interpreted as a class
name in the default namespace of the connection
(case independent).
If specified as a :class:`~pywbem.CIMClassName` object, its `host`
attribute will be ignored. If this object does not specify
a namespace, the default namespace of the connection is used.
AssocClass (:term:`string` or :class:`~pywbem.CIMClassName`):
Class name of an association class (case independent),
to filter the result to include only traversals of that association
class (or subclasses).
`None` means that no such filtering is peformed.
ResultClass (:term:`string` or :class:`~pywbem.CIMClassName`):
Class name of an associated class (case independent),
to filter the result to include only traversals to that associated
class (or subclasses).
`None` means that no such filtering is peformed.
Role (:term:`string`):
Role name (= property name) of the source end (case independent),
to filter the result to include only traversals from that source
role.
`None` means that no such filtering is peformed.
ResultRole (:term:`string`):
Role name (= property name) of the far end (case independent),
to filter the result to include only traversals to that far
role.
`None` means that no such filtering is peformed.
IncludeQualifiers (:class:`py:bool`):
Indicates that qualifiers are to be included in the returned
instances (or classes), as follows:
* If `False`, qualifiers are not included.
* If `True`, qualifiers are included if the WBEM server implements
support for this parameter.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default to be used. :term:`DSP0200`
defines that the server-implemented default is `False`.
This parameter has been deprecated in :term:`DSP0200`. Clients
cannot rely on qualifiers to be returned in this operation.
IncludeClassOrigin (:class:`py:bool`):
Indicates that class origin information is to be included on each
property or method in the returned instances (or classes), as
follows:
* If `False`, class origin information is not included.
* If `True`, class origin information is included.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default to be used. :term:`DSP0200`
defines that the server-implemented default is `False`.
This parameter has been deprecated in :term:`DSP0200` for
instance-level use. WBEM servers may either implement this
parameter as specified, or may treat any specified value as `False`.
PropertyList (:term:`string` or :term:`py:iterable` of :term:`string`):
An iterable specifying the names of the properties (or a string
that defines a single property) to be included in the returned
instances (or classes) (case independent).
An empty iterable indicates to include no properties.
If `None`, all properties are included.
**extra :
Additional keyword arguments are passed as additional operation
parameters to the WBEM server.
Note that :term:`DSP0200` does not define any additional parameters
for this operation.
Returns:
: The returned list of objects depend on the usage:
* For instance-level use: A list of
:class:`~pywbem.CIMInstance` objects that are representations
of the associated instances.
The `path` attribute of each :class:`~pywbem.CIMInstance`
object is a :class:`~pywbem.CIMInstanceName` object with its
attributes set as follows:
* `classname`: Name of the creation class of the instance.
* `keybindings`: Keybindings of the instance.
* `namespace`: Name of the CIM namespace containing the instance.
* `host`: Host and optionally port of the WBEM server containing
the CIM namespace, or `None` if the server did not return host
information.
* For class-level use: A list of :func:`py:tuple` of
(classpath, class) objects that are representations of the
associated classes.
Each tuple represents one class and has these items:
* classpath (:class:`~pywbem.CIMClassName`): The class
path of the class, with its attributes set as follows:
* `classname`: Name of the class.
* `namespace`: Name of the CIM namespace containing the class.
* `host`: Host and optionally port of the WBEM server containing
the CIM namespace, or `None` if the server did not return host
information.
* class (:class:`~pywbem.CIMClass`): The representation of the
class, with its `path` attribute set to the `classpath` tuple
item.
Raises:
Exceptions described in :class:`~pywbem.WBEMConnection`.
]
variable[exc] assign[=] constant[None]
variable[objects] assign[=] constant[None]
variable[method_name] assign[=] constant[Associators]
if name[self]._operation_recorders begin[:]
call[name[self].operation_recorder_reset, parameter[]]
call[name[self].operation_recorder_stage_pywbem_args, parameter[]]
<ast.Try object at 0x7da20c76c2b0> | keyword[def] identifier[Associators] ( identifier[self] , identifier[ObjectName] , identifier[AssocClass] = keyword[None] , identifier[ResultClass] = keyword[None] ,
identifier[Role] = keyword[None] , identifier[ResultRole] = keyword[None] , identifier[IncludeQualifiers] = keyword[None] ,
identifier[IncludeClassOrigin] = keyword[None] , identifier[PropertyList] = keyword[None] ,** identifier[extra] ):
literal[string]
identifier[exc] = keyword[None]
identifier[objects] = keyword[None]
identifier[method_name] = literal[string]
keyword[if] identifier[self] . identifier[_operation_recorders] :
identifier[self] . identifier[operation_recorder_reset] ()
identifier[self] . identifier[operation_recorder_stage_pywbem_args] (
identifier[method] = identifier[method_name] ,
identifier[ObjectName] = identifier[ObjectName] ,
identifier[AssocClass] = identifier[AssocClass] ,
identifier[ResultClass] = identifier[ResultClass] ,
identifier[Role] = identifier[Role] ,
identifier[ResultRole] = identifier[ResultRole] ,
identifier[IncludeQualifiers] = identifier[IncludeQualifiers] ,
identifier[IncludeClassOrigin] = identifier[IncludeClassOrigin] ,
identifier[PropertyList] = identifier[PropertyList] ,
** identifier[extra] )
keyword[try] :
identifier[stats] = identifier[self] . identifier[statistics] . identifier[start_timer] ( identifier[method_name] )
identifier[namespace] = identifier[self] . identifier[_iparam_namespace_from_objectname] (
identifier[ObjectName] , literal[string] )
identifier[objectname] = identifier[self] . identifier[_iparam_objectname] ( identifier[ObjectName] , literal[string] )
identifier[PropertyList] = identifier[_iparam_propertylist] ( identifier[PropertyList] )
identifier[result] = identifier[self] . identifier[_imethodcall] (
identifier[method_name] ,
identifier[namespace] ,
identifier[ObjectName] = identifier[objectname] ,
identifier[AssocClass] = identifier[self] . identifier[_iparam_classname] ( identifier[AssocClass] , literal[string] ),
identifier[ResultClass] = identifier[self] . identifier[_iparam_classname] ( identifier[ResultClass] , literal[string] ),
identifier[Role] = identifier[Role] ,
identifier[ResultRole] = identifier[ResultRole] ,
identifier[IncludeQualifiers] = identifier[IncludeQualifiers] ,
identifier[IncludeClassOrigin] = identifier[IncludeClassOrigin] ,
identifier[PropertyList] = identifier[PropertyList] ,
** identifier[extra] )
keyword[if] identifier[result] keyword[is] keyword[None] :
identifier[objects] =[]
keyword[else] :
identifier[objects] =[ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[result] [ literal[int] ][ literal[int] ]]
keyword[if] identifier[isinstance] ( identifier[objectname] , identifier[CIMInstanceName] ):
keyword[for] identifier[instance] keyword[in] identifier[objects] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[instance] , identifier[CIMInstance] ):
keyword[raise] identifier[CIMXMLParseError] (
identifier[_format] ( literal[string]
literal[string] ,
identifier[instance] . identifier[__class__] . identifier[__name__] ),
identifier[conn_id] = identifier[self] . identifier[conn_id] )
keyword[else] :
keyword[for] identifier[classpath] , identifier[klass] keyword[in] identifier[objects] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[classpath] , identifier[CIMClassName] ) keyword[or] keyword[not] identifier[isinstance] ( identifier[klass] , identifier[CIMClass] ):
keyword[raise] identifier[CIMXMLParseError] (
identifier[_format] ( literal[string]
literal[string] ,
identifier[classpath] . identifier[__class__] . identifier[__name__] ,
identifier[klass] . identifier[__class__] . identifier[__name__] ),
identifier[conn_id] = identifier[self] . identifier[conn_id] )
keyword[return] identifier[objects]
keyword[except] ( identifier[CIMXMLParseError] , identifier[XMLParseError] ) keyword[as] identifier[exce] :
identifier[exce] . identifier[request_data] = identifier[self] . identifier[last_raw_request]
identifier[exce] . identifier[response_data] = identifier[self] . identifier[last_raw_reply]
identifier[exc] = identifier[exce]
keyword[raise]
keyword[except] identifier[Exception] keyword[as] identifier[exce] :
identifier[exc] = identifier[exce]
keyword[raise]
keyword[finally] :
identifier[self] . identifier[_last_operation_time] = identifier[stats] . identifier[stop_timer] (
identifier[self] . identifier[last_request_len] , identifier[self] . identifier[last_reply_len] ,
identifier[self] . identifier[last_server_response_time] , identifier[exc] )
keyword[if] identifier[self] . identifier[_operation_recorders] :
identifier[self] . identifier[operation_recorder_stage_result] ( identifier[objects] , identifier[exc] ) | def Associators(self, ObjectName, AssocClass=None, ResultClass=None, Role=None, ResultRole=None, IncludeQualifiers=None, IncludeClassOrigin=None, PropertyList=None, **extra):
# pylint: disable=invalid-name, line-too-long
'\n Retrieve the instances associated to a source instance, or the classes\n associated to a source class.\n\n This method performs the Associators operation\n (see :term:`DSP0200`). See :ref:`WBEM operations` for a list of all\n methods performing such operations.\n\n If the operation succeeds, this method returns.\n Otherwise, this method raises an exception.\n\n Parameters:\n\n ObjectName:\n The object path of the source object, selecting instance-level or\n class-level use of this operation, as follows:\n\n * For selecting instance-level use: The instance path of the\n source instance, as a :class:`~pywbem.CIMInstanceName` object.\n If this object does not specify a namespace, the default namespace\n of the connection is used.\n Its `host` attribute will be ignored.\n\n * For selecting class-level use: The class path of the source\n class, as a :term:`string` or :class:`~pywbem.CIMClassName` object:\n\n If specified as a string, the string is interpreted as a class\n name in the default namespace of the connection\n (case independent).\n\n If specified as a :class:`~pywbem.CIMClassName` object, its `host`\n attribute will be ignored. If this object does not specify\n a namespace, the default namespace of the connection is used.\n\n AssocClass (:term:`string` or :class:`~pywbem.CIMClassName`):\n Class name of an association class (case independent),\n to filter the result to include only traversals of that association\n class (or subclasses).\n\n `None` means that no such filtering is peformed.\n\n ResultClass (:term:`string` or :class:`~pywbem.CIMClassName`):\n Class name of an associated class (case independent),\n to filter the result to include only traversals to that associated\n class (or subclasses).\n\n `None` means that no such filtering is peformed.\n\n Role (:term:`string`):\n Role name (= property name) of the source end (case independent),\n to filter the result to include only traversals from that source\n role.\n\n `None` means that no such filtering is peformed.\n\n ResultRole (:term:`string`):\n Role name (= property name) of the far end (case independent),\n to filter the result to include only traversals to that far\n role.\n\n `None` means that no such filtering is peformed.\n\n IncludeQualifiers (:class:`py:bool`):\n Indicates that qualifiers are to be included in the returned\n instances (or classes), as follows:\n\n * If `False`, qualifiers are not included.\n * If `True`, qualifiers are included if the WBEM server implements\n support for this parameter.\n * If `None`, this parameter is not passed to the WBEM server, and\n causes the server-implemented default to be used. :term:`DSP0200`\n defines that the server-implemented default is `False`.\n\n This parameter has been deprecated in :term:`DSP0200`. Clients\n cannot rely on qualifiers to be returned in this operation.\n\n IncludeClassOrigin (:class:`py:bool`):\n Indicates that class origin information is to be included on each\n property or method in the returned instances (or classes), as\n follows:\n\n * If `False`, class origin information is not included.\n * If `True`, class origin information is included.\n * If `None`, this parameter is not passed to the WBEM server, and\n causes the server-implemented default to be used. :term:`DSP0200`\n defines that the server-implemented default is `False`.\n\n This parameter has been deprecated in :term:`DSP0200` for\n instance-level use. WBEM servers may either implement this\n parameter as specified, or may treat any specified value as `False`.\n\n PropertyList (:term:`string` or :term:`py:iterable` of :term:`string`):\n An iterable specifying the names of the properties (or a string\n that defines a single property) to be included in the returned\n instances (or classes) (case independent).\n\n An empty iterable indicates to include no properties.\n\n If `None`, all properties are included.\n\n **extra :\n Additional keyword arguments are passed as additional operation\n parameters to the WBEM server.\n Note that :term:`DSP0200` does not define any additional parameters\n for this operation.\n\n Returns:\n\n : The returned list of objects depend on the usage:\n\n * For instance-level use: A list of\n :class:`~pywbem.CIMInstance` objects that are representations\n of the associated instances.\n\n The `path` attribute of each :class:`~pywbem.CIMInstance`\n object is a :class:`~pywbem.CIMInstanceName` object with its\n attributes set as follows:\n\n * `classname`: Name of the creation class of the instance.\n * `keybindings`: Keybindings of the instance.\n * `namespace`: Name of the CIM namespace containing the instance.\n * `host`: Host and optionally port of the WBEM server containing\n the CIM namespace, or `None` if the server did not return host\n information.\n\n * For class-level use: A list of :func:`py:tuple` of\n (classpath, class) objects that are representations of the\n associated classes.\n\n Each tuple represents one class and has these items:\n\n * classpath (:class:`~pywbem.CIMClassName`): The class\n path of the class, with its attributes set as follows:\n\n * `classname`: Name of the class.\n * `namespace`: Name of the CIM namespace containing the class.\n * `host`: Host and optionally port of the WBEM server containing\n the CIM namespace, or `None` if the server did not return host\n information.\n\n * class (:class:`~pywbem.CIMClass`): The representation of the\n class, with its `path` attribute set to the `classpath` tuple\n item.\n\n Raises:\n\n Exceptions described in :class:`~pywbem.WBEMConnection`.\n ' # noqa: E501
exc = None
objects = None
method_name = 'Associators'
if self._operation_recorders:
self.operation_recorder_reset()
self.operation_recorder_stage_pywbem_args(method=method_name, ObjectName=ObjectName, AssocClass=AssocClass, ResultClass=ResultClass, Role=Role, ResultRole=ResultRole, IncludeQualifiers=IncludeQualifiers, IncludeClassOrigin=IncludeClassOrigin, PropertyList=PropertyList, **extra) # depends on [control=['if'], data=[]]
try:
stats = self.statistics.start_timer(method_name)
namespace = self._iparam_namespace_from_objectname(ObjectName, 'ObjectName')
objectname = self._iparam_objectname(ObjectName, 'ObjectName')
PropertyList = _iparam_propertylist(PropertyList)
result = self._imethodcall(method_name, namespace, ObjectName=objectname, AssocClass=self._iparam_classname(AssocClass, 'AssocClass'), ResultClass=self._iparam_classname(ResultClass, 'ResultClass'), Role=Role, ResultRole=ResultRole, IncludeQualifiers=IncludeQualifiers, IncludeClassOrigin=IncludeClassOrigin, PropertyList=PropertyList, **extra)
# instance-level invocation: list of CIMInstance
# class-level invocation: list of CIMClass
if result is None:
objects = [] # depends on [control=['if'], data=[]]
else:
objects = [x[2] for x in result[0][2]]
if isinstance(objectname, CIMInstanceName):
# instance-level invocation
for instance in objects:
if not isinstance(instance, CIMInstance):
raise CIMXMLParseError(_format('Expecting CIMInstance object in result list, got {0} object', instance.__class__.__name__), conn_id=self.conn_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['instance']] # depends on [control=['if'], data=[]]
else:
# path and namespace are already set
# class-level invocation
for (classpath, klass) in objects:
if not isinstance(classpath, CIMClassName) or not isinstance(klass, CIMClass):
raise CIMXMLParseError(_format('Expecting tuple (CIMClassName, CIMClass) in result list, got tuple ({0}, {1})', classpath.__class__.__name__, klass.__class__.__name__), conn_id=self.conn_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# path and namespace are already set
return objects # depends on [control=['try'], data=[]]
except (CIMXMLParseError, XMLParseError) as exce:
exce.request_data = self.last_raw_request
exce.response_data = self.last_raw_reply
exc = exce
raise # depends on [control=['except'], data=['exce']]
except Exception as exce:
exc = exce
raise # depends on [control=['except'], data=['exce']]
finally:
self._last_operation_time = stats.stop_timer(self.last_request_len, self.last_reply_len, self.last_server_response_time, exc)
if self._operation_recorders:
self.operation_recorder_stage_result(objects, exc) # depends on [control=['if'], data=[]] |
def _assemble_and_send_request(self):
"""
Fires off the Fedex request.
@warning: NEVER CALL THIS METHOD DIRECTLY. CALL send_request(),
WHICH RESIDES ON FedexBaseService AND IS INHERITED.
"""
# We get an exception like this when specifying an IntegratorId:
# suds.TypeNotFound: Type not found: 'IntegratorId'
# Setting it to None does not seem to appease it.
del self.ClientDetail.IntegratorId
self.logger.debug(self.WebAuthenticationDetail)
self.logger.debug(self.ClientDetail)
self.logger.debug(self.TransactionDetail)
self.logger.debug(self.VersionId)
# Fire off the query.
return self.client.service.addressValidation(
WebAuthenticationDetail=self.WebAuthenticationDetail,
ClientDetail=self.ClientDetail,
TransactionDetail=self.TransactionDetail,
Version=self.VersionId,
InEffectAsOfTimestamp=datetime.datetime.now(),
AddressesToValidate=self.AddressesToValidate) | def function[_assemble_and_send_request, parameter[self]]:
constant[
Fires off the Fedex request.
@warning: NEVER CALL THIS METHOD DIRECTLY. CALL send_request(),
WHICH RESIDES ON FedexBaseService AND IS INHERITED.
]
<ast.Delete object at 0x7da1b1110a90>
call[name[self].logger.debug, parameter[name[self].WebAuthenticationDetail]]
call[name[self].logger.debug, parameter[name[self].ClientDetail]]
call[name[self].logger.debug, parameter[name[self].TransactionDetail]]
call[name[self].logger.debug, parameter[name[self].VersionId]]
return[call[name[self].client.service.addressValidation, parameter[]]] | keyword[def] identifier[_assemble_and_send_request] ( identifier[self] ):
literal[string]
keyword[del] identifier[self] . identifier[ClientDetail] . identifier[IntegratorId]
identifier[self] . identifier[logger] . identifier[debug] ( identifier[self] . identifier[WebAuthenticationDetail] )
identifier[self] . identifier[logger] . identifier[debug] ( identifier[self] . identifier[ClientDetail] )
identifier[self] . identifier[logger] . identifier[debug] ( identifier[self] . identifier[TransactionDetail] )
identifier[self] . identifier[logger] . identifier[debug] ( identifier[self] . identifier[VersionId] )
keyword[return] identifier[self] . identifier[client] . identifier[service] . identifier[addressValidation] (
identifier[WebAuthenticationDetail] = identifier[self] . identifier[WebAuthenticationDetail] ,
identifier[ClientDetail] = identifier[self] . identifier[ClientDetail] ,
identifier[TransactionDetail] = identifier[self] . identifier[TransactionDetail] ,
identifier[Version] = identifier[self] . identifier[VersionId] ,
identifier[InEffectAsOfTimestamp] = identifier[datetime] . identifier[datetime] . identifier[now] (),
identifier[AddressesToValidate] = identifier[self] . identifier[AddressesToValidate] ) | def _assemble_and_send_request(self):
"""
Fires off the Fedex request.
@warning: NEVER CALL THIS METHOD DIRECTLY. CALL send_request(),
WHICH RESIDES ON FedexBaseService AND IS INHERITED.
"""
# We get an exception like this when specifying an IntegratorId:
# suds.TypeNotFound: Type not found: 'IntegratorId'
# Setting it to None does not seem to appease it.
del self.ClientDetail.IntegratorId
self.logger.debug(self.WebAuthenticationDetail)
self.logger.debug(self.ClientDetail)
self.logger.debug(self.TransactionDetail)
self.logger.debug(self.VersionId)
# Fire off the query.
return self.client.service.addressValidation(WebAuthenticationDetail=self.WebAuthenticationDetail, ClientDetail=self.ClientDetail, TransactionDetail=self.TransactionDetail, Version=self.VersionId, InEffectAsOfTimestamp=datetime.datetime.now(), AddressesToValidate=self.AddressesToValidate) |
def clean(self):
"""Remove all of the terms from the section, and also remove them from the document"""
terms = list(self)
for t in terms:
self.doc.remove_term(t) | def function[clean, parameter[self]]:
constant[Remove all of the terms from the section, and also remove them from the document]
variable[terms] assign[=] call[name[list], parameter[name[self]]]
for taget[name[t]] in starred[name[terms]] begin[:]
call[name[self].doc.remove_term, parameter[name[t]]] | keyword[def] identifier[clean] ( identifier[self] ):
literal[string]
identifier[terms] = identifier[list] ( identifier[self] )
keyword[for] identifier[t] keyword[in] identifier[terms] :
identifier[self] . identifier[doc] . identifier[remove_term] ( identifier[t] ) | def clean(self):
"""Remove all of the terms from the section, and also remove them from the document"""
terms = list(self)
for t in terms:
self.doc.remove_term(t) # depends on [control=['for'], data=['t']] |
def make_new_subdomain_future(self, cursor, subdomain_rec):
"""
Recalculate the future for this subdomain from the current record
until the latest known record.
Returns the list of subdomain records we need to save.
"""
assert subdomain_rec.accepted, 'BUG: given subdomain record must already be accepted'
# what's the subdomain's future after this record?
fut = self.subdomain_db.get_subdomain_history(subdomain_rec.get_fqn(), include_unaccepted=True, start_sequence=subdomain_rec.n, start_zonefile_index=subdomain_rec.parent_zonefile_index, cur=cursor)
for i in range(0, len(fut)):
if fut[i].n == subdomain_rec.n and fut[i].parent_zonefile_index == subdomain_rec.parent_zonefile_index:
fut.pop(i)
break
if len(fut) == 0:
log.debug("At tip: {}".format(subdomain_rec))
return []
for i in range(0, len(fut)):
fut[i].accepted = False
fut = [subdomain_rec] + fut
fut.sort(lambda h1, h2: -1 if h1.n < h2.n or (h1.n == h2.n and h1.parent_zonefile_index < h2.parent_zonefile_index) \
else 0 if h1.n == h2.n and h1.parent_zonefile_index == h2.parent_zonefile_index \
else 1)
assert fut[0].accepted, 'BUG: initial subdomain record is not accepted: {}'.format(fut[0])
last_accepted = 0
for i in range(1, len(fut)):
if self.check_subdomain_transition(fut[last_accepted], fut[i]):
log.debug("Accept future update {}".format(fut[i]))
fut[i].accepted = True
last_accepted = i
else:
log.debug("Reject future update {}".format(fut[i]))
fut[i].accepted = False
return fut | def function[make_new_subdomain_future, parameter[self, cursor, subdomain_rec]]:
constant[
Recalculate the future for this subdomain from the current record
until the latest known record.
Returns the list of subdomain records we need to save.
]
assert[name[subdomain_rec].accepted]
variable[fut] assign[=] call[name[self].subdomain_db.get_subdomain_history, parameter[call[name[subdomain_rec].get_fqn, parameter[]]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[fut]]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b180f610> begin[:]
call[name[fut].pop, parameter[name[i]]]
break
if compare[call[name[len], parameter[name[fut]]] equal[==] constant[0]] begin[:]
call[name[log].debug, parameter[call[constant[At tip: {}].format, parameter[name[subdomain_rec]]]]]
return[list[[]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[fut]]]]]] begin[:]
call[name[fut]][name[i]].accepted assign[=] constant[False]
variable[fut] assign[=] binary_operation[list[[<ast.Name object at 0x7da1b180f010>]] + name[fut]]
call[name[fut].sort, parameter[<ast.Lambda object at 0x7da1b180ea70>]]
assert[call[name[fut]][constant[0]].accepted]
variable[last_accepted] assign[=] constant[0]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[fut]]]]]] begin[:]
if call[name[self].check_subdomain_transition, parameter[call[name[fut]][name[last_accepted]], call[name[fut]][name[i]]]] begin[:]
call[name[log].debug, parameter[call[constant[Accept future update {}].format, parameter[call[name[fut]][name[i]]]]]]
call[name[fut]][name[i]].accepted assign[=] constant[True]
variable[last_accepted] assign[=] name[i]
return[name[fut]] | keyword[def] identifier[make_new_subdomain_future] ( identifier[self] , identifier[cursor] , identifier[subdomain_rec] ):
literal[string]
keyword[assert] identifier[subdomain_rec] . identifier[accepted] , literal[string]
identifier[fut] = identifier[self] . identifier[subdomain_db] . identifier[get_subdomain_history] ( identifier[subdomain_rec] . identifier[get_fqn] (), identifier[include_unaccepted] = keyword[True] , identifier[start_sequence] = identifier[subdomain_rec] . identifier[n] , identifier[start_zonefile_index] = identifier[subdomain_rec] . identifier[parent_zonefile_index] , identifier[cur] = identifier[cursor] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[fut] )):
keyword[if] identifier[fut] [ identifier[i] ]. identifier[n] == identifier[subdomain_rec] . identifier[n] keyword[and] identifier[fut] [ identifier[i] ]. identifier[parent_zonefile_index] == identifier[subdomain_rec] . identifier[parent_zonefile_index] :
identifier[fut] . identifier[pop] ( identifier[i] )
keyword[break]
keyword[if] identifier[len] ( identifier[fut] )== literal[int] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[subdomain_rec] ))
keyword[return] []
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[fut] )):
identifier[fut] [ identifier[i] ]. identifier[accepted] = keyword[False]
identifier[fut] =[ identifier[subdomain_rec] ]+ identifier[fut]
identifier[fut] . identifier[sort] ( keyword[lambda] identifier[h1] , identifier[h2] :- literal[int] keyword[if] identifier[h1] . identifier[n] < identifier[h2] . identifier[n] keyword[or] ( identifier[h1] . identifier[n] == identifier[h2] . identifier[n] keyword[and] identifier[h1] . identifier[parent_zonefile_index] < identifier[h2] . identifier[parent_zonefile_index] ) keyword[else] literal[int] keyword[if] identifier[h1] . identifier[n] == identifier[h2] . identifier[n] keyword[and] identifier[h1] . identifier[parent_zonefile_index] == identifier[h2] . identifier[parent_zonefile_index] keyword[else] literal[int] )
keyword[assert] identifier[fut] [ literal[int] ]. identifier[accepted] , literal[string] . identifier[format] ( identifier[fut] [ literal[int] ])
identifier[last_accepted] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[fut] )):
keyword[if] identifier[self] . identifier[check_subdomain_transition] ( identifier[fut] [ identifier[last_accepted] ], identifier[fut] [ identifier[i] ]):
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[fut] [ identifier[i] ]))
identifier[fut] [ identifier[i] ]. identifier[accepted] = keyword[True]
identifier[last_accepted] = identifier[i]
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[fut] [ identifier[i] ]))
identifier[fut] [ identifier[i] ]. identifier[accepted] = keyword[False]
keyword[return] identifier[fut] | def make_new_subdomain_future(self, cursor, subdomain_rec):
"""
Recalculate the future for this subdomain from the current record
until the latest known record.
Returns the list of subdomain records we need to save.
"""
assert subdomain_rec.accepted, 'BUG: given subdomain record must already be accepted'
# what's the subdomain's future after this record?
fut = self.subdomain_db.get_subdomain_history(subdomain_rec.get_fqn(), include_unaccepted=True, start_sequence=subdomain_rec.n, start_zonefile_index=subdomain_rec.parent_zonefile_index, cur=cursor)
for i in range(0, len(fut)):
if fut[i].n == subdomain_rec.n and fut[i].parent_zonefile_index == subdomain_rec.parent_zonefile_index:
fut.pop(i)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
if len(fut) == 0:
log.debug('At tip: {}'.format(subdomain_rec))
return [] # depends on [control=['if'], data=[]]
for i in range(0, len(fut)):
fut[i].accepted = False # depends on [control=['for'], data=['i']]
fut = [subdomain_rec] + fut
fut.sort(lambda h1, h2: -1 if h1.n < h2.n or (h1.n == h2.n and h1.parent_zonefile_index < h2.parent_zonefile_index) else 0 if h1.n == h2.n and h1.parent_zonefile_index == h2.parent_zonefile_index else 1)
assert fut[0].accepted, 'BUG: initial subdomain record is not accepted: {}'.format(fut[0])
last_accepted = 0
for i in range(1, len(fut)):
if self.check_subdomain_transition(fut[last_accepted], fut[i]):
log.debug('Accept future update {}'.format(fut[i]))
fut[i].accepted = True
last_accepted = i # depends on [control=['if'], data=[]]
else:
log.debug('Reject future update {}'.format(fut[i]))
fut[i].accepted = False # depends on [control=['for'], data=['i']]
return fut |
def enable_rights(self):
"""
Enables rights management provided by :class:`fatbotslim.handlers.RightsHandler`.
"""
if self.rights is None:
handler_instance = RightsHandler(self)
self.handlers.insert(len(self.default_handlers), handler_instance) | def function[enable_rights, parameter[self]]:
constant[
Enables rights management provided by :class:`fatbotslim.handlers.RightsHandler`.
]
if compare[name[self].rights is constant[None]] begin[:]
variable[handler_instance] assign[=] call[name[RightsHandler], parameter[name[self]]]
call[name[self].handlers.insert, parameter[call[name[len], parameter[name[self].default_handlers]], name[handler_instance]]] | keyword[def] identifier[enable_rights] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[rights] keyword[is] keyword[None] :
identifier[handler_instance] = identifier[RightsHandler] ( identifier[self] )
identifier[self] . identifier[handlers] . identifier[insert] ( identifier[len] ( identifier[self] . identifier[default_handlers] ), identifier[handler_instance] ) | def enable_rights(self):
"""
Enables rights management provided by :class:`fatbotslim.handlers.RightsHandler`.
"""
if self.rights is None:
handler_instance = RightsHandler(self)
self.handlers.insert(len(self.default_handlers), handler_instance) # depends on [control=['if'], data=[]] |
async def forward(self, chat_id, disable_notification=None) -> Message:
"""
Forward this message
:param chat_id:
:param disable_notification:
:return:
"""
return await self.bot.forward_message(chat_id, self.chat.id, self.message_id, disable_notification) | <ast.AsyncFunctionDef object at 0x7da1b17d7970> | keyword[async] keyword[def] identifier[forward] ( identifier[self] , identifier[chat_id] , identifier[disable_notification] = keyword[None] )-> identifier[Message] :
literal[string]
keyword[return] keyword[await] identifier[self] . identifier[bot] . identifier[forward_message] ( identifier[chat_id] , identifier[self] . identifier[chat] . identifier[id] , identifier[self] . identifier[message_id] , identifier[disable_notification] ) | async def forward(self, chat_id, disable_notification=None) -> Message:
"""
Forward this message
:param chat_id:
:param disable_notification:
:return:
"""
return await self.bot.forward_message(chat_id, self.chat.id, self.message_id, disable_notification) |
def fetchall(self, query, *args):
"""
Returns all results of the given query.
:param query: The query to be executed as a `str`.
:param params: A `tuple` of parameters that will be replaced for
placeholders in the query.
:return: A `list` of `tuple`s with each field being one element in the
`tuple`.
"""
cursor = self.connection.cursor()
try:
cursor.execute(query, args)
return cursor.fetchall()
finally:
cursor.close() | def function[fetchall, parameter[self, query]]:
constant[
Returns all results of the given query.
:param query: The query to be executed as a `str`.
:param params: A `tuple` of parameters that will be replaced for
placeholders in the query.
:return: A `list` of `tuple`s with each field being one element in the
`tuple`.
]
variable[cursor] assign[=] call[name[self].connection.cursor, parameter[]]
<ast.Try object at 0x7da1b0f11a20> | keyword[def] identifier[fetchall] ( identifier[self] , identifier[query] ,* identifier[args] ):
literal[string]
identifier[cursor] = identifier[self] . identifier[connection] . identifier[cursor] ()
keyword[try] :
identifier[cursor] . identifier[execute] ( identifier[query] , identifier[args] )
keyword[return] identifier[cursor] . identifier[fetchall] ()
keyword[finally] :
identifier[cursor] . identifier[close] () | def fetchall(self, query, *args):
"""
Returns all results of the given query.
:param query: The query to be executed as a `str`.
:param params: A `tuple` of parameters that will be replaced for
placeholders in the query.
:return: A `list` of `tuple`s with each field being one element in the
`tuple`.
"""
cursor = self.connection.cursor()
try:
cursor.execute(query, args)
return cursor.fetchall() # depends on [control=['try'], data=[]]
finally:
cursor.close() |
def logging_stream_install(loglevel):
"""
Install logger that will output to stderr. If this function ha already installed a handler, replace it.
:param loglevel: log level for the stream
"""
formatter = logging.Formatter(LOGGING_FORMAT)
logger = logging.getLogger()
logger.removeHandler(LOGGING_HANDLERS['stream'])
if loglevel == LOGGING_LOGNOTHING:
streamHandler = None
else:
streamHandler = logging.StreamHandler()
streamHandler.setLevel(loglevel)
streamHandler.setFormatter(formatter)
LOGGING_HANDLERS['stream'] = streamHandler
if streamHandler:
logger.addHandler(streamHandler) | def function[logging_stream_install, parameter[loglevel]]:
constant[
Install logger that will output to stderr. If this function ha already installed a handler, replace it.
:param loglevel: log level for the stream
]
variable[formatter] assign[=] call[name[logging].Formatter, parameter[name[LOGGING_FORMAT]]]
variable[logger] assign[=] call[name[logging].getLogger, parameter[]]
call[name[logger].removeHandler, parameter[call[name[LOGGING_HANDLERS]][constant[stream]]]]
if compare[name[loglevel] equal[==] name[LOGGING_LOGNOTHING]] begin[:]
variable[streamHandler] assign[=] constant[None]
call[name[LOGGING_HANDLERS]][constant[stream]] assign[=] name[streamHandler]
if name[streamHandler] begin[:]
call[name[logger].addHandler, parameter[name[streamHandler]]] | keyword[def] identifier[logging_stream_install] ( identifier[loglevel] ):
literal[string]
identifier[formatter] = identifier[logging] . identifier[Formatter] ( identifier[LOGGING_FORMAT] )
identifier[logger] = identifier[logging] . identifier[getLogger] ()
identifier[logger] . identifier[removeHandler] ( identifier[LOGGING_HANDLERS] [ literal[string] ])
keyword[if] identifier[loglevel] == identifier[LOGGING_LOGNOTHING] :
identifier[streamHandler] = keyword[None]
keyword[else] :
identifier[streamHandler] = identifier[logging] . identifier[StreamHandler] ()
identifier[streamHandler] . identifier[setLevel] ( identifier[loglevel] )
identifier[streamHandler] . identifier[setFormatter] ( identifier[formatter] )
identifier[LOGGING_HANDLERS] [ literal[string] ]= identifier[streamHandler]
keyword[if] identifier[streamHandler] :
identifier[logger] . identifier[addHandler] ( identifier[streamHandler] ) | def logging_stream_install(loglevel):
"""
Install logger that will output to stderr. If this function ha already installed a handler, replace it.
:param loglevel: log level for the stream
"""
formatter = logging.Formatter(LOGGING_FORMAT)
logger = logging.getLogger()
logger.removeHandler(LOGGING_HANDLERS['stream'])
if loglevel == LOGGING_LOGNOTHING:
streamHandler = None # depends on [control=['if'], data=[]]
else:
streamHandler = logging.StreamHandler()
streamHandler.setLevel(loglevel)
streamHandler.setFormatter(formatter)
LOGGING_HANDLERS['stream'] = streamHandler
if streamHandler:
logger.addHandler(streamHandler) # depends on [control=['if'], data=[]] |
def get_parameter(self, twig=None, **kwargs):
"""
get a parameter from those that are variables
"""
kwargs['twig'] = twig
kwargs['check_default'] = False
kwargs['check_visible'] = False
ps = self.vars.filter(**kwargs)
if len(ps)==1:
return ps.get(check_visible=False, check_default=False)
elif len(ps) > 1:
# TODO: is this safe? Some constraints may have a parameter listed
# twice, so we can do this then, but maybe should check to make sure
# all items have the same uniqueid? Maybe check len(ps.uniqueids)?
return ps.to_list()[0]
else:
raise KeyError("no result found") | def function[get_parameter, parameter[self, twig]]:
constant[
get a parameter from those that are variables
]
call[name[kwargs]][constant[twig]] assign[=] name[twig]
call[name[kwargs]][constant[check_default]] assign[=] constant[False]
call[name[kwargs]][constant[check_visible]] assign[=] constant[False]
variable[ps] assign[=] call[name[self].vars.filter, parameter[]]
if compare[call[name[len], parameter[name[ps]]] equal[==] constant[1]] begin[:]
return[call[name[ps].get, parameter[]]] | keyword[def] identifier[get_parameter] ( identifier[self] , identifier[twig] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= identifier[twig]
identifier[kwargs] [ literal[string] ]= keyword[False]
identifier[kwargs] [ literal[string] ]= keyword[False]
identifier[ps] = identifier[self] . identifier[vars] . identifier[filter] (** identifier[kwargs] )
keyword[if] identifier[len] ( identifier[ps] )== literal[int] :
keyword[return] identifier[ps] . identifier[get] ( identifier[check_visible] = keyword[False] , identifier[check_default] = keyword[False] )
keyword[elif] identifier[len] ( identifier[ps] )> literal[int] :
keyword[return] identifier[ps] . identifier[to_list] ()[ literal[int] ]
keyword[else] :
keyword[raise] identifier[KeyError] ( literal[string] ) | def get_parameter(self, twig=None, **kwargs):
"""
get a parameter from those that are variables
"""
kwargs['twig'] = twig
kwargs['check_default'] = False
kwargs['check_visible'] = False
ps = self.vars.filter(**kwargs)
if len(ps) == 1:
return ps.get(check_visible=False, check_default=False) # depends on [control=['if'], data=[]]
elif len(ps) > 1:
# TODO: is this safe? Some constraints may have a parameter listed
# twice, so we can do this then, but maybe should check to make sure
# all items have the same uniqueid? Maybe check len(ps.uniqueids)?
return ps.to_list()[0] # depends on [control=['if'], data=[]]
else:
raise KeyError('no result found') |
def restoreXml(self, xml):
"""
Restores data from the xml entry.
:param xml | <xml.etree.ElementTree.Element>
:return <bool> | success
"""
if xml is None:
return False
# restore grouping
grps = xml.get('grouping')
if grps is not None:
self.setGroupingActive(True)
self.setGroupBy(grps.split(','))
# restore grouping enabled
grp_enabled = xml.get('groupingActive')
if grp_enabled is not None:
self.setGroupingActive(grp_enabled == 'True', autoRefresh=False)
# restore standard tree options
return super(XOrbTreeWidget, self).restoreXml(xml) | def function[restoreXml, parameter[self, xml]]:
constant[
Restores data from the xml entry.
:param xml | <xml.etree.ElementTree.Element>
:return <bool> | success
]
if compare[name[xml] is constant[None]] begin[:]
return[constant[False]]
variable[grps] assign[=] call[name[xml].get, parameter[constant[grouping]]]
if compare[name[grps] is_not constant[None]] begin[:]
call[name[self].setGroupingActive, parameter[constant[True]]]
call[name[self].setGroupBy, parameter[call[name[grps].split, parameter[constant[,]]]]]
variable[grp_enabled] assign[=] call[name[xml].get, parameter[constant[groupingActive]]]
if compare[name[grp_enabled] is_not constant[None]] begin[:]
call[name[self].setGroupingActive, parameter[compare[name[grp_enabled] equal[==] constant[True]]]]
return[call[call[name[super], parameter[name[XOrbTreeWidget], name[self]]].restoreXml, parameter[name[xml]]]] | keyword[def] identifier[restoreXml] ( identifier[self] , identifier[xml] ):
literal[string]
keyword[if] identifier[xml] keyword[is] keyword[None] :
keyword[return] keyword[False]
identifier[grps] = identifier[xml] . identifier[get] ( literal[string] )
keyword[if] identifier[grps] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[setGroupingActive] ( keyword[True] )
identifier[self] . identifier[setGroupBy] ( identifier[grps] . identifier[split] ( literal[string] ))
identifier[grp_enabled] = identifier[xml] . identifier[get] ( literal[string] )
keyword[if] identifier[grp_enabled] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[setGroupingActive] ( identifier[grp_enabled] == literal[string] , identifier[autoRefresh] = keyword[False] )
keyword[return] identifier[super] ( identifier[XOrbTreeWidget] , identifier[self] ). identifier[restoreXml] ( identifier[xml] ) | def restoreXml(self, xml):
"""
Restores data from the xml entry.
:param xml | <xml.etree.ElementTree.Element>
:return <bool> | success
"""
if xml is None:
return False # depends on [control=['if'], data=[]] # restore grouping
grps = xml.get('grouping')
if grps is not None:
self.setGroupingActive(True)
self.setGroupBy(grps.split(',')) # depends on [control=['if'], data=['grps']] # restore grouping enabled
grp_enabled = xml.get('groupingActive')
if grp_enabled is not None:
self.setGroupingActive(grp_enabled == 'True', autoRefresh=False) # depends on [control=['if'], data=['grp_enabled']] # restore standard tree options
return super(XOrbTreeWidget, self).restoreXml(xml) |
def log_and_dispatch(self, state_change):
""" Log and apply a state change.
This function will first write the state change to the write-ahead-log,
in case of a node crash the state change can be recovered and replayed
to restore the node state.
Events produced by applying state change are also saved.
"""
with self._lock:
timestamp = datetime.utcnow().isoformat(timespec='milliseconds')
state_change_id = self.storage.write_state_change(state_change, timestamp)
self.state_change_id = state_change_id
events = self.state_manager.dispatch(state_change)
self.storage.write_events(state_change_id, events, timestamp)
return events | def function[log_and_dispatch, parameter[self, state_change]]:
constant[ Log and apply a state change.
This function will first write the state change to the write-ahead-log,
in case of a node crash the state change can be recovered and replayed
to restore the node state.
Events produced by applying state change are also saved.
]
with name[self]._lock begin[:]
variable[timestamp] assign[=] call[call[name[datetime].utcnow, parameter[]].isoformat, parameter[]]
variable[state_change_id] assign[=] call[name[self].storage.write_state_change, parameter[name[state_change], name[timestamp]]]
name[self].state_change_id assign[=] name[state_change_id]
variable[events] assign[=] call[name[self].state_manager.dispatch, parameter[name[state_change]]]
call[name[self].storage.write_events, parameter[name[state_change_id], name[events], name[timestamp]]]
return[name[events]] | keyword[def] identifier[log_and_dispatch] ( identifier[self] , identifier[state_change] ):
literal[string]
keyword[with] identifier[self] . identifier[_lock] :
identifier[timestamp] = identifier[datetime] . identifier[utcnow] (). identifier[isoformat] ( identifier[timespec] = literal[string] )
identifier[state_change_id] = identifier[self] . identifier[storage] . identifier[write_state_change] ( identifier[state_change] , identifier[timestamp] )
identifier[self] . identifier[state_change_id] = identifier[state_change_id]
identifier[events] = identifier[self] . identifier[state_manager] . identifier[dispatch] ( identifier[state_change] )
identifier[self] . identifier[storage] . identifier[write_events] ( identifier[state_change_id] , identifier[events] , identifier[timestamp] )
keyword[return] identifier[events] | def log_and_dispatch(self, state_change):
""" Log and apply a state change.
This function will first write the state change to the write-ahead-log,
in case of a node crash the state change can be recovered and replayed
to restore the node state.
Events produced by applying state change are also saved.
"""
with self._lock:
timestamp = datetime.utcnow().isoformat(timespec='milliseconds')
state_change_id = self.storage.write_state_change(state_change, timestamp)
self.state_change_id = state_change_id
events = self.state_manager.dispatch(state_change)
self.storage.write_events(state_change_id, events, timestamp) # depends on [control=['with'], data=[]]
return events |
def list_attached_team(context, id, sort, limit, where, verbose):
"""list_attached_team(context, id, sort, limit. where. verbose)
List teams attached to a topic.
>>> dcictl topic-list-team
:param string id: ID of the topic to list teams for [required]
:param string sort: Field to apply sort
:param integer limit: Max number of rows to return
:param string where: An optional filter criteria
:param boolean verbose: Display verbose output
"""
result = topic.list_teams(context, id=id, sort=sort, limit=limit,
where=where)
utils.format_output(result, context.format, verbose=verbose) | def function[list_attached_team, parameter[context, id, sort, limit, where, verbose]]:
constant[list_attached_team(context, id, sort, limit. where. verbose)
List teams attached to a topic.
>>> dcictl topic-list-team
:param string id: ID of the topic to list teams for [required]
:param string sort: Field to apply sort
:param integer limit: Max number of rows to return
:param string where: An optional filter criteria
:param boolean verbose: Display verbose output
]
variable[result] assign[=] call[name[topic].list_teams, parameter[name[context]]]
call[name[utils].format_output, parameter[name[result], name[context].format]] | keyword[def] identifier[list_attached_team] ( identifier[context] , identifier[id] , identifier[sort] , identifier[limit] , identifier[where] , identifier[verbose] ):
literal[string]
identifier[result] = identifier[topic] . identifier[list_teams] ( identifier[context] , identifier[id] = identifier[id] , identifier[sort] = identifier[sort] , identifier[limit] = identifier[limit] ,
identifier[where] = identifier[where] )
identifier[utils] . identifier[format_output] ( identifier[result] , identifier[context] . identifier[format] , identifier[verbose] = identifier[verbose] ) | def list_attached_team(context, id, sort, limit, where, verbose):
"""list_attached_team(context, id, sort, limit. where. verbose)
List teams attached to a topic.
>>> dcictl topic-list-team
:param string id: ID of the topic to list teams for [required]
:param string sort: Field to apply sort
:param integer limit: Max number of rows to return
:param string where: An optional filter criteria
:param boolean verbose: Display verbose output
"""
result = topic.list_teams(context, id=id, sort=sort, limit=limit, where=where)
utils.format_output(result, context.format, verbose=verbose) |
def taxonomy_dict(self):
"""
:returns: a dict taxonomy string -> taxonomy index
"""
# .taxonomy must be set by the engine
tdict = {taxo: idx for idx, taxo in enumerate(self.taxonomy)}
return tdict | def function[taxonomy_dict, parameter[self]]:
constant[
:returns: a dict taxonomy string -> taxonomy index
]
variable[tdict] assign[=] <ast.DictComp object at 0x7da2054a5ea0>
return[name[tdict]] | keyword[def] identifier[taxonomy_dict] ( identifier[self] ):
literal[string]
identifier[tdict] ={ identifier[taxo] : identifier[idx] keyword[for] identifier[idx] , identifier[taxo] keyword[in] identifier[enumerate] ( identifier[self] . identifier[taxonomy] )}
keyword[return] identifier[tdict] | def taxonomy_dict(self):
"""
:returns: a dict taxonomy string -> taxonomy index
"""
# .taxonomy must be set by the engine
tdict = {taxo: idx for (idx, taxo) in enumerate(self.taxonomy)}
return tdict |
def update_or_create_all(cls, list_of_kwargs, keys=[]):
"""Batch method for updating a list of instances and
creating them if required
Args:
list_of_kwargs(list of dicts): A list of dicts where
each dict denotes the keyword args that you would pass
to the create method separately
keys (list, optional): A list of keys to use for the
initial finding step. Matching is done only on these
attributes.
Examples:
>>> Customer.update_or_create_all([
... {'name': 'Vicky', 'email': '[email protected]', 'age': 34},
... {'name': 'Ron', 'age': 40, 'email': '[email protected]',
... 'gender': 'Male'}], keys=['name', 'email'])
"""
objs = []
for kwargs in list_of_kwargs:
filter_kwargs = subdict(kwargs, keys)
if filter_kwargs == {}:
obj = None
else:
obj = cls.first(**filter_kwargs)
if obj is not None:
for key, value in kwargs.iteritems():
if (key not in keys and
key not in cls._no_overwrite_):
setattr(obj, key, value)
else:
obj = cls.new(**kwargs)
objs.append(obj)
try:
return cls.add_all(objs)
except:
cls.session.rollback()
raise | def function[update_or_create_all, parameter[cls, list_of_kwargs, keys]]:
constant[Batch method for updating a list of instances and
creating them if required
Args:
list_of_kwargs(list of dicts): A list of dicts where
each dict denotes the keyword args that you would pass
to the create method separately
keys (list, optional): A list of keys to use for the
initial finding step. Matching is done only on these
attributes.
Examples:
>>> Customer.update_or_create_all([
... {'name': 'Vicky', 'email': '[email protected]', 'age': 34},
... {'name': 'Ron', 'age': 40, 'email': '[email protected]',
... 'gender': 'Male'}], keys=['name', 'email'])
]
variable[objs] assign[=] list[[]]
for taget[name[kwargs]] in starred[name[list_of_kwargs]] begin[:]
variable[filter_kwargs] assign[=] call[name[subdict], parameter[name[kwargs], name[keys]]]
if compare[name[filter_kwargs] equal[==] dictionary[[], []]] begin[:]
variable[obj] assign[=] constant[None]
if compare[name[obj] is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b24856c0>, <ast.Name object at 0x7da1b2485c30>]]] in starred[call[name[kwargs].iteritems, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b2484f70> begin[:]
call[name[setattr], parameter[name[obj], name[key], name[value]]]
call[name[objs].append, parameter[name[obj]]]
<ast.Try object at 0x7da1b24ad330> | keyword[def] identifier[update_or_create_all] ( identifier[cls] , identifier[list_of_kwargs] , identifier[keys] =[]):
literal[string]
identifier[objs] =[]
keyword[for] identifier[kwargs] keyword[in] identifier[list_of_kwargs] :
identifier[filter_kwargs] = identifier[subdict] ( identifier[kwargs] , identifier[keys] )
keyword[if] identifier[filter_kwargs] =={}:
identifier[obj] = keyword[None]
keyword[else] :
identifier[obj] = identifier[cls] . identifier[first] (** identifier[filter_kwargs] )
keyword[if] identifier[obj] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[kwargs] . identifier[iteritems] ():
keyword[if] ( identifier[key] keyword[not] keyword[in] identifier[keys] keyword[and]
identifier[key] keyword[not] keyword[in] identifier[cls] . identifier[_no_overwrite_] ):
identifier[setattr] ( identifier[obj] , identifier[key] , identifier[value] )
keyword[else] :
identifier[obj] = identifier[cls] . identifier[new] (** identifier[kwargs] )
identifier[objs] . identifier[append] ( identifier[obj] )
keyword[try] :
keyword[return] identifier[cls] . identifier[add_all] ( identifier[objs] )
keyword[except] :
identifier[cls] . identifier[session] . identifier[rollback] ()
keyword[raise] | def update_or_create_all(cls, list_of_kwargs, keys=[]):
"""Batch method for updating a list of instances and
creating them if required
Args:
list_of_kwargs(list of dicts): A list of dicts where
each dict denotes the keyword args that you would pass
to the create method separately
keys (list, optional): A list of keys to use for the
initial finding step. Matching is done only on these
attributes.
Examples:
>>> Customer.update_or_create_all([
... {'name': 'Vicky', 'email': '[email protected]', 'age': 34},
... {'name': 'Ron', 'age': 40, 'email': '[email protected]',
... 'gender': 'Male'}], keys=['name', 'email'])
"""
objs = []
for kwargs in list_of_kwargs:
filter_kwargs = subdict(kwargs, keys)
if filter_kwargs == {}:
obj = None # depends on [control=['if'], data=[]]
else:
obj = cls.first(**filter_kwargs)
if obj is not None:
for (key, value) in kwargs.iteritems():
if key not in keys and key not in cls._no_overwrite_:
setattr(obj, key, value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['obj']]
else:
obj = cls.new(**kwargs)
objs.append(obj) # depends on [control=['for'], data=['kwargs']]
try:
return cls.add_all(objs) # depends on [control=['try'], data=[]]
except:
cls.session.rollback()
raise # depends on [control=['except'], data=[]] |
def get_document(self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Retrieves the specified document.
Example:
>>> import dialogflow_v2beta1
>>>
>>> client = dialogflow_v2beta1.DocumentsClient()
>>>
>>> name = client.document_path('[PROJECT]', '[KNOWLEDGE_BASE]', '[DOCUMENT]')
>>>
>>> response = client.get_document(name)
Args:
name (str): Required. The name of the document to retrieve.
Format ``projects/<Project ID>/knowledgeBases/<Knowledge Base
ID>/documents/<Document ID>``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.dialogflow_v2beta1.types.Document` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'get_document' not in self._inner_api_calls:
self._inner_api_calls[
'get_document'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_document,
default_retry=self._method_configs['GetDocument'].retry,
default_timeout=self._method_configs['GetDocument']
.timeout,
client_info=self._client_info,
)
request = document_pb2.GetDocumentRequest(name=name, )
return self._inner_api_calls['get_document'](
request, retry=retry, timeout=timeout, metadata=metadata) | def function[get_document, parameter[self, name, retry, timeout, metadata]]:
constant[
Retrieves the specified document.
Example:
>>> import dialogflow_v2beta1
>>>
>>> client = dialogflow_v2beta1.DocumentsClient()
>>>
>>> name = client.document_path('[PROJECT]', '[KNOWLEDGE_BASE]', '[DOCUMENT]')
>>>
>>> response = client.get_document(name)
Args:
name (str): Required. The name of the document to retrieve.
Format ``projects/<Project ID>/knowledgeBases/<Knowledge Base
ID>/documents/<Document ID>``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.dialogflow_v2beta1.types.Document` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
]
if compare[constant[get_document] <ast.NotIn object at 0x7da2590d7190> name[self]._inner_api_calls] begin[:]
call[name[self]._inner_api_calls][constant[get_document]] assign[=] call[name[google].api_core.gapic_v1.method.wrap_method, parameter[name[self].transport.get_document]]
variable[request] assign[=] call[name[document_pb2].GetDocumentRequest, parameter[]]
return[call[call[name[self]._inner_api_calls][constant[get_document]], parameter[name[request]]]] | keyword[def] identifier[get_document] ( identifier[self] ,
identifier[name] ,
identifier[retry] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[timeout] = identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] ,
identifier[metadata] = keyword[None] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[_inner_api_calls] :
identifier[self] . identifier[_inner_api_calls] [
literal[string] ]= identifier[google] . identifier[api_core] . identifier[gapic_v1] . identifier[method] . identifier[wrap_method] (
identifier[self] . identifier[transport] . identifier[get_document] ,
identifier[default_retry] = identifier[self] . identifier[_method_configs] [ literal[string] ]. identifier[retry] ,
identifier[default_timeout] = identifier[self] . identifier[_method_configs] [ literal[string] ]
. identifier[timeout] ,
identifier[client_info] = identifier[self] . identifier[_client_info] ,
)
identifier[request] = identifier[document_pb2] . identifier[GetDocumentRequest] ( identifier[name] = identifier[name] ,)
keyword[return] identifier[self] . identifier[_inner_api_calls] [ literal[string] ](
identifier[request] , identifier[retry] = identifier[retry] , identifier[timeout] = identifier[timeout] , identifier[metadata] = identifier[metadata] ) | def get_document(self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None):
"""
Retrieves the specified document.
Example:
>>> import dialogflow_v2beta1
>>>
>>> client = dialogflow_v2beta1.DocumentsClient()
>>>
>>> name = client.document_path('[PROJECT]', '[KNOWLEDGE_BASE]', '[DOCUMENT]')
>>>
>>> response = client.get_document(name)
Args:
name (str): Required. The name of the document to retrieve.
Format ``projects/<Project ID>/knowledgeBases/<Knowledge Base
ID>/documents/<Document ID>``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.dialogflow_v2beta1.types.Document` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'get_document' not in self._inner_api_calls:
self._inner_api_calls['get_document'] = google.api_core.gapic_v1.method.wrap_method(self.transport.get_document, default_retry=self._method_configs['GetDocument'].retry, default_timeout=self._method_configs['GetDocument'].timeout, client_info=self._client_info) # depends on [control=['if'], data=[]]
request = document_pb2.GetDocumentRequest(name=name)
return self._inner_api_calls['get_document'](request, retry=retry, timeout=timeout, metadata=metadata) |
def log_request(request: str, trim_log_values: bool = False, **kwargs: Any) -> None:
"""Log a request"""
return log_(request, request_logger, logging.INFO, trim=trim_log_values, **kwargs) | def function[log_request, parameter[request, trim_log_values]]:
constant[Log a request]
return[call[name[log_], parameter[name[request], name[request_logger], name[logging].INFO]]] | keyword[def] identifier[log_request] ( identifier[request] : identifier[str] , identifier[trim_log_values] : identifier[bool] = keyword[False] ,** identifier[kwargs] : identifier[Any] )-> keyword[None] :
literal[string]
keyword[return] identifier[log_] ( identifier[request] , identifier[request_logger] , identifier[logging] . identifier[INFO] , identifier[trim] = identifier[trim_log_values] ,** identifier[kwargs] ) | def log_request(request: str, trim_log_values: bool=False, **kwargs: Any) -> None:
"""Log a request"""
return log_(request, request_logger, logging.INFO, trim=trim_log_values, **kwargs) |
def multivariate_neg_logposterior(self,beta):
""" Returns negative log posterior, for a model with a covariance matrix
Parameters
----------
beta : np.array
Contains untransformed starting values for latent_variables
Returns
----------
Negative log posterior
"""
post = self.neg_loglik(beta)
for k in range(0,self.z_no):
if self.latent_variables.z_list[k].prior.covariance_prior is True:
post += -self.latent_variables.z_list[k].prior.logpdf(self.custom_covariance(beta))
break
else:
post += -self.latent_variables.z_list[k].prior.logpdf(beta[k])
return post | def function[multivariate_neg_logposterior, parameter[self, beta]]:
constant[ Returns negative log posterior, for a model with a covariance matrix
Parameters
----------
beta : np.array
Contains untransformed starting values for latent_variables
Returns
----------
Negative log posterior
]
variable[post] assign[=] call[name[self].neg_loglik, parameter[name[beta]]]
for taget[name[k]] in starred[call[name[range], parameter[constant[0], name[self].z_no]]] begin[:]
if compare[call[name[self].latent_variables.z_list][name[k]].prior.covariance_prior is constant[True]] begin[:]
<ast.AugAssign object at 0x7da2044c2f50>
break
return[name[post]] | keyword[def] identifier[multivariate_neg_logposterior] ( identifier[self] , identifier[beta] ):
literal[string]
identifier[post] = identifier[self] . identifier[neg_loglik] ( identifier[beta] )
keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[z_no] ):
keyword[if] identifier[self] . identifier[latent_variables] . identifier[z_list] [ identifier[k] ]. identifier[prior] . identifier[covariance_prior] keyword[is] keyword[True] :
identifier[post] +=- identifier[self] . identifier[latent_variables] . identifier[z_list] [ identifier[k] ]. identifier[prior] . identifier[logpdf] ( identifier[self] . identifier[custom_covariance] ( identifier[beta] ))
keyword[break]
keyword[else] :
identifier[post] +=- identifier[self] . identifier[latent_variables] . identifier[z_list] [ identifier[k] ]. identifier[prior] . identifier[logpdf] ( identifier[beta] [ identifier[k] ])
keyword[return] identifier[post] | def multivariate_neg_logposterior(self, beta):
""" Returns negative log posterior, for a model with a covariance matrix
Parameters
----------
beta : np.array
Contains untransformed starting values for latent_variables
Returns
----------
Negative log posterior
"""
post = self.neg_loglik(beta)
for k in range(0, self.z_no):
if self.latent_variables.z_list[k].prior.covariance_prior is True:
post += -self.latent_variables.z_list[k].prior.logpdf(self.custom_covariance(beta))
break # depends on [control=['if'], data=[]]
else:
post += -self.latent_variables.z_list[k].prior.logpdf(beta[k]) # depends on [control=['for'], data=['k']]
return post |
def _filter_subgraph(self, subgraph, predicate):
"""
Given a subgraph of the manifest, and a predicate, filter
the subgraph using that predicate. Generates a list of nodes.
"""
to_return = []
for unique_id, item in subgraph.items():
if predicate(item):
to_return.append(item)
return to_return | def function[_filter_subgraph, parameter[self, subgraph, predicate]]:
constant[
Given a subgraph of the manifest, and a predicate, filter
the subgraph using that predicate. Generates a list of nodes.
]
variable[to_return] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1a54ac0>, <ast.Name object at 0x7da1b1a565f0>]]] in starred[call[name[subgraph].items, parameter[]]] begin[:]
if call[name[predicate], parameter[name[item]]] begin[:]
call[name[to_return].append, parameter[name[item]]]
return[name[to_return]] | keyword[def] identifier[_filter_subgraph] ( identifier[self] , identifier[subgraph] , identifier[predicate] ):
literal[string]
identifier[to_return] =[]
keyword[for] identifier[unique_id] , identifier[item] keyword[in] identifier[subgraph] . identifier[items] ():
keyword[if] identifier[predicate] ( identifier[item] ):
identifier[to_return] . identifier[append] ( identifier[item] )
keyword[return] identifier[to_return] | def _filter_subgraph(self, subgraph, predicate):
"""
Given a subgraph of the manifest, and a predicate, filter
the subgraph using that predicate. Generates a list of nodes.
"""
to_return = []
for (unique_id, item) in subgraph.items():
if predicate(item):
to_return.append(item) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return to_return |
def segment_text(text=os.path.join(DATA_PATH, 'goodreads-omniscient-books.txt'),
start=None, stop=r'^Rate\ this', ignore=r'^[\d]'):
""" Split text into segments (sections, paragraphs) using regular expressions to trigger breaks.start
"""
start = start if hasattr(start, 'match') else re.compile(start) if start else None
stop = stop if hasattr(stop, 'match') else re.compile(stop) if stop else None
ignore = ignore if hasattr(ignore, 'match') else re.compile(ignore) if ignore else None
segments = []
segment = []
with open(text) as fin:
for line in fin:
if start is not None and start.match(line):
segments += [segment] if len(segment) else []
segment = [line]
elif stop is not None and stop.match(line):
segments += [segment]
segment = []
elif ignore is not None and ignore.match(line):
continue
else:
segment += [segment] | def function[segment_text, parameter[text, start, stop, ignore]]:
constant[ Split text into segments (sections, paragraphs) using regular expressions to trigger breaks.start
]
variable[start] assign[=] <ast.IfExp object at 0x7da1b24ec910>
variable[stop] assign[=] <ast.IfExp object at 0x7da1b24ecbb0>
variable[ignore] assign[=] <ast.IfExp object at 0x7da1b24ed180>
variable[segments] assign[=] list[[]]
variable[segment] assign[=] list[[]]
with call[name[open], parameter[name[text]]] begin[:]
for taget[name[line]] in starred[name[fin]] begin[:]
if <ast.BoolOp object at 0x7da1b24ed210> begin[:]
<ast.AugAssign object at 0x7da2054a41c0>
variable[segment] assign[=] list[[<ast.Name object at 0x7da2054a7250>]] | keyword[def] identifier[segment_text] ( identifier[text] = identifier[os] . identifier[path] . identifier[join] ( identifier[DATA_PATH] , literal[string] ),
identifier[start] = keyword[None] , identifier[stop] = literal[string] , identifier[ignore] = literal[string] ):
literal[string]
identifier[start] = identifier[start] keyword[if] identifier[hasattr] ( identifier[start] , literal[string] ) keyword[else] identifier[re] . identifier[compile] ( identifier[start] ) keyword[if] identifier[start] keyword[else] keyword[None]
identifier[stop] = identifier[stop] keyword[if] identifier[hasattr] ( identifier[stop] , literal[string] ) keyword[else] identifier[re] . identifier[compile] ( identifier[stop] ) keyword[if] identifier[stop] keyword[else] keyword[None]
identifier[ignore] = identifier[ignore] keyword[if] identifier[hasattr] ( identifier[ignore] , literal[string] ) keyword[else] identifier[re] . identifier[compile] ( identifier[ignore] ) keyword[if] identifier[ignore] keyword[else] keyword[None]
identifier[segments] =[]
identifier[segment] =[]
keyword[with] identifier[open] ( identifier[text] ) keyword[as] identifier[fin] :
keyword[for] identifier[line] keyword[in] identifier[fin] :
keyword[if] identifier[start] keyword[is] keyword[not] keyword[None] keyword[and] identifier[start] . identifier[match] ( identifier[line] ):
identifier[segments] +=[ identifier[segment] ] keyword[if] identifier[len] ( identifier[segment] ) keyword[else] []
identifier[segment] =[ identifier[line] ]
keyword[elif] identifier[stop] keyword[is] keyword[not] keyword[None] keyword[and] identifier[stop] . identifier[match] ( identifier[line] ):
identifier[segments] +=[ identifier[segment] ]
identifier[segment] =[]
keyword[elif] identifier[ignore] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ignore] . identifier[match] ( identifier[line] ):
keyword[continue]
keyword[else] :
identifier[segment] +=[ identifier[segment] ] | def segment_text(text=os.path.join(DATA_PATH, 'goodreads-omniscient-books.txt'), start=None, stop='^Rate\\ this', ignore='^[\\d]'):
""" Split text into segments (sections, paragraphs) using regular expressions to trigger breaks.start
"""
start = start if hasattr(start, 'match') else re.compile(start) if start else None
stop = stop if hasattr(stop, 'match') else re.compile(stop) if stop else None
ignore = ignore if hasattr(ignore, 'match') else re.compile(ignore) if ignore else None
segments = []
segment = []
with open(text) as fin:
for line in fin:
if start is not None and start.match(line):
segments += [segment] if len(segment) else []
segment = [line] # depends on [control=['if'], data=[]]
elif stop is not None and stop.match(line):
segments += [segment]
segment = [] # depends on [control=['if'], data=[]]
elif ignore is not None and ignore.match(line):
continue # depends on [control=['if'], data=[]]
else:
segment += [segment] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['fin']] |
def user_disable_throw_rest_endpoint(self, username, url='rest/scriptrunner/latest/custom/disableUser',
param='userName'):
"""The disable method throw own rest enpoint"""
url = "{}?{}={}".format(url, param, username)
return self.get(path=url) | def function[user_disable_throw_rest_endpoint, parameter[self, username, url, param]]:
constant[The disable method throw own rest enpoint]
variable[url] assign[=] call[constant[{}?{}={}].format, parameter[name[url], name[param], name[username]]]
return[call[name[self].get, parameter[]]] | keyword[def] identifier[user_disable_throw_rest_endpoint] ( identifier[self] , identifier[username] , identifier[url] = literal[string] ,
identifier[param] = literal[string] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[url] , identifier[param] , identifier[username] )
keyword[return] identifier[self] . identifier[get] ( identifier[path] = identifier[url] ) | def user_disable_throw_rest_endpoint(self, username, url='rest/scriptrunner/latest/custom/disableUser', param='userName'):
"""The disable method throw own rest enpoint"""
url = '{}?{}={}'.format(url, param, username)
return self.get(path=url) |
def _patch_distribution_metadata():
"""Patch write_pkg_file and read_pkg_file for higher metadata standards"""
for attr in ('write_pkg_file', 'read_pkg_file', 'get_metadata_version'):
new_val = getattr(setuptools.dist, attr)
setattr(distutils.dist.DistributionMetadata, attr, new_val) | def function[_patch_distribution_metadata, parameter[]]:
constant[Patch write_pkg_file and read_pkg_file for higher metadata standards]
for taget[name[attr]] in starred[tuple[[<ast.Constant object at 0x7da1b1b10880>, <ast.Constant object at 0x7da1b1b11780>, <ast.Constant object at 0x7da1b1b10d60>]]] begin[:]
variable[new_val] assign[=] call[name[getattr], parameter[name[setuptools].dist, name[attr]]]
call[name[setattr], parameter[name[distutils].dist.DistributionMetadata, name[attr], name[new_val]]] | keyword[def] identifier[_patch_distribution_metadata] ():
literal[string]
keyword[for] identifier[attr] keyword[in] ( literal[string] , literal[string] , literal[string] ):
identifier[new_val] = identifier[getattr] ( identifier[setuptools] . identifier[dist] , identifier[attr] )
identifier[setattr] ( identifier[distutils] . identifier[dist] . identifier[DistributionMetadata] , identifier[attr] , identifier[new_val] ) | def _patch_distribution_metadata():
"""Patch write_pkg_file and read_pkg_file for higher metadata standards"""
for attr in ('write_pkg_file', 'read_pkg_file', 'get_metadata_version'):
new_val = getattr(setuptools.dist, attr)
setattr(distutils.dist.DistributionMetadata, attr, new_val) # depends on [control=['for'], data=['attr']] |
def stream_index(self, bucket, index, startkey, endkey=None,
return_terms=None, max_results=None, continuation=None,
timeout=None):
"""
Streams a secondary index query.
"""
raise NotImplementedError | def function[stream_index, parameter[self, bucket, index, startkey, endkey, return_terms, max_results, continuation, timeout]]:
constant[
Streams a secondary index query.
]
<ast.Raise object at 0x7da18eb56590> | keyword[def] identifier[stream_index] ( identifier[self] , identifier[bucket] , identifier[index] , identifier[startkey] , identifier[endkey] = keyword[None] ,
identifier[return_terms] = keyword[None] , identifier[max_results] = keyword[None] , identifier[continuation] = keyword[None] ,
identifier[timeout] = keyword[None] ):
literal[string]
keyword[raise] identifier[NotImplementedError] | def stream_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None):
"""
Streams a secondary index query.
"""
raise NotImplementedError |
def get(key, default=-1):
"""Backport support for original codes."""
if isinstance(key, int):
return DI(key)
if key not in DI._member_map_:
extend_enum(DI, key, default)
return DI[key] | def function[get, parameter[key, default]]:
constant[Backport support for original codes.]
if call[name[isinstance], parameter[name[key], name[int]]] begin[:]
return[call[name[DI], parameter[name[key]]]]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[DI]._member_map_] begin[:]
call[name[extend_enum], parameter[name[DI], name[key], name[default]]]
return[call[name[DI]][name[key]]] | keyword[def] identifier[get] ( identifier[key] , identifier[default] =- literal[int] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[key] , identifier[int] ):
keyword[return] identifier[DI] ( identifier[key] )
keyword[if] identifier[key] keyword[not] keyword[in] identifier[DI] . identifier[_member_map_] :
identifier[extend_enum] ( identifier[DI] , identifier[key] , identifier[default] )
keyword[return] identifier[DI] [ identifier[key] ] | def get(key, default=-1):
"""Backport support for original codes."""
if isinstance(key, int):
return DI(key) # depends on [control=['if'], data=[]]
if key not in DI._member_map_:
extend_enum(DI, key, default) # depends on [control=['if'], data=['key']]
return DI[key] |
def _format_value(self, value):
"""
Format a value with packagename, if not already set
:param value:
:return:
"""
if len(value) > 0:
if value[0] == ".":
value = self.package + value
else:
v_dot = value.find(".")
if v_dot == 0:
value = self.package + "." + value
elif v_dot == -1:
value = self.package + "." + value
return value | def function[_format_value, parameter[self, value]]:
constant[
Format a value with packagename, if not already set
:param value:
:return:
]
if compare[call[name[len], parameter[name[value]]] greater[>] constant[0]] begin[:]
if compare[call[name[value]][constant[0]] equal[==] constant[.]] begin[:]
variable[value] assign[=] binary_operation[name[self].package + name[value]]
return[name[value]] | keyword[def] identifier[_format_value] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[len] ( identifier[value] )> literal[int] :
keyword[if] identifier[value] [ literal[int] ]== literal[string] :
identifier[value] = identifier[self] . identifier[package] + identifier[value]
keyword[else] :
identifier[v_dot] = identifier[value] . identifier[find] ( literal[string] )
keyword[if] identifier[v_dot] == literal[int] :
identifier[value] = identifier[self] . identifier[package] + literal[string] + identifier[value]
keyword[elif] identifier[v_dot] ==- literal[int] :
identifier[value] = identifier[self] . identifier[package] + literal[string] + identifier[value]
keyword[return] identifier[value] | def _format_value(self, value):
"""
Format a value with packagename, if not already set
:param value:
:return:
"""
if len(value) > 0:
if value[0] == '.':
value = self.package + value # depends on [control=['if'], data=[]]
else:
v_dot = value.find('.')
if v_dot == 0:
value = self.package + '.' + value # depends on [control=['if'], data=[]]
elif v_dot == -1:
value = self.package + '.' + value # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return value |
def _on_sphinx_thread_html_ready(self, html_text):
"""Set our sphinx documentation based on thread result"""
self._sphinx_thread.wait()
self.set_rich_text_html(html_text, QUrl.fromLocalFile(self.css_path)) | def function[_on_sphinx_thread_html_ready, parameter[self, html_text]]:
constant[Set our sphinx documentation based on thread result]
call[name[self]._sphinx_thread.wait, parameter[]]
call[name[self].set_rich_text_html, parameter[name[html_text], call[name[QUrl].fromLocalFile, parameter[name[self].css_path]]]] | keyword[def] identifier[_on_sphinx_thread_html_ready] ( identifier[self] , identifier[html_text] ):
literal[string]
identifier[self] . identifier[_sphinx_thread] . identifier[wait] ()
identifier[self] . identifier[set_rich_text_html] ( identifier[html_text] , identifier[QUrl] . identifier[fromLocalFile] ( identifier[self] . identifier[css_path] )) | def _on_sphinx_thread_html_ready(self, html_text):
"""Set our sphinx documentation based on thread result"""
self._sphinx_thread.wait()
self.set_rich_text_html(html_text, QUrl.fromLocalFile(self.css_path)) |
def trending(self, rating=None, limit=DEFAULT_SEARCH_LIMIT):
"""
Retrieve GIFs currently trending online. The data returned mirrors
that used to create The Hot 100 list of GIFs on Giphy.
:param rating: limit results to those rated (y,g, pg, pg-13 or r).
:type rating: string
:param limit: Maximum number of results to yield
:type limit: int
"""
results_yielded = 0 # Count how many things we yield
page, per_page = 0, 25
params = {'rating': rating} if rating else {}
fetch = partial(self._fetch, 'trending', **params)
# Generate results until we 1) run out of pages 2) reach a limit
while True:
data = fetch(offset=page, limit=per_page)
page += per_page
# Guard for empty results
if not data['data']:
raise StopIteration
for item in data['data']:
results_yielded += 1
yield GiphyImage(item)
if limit is not None and results_yielded >= limit:
raise StopIteration
# Check yieled limit and whether or not there are more items
if (page >= data['pagination']['total_count'] or
(limit is not None and results_yielded >= limit)):
raise StopIteration | def function[trending, parameter[self, rating, limit]]:
constant[
Retrieve GIFs currently trending online. The data returned mirrors
that used to create The Hot 100 list of GIFs on Giphy.
:param rating: limit results to those rated (y,g, pg, pg-13 or r).
:type rating: string
:param limit: Maximum number of results to yield
:type limit: int
]
variable[results_yielded] assign[=] constant[0]
<ast.Tuple object at 0x7da1b01c2a70> assign[=] tuple[[<ast.Constant object at 0x7da1b01c1270>, <ast.Constant object at 0x7da1b01c3790>]]
variable[params] assign[=] <ast.IfExp object at 0x7da1b01c1ff0>
variable[fetch] assign[=] call[name[partial], parameter[name[self]._fetch, constant[trending]]]
while constant[True] begin[:]
variable[data] assign[=] call[name[fetch], parameter[]]
<ast.AugAssign object at 0x7da1b01c35b0>
if <ast.UnaryOp object at 0x7da1b01c2170> begin[:]
<ast.Raise object at 0x7da1b01c3af0>
for taget[name[item]] in starred[call[name[data]][constant[data]]] begin[:]
<ast.AugAssign object at 0x7da1b01c0cd0>
<ast.Yield object at 0x7da1b01c03a0>
if <ast.BoolOp object at 0x7da1b01c17b0> begin[:]
<ast.Raise object at 0x7da1b01c2e60>
if <ast.BoolOp object at 0x7da1b01c26e0> begin[:]
<ast.Raise object at 0x7da1b01c3910> | keyword[def] identifier[trending] ( identifier[self] , identifier[rating] = keyword[None] , identifier[limit] = identifier[DEFAULT_SEARCH_LIMIT] ):
literal[string]
identifier[results_yielded] = literal[int]
identifier[page] , identifier[per_page] = literal[int] , literal[int]
identifier[params] ={ literal[string] : identifier[rating] } keyword[if] identifier[rating] keyword[else] {}
identifier[fetch] = identifier[partial] ( identifier[self] . identifier[_fetch] , literal[string] ,** identifier[params] )
keyword[while] keyword[True] :
identifier[data] = identifier[fetch] ( identifier[offset] = identifier[page] , identifier[limit] = identifier[per_page] )
identifier[page] += identifier[per_page]
keyword[if] keyword[not] identifier[data] [ literal[string] ]:
keyword[raise] identifier[StopIteration]
keyword[for] identifier[item] keyword[in] identifier[data] [ literal[string] ]:
identifier[results_yielded] += literal[int]
keyword[yield] identifier[GiphyImage] ( identifier[item] )
keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] keyword[and] identifier[results_yielded] >= identifier[limit] :
keyword[raise] identifier[StopIteration]
keyword[if] ( identifier[page] >= identifier[data] [ literal[string] ][ literal[string] ] keyword[or]
( identifier[limit] keyword[is] keyword[not] keyword[None] keyword[and] identifier[results_yielded] >= identifier[limit] )):
keyword[raise] identifier[StopIteration] | def trending(self, rating=None, limit=DEFAULT_SEARCH_LIMIT):
"""
Retrieve GIFs currently trending online. The data returned mirrors
that used to create The Hot 100 list of GIFs on Giphy.
:param rating: limit results to those rated (y,g, pg, pg-13 or r).
:type rating: string
:param limit: Maximum number of results to yield
:type limit: int
"""
results_yielded = 0 # Count how many things we yield
(page, per_page) = (0, 25)
params = {'rating': rating} if rating else {}
fetch = partial(self._fetch, 'trending', **params)
# Generate results until we 1) run out of pages 2) reach a limit
while True:
data = fetch(offset=page, limit=per_page)
page += per_page
# Guard for empty results
if not data['data']:
raise StopIteration # depends on [control=['if'], data=[]]
for item in data['data']:
results_yielded += 1
yield GiphyImage(item)
if limit is not None and results_yielded >= limit:
raise StopIteration # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
# Check yieled limit and whether or not there are more items
if page >= data['pagination']['total_count'] or (limit is not None and results_yielded >= limit):
raise StopIteration # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def public(self) -> 'PrettyDir':
"""Returns public attributes of the inspected object."""
return PrettyDir(
self.obj, [pattr for pattr in self.pattrs if not pattr.name.startswith('_')]
) | def function[public, parameter[self]]:
constant[Returns public attributes of the inspected object.]
return[call[name[PrettyDir], parameter[name[self].obj, <ast.ListComp object at 0x7da2054a4940>]]] | keyword[def] identifier[public] ( identifier[self] )-> literal[string] :
literal[string]
keyword[return] identifier[PrettyDir] (
identifier[self] . identifier[obj] ,[ identifier[pattr] keyword[for] identifier[pattr] keyword[in] identifier[self] . identifier[pattrs] keyword[if] keyword[not] identifier[pattr] . identifier[name] . identifier[startswith] ( literal[string] )]
) | def public(self) -> 'PrettyDir':
"""Returns public attributes of the inspected object."""
return PrettyDir(self.obj, [pattr for pattr in self.pattrs if not pattr.name.startswith('_')]) |
def nonce():
"""
Returns a new nonce to be used with the Piazza API.
"""
nonce_part1 = _int2base(int(_time()*1000), 36)
nonce_part2 = _int2base(round(_random()*1679616), 36)
return "{}{}".format(nonce_part1, nonce_part2) | def function[nonce, parameter[]]:
constant[
Returns a new nonce to be used with the Piazza API.
]
variable[nonce_part1] assign[=] call[name[_int2base], parameter[call[name[int], parameter[binary_operation[call[name[_time], parameter[]] * constant[1000]]]], constant[36]]]
variable[nonce_part2] assign[=] call[name[_int2base], parameter[call[name[round], parameter[binary_operation[call[name[_random], parameter[]] * constant[1679616]]]], constant[36]]]
return[call[constant[{}{}].format, parameter[name[nonce_part1], name[nonce_part2]]]] | keyword[def] identifier[nonce] ():
literal[string]
identifier[nonce_part1] = identifier[_int2base] ( identifier[int] ( identifier[_time] ()* literal[int] ), literal[int] )
identifier[nonce_part2] = identifier[_int2base] ( identifier[round] ( identifier[_random] ()* literal[int] ), literal[int] )
keyword[return] literal[string] . identifier[format] ( identifier[nonce_part1] , identifier[nonce_part2] ) | def nonce():
"""
Returns a new nonce to be used with the Piazza API.
"""
nonce_part1 = _int2base(int(_time() * 1000), 36)
nonce_part2 = _int2base(round(_random() * 1679616), 36)
return '{}{}'.format(nonce_part1, nonce_part2) |
def _get_err(self, e, id=None, jsonrpc=DEFAULT_JSONRPC):
"""
Returns jsonrpc error message.
"""
# Do not respond to notifications when the request is valid.
if not id \
and not isinstance(e, ParseError) \
and not isinstance(e, InvalidRequestError):
return None
respond = {'id': id}
if isinstance(jsonrpc, int):
# v1.0 requires result to exist always.
# No error codes are defined in v1.0 so only use the message.
if jsonrpc == 10:
respond['result'] = None
respond['error'] = e.dumps()['message']
else:
self._fill_ver(jsonrpc, respond)
respond['error'] = e.dumps()
else:
respond['jsonrpc'] = jsonrpc
respond['error'] = e.dumps()
return respond | def function[_get_err, parameter[self, e, id, jsonrpc]]:
constant[
Returns jsonrpc error message.
]
if <ast.BoolOp object at 0x7da1b0ac2f80> begin[:]
return[constant[None]]
variable[respond] assign[=] dictionary[[<ast.Constant object at 0x7da1b0ac21d0>], [<ast.Name object at 0x7da1b0ac2bf0>]]
if call[name[isinstance], parameter[name[jsonrpc], name[int]]] begin[:]
if compare[name[jsonrpc] equal[==] constant[10]] begin[:]
call[name[respond]][constant[result]] assign[=] constant[None]
call[name[respond]][constant[error]] assign[=] call[call[name[e].dumps, parameter[]]][constant[message]]
return[name[respond]] | keyword[def] identifier[_get_err] ( identifier[self] , identifier[e] , identifier[id] = keyword[None] , identifier[jsonrpc] = identifier[DEFAULT_JSONRPC] ):
literal[string]
keyword[if] keyword[not] identifier[id] keyword[and] keyword[not] identifier[isinstance] ( identifier[e] , identifier[ParseError] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[e] , identifier[InvalidRequestError] ):
keyword[return] keyword[None]
identifier[respond] ={ literal[string] : identifier[id] }
keyword[if] identifier[isinstance] ( identifier[jsonrpc] , identifier[int] ):
keyword[if] identifier[jsonrpc] == literal[int] :
identifier[respond] [ literal[string] ]= keyword[None]
identifier[respond] [ literal[string] ]= identifier[e] . identifier[dumps] ()[ literal[string] ]
keyword[else] :
identifier[self] . identifier[_fill_ver] ( identifier[jsonrpc] , identifier[respond] )
identifier[respond] [ literal[string] ]= identifier[e] . identifier[dumps] ()
keyword[else] :
identifier[respond] [ literal[string] ]= identifier[jsonrpc]
identifier[respond] [ literal[string] ]= identifier[e] . identifier[dumps] ()
keyword[return] identifier[respond] | def _get_err(self, e, id=None, jsonrpc=DEFAULT_JSONRPC):
"""
Returns jsonrpc error message.
"""
# Do not respond to notifications when the request is valid.
if not id and (not isinstance(e, ParseError)) and (not isinstance(e, InvalidRequestError)):
return None # depends on [control=['if'], data=[]]
respond = {'id': id}
if isinstance(jsonrpc, int):
# v1.0 requires result to exist always.
# No error codes are defined in v1.0 so only use the message.
if jsonrpc == 10:
respond['result'] = None
respond['error'] = e.dumps()['message'] # depends on [control=['if'], data=[]]
else:
self._fill_ver(jsonrpc, respond)
respond['error'] = e.dumps() # depends on [control=['if'], data=[]]
else:
respond['jsonrpc'] = jsonrpc
respond['error'] = e.dumps()
return respond |
def open(hdfs_path, mode="r", buff_size=0, replication=0, blocksize=0,
user=None, encoding=None, errors=None):
"""
Open a file, returning an :class:`~.file.hdfs_file` object.
``hdfs_path`` and ``user`` are passed to :func:`~path.split`,
while the other args are passed to the :class:`~.file.hdfs_file`
constructor.
"""
host, port, path_ = path.split(hdfs_path, user)
fs = hdfs(host, port, user)
return fs.open_file(path_, mode, buff_size, replication, blocksize,
encoding, errors) | def function[open, parameter[hdfs_path, mode, buff_size, replication, blocksize, user, encoding, errors]]:
constant[
Open a file, returning an :class:`~.file.hdfs_file` object.
``hdfs_path`` and ``user`` are passed to :func:`~path.split`,
while the other args are passed to the :class:`~.file.hdfs_file`
constructor.
]
<ast.Tuple object at 0x7da1b13a4850> assign[=] call[name[path].split, parameter[name[hdfs_path], name[user]]]
variable[fs] assign[=] call[name[hdfs], parameter[name[host], name[port], name[user]]]
return[call[name[fs].open_file, parameter[name[path_], name[mode], name[buff_size], name[replication], name[blocksize], name[encoding], name[errors]]]] | keyword[def] identifier[open] ( identifier[hdfs_path] , identifier[mode] = literal[string] , identifier[buff_size] = literal[int] , identifier[replication] = literal[int] , identifier[blocksize] = literal[int] ,
identifier[user] = keyword[None] , identifier[encoding] = keyword[None] , identifier[errors] = keyword[None] ):
literal[string]
identifier[host] , identifier[port] , identifier[path_] = identifier[path] . identifier[split] ( identifier[hdfs_path] , identifier[user] )
identifier[fs] = identifier[hdfs] ( identifier[host] , identifier[port] , identifier[user] )
keyword[return] identifier[fs] . identifier[open_file] ( identifier[path_] , identifier[mode] , identifier[buff_size] , identifier[replication] , identifier[blocksize] ,
identifier[encoding] , identifier[errors] ) | def open(hdfs_path, mode='r', buff_size=0, replication=0, blocksize=0, user=None, encoding=None, errors=None):
"""
Open a file, returning an :class:`~.file.hdfs_file` object.
``hdfs_path`` and ``user`` are passed to :func:`~path.split`,
while the other args are passed to the :class:`~.file.hdfs_file`
constructor.
"""
(host, port, path_) = path.split(hdfs_path, user)
fs = hdfs(host, port, user)
return fs.open_file(path_, mode, buff_size, replication, blocksize, encoding, errors) |
def _do_resumable_upload(self, stream, metadata, num_retries):
"""Perform a resumable upload.
:type stream: IO[bytes]
:param stream: A bytes IO object open for reading.
:type metadata: dict
:param metadata: The metadata associated with the upload.
:type num_retries: int
:param num_retries: Number of upload retries. (Deprecated: This
argument will be removed in a future release.)
:rtype: :class:`~requests.Response`
:returns: The "200 OK" response object returned after the final chunk
is uploaded.
"""
upload, transport = self._initiate_resumable_upload(
stream, metadata, num_retries
)
while not upload.finished:
response = upload.transmit_next_chunk(transport)
return response | def function[_do_resumable_upload, parameter[self, stream, metadata, num_retries]]:
constant[Perform a resumable upload.
:type stream: IO[bytes]
:param stream: A bytes IO object open for reading.
:type metadata: dict
:param metadata: The metadata associated with the upload.
:type num_retries: int
:param num_retries: Number of upload retries. (Deprecated: This
argument will be removed in a future release.)
:rtype: :class:`~requests.Response`
:returns: The "200 OK" response object returned after the final chunk
is uploaded.
]
<ast.Tuple object at 0x7da20e9543a0> assign[=] call[name[self]._initiate_resumable_upload, parameter[name[stream], name[metadata], name[num_retries]]]
while <ast.UnaryOp object at 0x7da20e9551e0> begin[:]
variable[response] assign[=] call[name[upload].transmit_next_chunk, parameter[name[transport]]]
return[name[response]] | keyword[def] identifier[_do_resumable_upload] ( identifier[self] , identifier[stream] , identifier[metadata] , identifier[num_retries] ):
literal[string]
identifier[upload] , identifier[transport] = identifier[self] . identifier[_initiate_resumable_upload] (
identifier[stream] , identifier[metadata] , identifier[num_retries]
)
keyword[while] keyword[not] identifier[upload] . identifier[finished] :
identifier[response] = identifier[upload] . identifier[transmit_next_chunk] ( identifier[transport] )
keyword[return] identifier[response] | def _do_resumable_upload(self, stream, metadata, num_retries):
"""Perform a resumable upload.
:type stream: IO[bytes]
:param stream: A bytes IO object open for reading.
:type metadata: dict
:param metadata: The metadata associated with the upload.
:type num_retries: int
:param num_retries: Number of upload retries. (Deprecated: This
argument will be removed in a future release.)
:rtype: :class:`~requests.Response`
:returns: The "200 OK" response object returned after the final chunk
is uploaded.
"""
(upload, transport) = self._initiate_resumable_upload(stream, metadata, num_retries)
while not upload.finished:
response = upload.transmit_next_chunk(transport) # depends on [control=['while'], data=[]]
return response |
def pop_all(self, event_name):
"""Return and remove all stored events of a specified name.
Pops all events from their queue. May miss the latest ones.
If no event is available, return immediately.
Args:
event_name: Name of the events to be popped.
Returns:
List of the desired events.
Raises:
IllegalStateError: Raised if pop is called before the dispatcher
starts polling.
"""
if not self.started:
raise IllegalStateError(("Dispatcher needs to be started before "
"popping."))
results = []
try:
self.lock.acquire()
while True:
e = self.event_dict[event_name].get(block=False)
results.append(e)
except (queue.Empty, KeyError):
return results
finally:
self.lock.release() | def function[pop_all, parameter[self, event_name]]:
constant[Return and remove all stored events of a specified name.
Pops all events from their queue. May miss the latest ones.
If no event is available, return immediately.
Args:
event_name: Name of the events to be popped.
Returns:
List of the desired events.
Raises:
IllegalStateError: Raised if pop is called before the dispatcher
starts polling.
]
if <ast.UnaryOp object at 0x7da1b08f8580> begin[:]
<ast.Raise object at 0x7da1b08f8a30>
variable[results] assign[=] list[[]]
<ast.Try object at 0x7da1b08fae90> | keyword[def] identifier[pop_all] ( identifier[self] , identifier[event_name] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[started] :
keyword[raise] identifier[IllegalStateError] (( literal[string]
literal[string] ))
identifier[results] =[]
keyword[try] :
identifier[self] . identifier[lock] . identifier[acquire] ()
keyword[while] keyword[True] :
identifier[e] = identifier[self] . identifier[event_dict] [ identifier[event_name] ]. identifier[get] ( identifier[block] = keyword[False] )
identifier[results] . identifier[append] ( identifier[e] )
keyword[except] ( identifier[queue] . identifier[Empty] , identifier[KeyError] ):
keyword[return] identifier[results]
keyword[finally] :
identifier[self] . identifier[lock] . identifier[release] () | def pop_all(self, event_name):
"""Return and remove all stored events of a specified name.
Pops all events from their queue. May miss the latest ones.
If no event is available, return immediately.
Args:
event_name: Name of the events to be popped.
Returns:
List of the desired events.
Raises:
IllegalStateError: Raised if pop is called before the dispatcher
starts polling.
"""
if not self.started:
raise IllegalStateError('Dispatcher needs to be started before popping.') # depends on [control=['if'], data=[]]
results = []
try:
self.lock.acquire()
while True:
e = self.event_dict[event_name].get(block=False)
results.append(e) # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except (queue.Empty, KeyError):
return results # depends on [control=['except'], data=[]]
finally:
self.lock.release() |
def get_version_text(self):
"""Return the version information from Unix host."""
try:
version_text = self.device.send('uname -sr', timeout=10)
except CommandError:
self.log("Non Unix jumphost type detected")
return None
raise ConnectionError("Non Unix jumphost type detected.")
return version_text | def function[get_version_text, parameter[self]]:
constant[Return the version information from Unix host.]
<ast.Try object at 0x7da20c76d060>
return[name[version_text]] | keyword[def] identifier[get_version_text] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[version_text] = identifier[self] . identifier[device] . identifier[send] ( literal[string] , identifier[timeout] = literal[int] )
keyword[except] identifier[CommandError] :
identifier[self] . identifier[log] ( literal[string] )
keyword[return] keyword[None]
keyword[raise] identifier[ConnectionError] ( literal[string] )
keyword[return] identifier[version_text] | def get_version_text(self):
"""Return the version information from Unix host."""
try:
version_text = self.device.send('uname -sr', timeout=10) # depends on [control=['try'], data=[]]
except CommandError:
self.log('Non Unix jumphost type detected')
return None
raise ConnectionError('Non Unix jumphost type detected.') # depends on [control=['except'], data=[]]
return version_text |
def _set_next_host_location(self, context):
'''
A function which sets the next host location on the request, if applicable.
:param ~azure.storage.models.RetryContext context:
The retry context containing the previous host location and the request
to evaluate and possibly modify.
'''
if len(context.request.host_locations) > 1:
# If there's more than one possible location, retry to the alternative
if context.location_mode == LocationMode.PRIMARY:
context.location_mode = LocationMode.SECONDARY
# if targeting the emulator (with path style), change path instead of host
if context.is_emulated:
# replace the first instance of primary account name with the secondary account name
context.request.path = context.request.path.replace(DEV_ACCOUNT_NAME, DEV_ACCOUNT_SECONDARY_NAME, 1)
else:
context.request.host = context.request.host_locations.get(context.location_mode)
else:
context.location_mode = LocationMode.PRIMARY
# if targeting the emulator (with path style), change path instead of host
if context.is_emulated:
# replace the first instance of secondary account name with the primary account name
context.request.path = context.request.path.replace(DEV_ACCOUNT_SECONDARY_NAME, DEV_ACCOUNT_NAME, 1)
else:
context.request.host = context.request.host_locations.get(context.location_mode) | def function[_set_next_host_location, parameter[self, context]]:
constant[
A function which sets the next host location on the request, if applicable.
:param ~azure.storage.models.RetryContext context:
The retry context containing the previous host location and the request
to evaluate and possibly modify.
]
if compare[call[name[len], parameter[name[context].request.host_locations]] greater[>] constant[1]] begin[:]
if compare[name[context].location_mode equal[==] name[LocationMode].PRIMARY] begin[:]
name[context].location_mode assign[=] name[LocationMode].SECONDARY
if name[context].is_emulated begin[:]
name[context].request.path assign[=] call[name[context].request.path.replace, parameter[name[DEV_ACCOUNT_NAME], name[DEV_ACCOUNT_SECONDARY_NAME], constant[1]]] | keyword[def] identifier[_set_next_host_location] ( identifier[self] , identifier[context] ):
literal[string]
keyword[if] identifier[len] ( identifier[context] . identifier[request] . identifier[host_locations] )> literal[int] :
keyword[if] identifier[context] . identifier[location_mode] == identifier[LocationMode] . identifier[PRIMARY] :
identifier[context] . identifier[location_mode] = identifier[LocationMode] . identifier[SECONDARY]
keyword[if] identifier[context] . identifier[is_emulated] :
identifier[context] . identifier[request] . identifier[path] = identifier[context] . identifier[request] . identifier[path] . identifier[replace] ( identifier[DEV_ACCOUNT_NAME] , identifier[DEV_ACCOUNT_SECONDARY_NAME] , literal[int] )
keyword[else] :
identifier[context] . identifier[request] . identifier[host] = identifier[context] . identifier[request] . identifier[host_locations] . identifier[get] ( identifier[context] . identifier[location_mode] )
keyword[else] :
identifier[context] . identifier[location_mode] = identifier[LocationMode] . identifier[PRIMARY]
keyword[if] identifier[context] . identifier[is_emulated] :
identifier[context] . identifier[request] . identifier[path] = identifier[context] . identifier[request] . identifier[path] . identifier[replace] ( identifier[DEV_ACCOUNT_SECONDARY_NAME] , identifier[DEV_ACCOUNT_NAME] , literal[int] )
keyword[else] :
identifier[context] . identifier[request] . identifier[host] = identifier[context] . identifier[request] . identifier[host_locations] . identifier[get] ( identifier[context] . identifier[location_mode] ) | def _set_next_host_location(self, context):
"""
A function which sets the next host location on the request, if applicable.
:param ~azure.storage.models.RetryContext context:
The retry context containing the previous host location and the request
to evaluate and possibly modify.
"""
if len(context.request.host_locations) > 1:
# If there's more than one possible location, retry to the alternative
if context.location_mode == LocationMode.PRIMARY:
context.location_mode = LocationMode.SECONDARY
# if targeting the emulator (with path style), change path instead of host
if context.is_emulated:
# replace the first instance of primary account name with the secondary account name
context.request.path = context.request.path.replace(DEV_ACCOUNT_NAME, DEV_ACCOUNT_SECONDARY_NAME, 1) # depends on [control=['if'], data=[]]
else:
context.request.host = context.request.host_locations.get(context.location_mode) # depends on [control=['if'], data=[]]
else:
context.location_mode = LocationMode.PRIMARY
# if targeting the emulator (with path style), change path instead of host
if context.is_emulated:
# replace the first instance of secondary account name with the primary account name
context.request.path = context.request.path.replace(DEV_ACCOUNT_SECONDARY_NAME, DEV_ACCOUNT_NAME, 1) # depends on [control=['if'], data=[]]
else:
context.request.host = context.request.host_locations.get(context.location_mode) # depends on [control=['if'], data=[]] |
def print_debug(*args, **kwargs):
"""
Print if and only if the debug flag is set true in the config.yaml file.
Args:
args : var args of print arguments.
"""
if WTF_CONFIG_READER.get("debug", False) == True:
print(*args, **kwargs) | def function[print_debug, parameter[]]:
constant[
Print if and only if the debug flag is set true in the config.yaml file.
Args:
args : var args of print arguments.
]
if compare[call[name[WTF_CONFIG_READER].get, parameter[constant[debug], constant[False]]] equal[==] constant[True]] begin[:]
call[name[print], parameter[<ast.Starred object at 0x7da1b1114af0>]] | keyword[def] identifier[print_debug] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[WTF_CONFIG_READER] . identifier[get] ( literal[string] , keyword[False] )== keyword[True] :
identifier[print] (* identifier[args] ,** identifier[kwargs] ) | def print_debug(*args, **kwargs):
"""
Print if and only if the debug flag is set true in the config.yaml file.
Args:
args : var args of print arguments.
"""
if WTF_CONFIG_READER.get('debug', False) == True:
print(*args, **kwargs) # depends on [control=['if'], data=[]] |
def indexFromItem(self, regItem, col=0):
""" Gets the index (with column=0) for the row that contains the regItem
If col is negative, it is counted from the end
"""
if col < 0:
col = len(self.attrNames) - col
try:
row = self.registry.items.index(regItem)
except ValueError:
return QtCore.QModelIndex()
else:
return self.index(row, col) | def function[indexFromItem, parameter[self, regItem, col]]:
constant[ Gets the index (with column=0) for the row that contains the regItem
If col is negative, it is counted from the end
]
if compare[name[col] less[<] constant[0]] begin[:]
variable[col] assign[=] binary_operation[call[name[len], parameter[name[self].attrNames]] - name[col]]
<ast.Try object at 0x7da1b04d2800> | keyword[def] identifier[indexFromItem] ( identifier[self] , identifier[regItem] , identifier[col] = literal[int] ):
literal[string]
keyword[if] identifier[col] < literal[int] :
identifier[col] = identifier[len] ( identifier[self] . identifier[attrNames] )- identifier[col]
keyword[try] :
identifier[row] = identifier[self] . identifier[registry] . identifier[items] . identifier[index] ( identifier[regItem] )
keyword[except] identifier[ValueError] :
keyword[return] identifier[QtCore] . identifier[QModelIndex] ()
keyword[else] :
keyword[return] identifier[self] . identifier[index] ( identifier[row] , identifier[col] ) | def indexFromItem(self, regItem, col=0):
""" Gets the index (with column=0) for the row that contains the regItem
If col is negative, it is counted from the end
"""
if col < 0:
col = len(self.attrNames) - col # depends on [control=['if'], data=['col']]
try:
row = self.registry.items.index(regItem) # depends on [control=['try'], data=[]]
except ValueError:
return QtCore.QModelIndex() # depends on [control=['except'], data=[]]
else:
return self.index(row, col) |
async def async_init(self) -> None:
"""Create a Tile session."""
if not self._client_established:
await self.request(
'put',
'clients/{0}'.format(self.client_uuid),
data={
'app_id': DEFAULT_APP_ID,
'app_version': DEFAULT_APP_VERSION,
'locale': self._locale
})
self._client_established = True
resp = await self.request(
'post',
'clients/{0}/sessions'.format(self.client_uuid),
data={
'email': self._email,
'password': self._password
})
if not self.user_uuid:
self.user_uuid = resp['result']['user']['user_uuid']
self._session_expiry = resp['result']['session_expiration_timestamp']
self.tiles = Tile(self.request, self.user_uuid) | <ast.AsyncFunctionDef object at 0x7da18dc04bb0> | keyword[async] keyword[def] identifier[async_init] ( identifier[self] )-> keyword[None] :
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_client_established] :
keyword[await] identifier[self] . identifier[request] (
literal[string] ,
literal[string] . identifier[format] ( identifier[self] . identifier[client_uuid] ),
identifier[data] ={
literal[string] : identifier[DEFAULT_APP_ID] ,
literal[string] : identifier[DEFAULT_APP_VERSION] ,
literal[string] : identifier[self] . identifier[_locale]
})
identifier[self] . identifier[_client_established] = keyword[True]
identifier[resp] = keyword[await] identifier[self] . identifier[request] (
literal[string] ,
literal[string] . identifier[format] ( identifier[self] . identifier[client_uuid] ),
identifier[data] ={
literal[string] : identifier[self] . identifier[_email] ,
literal[string] : identifier[self] . identifier[_password]
})
keyword[if] keyword[not] identifier[self] . identifier[user_uuid] :
identifier[self] . identifier[user_uuid] = identifier[resp] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[self] . identifier[_session_expiry] = identifier[resp] [ literal[string] ][ literal[string] ]
identifier[self] . identifier[tiles] = identifier[Tile] ( identifier[self] . identifier[request] , identifier[self] . identifier[user_uuid] ) | async def async_init(self) -> None:
"""Create a Tile session."""
if not self._client_established:
await self.request('put', 'clients/{0}'.format(self.client_uuid), data={'app_id': DEFAULT_APP_ID, 'app_version': DEFAULT_APP_VERSION, 'locale': self._locale})
self._client_established = True # depends on [control=['if'], data=[]]
resp = await self.request('post', 'clients/{0}/sessions'.format(self.client_uuid), data={'email': self._email, 'password': self._password})
if not self.user_uuid:
self.user_uuid = resp['result']['user']['user_uuid'] # depends on [control=['if'], data=[]]
self._session_expiry = resp['result']['session_expiration_timestamp']
self.tiles = Tile(self.request, self.user_uuid) |
def window_opened_by(self, trigger_func, wait=None):
"""
Get the window that has been opened by the passed lambda. It will wait for it to be opened
(in the same way as other Capybara methods wait). It's better to use this method than
``windows[-1]`` `as order of windows isn't defined in some drivers`__.
__ https://dvcs.w3.org/hg/webdriver/raw-file/default/webdriver-spec.html#h_note_10
Args:
trigger_func (func): The function that should trigger the opening of a new window.
wait (int | float, optional): Maximum wait time. Defaults to
:data:`capybara.default_max_wait_time`.
Returns:
Window: The window that has been opened within the lambda.
Raises:
WindowError: If lambda passed to window hasn't opened window or opened more than one
window.
"""
old_handles = set(self.driver.window_handles)
trigger_func()
@self.document.synchronize(wait=wait, errors=(WindowError,))
def get_new_window():
opened_handles = set(self.driver.window_handles) - old_handles
if len(opened_handles) != 1:
raise WindowError("lambda passed to `window_opened_by` "
"opened {0} windows instead of 1".format(len(opened_handles)))
return Window(self, list(opened_handles)[0])
return get_new_window() | def function[window_opened_by, parameter[self, trigger_func, wait]]:
constant[
Get the window that has been opened by the passed lambda. It will wait for it to be opened
(in the same way as other Capybara methods wait). It's better to use this method than
``windows[-1]`` `as order of windows isn't defined in some drivers`__.
__ https://dvcs.w3.org/hg/webdriver/raw-file/default/webdriver-spec.html#h_note_10
Args:
trigger_func (func): The function that should trigger the opening of a new window.
wait (int | float, optional): Maximum wait time. Defaults to
:data:`capybara.default_max_wait_time`.
Returns:
Window: The window that has been opened within the lambda.
Raises:
WindowError: If lambda passed to window hasn't opened window or opened more than one
window.
]
variable[old_handles] assign[=] call[name[set], parameter[name[self].driver.window_handles]]
call[name[trigger_func], parameter[]]
def function[get_new_window, parameter[]]:
variable[opened_handles] assign[=] binary_operation[call[name[set], parameter[name[self].driver.window_handles]] - name[old_handles]]
if compare[call[name[len], parameter[name[opened_handles]]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da1b0215240>
return[call[name[Window], parameter[name[self], call[call[name[list], parameter[name[opened_handles]]]][constant[0]]]]]
return[call[name[get_new_window], parameter[]]] | keyword[def] identifier[window_opened_by] ( identifier[self] , identifier[trigger_func] , identifier[wait] = keyword[None] ):
literal[string]
identifier[old_handles] = identifier[set] ( identifier[self] . identifier[driver] . identifier[window_handles] )
identifier[trigger_func] ()
@ identifier[self] . identifier[document] . identifier[synchronize] ( identifier[wait] = identifier[wait] , identifier[errors] =( identifier[WindowError] ,))
keyword[def] identifier[get_new_window] ():
identifier[opened_handles] = identifier[set] ( identifier[self] . identifier[driver] . identifier[window_handles] )- identifier[old_handles]
keyword[if] identifier[len] ( identifier[opened_handles] )!= literal[int] :
keyword[raise] identifier[WindowError] ( literal[string]
literal[string] . identifier[format] ( identifier[len] ( identifier[opened_handles] )))
keyword[return] identifier[Window] ( identifier[self] , identifier[list] ( identifier[opened_handles] )[ literal[int] ])
keyword[return] identifier[get_new_window] () | def window_opened_by(self, trigger_func, wait=None):
"""
Get the window that has been opened by the passed lambda. It will wait for it to be opened
(in the same way as other Capybara methods wait). It's better to use this method than
``windows[-1]`` `as order of windows isn't defined in some drivers`__.
__ https://dvcs.w3.org/hg/webdriver/raw-file/default/webdriver-spec.html#h_note_10
Args:
trigger_func (func): The function that should trigger the opening of a new window.
wait (int | float, optional): Maximum wait time. Defaults to
:data:`capybara.default_max_wait_time`.
Returns:
Window: The window that has been opened within the lambda.
Raises:
WindowError: If lambda passed to window hasn't opened window or opened more than one
window.
"""
old_handles = set(self.driver.window_handles)
trigger_func()
@self.document.synchronize(wait=wait, errors=(WindowError,))
def get_new_window():
opened_handles = set(self.driver.window_handles) - old_handles
if len(opened_handles) != 1:
raise WindowError('lambda passed to `window_opened_by` opened {0} windows instead of 1'.format(len(opened_handles))) # depends on [control=['if'], data=[]]
return Window(self, list(opened_handles)[0])
return get_new_window() |
def add_environment_vars(config: MutableMapping[str, Any]):
"""Override config with environment variables
Environment variables have to be prefixed with BELBIO_
which will be stripped before splitting on '__' and lower-casing
the environment variable name that is left into keys for the
config dictionary.
Example:
BELBIO_BEL_API__SERVERS__API_URL=http://api.bel.bio
1. BELBIO_BEL_API__SERVERS__API_URL ==> BEL_API__SERVERS__API_URL
2. BEL_API__SERVERS__API_URL ==> bel_api__servers__api_url
3. bel_api__servers__api_url ==> [bel_api, servers, api_url]
4. [bel_api, servers, api_url] ==> config['bel_api']['servers']['api_url'] = http://api.bel.bio
"""
# TODO need to redo config - can't add value to dictionary without recursively building up the dict
# check into config libraries again
for e in os.environ:
if re.match("BELBIO_", e):
val = os.environ.get(e)
if val:
e.replace("BELBIO_", "")
env_keys = e.lower().split("__")
if len(env_keys) > 1:
joined = '"]["'.join(env_keys)
eval_config = f'config["{joined}"] = val'
try:
eval(eval_config)
except Exception as exc:
log.warn("Cannot process {e} into config")
else:
config[env_keys[0]] = val | def function[add_environment_vars, parameter[config]]:
constant[Override config with environment variables
Environment variables have to be prefixed with BELBIO_
which will be stripped before splitting on '__' and lower-casing
the environment variable name that is left into keys for the
config dictionary.
Example:
BELBIO_BEL_API__SERVERS__API_URL=http://api.bel.bio
1. BELBIO_BEL_API__SERVERS__API_URL ==> BEL_API__SERVERS__API_URL
2. BEL_API__SERVERS__API_URL ==> bel_api__servers__api_url
3. bel_api__servers__api_url ==> [bel_api, servers, api_url]
4. [bel_api, servers, api_url] ==> config['bel_api']['servers']['api_url'] = http://api.bel.bio
]
for taget[name[e]] in starred[name[os].environ] begin[:]
if call[name[re].match, parameter[constant[BELBIO_], name[e]]] begin[:]
variable[val] assign[=] call[name[os].environ.get, parameter[name[e]]]
if name[val] begin[:]
call[name[e].replace, parameter[constant[BELBIO_], constant[]]]
variable[env_keys] assign[=] call[call[name[e].lower, parameter[]].split, parameter[constant[__]]]
if compare[call[name[len], parameter[name[env_keys]]] greater[>] constant[1]] begin[:]
variable[joined] assign[=] call[constant["]["].join, parameter[name[env_keys]]]
variable[eval_config] assign[=] <ast.JoinedStr object at 0x7da1b1968d30>
<ast.Try object at 0x7da1b1968a90> | keyword[def] identifier[add_environment_vars] ( identifier[config] : identifier[MutableMapping] [ identifier[str] , identifier[Any] ]):
literal[string]
keyword[for] identifier[e] keyword[in] identifier[os] . identifier[environ] :
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[e] ):
identifier[val] = identifier[os] . identifier[environ] . identifier[get] ( identifier[e] )
keyword[if] identifier[val] :
identifier[e] . identifier[replace] ( literal[string] , literal[string] )
identifier[env_keys] = identifier[e] . identifier[lower] (). identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[env_keys] )> literal[int] :
identifier[joined] = literal[string] . identifier[join] ( identifier[env_keys] )
identifier[eval_config] = literal[string]
keyword[try] :
identifier[eval] ( identifier[eval_config] )
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
identifier[log] . identifier[warn] ( literal[string] )
keyword[else] :
identifier[config] [ identifier[env_keys] [ literal[int] ]]= identifier[val] | def add_environment_vars(config: MutableMapping[str, Any]):
"""Override config with environment variables
Environment variables have to be prefixed with BELBIO_
which will be stripped before splitting on '__' and lower-casing
the environment variable name that is left into keys for the
config dictionary.
Example:
BELBIO_BEL_API__SERVERS__API_URL=http://api.bel.bio
1. BELBIO_BEL_API__SERVERS__API_URL ==> BEL_API__SERVERS__API_URL
2. BEL_API__SERVERS__API_URL ==> bel_api__servers__api_url
3. bel_api__servers__api_url ==> [bel_api, servers, api_url]
4. [bel_api, servers, api_url] ==> config['bel_api']['servers']['api_url'] = http://api.bel.bio
"""
# TODO need to redo config - can't add value to dictionary without recursively building up the dict
# check into config libraries again
for e in os.environ:
if re.match('BELBIO_', e):
val = os.environ.get(e)
if val:
e.replace('BELBIO_', '')
env_keys = e.lower().split('__')
if len(env_keys) > 1:
joined = '"]["'.join(env_keys)
eval_config = f'config["{joined}"] = val'
try:
eval(eval_config) # depends on [control=['try'], data=[]]
except Exception as exc:
log.warn('Cannot process {e} into config') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
config[env_keys[0]] = val # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['e']] |
def multiloss(losses, logging_namespace="multiloss", exclude_from_weighting=[]):
"""
Create a loss from multiple losses my mixing them.
This multi-loss implementation is inspired by the Paper "Multi-Task Learning Using Uncertainty to Weight Losses
for Scene Geometry and Semantics" by Kendall, Gal and Cipolla.
:param losses: A dict containing all losses that should be merged.
:param logging_namespace: Variable scope in which multiloss lives.
:param exclude_from_weighting: A list of losses that are already weighted and should not be sigma weighted.
:return: A single loss.
"""
with tf.variable_scope(logging_namespace):
sum_loss = 0
for loss_name, loss in losses.items():
if loss_name not in exclude_from_weighting:
with tf.variable_scope(loss_name) as scope:
sum_loss += variance_corrected_loss(loss)
else:
sum_loss += loss
return sum_loss | def function[multiloss, parameter[losses, logging_namespace, exclude_from_weighting]]:
constant[
Create a loss from multiple losses my mixing them.
This multi-loss implementation is inspired by the Paper "Multi-Task Learning Using Uncertainty to Weight Losses
for Scene Geometry and Semantics" by Kendall, Gal and Cipolla.
:param losses: A dict containing all losses that should be merged.
:param logging_namespace: Variable scope in which multiloss lives.
:param exclude_from_weighting: A list of losses that are already weighted and should not be sigma weighted.
:return: A single loss.
]
with call[name[tf].variable_scope, parameter[name[logging_namespace]]] begin[:]
variable[sum_loss] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b1b02f20>, <ast.Name object at 0x7da1b1b03f70>]]] in starred[call[name[losses].items, parameter[]]] begin[:]
if compare[name[loss_name] <ast.NotIn object at 0x7da2590d7190> name[exclude_from_weighting]] begin[:]
with call[name[tf].variable_scope, parameter[name[loss_name]]] begin[:]
<ast.AugAssign object at 0x7da1b1b01660>
return[name[sum_loss]] | keyword[def] identifier[multiloss] ( identifier[losses] , identifier[logging_namespace] = literal[string] , identifier[exclude_from_weighting] =[]):
literal[string]
keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[logging_namespace] ):
identifier[sum_loss] = literal[int]
keyword[for] identifier[loss_name] , identifier[loss] keyword[in] identifier[losses] . identifier[items] ():
keyword[if] identifier[loss_name] keyword[not] keyword[in] identifier[exclude_from_weighting] :
keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[loss_name] ) keyword[as] identifier[scope] :
identifier[sum_loss] += identifier[variance_corrected_loss] ( identifier[loss] )
keyword[else] :
identifier[sum_loss] += identifier[loss]
keyword[return] identifier[sum_loss] | def multiloss(losses, logging_namespace='multiloss', exclude_from_weighting=[]):
"""
Create a loss from multiple losses my mixing them.
This multi-loss implementation is inspired by the Paper "Multi-Task Learning Using Uncertainty to Weight Losses
for Scene Geometry and Semantics" by Kendall, Gal and Cipolla.
:param losses: A dict containing all losses that should be merged.
:param logging_namespace: Variable scope in which multiloss lives.
:param exclude_from_weighting: A list of losses that are already weighted and should not be sigma weighted.
:return: A single loss.
"""
with tf.variable_scope(logging_namespace):
sum_loss = 0
for (loss_name, loss) in losses.items():
if loss_name not in exclude_from_weighting:
with tf.variable_scope(loss_name) as scope:
sum_loss += variance_corrected_loss(loss) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=['loss_name']]
else:
sum_loss += loss # depends on [control=['for'], data=[]]
return sum_loss # depends on [control=['with'], data=[]] |
def _initialiseIteration(self):
"""
Starts a new iteration.
"""
self._searchIterator = self._search(
self._request.start,
self._request.end if self._request.end != 0 else None)
self._currentObject = next(self._searchIterator, None)
if self._currentObject is not None:
self._nextObject = next(self._searchIterator, None)
self._searchAnchor = self._request.start
self._distanceFromAnchor = 0
firstObjectStart = self._getStart(self._currentObject)
if firstObjectStart > self._request.start:
self._searchAnchor = firstObjectStart | def function[_initialiseIteration, parameter[self]]:
constant[
Starts a new iteration.
]
name[self]._searchIterator assign[=] call[name[self]._search, parameter[name[self]._request.start, <ast.IfExp object at 0x7da18ede4250>]]
name[self]._currentObject assign[=] call[name[next], parameter[name[self]._searchIterator, constant[None]]]
if compare[name[self]._currentObject is_not constant[None]] begin[:]
name[self]._nextObject assign[=] call[name[next], parameter[name[self]._searchIterator, constant[None]]]
name[self]._searchAnchor assign[=] name[self]._request.start
name[self]._distanceFromAnchor assign[=] constant[0]
variable[firstObjectStart] assign[=] call[name[self]._getStart, parameter[name[self]._currentObject]]
if compare[name[firstObjectStart] greater[>] name[self]._request.start] begin[:]
name[self]._searchAnchor assign[=] name[firstObjectStart] | keyword[def] identifier[_initialiseIteration] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_searchIterator] = identifier[self] . identifier[_search] (
identifier[self] . identifier[_request] . identifier[start] ,
identifier[self] . identifier[_request] . identifier[end] keyword[if] identifier[self] . identifier[_request] . identifier[end] != literal[int] keyword[else] keyword[None] )
identifier[self] . identifier[_currentObject] = identifier[next] ( identifier[self] . identifier[_searchIterator] , keyword[None] )
keyword[if] identifier[self] . identifier[_currentObject] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_nextObject] = identifier[next] ( identifier[self] . identifier[_searchIterator] , keyword[None] )
identifier[self] . identifier[_searchAnchor] = identifier[self] . identifier[_request] . identifier[start]
identifier[self] . identifier[_distanceFromAnchor] = literal[int]
identifier[firstObjectStart] = identifier[self] . identifier[_getStart] ( identifier[self] . identifier[_currentObject] )
keyword[if] identifier[firstObjectStart] > identifier[self] . identifier[_request] . identifier[start] :
identifier[self] . identifier[_searchAnchor] = identifier[firstObjectStart] | def _initialiseIteration(self):
"""
Starts a new iteration.
"""
self._searchIterator = self._search(self._request.start, self._request.end if self._request.end != 0 else None)
self._currentObject = next(self._searchIterator, None)
if self._currentObject is not None:
self._nextObject = next(self._searchIterator, None)
self._searchAnchor = self._request.start
self._distanceFromAnchor = 0
firstObjectStart = self._getStart(self._currentObject)
if firstObjectStart > self._request.start:
self._searchAnchor = firstObjectStart # depends on [control=['if'], data=['firstObjectStart']] # depends on [control=['if'], data=[]] |
def epcrparsethreads(self):
"""
Parse the ePCR results, and run BLAST on the parsed results
"""
from Bio import SeqIO
# Create the threads for the BLAST analysis
for sample in self.metadata:
if sample.general.bestassemblyfile != 'NA':
threads = Thread(target=self.epcrparse, args=())
threads.setDaemon(True)
threads.start()
for sample in self.metadata:
if sample.general.bestassemblyfile != 'NA':
if sample[self.analysistype].primers != 'NA':
# Initialise a dictionary to store the SeqIO records of each assembly
record = dict()
# Initialise dictionaries to store results in the object
sample[self.analysistype].blastresults = dict()
sample[self.analysistype].rawblastresults = dict()
# Load the records from the assembly into the dictionary
for rec in SeqIO.parse(sample.general.bestassemblyfile, 'fasta'):
record[rec.id] = str(rec.seq)
# Iterate through the ePCR results
for line in sample[self.analysistype].epcrresults:
# The data of interest is in the lines that do not start with a #
# TLH 2016-SEQ-0359_4_length_321195_cov_28.6354_ID_3773 + 227879 228086 0 0 208/1000-1000
if not line.startswith('#'):
# Add the variables to the queue
self.epcrparsequeue.put((sample, record, line))
self.epcrparsequeue.join() | def function[epcrparsethreads, parameter[self]]:
constant[
Parse the ePCR results, and run BLAST on the parsed results
]
from relative_module[Bio] import module[SeqIO]
for taget[name[sample]] in starred[name[self].metadata] begin[:]
if compare[name[sample].general.bestassemblyfile not_equal[!=] constant[NA]] begin[:]
variable[threads] assign[=] call[name[Thread], parameter[]]
call[name[threads].setDaemon, parameter[constant[True]]]
call[name[threads].start, parameter[]]
for taget[name[sample]] in starred[name[self].metadata] begin[:]
if compare[name[sample].general.bestassemblyfile not_equal[!=] constant[NA]] begin[:]
if compare[call[name[sample]][name[self].analysistype].primers not_equal[!=] constant[NA]] begin[:]
variable[record] assign[=] call[name[dict], parameter[]]
call[name[sample]][name[self].analysistype].blastresults assign[=] call[name[dict], parameter[]]
call[name[sample]][name[self].analysistype].rawblastresults assign[=] call[name[dict], parameter[]]
for taget[name[rec]] in starred[call[name[SeqIO].parse, parameter[name[sample].general.bestassemblyfile, constant[fasta]]]] begin[:]
call[name[record]][name[rec].id] assign[=] call[name[str], parameter[name[rec].seq]]
for taget[name[line]] in starred[call[name[sample]][name[self].analysistype].epcrresults] begin[:]
if <ast.UnaryOp object at 0x7da1b1eae5c0> begin[:]
call[name[self].epcrparsequeue.put, parameter[tuple[[<ast.Name object at 0x7da1b1eaca60>, <ast.Name object at 0x7da1b1eae0b0>, <ast.Name object at 0x7da1b1eac880>]]]]
call[name[self].epcrparsequeue.join, parameter[]] | keyword[def] identifier[epcrparsethreads] ( identifier[self] ):
literal[string]
keyword[from] identifier[Bio] keyword[import] identifier[SeqIO]
keyword[for] identifier[sample] keyword[in] identifier[self] . identifier[metadata] :
keyword[if] identifier[sample] . identifier[general] . identifier[bestassemblyfile] != literal[string] :
identifier[threads] = identifier[Thread] ( identifier[target] = identifier[self] . identifier[epcrparse] , identifier[args] =())
identifier[threads] . identifier[setDaemon] ( keyword[True] )
identifier[threads] . identifier[start] ()
keyword[for] identifier[sample] keyword[in] identifier[self] . identifier[metadata] :
keyword[if] identifier[sample] . identifier[general] . identifier[bestassemblyfile] != literal[string] :
keyword[if] identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[primers] != literal[string] :
identifier[record] = identifier[dict] ()
identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[blastresults] = identifier[dict] ()
identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[rawblastresults] = identifier[dict] ()
keyword[for] identifier[rec] keyword[in] identifier[SeqIO] . identifier[parse] ( identifier[sample] . identifier[general] . identifier[bestassemblyfile] , literal[string] ):
identifier[record] [ identifier[rec] . identifier[id] ]= identifier[str] ( identifier[rec] . identifier[seq] )
keyword[for] identifier[line] keyword[in] identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[epcrresults] :
keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[self] . identifier[epcrparsequeue] . identifier[put] (( identifier[sample] , identifier[record] , identifier[line] ))
identifier[self] . identifier[epcrparsequeue] . identifier[join] () | def epcrparsethreads(self):
"""
Parse the ePCR results, and run BLAST on the parsed results
"""
from Bio import SeqIO
# Create the threads for the BLAST analysis
for sample in self.metadata:
if sample.general.bestassemblyfile != 'NA':
threads = Thread(target=self.epcrparse, args=())
threads.setDaemon(True)
threads.start() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sample']]
for sample in self.metadata:
if sample.general.bestassemblyfile != 'NA':
if sample[self.analysistype].primers != 'NA':
# Initialise a dictionary to store the SeqIO records of each assembly
record = dict()
# Initialise dictionaries to store results in the object
sample[self.analysistype].blastresults = dict()
sample[self.analysistype].rawblastresults = dict()
# Load the records from the assembly into the dictionary
for rec in SeqIO.parse(sample.general.bestassemblyfile, 'fasta'):
record[rec.id] = str(rec.seq) # depends on [control=['for'], data=['rec']]
# Iterate through the ePCR results
for line in sample[self.analysistype].epcrresults:
# The data of interest is in the lines that do not start with a #
# TLH 2016-SEQ-0359_4_length_321195_cov_28.6354_ID_3773 + 227879 228086 0 0 208/1000-1000
if not line.startswith('#'):
# Add the variables to the queue
self.epcrparsequeue.put((sample, record, line)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sample']]
self.epcrparsequeue.join() |
def get_port_area(self, view):
"""Calculates the drawing area affected by the (hovered) port
"""
state_v = self.parent
center = self.handle.pos
margin = self.port_side_size / 4.
if self.side in [SnappedSide.LEFT, SnappedSide.RIGHT]:
height, width = self.port_size
else:
width, height = self.port_size
upper_left = center[0] - width / 2 - margin, center[1] - height / 2 - margin
lower_right = center[0] + width / 2 + margin, center[1] + height / 2 + margin
port_upper_left = view.get_matrix_i2v(state_v).transform_point(*upper_left)
port_lower_right = view.get_matrix_i2v(state_v).transform_point(*lower_right)
size = port_lower_right[0] - port_upper_left[0], port_lower_right[1] - port_upper_left[1]
return port_upper_left[0], port_upper_left[1], size[0], size[1] | def function[get_port_area, parameter[self, view]]:
constant[Calculates the drawing area affected by the (hovered) port
]
variable[state_v] assign[=] name[self].parent
variable[center] assign[=] name[self].handle.pos
variable[margin] assign[=] binary_operation[name[self].port_side_size / constant[4.0]]
if compare[name[self].side in list[[<ast.Attribute object at 0x7da18eb54190>, <ast.Attribute object at 0x7da18eb574f0>]]] begin[:]
<ast.Tuple object at 0x7da18eb57460> assign[=] name[self].port_size
variable[upper_left] assign[=] tuple[[<ast.BinOp object at 0x7da18eb56200>, <ast.BinOp object at 0x7da18eb56830>]]
variable[lower_right] assign[=] tuple[[<ast.BinOp object at 0x7da18eb57f40>, <ast.BinOp object at 0x7da1b1b69420>]]
variable[port_upper_left] assign[=] call[call[name[view].get_matrix_i2v, parameter[name[state_v]]].transform_point, parameter[<ast.Starred object at 0x7da1b1b69750>]]
variable[port_lower_right] assign[=] call[call[name[view].get_matrix_i2v, parameter[name[state_v]]].transform_point, parameter[<ast.Starred object at 0x7da1b1b69930>]]
variable[size] assign[=] tuple[[<ast.BinOp object at 0x7da1b1b69a20>, <ast.BinOp object at 0x7da1b1b69b70>]]
return[tuple[[<ast.Subscript object at 0x7da1b1b69d20>, <ast.Subscript object at 0x7da1b1b69db0>, <ast.Subscript object at 0x7da1b1b69e40>, <ast.Subscript object at 0x7da1b1b69ed0>]]] | keyword[def] identifier[get_port_area] ( identifier[self] , identifier[view] ):
literal[string]
identifier[state_v] = identifier[self] . identifier[parent]
identifier[center] = identifier[self] . identifier[handle] . identifier[pos]
identifier[margin] = identifier[self] . identifier[port_side_size] / literal[int]
keyword[if] identifier[self] . identifier[side] keyword[in] [ identifier[SnappedSide] . identifier[LEFT] , identifier[SnappedSide] . identifier[RIGHT] ]:
identifier[height] , identifier[width] = identifier[self] . identifier[port_size]
keyword[else] :
identifier[width] , identifier[height] = identifier[self] . identifier[port_size]
identifier[upper_left] = identifier[center] [ literal[int] ]- identifier[width] / literal[int] - identifier[margin] , identifier[center] [ literal[int] ]- identifier[height] / literal[int] - identifier[margin]
identifier[lower_right] = identifier[center] [ literal[int] ]+ identifier[width] / literal[int] + identifier[margin] , identifier[center] [ literal[int] ]+ identifier[height] / literal[int] + identifier[margin]
identifier[port_upper_left] = identifier[view] . identifier[get_matrix_i2v] ( identifier[state_v] ). identifier[transform_point] (* identifier[upper_left] )
identifier[port_lower_right] = identifier[view] . identifier[get_matrix_i2v] ( identifier[state_v] ). identifier[transform_point] (* identifier[lower_right] )
identifier[size] = identifier[port_lower_right] [ literal[int] ]- identifier[port_upper_left] [ literal[int] ], identifier[port_lower_right] [ literal[int] ]- identifier[port_upper_left] [ literal[int] ]
keyword[return] identifier[port_upper_left] [ literal[int] ], identifier[port_upper_left] [ literal[int] ], identifier[size] [ literal[int] ], identifier[size] [ literal[int] ] | def get_port_area(self, view):
"""Calculates the drawing area affected by the (hovered) port
"""
state_v = self.parent
center = self.handle.pos
margin = self.port_side_size / 4.0
if self.side in [SnappedSide.LEFT, SnappedSide.RIGHT]:
(height, width) = self.port_size # depends on [control=['if'], data=[]]
else:
(width, height) = self.port_size
upper_left = (center[0] - width / 2 - margin, center[1] - height / 2 - margin)
lower_right = (center[0] + width / 2 + margin, center[1] + height / 2 + margin)
port_upper_left = view.get_matrix_i2v(state_v).transform_point(*upper_left)
port_lower_right = view.get_matrix_i2v(state_v).transform_point(*lower_right)
size = (port_lower_right[0] - port_upper_left[0], port_lower_right[1] - port_upper_left[1])
return (port_upper_left[0], port_upper_left[1], size[0], size[1]) |
def from_(self, selectable):
"""
Adds a table to the query. This function can only be called once and will raise an AttributeError if called a
second time.
:param selectable:
Type: ``Table``, ``Query``, or ``str``
When a ``str`` is passed, a table with the name matching the ``str`` value is used.
:returns
A copy of the query with the table added.
"""
self._from.append(Table(selectable) if isinstance(selectable, str) else selectable)
if isinstance(selectable, (QueryBuilder, _UnionQuery)) and selectable.alias is None:
if isinstance(selectable, QueryBuilder):
sub_query_count = selectable._subquery_count
else:
sub_query_count = 0
sub_query_count = max(self._subquery_count, sub_query_count)
selectable.alias = 'sq%d' % sub_query_count
self._subquery_count = sub_query_count + 1 | def function[from_, parameter[self, selectable]]:
constant[
Adds a table to the query. This function can only be called once and will raise an AttributeError if called a
second time.
:param selectable:
Type: ``Table``, ``Query``, or ``str``
When a ``str`` is passed, a table with the name matching the ``str`` value is used.
:returns
A copy of the query with the table added.
]
call[name[self]._from.append, parameter[<ast.IfExp object at 0x7da1b1d36950>]]
if <ast.BoolOp object at 0x7da1b1d36c50> begin[:]
if call[name[isinstance], parameter[name[selectable], name[QueryBuilder]]] begin[:]
variable[sub_query_count] assign[=] name[selectable]._subquery_count
variable[sub_query_count] assign[=] call[name[max], parameter[name[self]._subquery_count, name[sub_query_count]]]
name[selectable].alias assign[=] binary_operation[constant[sq%d] <ast.Mod object at 0x7da2590d6920> name[sub_query_count]]
name[self]._subquery_count assign[=] binary_operation[name[sub_query_count] + constant[1]] | keyword[def] identifier[from_] ( identifier[self] , identifier[selectable] ):
literal[string]
identifier[self] . identifier[_from] . identifier[append] ( identifier[Table] ( identifier[selectable] ) keyword[if] identifier[isinstance] ( identifier[selectable] , identifier[str] ) keyword[else] identifier[selectable] )
keyword[if] identifier[isinstance] ( identifier[selectable] ,( identifier[QueryBuilder] , identifier[_UnionQuery] )) keyword[and] identifier[selectable] . identifier[alias] keyword[is] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[selectable] , identifier[QueryBuilder] ):
identifier[sub_query_count] = identifier[selectable] . identifier[_subquery_count]
keyword[else] :
identifier[sub_query_count] = literal[int]
identifier[sub_query_count] = identifier[max] ( identifier[self] . identifier[_subquery_count] , identifier[sub_query_count] )
identifier[selectable] . identifier[alias] = literal[string] % identifier[sub_query_count]
identifier[self] . identifier[_subquery_count] = identifier[sub_query_count] + literal[int] | def from_(self, selectable):
"""
Adds a table to the query. This function can only be called once and will raise an AttributeError if called a
second time.
:param selectable:
Type: ``Table``, ``Query``, or ``str``
When a ``str`` is passed, a table with the name matching the ``str`` value is used.
:returns
A copy of the query with the table added.
"""
self._from.append(Table(selectable) if isinstance(selectable, str) else selectable)
if isinstance(selectable, (QueryBuilder, _UnionQuery)) and selectable.alias is None:
if isinstance(selectable, QueryBuilder):
sub_query_count = selectable._subquery_count # depends on [control=['if'], data=[]]
else:
sub_query_count = 0
sub_query_count = max(self._subquery_count, sub_query_count)
selectable.alias = 'sq%d' % sub_query_count
self._subquery_count = sub_query_count + 1 # depends on [control=['if'], data=[]] |
def geocode(address):
'''Query function to obtain a latitude and longitude from a location
string such as `Houston, TX` or`Colombia`. This uses an online lookup,
currently wrapping the `geopy` library, and providing an on-disk cache
of queries.
Parameters
----------
address : str
Search string to retrieve the location, [-]
Returns
-------
latitude : float
Latitude of address, [degrees]
longitude : float
Longitude of address, [degrees]
Notes
-----
If a query has been retrieved before, this function will take under 1 ms;
it takes several seconds otherwise.
Examples
--------
>>> geocode('Fredericton, NB')
(45.966425, -66.645813)
'''
loc_tuple = None
try:
cache = geopy_cache()
loc_tuple = cache.cached_address(address)
except:
# Handle bugs in the cache, i.e. if there is no space on disk to create
# the database, by ignoring them
pass
if loc_tuple is not None:
return loc_tuple
else:
geocoder = geopy_geolocator()
if geocoder is None:
return geopy_missing_msg
location = geocoder.geocode(address)
try:
cache.cache_address(address, location.latitude, location.longitude)
except:
pass
return (location.latitude, location.longitude) | def function[geocode, parameter[address]]:
constant[Query function to obtain a latitude and longitude from a location
string such as `Houston, TX` or`Colombia`. This uses an online lookup,
currently wrapping the `geopy` library, and providing an on-disk cache
of queries.
Parameters
----------
address : str
Search string to retrieve the location, [-]
Returns
-------
latitude : float
Latitude of address, [degrees]
longitude : float
Longitude of address, [degrees]
Notes
-----
If a query has been retrieved before, this function will take under 1 ms;
it takes several seconds otherwise.
Examples
--------
>>> geocode('Fredericton, NB')
(45.966425, -66.645813)
]
variable[loc_tuple] assign[=] constant[None]
<ast.Try object at 0x7da18f09f190>
if compare[name[loc_tuple] is_not constant[None]] begin[:]
return[name[loc_tuple]] | keyword[def] identifier[geocode] ( identifier[address] ):
literal[string]
identifier[loc_tuple] = keyword[None]
keyword[try] :
identifier[cache] = identifier[geopy_cache] ()
identifier[loc_tuple] = identifier[cache] . identifier[cached_address] ( identifier[address] )
keyword[except] :
keyword[pass]
keyword[if] identifier[loc_tuple] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[loc_tuple]
keyword[else] :
identifier[geocoder] = identifier[geopy_geolocator] ()
keyword[if] identifier[geocoder] keyword[is] keyword[None] :
keyword[return] identifier[geopy_missing_msg]
identifier[location] = identifier[geocoder] . identifier[geocode] ( identifier[address] )
keyword[try] :
identifier[cache] . identifier[cache_address] ( identifier[address] , identifier[location] . identifier[latitude] , identifier[location] . identifier[longitude] )
keyword[except] :
keyword[pass]
keyword[return] ( identifier[location] . identifier[latitude] , identifier[location] . identifier[longitude] ) | def geocode(address):
"""Query function to obtain a latitude and longitude from a location
string such as `Houston, TX` or`Colombia`. This uses an online lookup,
currently wrapping the `geopy` library, and providing an on-disk cache
of queries.
Parameters
----------
address : str
Search string to retrieve the location, [-]
Returns
-------
latitude : float
Latitude of address, [degrees]
longitude : float
Longitude of address, [degrees]
Notes
-----
If a query has been retrieved before, this function will take under 1 ms;
it takes several seconds otherwise.
Examples
--------
>>> geocode('Fredericton, NB')
(45.966425, -66.645813)
"""
loc_tuple = None
try:
cache = geopy_cache()
loc_tuple = cache.cached_address(address) # depends on [control=['try'], data=[]]
except:
# Handle bugs in the cache, i.e. if there is no space on disk to create
# the database, by ignoring them
pass # depends on [control=['except'], data=[]]
if loc_tuple is not None:
return loc_tuple # depends on [control=['if'], data=['loc_tuple']]
else:
geocoder = geopy_geolocator()
if geocoder is None:
return geopy_missing_msg # depends on [control=['if'], data=[]]
location = geocoder.geocode(address)
try:
cache.cache_address(address, location.latitude, location.longitude) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
return (location.latitude, location.longitude) |
def attach_identifier_attributes(self, ast): # type: (Dict[str, Any]) -> Dict[str, Any]
""" Attach 5 flags to the AST.
- is dynamic: True if the identifier name can be determined by static analysis.
- is member: True if the identifier is a member of a subscription/dot/slice node.
- is declaring: True if the identifier is used to declare.
- is autoload: True if the identifier is declared with autoload.
- is function: True if the identifier is a function. Vim distinguish
between function identifiers and variable identifiers.
- is declarative parameter: True if the identifier is a declarative
parameter. For example, the identifier "param" in Func(param) is a
declarative parameter.
- is on string expression context: True if the variable is on the
string expression context. The string expression context is the
string content on the 2nd argument of the map or filter function.
- is lambda argument: True if the identifier is a lambda argument.
"""
redir_assignment_parser = RedirAssignmentParser()
ast_with_parsed_redir = redir_assignment_parser.process(ast)
map_and_filter_parser = CallNodeParser()
ast_with_parse_map_and_filter_and_redir = \
map_and_filter_parser.process(ast_with_parsed_redir)
traverse(
ast_with_parse_map_and_filter_and_redir,
on_enter=lambda node: self._enter_handler(
node,
is_on_lambda_str=None,
is_on_lambda_body=None,
)
)
return ast | def function[attach_identifier_attributes, parameter[self, ast]]:
constant[ Attach 5 flags to the AST.
- is dynamic: True if the identifier name can be determined by static analysis.
- is member: True if the identifier is a member of a subscription/dot/slice node.
- is declaring: True if the identifier is used to declare.
- is autoload: True if the identifier is declared with autoload.
- is function: True if the identifier is a function. Vim distinguish
between function identifiers and variable identifiers.
- is declarative parameter: True if the identifier is a declarative
parameter. For example, the identifier "param" in Func(param) is a
declarative parameter.
- is on string expression context: True if the variable is on the
string expression context. The string expression context is the
string content on the 2nd argument of the map or filter function.
- is lambda argument: True if the identifier is a lambda argument.
]
variable[redir_assignment_parser] assign[=] call[name[RedirAssignmentParser], parameter[]]
variable[ast_with_parsed_redir] assign[=] call[name[redir_assignment_parser].process, parameter[name[ast]]]
variable[map_and_filter_parser] assign[=] call[name[CallNodeParser], parameter[]]
variable[ast_with_parse_map_and_filter_and_redir] assign[=] call[name[map_and_filter_parser].process, parameter[name[ast_with_parsed_redir]]]
call[name[traverse], parameter[name[ast_with_parse_map_and_filter_and_redir]]]
return[name[ast]] | keyword[def] identifier[attach_identifier_attributes] ( identifier[self] , identifier[ast] ):
literal[string]
identifier[redir_assignment_parser] = identifier[RedirAssignmentParser] ()
identifier[ast_with_parsed_redir] = identifier[redir_assignment_parser] . identifier[process] ( identifier[ast] )
identifier[map_and_filter_parser] = identifier[CallNodeParser] ()
identifier[ast_with_parse_map_and_filter_and_redir] = identifier[map_and_filter_parser] . identifier[process] ( identifier[ast_with_parsed_redir] )
identifier[traverse] (
identifier[ast_with_parse_map_and_filter_and_redir] ,
identifier[on_enter] = keyword[lambda] identifier[node] : identifier[self] . identifier[_enter_handler] (
identifier[node] ,
identifier[is_on_lambda_str] = keyword[None] ,
identifier[is_on_lambda_body] = keyword[None] ,
)
)
keyword[return] identifier[ast] | def attach_identifier_attributes(self, ast): # type: (Dict[str, Any]) -> Dict[str, Any]
' Attach 5 flags to the AST.\n\n - is dynamic: True if the identifier name can be determined by static analysis.\n - is member: True if the identifier is a member of a subscription/dot/slice node.\n - is declaring: True if the identifier is used to declare.\n - is autoload: True if the identifier is declared with autoload.\n - is function: True if the identifier is a function. Vim distinguish\n between function identifiers and variable identifiers.\n - is declarative parameter: True if the identifier is a declarative\n parameter. For example, the identifier "param" in Func(param) is a\n declarative parameter.\n - is on string expression context: True if the variable is on the\n string expression context. The string expression context is the\n string content on the 2nd argument of the map or filter function.\n - is lambda argument: True if the identifier is a lambda argument.\n '
redir_assignment_parser = RedirAssignmentParser()
ast_with_parsed_redir = redir_assignment_parser.process(ast)
map_and_filter_parser = CallNodeParser()
ast_with_parse_map_and_filter_and_redir = map_and_filter_parser.process(ast_with_parsed_redir)
traverse(ast_with_parse_map_and_filter_and_redir, on_enter=lambda node: self._enter_handler(node, is_on_lambda_str=None, is_on_lambda_body=None))
return ast |
def from_cbn_jgif(graph_jgif_dict):
"""Build a BEL graph from CBN JGIF.
Map the JGIF used by the Causal Biological Network Database to standard namespace and annotations, then
builds a BEL graph using :func:`pybel.from_jgif`.
:param dict graph_jgif_dict: The JSON object representing the graph in JGIF format
:rtype: BELGraph
Example:
>>> import requests
>>> from pybel import from_cbn_jgif
>>> apoptosis_url = 'http://causalbionet.com/Networks/GetJSONGraphFile?networkId=810385422'
>>> graph_jgif_dict = requests.get(apoptosis_url).json()
>>> graph = from_cbn_jgif(graph_jgif_dict)
.. warning::
Handling the annotations is not yet supported, since the CBN documents do not refer to the resources used
to create them. This may be added in the future, but the annotations must be stripped from the graph
before uploading to the network store using :func:`pybel.struct.mutation.strip_annotations`.
"""
graph_jgif_dict = map_cbn(graph_jgif_dict)
graph_jgif_dict['graph']['metadata'].update({
METADATA_AUTHORS: 'Causal Biological Networks Database',
METADATA_LICENSES: """
Please cite:
- www.causalbionet.com
- https://bionet.sbvimprover.com
as well as any relevant publications.
The sbv IMPROVER project, the website and the Symposia are part of a collaborative project
designed to enable scientists to learn about and contribute to the development of a new crowd
sourcing method for verification of scientific data and results. The current challenges, website
and biological network models were developed and are maintained as part of a collaboration among
Selventa, OrangeBus and ADS. The project is led and funded by Philip Morris International. For more
information on the focus of Philip Morris International’s research, please visit www.pmi.com.
""".replace('\n', '\t'),
METADATA_CONTACT: '[email protected]',
})
graph = from_jgif(graph_jgif_dict)
graph.namespace_url.update({
'HGNC': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/hgnc-human-genes/hgnc-human-genes-20150601.belns',
'GOBP': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/go-biological-process/go-biological-process-20150601.belns',
'SFAM': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/selventa-protein-families/selventa-protein-families-20150601.belns',
'GOCC': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/go-cellular-component/go-cellular-component-20170511.belns',
'MESHPP': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/mesh-processes/mesh-processes-20150601.belns',
'MGI': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/mgi-mouse-genes/mgi-mouse-genes-20150601.belns',
'RGD': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/rgd-rat-genes/rgd-rat-genes-20150601.belns',
'CHEBI': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/chebi/chebi-20150601.belns',
'SCHEM': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/selventa-legacy-chemicals/selventa-legacy-chemicals-20150601.belns',
'EGID': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/entrez-gene-ids/entrez-gene-ids-20150601.belns',
'MESHD': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/mesh-diseases/mesh-diseases-20150601.belns',
'SDIS': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/selventa-legacy-diseases/selventa-legacy-diseases-20150601.belns',
'SCOMP': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/selventa-named-complexes/selventa-named-complexes-20150601.belns',
'MESHC': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/mesh-chemicals/mesh-chemicals-20170511.belns',
'GOBPID': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/go-biological-process-ids/go-biological-process-ids-20150601.belns',
'MESHCS': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/mesh-cell-structures/mesh-cell-structures-20150601.belns',
})
graph.annotation_url.update({
'Cell': 'https://arty.scai.fraunhofer.de/artifactory/bel/annotation/cell-line/cell-line-20150601.belanno',
'Disease': 'https://arty.scai.fraunhofer.de/artifactory/bel/annotation/disease/disease-20150601.belanno',
'Species': 'https://arty.scai.fraunhofer.de/artifactory/bel/annotation/species-taxonomy-id/species-taxonomy-id-20170511.belanno',
'Tissue': 'https://arty.scai.fraunhofer.de/artifactory/bel/annotation/mesh-anatomy/mesh-anatomy-20150601.belanno',
})
return graph | def function[from_cbn_jgif, parameter[graph_jgif_dict]]:
constant[Build a BEL graph from CBN JGIF.
Map the JGIF used by the Causal Biological Network Database to standard namespace and annotations, then
builds a BEL graph using :func:`pybel.from_jgif`.
:param dict graph_jgif_dict: The JSON object representing the graph in JGIF format
:rtype: BELGraph
Example:
>>> import requests
>>> from pybel import from_cbn_jgif
>>> apoptosis_url = 'http://causalbionet.com/Networks/GetJSONGraphFile?networkId=810385422'
>>> graph_jgif_dict = requests.get(apoptosis_url).json()
>>> graph = from_cbn_jgif(graph_jgif_dict)
.. warning::
Handling the annotations is not yet supported, since the CBN documents do not refer to the resources used
to create them. This may be added in the future, but the annotations must be stripped from the graph
before uploading to the network store using :func:`pybel.struct.mutation.strip_annotations`.
]
variable[graph_jgif_dict] assign[=] call[name[map_cbn], parameter[name[graph_jgif_dict]]]
call[call[call[name[graph_jgif_dict]][constant[graph]]][constant[metadata]].update, parameter[dictionary[[<ast.Name object at 0x7da1b0e47a60>, <ast.Name object at 0x7da1b0e45ab0>, <ast.Name object at 0x7da1b0e46470>], [<ast.Constant object at 0x7da1b0e45090>, <ast.Call object at 0x7da1b0e477c0>, <ast.Constant object at 0x7da1b0ebe290>]]]]
variable[graph] assign[=] call[name[from_jgif], parameter[name[graph_jgif_dict]]]
call[name[graph].namespace_url.update, parameter[dictionary[[<ast.Constant object at 0x7da1b0ebdcc0>, <ast.Constant object at 0x7da1b0ebc8b0>, <ast.Constant object at 0x7da1b0ebd0f0>, <ast.Constant object at 0x7da1b0ebe170>, <ast.Constant object at 0x7da1b0ebc880>, <ast.Constant object at 0x7da1b0ebcd00>, <ast.Constant object at 0x7da1b0ebd510>, <ast.Constant object at 0x7da1b0ebd3f0>, <ast.Constant object at 0x7da1b0ebd750>, <ast.Constant object at 0x7da1b0ebf160>, <ast.Constant object at 0x7da1b0ebef20>, <ast.Constant object at 0x7da1b0ebcfd0>, <ast.Constant object at 0x7da1b0ebf850>, <ast.Constant object at 0x7da1b0ebcf40>, <ast.Constant object at 0x7da1b0ebeec0>, <ast.Constant object at 0x7da1b0ebffa0>], [<ast.Constant object at 0x7da1b0ebe920>, <ast.Constant object at 0x7da1b0ebed40>, <ast.Constant object at 0x7da1b0ebcee0>, <ast.Constant object at 0x7da1b0ebfb20>, <ast.Constant object at 0x7da1b0ebdc30>, <ast.Constant object at 0x7da1b0ebfbe0>, <ast.Constant object at 0x7da1b0ebfeb0>, <ast.Constant object at 0x7da1b0ebc730>, <ast.Constant object at 0x7da1b0ebf970>, <ast.Constant object at 0x7da1b0ebc970>, <ast.Constant object at 0x7da1b0ebe140>, <ast.Constant object at 0x7da1b0ebd0c0>, <ast.Constant object at 0x7da1b0ebfdf0>, <ast.Constant object at 0x7da1b0ebd1e0>, <ast.Constant object at 0x7da1b0ebe1d0>, <ast.Constant object at 0x7da1b0ebee00>]]]]
call[name[graph].annotation_url.update, parameter[dictionary[[<ast.Constant object at 0x7da1b0ebd8d0>, <ast.Constant object at 0x7da1b0ebdc60>, <ast.Constant object at 0x7da1b0ebd720>, <ast.Constant object at 0x7da20c6a97b0>], [<ast.Constant object at 0x7da20c6a9060>, <ast.Constant object at 0x7da20c6a8610>, <ast.Constant object at 0x7da20c6a87c0>, <ast.Constant object at 0x7da20c6a9180>]]]]
return[name[graph]] | keyword[def] identifier[from_cbn_jgif] ( identifier[graph_jgif_dict] ):
literal[string]
identifier[graph_jgif_dict] = identifier[map_cbn] ( identifier[graph_jgif_dict] )
identifier[graph_jgif_dict] [ literal[string] ][ literal[string] ]. identifier[update] ({
identifier[METADATA_AUTHORS] : literal[string] ,
identifier[METADATA_LICENSES] : literal[string] . identifier[replace] ( literal[string] , literal[string] ),
identifier[METADATA_CONTACT] : literal[string] ,
})
identifier[graph] = identifier[from_jgif] ( identifier[graph_jgif_dict] )
identifier[graph] . identifier[namespace_url] . identifier[update] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[graph] . identifier[annotation_url] . identifier[update] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
keyword[return] identifier[graph] | def from_cbn_jgif(graph_jgif_dict):
"""Build a BEL graph from CBN JGIF.
Map the JGIF used by the Causal Biological Network Database to standard namespace and annotations, then
builds a BEL graph using :func:`pybel.from_jgif`.
:param dict graph_jgif_dict: The JSON object representing the graph in JGIF format
:rtype: BELGraph
Example:
>>> import requests
>>> from pybel import from_cbn_jgif
>>> apoptosis_url = 'http://causalbionet.com/Networks/GetJSONGraphFile?networkId=810385422'
>>> graph_jgif_dict = requests.get(apoptosis_url).json()
>>> graph = from_cbn_jgif(graph_jgif_dict)
.. warning::
Handling the annotations is not yet supported, since the CBN documents do not refer to the resources used
to create them. This may be added in the future, but the annotations must be stripped from the graph
before uploading to the network store using :func:`pybel.struct.mutation.strip_annotations`.
"""
graph_jgif_dict = map_cbn(graph_jgif_dict)
graph_jgif_dict['graph']['metadata'].update({METADATA_AUTHORS: 'Causal Biological Networks Database', METADATA_LICENSES: '\n Please cite:\n\n - www.causalbionet.com\n - https://bionet.sbvimprover.com\n\n as well as any relevant publications.\n\n The sbv IMPROVER project, the website and the Symposia are part of a collaborative project\n designed to enable scientists to learn about and contribute to the development of a new crowd\n sourcing method for verification of scientific data and results. The current challenges, website\n and biological network models were developed and are maintained as part of a collaboration among\n Selventa, OrangeBus and ADS. The project is led and funded by Philip Morris International. For more\n information on the focus of Philip Morris International’s research, please visit www.pmi.com.\n '.replace('\n', '\t'), METADATA_CONTACT: '[email protected]'})
graph = from_jgif(graph_jgif_dict)
graph.namespace_url.update({'HGNC': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/hgnc-human-genes/hgnc-human-genes-20150601.belns', 'GOBP': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/go-biological-process/go-biological-process-20150601.belns', 'SFAM': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/selventa-protein-families/selventa-protein-families-20150601.belns', 'GOCC': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/go-cellular-component/go-cellular-component-20170511.belns', 'MESHPP': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/mesh-processes/mesh-processes-20150601.belns', 'MGI': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/mgi-mouse-genes/mgi-mouse-genes-20150601.belns', 'RGD': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/rgd-rat-genes/rgd-rat-genes-20150601.belns', 'CHEBI': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/chebi/chebi-20150601.belns', 'SCHEM': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/selventa-legacy-chemicals/selventa-legacy-chemicals-20150601.belns', 'EGID': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/entrez-gene-ids/entrez-gene-ids-20150601.belns', 'MESHD': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/mesh-diseases/mesh-diseases-20150601.belns', 'SDIS': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/selventa-legacy-diseases/selventa-legacy-diseases-20150601.belns', 'SCOMP': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/selventa-named-complexes/selventa-named-complexes-20150601.belns', 'MESHC': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/mesh-chemicals/mesh-chemicals-20170511.belns', 'GOBPID': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/go-biological-process-ids/go-biological-process-ids-20150601.belns', 'MESHCS': 'https://arty.scai.fraunhofer.de/artifactory/bel/namespace/mesh-cell-structures/mesh-cell-structures-20150601.belns'})
graph.annotation_url.update({'Cell': 'https://arty.scai.fraunhofer.de/artifactory/bel/annotation/cell-line/cell-line-20150601.belanno', 'Disease': 'https://arty.scai.fraunhofer.de/artifactory/bel/annotation/disease/disease-20150601.belanno', 'Species': 'https://arty.scai.fraunhofer.de/artifactory/bel/annotation/species-taxonomy-id/species-taxonomy-id-20170511.belanno', 'Tissue': 'https://arty.scai.fraunhofer.de/artifactory/bel/annotation/mesh-anatomy/mesh-anatomy-20150601.belanno'})
return graph |
def _maybe_launch_index_impl_thread(self):
"""Attempts to launch a thread to compute index_impl().
This may not launch a new thread if one is already running to compute
index_impl(); in that case, this function is a no-op.
"""
# Try to acquire the lock for computing index_impl(), without blocking.
if self._index_impl_lock.acquire(False):
# We got the lock. Start the thread, which will unlock the lock when done.
self._index_impl_thread = threading.Thread(
target=self._async_index_impl,
name='TextPluginIndexImplThread')
self._index_impl_thread.start() | def function[_maybe_launch_index_impl_thread, parameter[self]]:
constant[Attempts to launch a thread to compute index_impl().
This may not launch a new thread if one is already running to compute
index_impl(); in that case, this function is a no-op.
]
if call[name[self]._index_impl_lock.acquire, parameter[constant[False]]] begin[:]
name[self]._index_impl_thread assign[=] call[name[threading].Thread, parameter[]]
call[name[self]._index_impl_thread.start, parameter[]] | keyword[def] identifier[_maybe_launch_index_impl_thread] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_index_impl_lock] . identifier[acquire] ( keyword[False] ):
identifier[self] . identifier[_index_impl_thread] = identifier[threading] . identifier[Thread] (
identifier[target] = identifier[self] . identifier[_async_index_impl] ,
identifier[name] = literal[string] )
identifier[self] . identifier[_index_impl_thread] . identifier[start] () | def _maybe_launch_index_impl_thread(self):
"""Attempts to launch a thread to compute index_impl().
This may not launch a new thread if one is already running to compute
index_impl(); in that case, this function is a no-op.
"""
# Try to acquire the lock for computing index_impl(), without blocking.
if self._index_impl_lock.acquire(False):
# We got the lock. Start the thread, which will unlock the lock when done.
self._index_impl_thread = threading.Thread(target=self._async_index_impl, name='TextPluginIndexImplThread')
self._index_impl_thread.start() # depends on [control=['if'], data=[]] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.