code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def get_reviews(self, listing_id, offset=0, limit=20):
"""
Get reviews for a given listing
"""
params = {
'_order': 'language_country',
'listing_id': str(listing_id),
'_offset': str(offset),
'role': 'all',
'_limit': str(limit),
'_format': 'for_mobile_client',
}
print(self._session.headers)
r = self._session.get(API_URL + "/reviews", params=params)
r.raise_for_status()
return r.json() | def function[get_reviews, parameter[self, listing_id, offset, limit]]:
constant[
Get reviews for a given listing
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c5540>, <ast.Constant object at 0x7da20c6c66e0>, <ast.Constant object at 0x7da20c6c4160>, <ast.Constant object at 0x7da20c6c4c10>, <ast.Constant object at 0x7da20c6c5fc0>, <ast.Constant object at 0x7da20c6c7700>], [<ast.Constant object at 0x7da20c6c5ae0>, <ast.Call object at 0x7da20c6c4940>, <ast.Call object at 0x7da20c6c5840>, <ast.Constant object at 0x7da20c6c70d0>, <ast.Call object at 0x7da20c6c7d60>, <ast.Constant object at 0x7da20c6c7340>]]
call[name[print], parameter[name[self]._session.headers]]
variable[r] assign[=] call[name[self]._session.get, parameter[binary_operation[name[API_URL] + constant[/reviews]]]]
call[name[r].raise_for_status, parameter[]]
return[call[name[r].json, parameter[]]] | keyword[def] identifier[get_reviews] ( identifier[self] , identifier[listing_id] , identifier[offset] = literal[int] , identifier[limit] = literal[int] ):
literal[string]
identifier[params] ={
literal[string] : literal[string] ,
literal[string] : identifier[str] ( identifier[listing_id] ),
literal[string] : identifier[str] ( identifier[offset] ),
literal[string] : literal[string] ,
literal[string] : identifier[str] ( identifier[limit] ),
literal[string] : literal[string] ,
}
identifier[print] ( identifier[self] . identifier[_session] . identifier[headers] )
identifier[r] = identifier[self] . identifier[_session] . identifier[get] ( identifier[API_URL] + literal[string] , identifier[params] = identifier[params] )
identifier[r] . identifier[raise_for_status] ()
keyword[return] identifier[r] . identifier[json] () | def get_reviews(self, listing_id, offset=0, limit=20):
"""
Get reviews for a given listing
"""
params = {'_order': 'language_country', 'listing_id': str(listing_id), '_offset': str(offset), 'role': 'all', '_limit': str(limit), '_format': 'for_mobile_client'}
print(self._session.headers)
r = self._session.get(API_URL + '/reviews', params=params)
r.raise_for_status()
return r.json() |
def p_pkg_desc_1(self, p):
"""pkg_desc : PKG_DESC TEXT"""
try:
if six.PY2:
value = p[2].decode(encoding='utf-8')
else:
value = p[2]
self.builder.set_pkg_desc(self.document, value)
except CardinalityError:
self.more_than_one_error('PackageDescription', p.lineno(1))
except OrderError:
self.order_error('PackageDescription', 'PackageFileName', p.lineno(1)) | def function[p_pkg_desc_1, parameter[self, p]]:
constant[pkg_desc : PKG_DESC TEXT]
<ast.Try object at 0x7da1b0159120> | keyword[def] identifier[p_pkg_desc_1] ( identifier[self] , identifier[p] ):
literal[string]
keyword[try] :
keyword[if] identifier[six] . identifier[PY2] :
identifier[value] = identifier[p] [ literal[int] ]. identifier[decode] ( identifier[encoding] = literal[string] )
keyword[else] :
identifier[value] = identifier[p] [ literal[int] ]
identifier[self] . identifier[builder] . identifier[set_pkg_desc] ( identifier[self] . identifier[document] , identifier[value] )
keyword[except] identifier[CardinalityError] :
identifier[self] . identifier[more_than_one_error] ( literal[string] , identifier[p] . identifier[lineno] ( literal[int] ))
keyword[except] identifier[OrderError] :
identifier[self] . identifier[order_error] ( literal[string] , literal[string] , identifier[p] . identifier[lineno] ( literal[int] )) | def p_pkg_desc_1(self, p):
"""pkg_desc : PKG_DESC TEXT"""
try:
if six.PY2:
value = p[2].decode(encoding='utf-8') # depends on [control=['if'], data=[]]
else:
value = p[2]
self.builder.set_pkg_desc(self.document, value) # depends on [control=['try'], data=[]]
except CardinalityError:
self.more_than_one_error('PackageDescription', p.lineno(1)) # depends on [control=['except'], data=[]]
except OrderError:
self.order_error('PackageDescription', 'PackageFileName', p.lineno(1)) # depends on [control=['except'], data=[]] |
def define_new(cls, name, members, is_abstract=False):
"""
Define a new struct type derived from the current type.
Parameters
----------
name: str
Name of the struct type
members: {member_name : type}
Dictionary of struct member types.
is_abstract: bool
If set, marks the struct as abstract.
"""
m = OrderedDict(cls._members)
# Make sure derivation does not have any overlapping keys with its parent
if set(m.keys()) & set(members.keys()):
raise ValueError("'members' contains keys that overlap with parent")
m.update(members)
dct = {
'_members' : m,
'_is_abstract': is_abstract,
}
newcls = type(name, (cls,), dct)
return newcls | def function[define_new, parameter[cls, name, members, is_abstract]]:
constant[
Define a new struct type derived from the current type.
Parameters
----------
name: str
Name of the struct type
members: {member_name : type}
Dictionary of struct member types.
is_abstract: bool
If set, marks the struct as abstract.
]
variable[m] assign[=] call[name[OrderedDict], parameter[name[cls]._members]]
if binary_operation[call[name[set], parameter[call[name[m].keys, parameter[]]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[members].keys, parameter[]]]]] begin[:]
<ast.Raise object at 0x7da1b0da2920>
call[name[m].update, parameter[name[members]]]
variable[dct] assign[=] dictionary[[<ast.Constant object at 0x7da1b0da28c0>, <ast.Constant object at 0x7da1b0da19c0>], [<ast.Name object at 0x7da1b0da3700>, <ast.Name object at 0x7da1b0da1de0>]]
variable[newcls] assign[=] call[name[type], parameter[name[name], tuple[[<ast.Name object at 0x7da1b0da2560>]], name[dct]]]
return[name[newcls]] | keyword[def] identifier[define_new] ( identifier[cls] , identifier[name] , identifier[members] , identifier[is_abstract] = keyword[False] ):
literal[string]
identifier[m] = identifier[OrderedDict] ( identifier[cls] . identifier[_members] )
keyword[if] identifier[set] ( identifier[m] . identifier[keys] ())& identifier[set] ( identifier[members] . identifier[keys] ()):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[m] . identifier[update] ( identifier[members] )
identifier[dct] ={
literal[string] : identifier[m] ,
literal[string] : identifier[is_abstract] ,
}
identifier[newcls] = identifier[type] ( identifier[name] ,( identifier[cls] ,), identifier[dct] )
keyword[return] identifier[newcls] | def define_new(cls, name, members, is_abstract=False):
"""
Define a new struct type derived from the current type.
Parameters
----------
name: str
Name of the struct type
members: {member_name : type}
Dictionary of struct member types.
is_abstract: bool
If set, marks the struct as abstract.
"""
m = OrderedDict(cls._members)
# Make sure derivation does not have any overlapping keys with its parent
if set(m.keys()) & set(members.keys()):
raise ValueError("'members' contains keys that overlap with parent") # depends on [control=['if'], data=[]]
m.update(members)
dct = {'_members': m, '_is_abstract': is_abstract}
newcls = type(name, (cls,), dct)
return newcls |
def pkginfo(name, version, arch, repoid, install_date=None, install_date_time_t=None):
'''
Build and return a pkginfo namedtuple
'''
pkginfo_tuple = collections.namedtuple(
'PkgInfo',
('name', 'version', 'arch', 'repoid', 'install_date',
'install_date_time_t')
)
return pkginfo_tuple(name, version, arch, repoid, install_date,
install_date_time_t) | def function[pkginfo, parameter[name, version, arch, repoid, install_date, install_date_time_t]]:
constant[
Build and return a pkginfo namedtuple
]
variable[pkginfo_tuple] assign[=] call[name[collections].namedtuple, parameter[constant[PkgInfo], tuple[[<ast.Constant object at 0x7da1b2121f30>, <ast.Constant object at 0x7da1b2122170>, <ast.Constant object at 0x7da1b2121d50>, <ast.Constant object at 0x7da1b2122320>, <ast.Constant object at 0x7da1b21208b0>, <ast.Constant object at 0x7da1b2122cb0>]]]]
return[call[name[pkginfo_tuple], parameter[name[name], name[version], name[arch], name[repoid], name[install_date], name[install_date_time_t]]]] | keyword[def] identifier[pkginfo] ( identifier[name] , identifier[version] , identifier[arch] , identifier[repoid] , identifier[install_date] = keyword[None] , identifier[install_date_time_t] = keyword[None] ):
literal[string]
identifier[pkginfo_tuple] = identifier[collections] . identifier[namedtuple] (
literal[string] ,
( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] )
)
keyword[return] identifier[pkginfo_tuple] ( identifier[name] , identifier[version] , identifier[arch] , identifier[repoid] , identifier[install_date] ,
identifier[install_date_time_t] ) | def pkginfo(name, version, arch, repoid, install_date=None, install_date_time_t=None):
"""
Build and return a pkginfo namedtuple
"""
pkginfo_tuple = collections.namedtuple('PkgInfo', ('name', 'version', 'arch', 'repoid', 'install_date', 'install_date_time_t'))
return pkginfo_tuple(name, version, arch, repoid, install_date, install_date_time_t) |
def t2T(self, seg, t):
"""returns the path parameter T which corresponds to the segment
parameter t. In other words, for any Path object, path, and any
segment in path, seg, T(t) = path.t2T(seg, t) is the unique
reparameterization such that path.point(T(t)) == seg.point(t) for all
0 <= t <= 1.
Input Note: seg can be a segment in the Path object or its
corresponding index."""
self._calc_lengths()
# Accept an index or a segment for seg
if isinstance(seg, int):
seg_idx = seg
else:
try:
seg_idx = self.index(seg)
except ValueError:
assert is_path_segment(seg) or isinstance(seg, int)
raise
segment_start = sum(self._lengths[:seg_idx])
segment_end = segment_start + self._lengths[seg_idx]
T = (segment_end - segment_start)*t + segment_start
return T | def function[t2T, parameter[self, seg, t]]:
constant[returns the path parameter T which corresponds to the segment
parameter t. In other words, for any Path object, path, and any
segment in path, seg, T(t) = path.t2T(seg, t) is the unique
reparameterization such that path.point(T(t)) == seg.point(t) for all
0 <= t <= 1.
Input Note: seg can be a segment in the Path object or its
corresponding index.]
call[name[self]._calc_lengths, parameter[]]
if call[name[isinstance], parameter[name[seg], name[int]]] begin[:]
variable[seg_idx] assign[=] name[seg]
variable[segment_start] assign[=] call[name[sum], parameter[call[name[self]._lengths][<ast.Slice object at 0x7da20c6e74f0>]]]
variable[segment_end] assign[=] binary_operation[name[segment_start] + call[name[self]._lengths][name[seg_idx]]]
variable[T] assign[=] binary_operation[binary_operation[binary_operation[name[segment_end] - name[segment_start]] * name[t]] + name[segment_start]]
return[name[T]] | keyword[def] identifier[t2T] ( identifier[self] , identifier[seg] , identifier[t] ):
literal[string]
identifier[self] . identifier[_calc_lengths] ()
keyword[if] identifier[isinstance] ( identifier[seg] , identifier[int] ):
identifier[seg_idx] = identifier[seg]
keyword[else] :
keyword[try] :
identifier[seg_idx] = identifier[self] . identifier[index] ( identifier[seg] )
keyword[except] identifier[ValueError] :
keyword[assert] identifier[is_path_segment] ( identifier[seg] ) keyword[or] identifier[isinstance] ( identifier[seg] , identifier[int] )
keyword[raise]
identifier[segment_start] = identifier[sum] ( identifier[self] . identifier[_lengths] [: identifier[seg_idx] ])
identifier[segment_end] = identifier[segment_start] + identifier[self] . identifier[_lengths] [ identifier[seg_idx] ]
identifier[T] =( identifier[segment_end] - identifier[segment_start] )* identifier[t] + identifier[segment_start]
keyword[return] identifier[T] | def t2T(self, seg, t):
"""returns the path parameter T which corresponds to the segment
parameter t. In other words, for any Path object, path, and any
segment in path, seg, T(t) = path.t2T(seg, t) is the unique
reparameterization such that path.point(T(t)) == seg.point(t) for all
0 <= t <= 1.
Input Note: seg can be a segment in the Path object or its
corresponding index."""
self._calc_lengths()
# Accept an index or a segment for seg
if isinstance(seg, int):
seg_idx = seg # depends on [control=['if'], data=[]]
else:
try:
seg_idx = self.index(seg) # depends on [control=['try'], data=[]]
except ValueError:
assert is_path_segment(seg) or isinstance(seg, int)
raise # depends on [control=['except'], data=[]]
segment_start = sum(self._lengths[:seg_idx])
segment_end = segment_start + self._lengths[seg_idx]
T = (segment_end - segment_start) * t + segment_start
return T |
def register(self, *actions):
"""Register `actions` in the current application. All `actions` must be
an instance of :class:`.Action` or one of its subclasses.
If `overwrite` is `True`, then it is allowed to overwrite an
existing action with same name and category; else `ValueError`
is raised.
"""
assert self.installed(), "Actions not enabled on this application"
assert all(isinstance(a, Action) for a in actions)
for action in actions:
cat = action.category
reg = self._state["categories"].setdefault(cat, [])
reg.append(action) | def function[register, parameter[self]]:
constant[Register `actions` in the current application. All `actions` must be
an instance of :class:`.Action` or one of its subclasses.
If `overwrite` is `True`, then it is allowed to overwrite an
existing action with same name and category; else `ValueError`
is raised.
]
assert[call[name[self].installed, parameter[]]]
assert[call[name[all], parameter[<ast.GeneratorExp object at 0x7da20c6c5540>]]]
for taget[name[action]] in starred[name[actions]] begin[:]
variable[cat] assign[=] name[action].category
variable[reg] assign[=] call[call[name[self]._state][constant[categories]].setdefault, parameter[name[cat], list[[]]]]
call[name[reg].append, parameter[name[action]]] | keyword[def] identifier[register] ( identifier[self] ,* identifier[actions] ):
literal[string]
keyword[assert] identifier[self] . identifier[installed] (), literal[string]
keyword[assert] identifier[all] ( identifier[isinstance] ( identifier[a] , identifier[Action] ) keyword[for] identifier[a] keyword[in] identifier[actions] )
keyword[for] identifier[action] keyword[in] identifier[actions] :
identifier[cat] = identifier[action] . identifier[category]
identifier[reg] = identifier[self] . identifier[_state] [ literal[string] ]. identifier[setdefault] ( identifier[cat] ,[])
identifier[reg] . identifier[append] ( identifier[action] ) | def register(self, *actions):
"""Register `actions` in the current application. All `actions` must be
an instance of :class:`.Action` or one of its subclasses.
If `overwrite` is `True`, then it is allowed to overwrite an
existing action with same name and category; else `ValueError`
is raised.
"""
assert self.installed(), 'Actions not enabled on this application'
assert all((isinstance(a, Action) for a in actions))
for action in actions:
cat = action.category
reg = self._state['categories'].setdefault(cat, [])
reg.append(action) # depends on [control=['for'], data=['action']] |
def as_requirement(self):
"""Return a ``Requirement`` that matches this distribution exactly"""
if isinstance(self.parsed_version, packaging.version.Version):
spec = "%s==%s" % (self.project_name, self.parsed_version)
else:
spec = "%s===%s" % (self.project_name, self.parsed_version)
return Requirement.parse(spec) | def function[as_requirement, parameter[self]]:
constant[Return a ``Requirement`` that matches this distribution exactly]
if call[name[isinstance], parameter[name[self].parsed_version, name[packaging].version.Version]] begin[:]
variable[spec] assign[=] binary_operation[constant[%s==%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6c4df0>, <ast.Attribute object at 0x7da20c6c7310>]]]
return[call[name[Requirement].parse, parameter[name[spec]]]] | keyword[def] identifier[as_requirement] ( identifier[self] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[parsed_version] , identifier[packaging] . identifier[version] . identifier[Version] ):
identifier[spec] = literal[string] %( identifier[self] . identifier[project_name] , identifier[self] . identifier[parsed_version] )
keyword[else] :
identifier[spec] = literal[string] %( identifier[self] . identifier[project_name] , identifier[self] . identifier[parsed_version] )
keyword[return] identifier[Requirement] . identifier[parse] ( identifier[spec] ) | def as_requirement(self):
"""Return a ``Requirement`` that matches this distribution exactly"""
if isinstance(self.parsed_version, packaging.version.Version):
spec = '%s==%s' % (self.project_name, self.parsed_version) # depends on [control=['if'], data=[]]
else:
spec = '%s===%s' % (self.project_name, self.parsed_version)
return Requirement.parse(spec) |
def mehring_values(self):
"""
Returns: the Chemical shielding tensor in Mehring Notation
"""
pas=self.principal_axis_system
sigma_iso=pas.trace() / 3
sigma_11, sigma_22, sigma_33=np.diag(pas)
return self.MehringNotation(sigma_iso, sigma_11, sigma_22, sigma_33) | def function[mehring_values, parameter[self]]:
constant[
Returns: the Chemical shielding tensor in Mehring Notation
]
variable[pas] assign[=] name[self].principal_axis_system
variable[sigma_iso] assign[=] binary_operation[call[name[pas].trace, parameter[]] / constant[3]]
<ast.Tuple object at 0x7da1b1c58040> assign[=] call[name[np].diag, parameter[name[pas]]]
return[call[name[self].MehringNotation, parameter[name[sigma_iso], name[sigma_11], name[sigma_22], name[sigma_33]]]] | keyword[def] identifier[mehring_values] ( identifier[self] ):
literal[string]
identifier[pas] = identifier[self] . identifier[principal_axis_system]
identifier[sigma_iso] = identifier[pas] . identifier[trace] ()/ literal[int]
identifier[sigma_11] , identifier[sigma_22] , identifier[sigma_33] = identifier[np] . identifier[diag] ( identifier[pas] )
keyword[return] identifier[self] . identifier[MehringNotation] ( identifier[sigma_iso] , identifier[sigma_11] , identifier[sigma_22] , identifier[sigma_33] ) | def mehring_values(self):
"""
Returns: the Chemical shielding tensor in Mehring Notation
"""
pas = self.principal_axis_system
sigma_iso = pas.trace() / 3
(sigma_11, sigma_22, sigma_33) = np.diag(pas)
return self.MehringNotation(sigma_iso, sigma_11, sigma_22, sigma_33) |
def get_task(self, name):
"""Get task by name or create it if it does not exists."""
if name in self.tasks.keys():
task = self.tasks[name]
else:
task = Task(name)
self.tasks[name] = task
return task | def function[get_task, parameter[self, name]]:
constant[Get task by name or create it if it does not exists.]
if compare[name[name] in call[name[self].tasks.keys, parameter[]]] begin[:]
variable[task] assign[=] call[name[self].tasks][name[name]]
return[name[task]] | keyword[def] identifier[get_task] ( identifier[self] , identifier[name] ):
literal[string]
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[tasks] . identifier[keys] ():
identifier[task] = identifier[self] . identifier[tasks] [ identifier[name] ]
keyword[else] :
identifier[task] = identifier[Task] ( identifier[name] )
identifier[self] . identifier[tasks] [ identifier[name] ]= identifier[task]
keyword[return] identifier[task] | def get_task(self, name):
"""Get task by name or create it if it does not exists."""
if name in self.tasks.keys():
task = self.tasks[name] # depends on [control=['if'], data=['name']]
else:
task = Task(name)
self.tasks[name] = task
return task |
def duplicate(self):
'''
Returns a copy of the current contact element.
@returns: Contact
'''
return self.__class__(name=self.name, identifier=self.identifier,
phone=self.phone, require_id=self.__require_id,
address=self.address.duplicate()) | def function[duplicate, parameter[self]]:
constant[
Returns a copy of the current contact element.
@returns: Contact
]
return[call[name[self].__class__, parameter[]]] | keyword[def] identifier[duplicate] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[__class__] ( identifier[name] = identifier[self] . identifier[name] , identifier[identifier] = identifier[self] . identifier[identifier] ,
identifier[phone] = identifier[self] . identifier[phone] , identifier[require_id] = identifier[self] . identifier[__require_id] ,
identifier[address] = identifier[self] . identifier[address] . identifier[duplicate] ()) | def duplicate(self):
"""
Returns a copy of the current contact element.
@returns: Contact
"""
return self.__class__(name=self.name, identifier=self.identifier, phone=self.phone, require_id=self.__require_id, address=self.address.duplicate()) |
def get_ADC_value(bus, addr, channel):
"""
This method selects a channel and initiates conversion
The ADC operates at 240 SPS (12 bits) with 1x gain
One shot conversions are used, meaning a wait period is needed
in order to acquire new data. This is done via a constant poll
of the ready bit.
Upon completion, a voltage value is returned to the caller.
Usage - ADC_start(bus, SensorCluster.ADC_addr, channel_to_read)
IMPORTANT NOTE:
The ADC uses a 2.048V voltage reference
"""
if channel == 1:
INIT = 0b10000000
elif channel == 2:
INIT = 0b10100000
elif channel == 3:
INIT = 0b11000000
elif channel == 4:
INIT = 0b11100000
bus.write_byte(addr, INIT)
data = bus.read_i2c_block_data(addr, 0, 3)
status = (data[2] & 0b10000000) >> 7
while(status == 1):
data = bus.read_i2c_block_data(addr, 0, 3)
status = (data[2] & 0b10000000) >> 7
sign = data[0] & 0b00001000
val = ((data[0] & 0b0000111) << 8) | (data[1])
if (sign == 1):
val = (val ^ 0x3ff) + 1 # compute 2s complement for 12 bit val
# Convert val to a ratiomerical ADC reading
return float(val) * 2.048 / float(2047) | def function[get_ADC_value, parameter[bus, addr, channel]]:
constant[
This method selects a channel and initiates conversion
The ADC operates at 240 SPS (12 bits) with 1x gain
One shot conversions are used, meaning a wait period is needed
in order to acquire new data. This is done via a constant poll
of the ready bit.
Upon completion, a voltage value is returned to the caller.
Usage - ADC_start(bus, SensorCluster.ADC_addr, channel_to_read)
IMPORTANT NOTE:
The ADC uses a 2.048V voltage reference
]
if compare[name[channel] equal[==] constant[1]] begin[:]
variable[INIT] assign[=] constant[128]
call[name[bus].write_byte, parameter[name[addr], name[INIT]]]
variable[data] assign[=] call[name[bus].read_i2c_block_data, parameter[name[addr], constant[0], constant[3]]]
variable[status] assign[=] binary_operation[binary_operation[call[name[data]][constant[2]] <ast.BitAnd object at 0x7da2590d6b60> constant[128]] <ast.RShift object at 0x7da2590d6a40> constant[7]]
while compare[name[status] equal[==] constant[1]] begin[:]
variable[data] assign[=] call[name[bus].read_i2c_block_data, parameter[name[addr], constant[0], constant[3]]]
variable[status] assign[=] binary_operation[binary_operation[call[name[data]][constant[2]] <ast.BitAnd object at 0x7da2590d6b60> constant[128]] <ast.RShift object at 0x7da2590d6a40> constant[7]]
variable[sign] assign[=] binary_operation[call[name[data]][constant[0]] <ast.BitAnd object at 0x7da2590d6b60> constant[8]]
variable[val] assign[=] binary_operation[binary_operation[binary_operation[call[name[data]][constant[0]] <ast.BitAnd object at 0x7da2590d6b60> constant[7]] <ast.LShift object at 0x7da2590d69e0> constant[8]] <ast.BitOr object at 0x7da2590d6aa0> call[name[data]][constant[1]]]
if compare[name[sign] equal[==] constant[1]] begin[:]
variable[val] assign[=] binary_operation[binary_operation[name[val] <ast.BitXor object at 0x7da2590d6b00> constant[1023]] + constant[1]]
return[binary_operation[binary_operation[call[name[float], parameter[name[val]]] * constant[2.048]] / call[name[float], parameter[constant[2047]]]]] | keyword[def] identifier[get_ADC_value] ( identifier[bus] , identifier[addr] , identifier[channel] ):
literal[string]
keyword[if] identifier[channel] == literal[int] :
identifier[INIT] = literal[int]
keyword[elif] identifier[channel] == literal[int] :
identifier[INIT] = literal[int]
keyword[elif] identifier[channel] == literal[int] :
identifier[INIT] = literal[int]
keyword[elif] identifier[channel] == literal[int] :
identifier[INIT] = literal[int]
identifier[bus] . identifier[write_byte] ( identifier[addr] , identifier[INIT] )
identifier[data] = identifier[bus] . identifier[read_i2c_block_data] ( identifier[addr] , literal[int] , literal[int] )
identifier[status] =( identifier[data] [ literal[int] ]& literal[int] )>> literal[int]
keyword[while] ( identifier[status] == literal[int] ):
identifier[data] = identifier[bus] . identifier[read_i2c_block_data] ( identifier[addr] , literal[int] , literal[int] )
identifier[status] =( identifier[data] [ literal[int] ]& literal[int] )>> literal[int]
identifier[sign] = identifier[data] [ literal[int] ]& literal[int]
identifier[val] =(( identifier[data] [ literal[int] ]& literal[int] )<< literal[int] )|( identifier[data] [ literal[int] ])
keyword[if] ( identifier[sign] == literal[int] ):
identifier[val] =( identifier[val] ^ literal[int] )+ literal[int]
keyword[return] identifier[float] ( identifier[val] )* literal[int] / identifier[float] ( literal[int] ) | def get_ADC_value(bus, addr, channel):
"""
This method selects a channel and initiates conversion
The ADC operates at 240 SPS (12 bits) with 1x gain
One shot conversions are used, meaning a wait period is needed
in order to acquire new data. This is done via a constant poll
of the ready bit.
Upon completion, a voltage value is returned to the caller.
Usage - ADC_start(bus, SensorCluster.ADC_addr, channel_to_read)
IMPORTANT NOTE:
The ADC uses a 2.048V voltage reference
"""
if channel == 1:
INIT = 128 # depends on [control=['if'], data=[]]
elif channel == 2:
INIT = 160 # depends on [control=['if'], data=[]]
elif channel == 3:
INIT = 192 # depends on [control=['if'], data=[]]
elif channel == 4:
INIT = 224 # depends on [control=['if'], data=[]]
bus.write_byte(addr, INIT)
data = bus.read_i2c_block_data(addr, 0, 3)
status = (data[2] & 128) >> 7
while status == 1:
data = bus.read_i2c_block_data(addr, 0, 3)
status = (data[2] & 128) >> 7 # depends on [control=['while'], data=['status']]
sign = data[0] & 8
val = (data[0] & 7) << 8 | data[1]
if sign == 1:
val = (val ^ 1023) + 1 # compute 2s complement for 12 bit val # depends on [control=['if'], data=[]] # Convert val to a ratiomerical ADC reading
return float(val) * 2.048 / float(2047) |
def cycle(arrays, descs=None, cadence=0.6, toworlds=None,
drawoverlay=None, yflip=False, tostatuses=None, run_main=True,
save_after_viewing=None):
"""Interactively display a series of 2D data arrays.
arrays
An iterable of 2D arrays (a 3D array works).
descs
An iterable of text descriptions, one for each array
cadence
The time delay before the next array is shown, in seconds.
tostatuses
An iterable of functions that convert cursor positions to a textual
status output corresponding to that position. FIXME details needed.
toworlds
An iterable of functions that convert cursor positions to a
latitude/longitude pair that is displayed in the status output.
The `tostatuses` keyword is a more generic version of this.
FIXME details needed.
drawoverlay
An optional function that draws an overlay on the display after
the underlying data image is presented. FIXME details needed.
yflip
If true, have the numerical *y* coordinates have 0 refer to the
bottom of the image. Note that the data array is still drawn such
that its first row appears at the top!
run_main
If true, run the Gtk mainloop explicitly so that the function does
not return until the window is closed. If false, no mainloop is run.
If the application happens to already be running a mainloop in the
background, the window will appear and the user will be able to
interact with it while this thread continues executing.
save_after_viewing
If set to a string containing an integer percent-formatting specifier,
the data will be written to a series of PNG files after the window is
closed.
"""
n = len(arrays)
amin = amax = h = w = None
if toworlds is not None and tostatuses is not None:
raise ValueError('only one of "toworlds" and "tostatuses" may be given')
if descs is None:
descs = [''] * n
for array in arrays:
thish, thisw = array.shape
thismin, thismax = array.min(), array.max()
if not np.isfinite(thismin):
thismin = array[np.ma.where(np.isfinite(array))].min()
if not np.isfinite(thismax):
thismax = array[np.ma.where(np.isfinite(array))].max()
if amin is None:
w, h, amin, amax = thisw, thish, thismin, thismax
else:
if thisw != w:
raise ValueError('array widths not all equal')
if thish != h:
raise ValueError('array heights not all equal')
amin = min(amin, thismin)
amax = max(amax, thismax)
stride = cairo.ImageSurface.format_stride_for_width(cairo.FORMAT_ARGB32, w)
assert stride % 4 == 0 # stride is in bytes
imgdata = np.empty((n, h, stride // 4), dtype=np.uint32)
fixed = np.empty((n, h, w), dtype=np.int32)
antimask = np.empty((n, h, w), dtype=np.bool_)
surfaces = [None] * n
imgdata.fill(0xFF000000)
for i, array in enumerate(arrays):
surfaces[i] = cairo.ImageSurface.create_for_data(imgdata[i], cairo.FORMAT_ARGB32,
w, h, stride)
if np.ma.is_masked(array):
filled = array.filled(amin)
antimask[i] = ~array.mask
else:
filled = array
antimask[i].fill(True)
fixed[i] = (filled - amin) * (0x0FFFFFF0 / (amax - amin))
def getn():
return n
def getshapei(i):
return w, h
def getdesci(i):
return descs[i]
clipped = np.zeros((h, w), dtype=np.int32) # scratch arrays -- two needed
clipped2 = np.zeros((h, w), dtype=np.uint32) # to make numpy ufunc casting happy
def settuningi(i, tunerx, tunery):
np.bitwise_and(imgdata[i], 0xFF000000, imgdata[i])
fmin = int(0x0FFFFFF0 * tunerx)
fmax = int(0x0FFFFFF0 * tunery)
if fmin == fmax:
np.add(imgdata[i], 255 * (fixed[i] > fmin).astype(np.uint32), imgdata[i])
else:
np.clip(fixed[i], fmin, fmax, clipped)
np.subtract(clipped, fmin, clipped)
np.multiply(clipped, 255. / (fmax - fmin), clipped2, casting='unsafe')
np.add(imgdata[i], clipped2, imgdata[i])
np.multiply(imgdata[i], antimask[i], imgdata[i])
def getsurfacei(i, xoffset, yoffset, width, height):
return surfaces[i], xoffset, yoffset
# see comment in view()
nomasks = [not np.ma.is_masked(a) or a.mask is np.ma.nomask
for a in arrays]
if tostatuses is None:
if toworlds is None:
tostatuses = [None] * n
else:
from .astutil import fmthours, fmtdeglat
def make_status_func(toworld):
def status(y_and_x):
lat, lon = toworld(y_and_x)
return 'lat=%s lon=%s' % (fmtdeglat(lat),
fmthours(lon))
tostatuses = [make_status_func(toworlds[i]) for i in range(n)]
def fmtstatusi(i, x, y):
s = ''
row = int(np.floor(y + 0.5))
col = int(np.floor(x + 0.5))
if row >= 0 and col >= 0 and row < h and col < w:
if nomasks[i] or not arrays[i].mask[row,col]:
s += '%g ' % arrays[i][row,col]
if yflip:
y = h - 1 - y
row = h - 1 - row
s += '[%d,%d] x=%.1f y=%.1f' % (row, col, x, y)
if tostatuses[i] is not None:
s += ' ' + tostatuses[i](np.array([y, x]))
return s
cycler = Cycler()
cycler.set_n_getter(getn)
cycler.set_shape_getter(getshapei)
cycler.set_desc_getter(getdesci)
cycler.set_tuning_setter(settuningi)
cycler.set_surface_getter(getsurfacei)
cycler.set_status_formatter(fmtstatusi)
cycler.set_overlay_drawer(drawoverlay)
cycler.win.show_all()
if run_main:
cycler.win.connect('destroy', Gtk.main_quit)
Gtk.main()
else:
cycler.win.connect('destroy', lambda e: cycler.win.destroy())
if save_after_viewing is not None:
for i in range(n):
filename = save_after_viewing % (i,)
settuningi(i, cycler.last_tunerx, cycler.last_tunery)
surface, xoffset, yoffset = getsurfacei(i, 0, 0, w, h)
surface.write_to_png(filename) | def function[cycle, parameter[arrays, descs, cadence, toworlds, drawoverlay, yflip, tostatuses, run_main, save_after_viewing]]:
constant[Interactively display a series of 2D data arrays.
arrays
An iterable of 2D arrays (a 3D array works).
descs
An iterable of text descriptions, one for each array
cadence
The time delay before the next array is shown, in seconds.
tostatuses
An iterable of functions that convert cursor positions to a textual
status output corresponding to that position. FIXME details needed.
toworlds
An iterable of functions that convert cursor positions to a
latitude/longitude pair that is displayed in the status output.
The `tostatuses` keyword is a more generic version of this.
FIXME details needed.
drawoverlay
An optional function that draws an overlay on the display after
the underlying data image is presented. FIXME details needed.
yflip
If true, have the numerical *y* coordinates have 0 refer to the
bottom of the image. Note that the data array is still drawn such
that its first row appears at the top!
run_main
If true, run the Gtk mainloop explicitly so that the function does
not return until the window is closed. If false, no mainloop is run.
If the application happens to already be running a mainloop in the
background, the window will appear and the user will be able to
interact with it while this thread continues executing.
save_after_viewing
If set to a string containing an integer percent-formatting specifier,
the data will be written to a series of PNG files after the window is
closed.
]
variable[n] assign[=] call[name[len], parameter[name[arrays]]]
variable[amin] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b269f940> begin[:]
<ast.Raise object at 0x7da1b269f7f0>
if compare[name[descs] is constant[None]] begin[:]
variable[descs] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b269f5b0>]] * name[n]]
for taget[name[array]] in starred[name[arrays]] begin[:]
<ast.Tuple object at 0x7da1b269f460> assign[=] name[array].shape
<ast.Tuple object at 0x7da1b269f340> assign[=] tuple[[<ast.Call object at 0x7da1b269f280>, <ast.Call object at 0x7da1b269f1f0>]]
if <ast.UnaryOp object at 0x7da1b269f130> begin[:]
variable[thismin] assign[=] call[call[name[array]][call[name[np].ma.where, parameter[call[name[np].isfinite, parameter[name[array]]]]]].min, parameter[]]
if <ast.UnaryOp object at 0x7da1b269ed70> begin[:]
variable[thismax] assign[=] call[call[name[array]][call[name[np].ma.where, parameter[call[name[np].isfinite, parameter[name[array]]]]]].max, parameter[]]
if compare[name[amin] is constant[None]] begin[:]
<ast.Tuple object at 0x7da1b269e8f0> assign[=] tuple[[<ast.Name object at 0x7da1b269e7d0>, <ast.Name object at 0x7da1b269e7a0>, <ast.Name object at 0x7da1b269e770>, <ast.Name object at 0x7da1b269e740>]]
variable[stride] assign[=] call[name[cairo].ImageSurface.format_stride_for_width, parameter[name[cairo].FORMAT_ARGB32, name[w]]]
assert[compare[binary_operation[name[stride] <ast.Mod object at 0x7da2590d6920> constant[4]] equal[==] constant[0]]]
variable[imgdata] assign[=] call[name[np].empty, parameter[tuple[[<ast.Name object at 0x7da1b27a6c80>, <ast.Name object at 0x7da1b27a5c00>, <ast.BinOp object at 0x7da1b27a78b0>]]]]
variable[fixed] assign[=] call[name[np].empty, parameter[tuple[[<ast.Name object at 0x7da1b27a78e0>, <ast.Name object at 0x7da1b27a7c40>, <ast.Name object at 0x7da1b27a6fe0>]]]]
variable[antimask] assign[=] call[name[np].empty, parameter[tuple[[<ast.Name object at 0x7da1b27a6980>, <ast.Name object at 0x7da1b27a7b80>, <ast.Name object at 0x7da1b269e0b0>]]]]
variable[surfaces] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b269df30>]] * name[n]]
call[name[imgdata].fill, parameter[constant[4278190080]]]
for taget[tuple[[<ast.Name object at 0x7da1b261bb80>, <ast.Name object at 0x7da1b261bb50>]]] in starred[call[name[enumerate], parameter[name[arrays]]]] begin[:]
call[name[surfaces]][name[i]] assign[=] call[name[cairo].ImageSurface.create_for_data, parameter[call[name[imgdata]][name[i]], name[cairo].FORMAT_ARGB32, name[w], name[h], name[stride]]]
if call[name[np].ma.is_masked, parameter[name[array]]] begin[:]
variable[filled] assign[=] call[name[array].filled, parameter[name[amin]]]
call[name[antimask]][name[i]] assign[=] <ast.UnaryOp object at 0x7da1b261b460>
call[name[fixed]][name[i]] assign[=] binary_operation[binary_operation[name[filled] - name[amin]] * binary_operation[constant[268435440] / binary_operation[name[amax] - name[amin]]]]
def function[getn, parameter[]]:
return[name[n]]
def function[getshapei, parameter[i]]:
return[tuple[[<ast.Name object at 0x7da1b261ad70>, <ast.Name object at 0x7da1b261ad40>]]]
def function[getdesci, parameter[i]]:
return[call[name[descs]][name[i]]]
variable[clipped] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b261aa40>, <ast.Name object at 0x7da1b261aa10>]]]]
variable[clipped2] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b261a830>, <ast.Name object at 0x7da1b261a800>]]]]
def function[settuningi, parameter[i, tunerx, tunery]]:
call[name[np].bitwise_and, parameter[call[name[imgdata]][name[i]], constant[4278190080], call[name[imgdata]][name[i]]]]
variable[fmin] assign[=] call[name[int], parameter[binary_operation[constant[268435440] * name[tunerx]]]]
variable[fmax] assign[=] call[name[int], parameter[binary_operation[constant[268435440] * name[tunery]]]]
if compare[name[fmin] equal[==] name[fmax]] begin[:]
call[name[np].add, parameter[call[name[imgdata]][name[i]], binary_operation[constant[255] * call[compare[call[name[fixed]][name[i]] greater[>] name[fmin]].astype, parameter[name[np].uint32]]], call[name[imgdata]][name[i]]]]
call[name[np].multiply, parameter[call[name[imgdata]][name[i]], call[name[antimask]][name[i]], call[name[imgdata]][name[i]]]]
def function[getsurfacei, parameter[i, xoffset, yoffset, width, height]]:
return[tuple[[<ast.Subscript object at 0x7da1b2618f70>, <ast.Name object at 0x7da1b2618ee0>, <ast.Name object at 0x7da1b2618eb0>]]]
variable[nomasks] assign[=] <ast.ListComp object at 0x7da1b26180d0>
if compare[name[tostatuses] is constant[None]] begin[:]
if compare[name[toworlds] is constant[None]] begin[:]
variable[tostatuses] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b2618670>]] * name[n]]
def function[fmtstatusi, parameter[i, x, y]]:
variable[s] assign[=] constant[]
variable[row] assign[=] call[name[int], parameter[call[name[np].floor, parameter[binary_operation[name[y] + constant[0.5]]]]]]
variable[col] assign[=] call[name[int], parameter[call[name[np].floor, parameter[binary_operation[name[x] + constant[0.5]]]]]]
if <ast.BoolOp object at 0x7da1b266ba90> begin[:]
if <ast.BoolOp object at 0x7da1b266b7f0> begin[:]
<ast.AugAssign object at 0x7da1b266b580>
if name[yflip] begin[:]
variable[y] assign[=] binary_operation[binary_operation[name[h] - constant[1]] - name[y]]
variable[row] assign[=] binary_operation[binary_operation[name[h] - constant[1]] - name[row]]
<ast.AugAssign object at 0x7da1b266b070>
if compare[call[name[tostatuses]][name[i]] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b266ada0>
return[name[s]]
variable[cycler] assign[=] call[name[Cycler], parameter[]]
call[name[cycler].set_n_getter, parameter[name[getn]]]
call[name[cycler].set_shape_getter, parameter[name[getshapei]]]
call[name[cycler].set_desc_getter, parameter[name[getdesci]]]
call[name[cycler].set_tuning_setter, parameter[name[settuningi]]]
call[name[cycler].set_surface_getter, parameter[name[getsurfacei]]]
call[name[cycler].set_status_formatter, parameter[name[fmtstatusi]]]
call[name[cycler].set_overlay_drawer, parameter[name[drawoverlay]]]
call[name[cycler].win.show_all, parameter[]]
if name[run_main] begin[:]
call[name[cycler].win.connect, parameter[constant[destroy], name[Gtk].main_quit]]
call[name[Gtk].main, parameter[]]
if compare[name[save_after_viewing] is_not constant[None]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
variable[filename] assign[=] binary_operation[name[save_after_viewing] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b26688e0>]]]
call[name[settuningi], parameter[name[i], name[cycler].last_tunerx, name[cycler].last_tunery]]
<ast.Tuple object at 0x7da1b26686d0> assign[=] call[name[getsurfacei], parameter[name[i], constant[0], constant[0], name[w], name[h]]]
call[name[surface].write_to_png, parameter[name[filename]]] | keyword[def] identifier[cycle] ( identifier[arrays] , identifier[descs] = keyword[None] , identifier[cadence] = literal[int] , identifier[toworlds] = keyword[None] ,
identifier[drawoverlay] = keyword[None] , identifier[yflip] = keyword[False] , identifier[tostatuses] = keyword[None] , identifier[run_main] = keyword[True] ,
identifier[save_after_viewing] = keyword[None] ):
literal[string]
identifier[n] = identifier[len] ( identifier[arrays] )
identifier[amin] = identifier[amax] = identifier[h] = identifier[w] = keyword[None]
keyword[if] identifier[toworlds] keyword[is] keyword[not] keyword[None] keyword[and] identifier[tostatuses] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[descs] keyword[is] keyword[None] :
identifier[descs] =[ literal[string] ]* identifier[n]
keyword[for] identifier[array] keyword[in] identifier[arrays] :
identifier[thish] , identifier[thisw] = identifier[array] . identifier[shape]
identifier[thismin] , identifier[thismax] = identifier[array] . identifier[min] (), identifier[array] . identifier[max] ()
keyword[if] keyword[not] identifier[np] . identifier[isfinite] ( identifier[thismin] ):
identifier[thismin] = identifier[array] [ identifier[np] . identifier[ma] . identifier[where] ( identifier[np] . identifier[isfinite] ( identifier[array] ))]. identifier[min] ()
keyword[if] keyword[not] identifier[np] . identifier[isfinite] ( identifier[thismax] ):
identifier[thismax] = identifier[array] [ identifier[np] . identifier[ma] . identifier[where] ( identifier[np] . identifier[isfinite] ( identifier[array] ))]. identifier[max] ()
keyword[if] identifier[amin] keyword[is] keyword[None] :
identifier[w] , identifier[h] , identifier[amin] , identifier[amax] = identifier[thisw] , identifier[thish] , identifier[thismin] , identifier[thismax]
keyword[else] :
keyword[if] identifier[thisw] != identifier[w] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[thish] != identifier[h] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[amin] = identifier[min] ( identifier[amin] , identifier[thismin] )
identifier[amax] = identifier[max] ( identifier[amax] , identifier[thismax] )
identifier[stride] = identifier[cairo] . identifier[ImageSurface] . identifier[format_stride_for_width] ( identifier[cairo] . identifier[FORMAT_ARGB32] , identifier[w] )
keyword[assert] identifier[stride] % literal[int] == literal[int]
identifier[imgdata] = identifier[np] . identifier[empty] (( identifier[n] , identifier[h] , identifier[stride] // literal[int] ), identifier[dtype] = identifier[np] . identifier[uint32] )
identifier[fixed] = identifier[np] . identifier[empty] (( identifier[n] , identifier[h] , identifier[w] ), identifier[dtype] = identifier[np] . identifier[int32] )
identifier[antimask] = identifier[np] . identifier[empty] (( identifier[n] , identifier[h] , identifier[w] ), identifier[dtype] = identifier[np] . identifier[bool_] )
identifier[surfaces] =[ keyword[None] ]* identifier[n]
identifier[imgdata] . identifier[fill] ( literal[int] )
keyword[for] identifier[i] , identifier[array] keyword[in] identifier[enumerate] ( identifier[arrays] ):
identifier[surfaces] [ identifier[i] ]= identifier[cairo] . identifier[ImageSurface] . identifier[create_for_data] ( identifier[imgdata] [ identifier[i] ], identifier[cairo] . identifier[FORMAT_ARGB32] ,
identifier[w] , identifier[h] , identifier[stride] )
keyword[if] identifier[np] . identifier[ma] . identifier[is_masked] ( identifier[array] ):
identifier[filled] = identifier[array] . identifier[filled] ( identifier[amin] )
identifier[antimask] [ identifier[i] ]=~ identifier[array] . identifier[mask]
keyword[else] :
identifier[filled] = identifier[array]
identifier[antimask] [ identifier[i] ]. identifier[fill] ( keyword[True] )
identifier[fixed] [ identifier[i] ]=( identifier[filled] - identifier[amin] )*( literal[int] /( identifier[amax] - identifier[amin] ))
keyword[def] identifier[getn] ():
keyword[return] identifier[n]
keyword[def] identifier[getshapei] ( identifier[i] ):
keyword[return] identifier[w] , identifier[h]
keyword[def] identifier[getdesci] ( identifier[i] ):
keyword[return] identifier[descs] [ identifier[i] ]
identifier[clipped] = identifier[np] . identifier[zeros] (( identifier[h] , identifier[w] ), identifier[dtype] = identifier[np] . identifier[int32] )
identifier[clipped2] = identifier[np] . identifier[zeros] (( identifier[h] , identifier[w] ), identifier[dtype] = identifier[np] . identifier[uint32] )
keyword[def] identifier[settuningi] ( identifier[i] , identifier[tunerx] , identifier[tunery] ):
identifier[np] . identifier[bitwise_and] ( identifier[imgdata] [ identifier[i] ], literal[int] , identifier[imgdata] [ identifier[i] ])
identifier[fmin] = identifier[int] ( literal[int] * identifier[tunerx] )
identifier[fmax] = identifier[int] ( literal[int] * identifier[tunery] )
keyword[if] identifier[fmin] == identifier[fmax] :
identifier[np] . identifier[add] ( identifier[imgdata] [ identifier[i] ], literal[int] *( identifier[fixed] [ identifier[i] ]> identifier[fmin] ). identifier[astype] ( identifier[np] . identifier[uint32] ), identifier[imgdata] [ identifier[i] ])
keyword[else] :
identifier[np] . identifier[clip] ( identifier[fixed] [ identifier[i] ], identifier[fmin] , identifier[fmax] , identifier[clipped] )
identifier[np] . identifier[subtract] ( identifier[clipped] , identifier[fmin] , identifier[clipped] )
identifier[np] . identifier[multiply] ( identifier[clipped] , literal[int] /( identifier[fmax] - identifier[fmin] ), identifier[clipped2] , identifier[casting] = literal[string] )
identifier[np] . identifier[add] ( identifier[imgdata] [ identifier[i] ], identifier[clipped2] , identifier[imgdata] [ identifier[i] ])
identifier[np] . identifier[multiply] ( identifier[imgdata] [ identifier[i] ], identifier[antimask] [ identifier[i] ], identifier[imgdata] [ identifier[i] ])
keyword[def] identifier[getsurfacei] ( identifier[i] , identifier[xoffset] , identifier[yoffset] , identifier[width] , identifier[height] ):
keyword[return] identifier[surfaces] [ identifier[i] ], identifier[xoffset] , identifier[yoffset]
identifier[nomasks] =[ keyword[not] identifier[np] . identifier[ma] . identifier[is_masked] ( identifier[a] ) keyword[or] identifier[a] . identifier[mask] keyword[is] identifier[np] . identifier[ma] . identifier[nomask]
keyword[for] identifier[a] keyword[in] identifier[arrays] ]
keyword[if] identifier[tostatuses] keyword[is] keyword[None] :
keyword[if] identifier[toworlds] keyword[is] keyword[None] :
identifier[tostatuses] =[ keyword[None] ]* identifier[n]
keyword[else] :
keyword[from] . identifier[astutil] keyword[import] identifier[fmthours] , identifier[fmtdeglat]
keyword[def] identifier[make_status_func] ( identifier[toworld] ):
keyword[def] identifier[status] ( identifier[y_and_x] ):
identifier[lat] , identifier[lon] = identifier[toworld] ( identifier[y_and_x] )
keyword[return] literal[string] %( identifier[fmtdeglat] ( identifier[lat] ),
identifier[fmthours] ( identifier[lon] ))
identifier[tostatuses] =[ identifier[make_status_func] ( identifier[toworlds] [ identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] )]
keyword[def] identifier[fmtstatusi] ( identifier[i] , identifier[x] , identifier[y] ):
identifier[s] = literal[string]
identifier[row] = identifier[int] ( identifier[np] . identifier[floor] ( identifier[y] + literal[int] ))
identifier[col] = identifier[int] ( identifier[np] . identifier[floor] ( identifier[x] + literal[int] ))
keyword[if] identifier[row] >= literal[int] keyword[and] identifier[col] >= literal[int] keyword[and] identifier[row] < identifier[h] keyword[and] identifier[col] < identifier[w] :
keyword[if] identifier[nomasks] [ identifier[i] ] keyword[or] keyword[not] identifier[arrays] [ identifier[i] ]. identifier[mask] [ identifier[row] , identifier[col] ]:
identifier[s] += literal[string] % identifier[arrays] [ identifier[i] ][ identifier[row] , identifier[col] ]
keyword[if] identifier[yflip] :
identifier[y] = identifier[h] - literal[int] - identifier[y]
identifier[row] = identifier[h] - literal[int] - identifier[row]
identifier[s] += literal[string] %( identifier[row] , identifier[col] , identifier[x] , identifier[y] )
keyword[if] identifier[tostatuses] [ identifier[i] ] keyword[is] keyword[not] keyword[None] :
identifier[s] += literal[string] + identifier[tostatuses] [ identifier[i] ]( identifier[np] . identifier[array] ([ identifier[y] , identifier[x] ]))
keyword[return] identifier[s]
identifier[cycler] = identifier[Cycler] ()
identifier[cycler] . identifier[set_n_getter] ( identifier[getn] )
identifier[cycler] . identifier[set_shape_getter] ( identifier[getshapei] )
identifier[cycler] . identifier[set_desc_getter] ( identifier[getdesci] )
identifier[cycler] . identifier[set_tuning_setter] ( identifier[settuningi] )
identifier[cycler] . identifier[set_surface_getter] ( identifier[getsurfacei] )
identifier[cycler] . identifier[set_status_formatter] ( identifier[fmtstatusi] )
identifier[cycler] . identifier[set_overlay_drawer] ( identifier[drawoverlay] )
identifier[cycler] . identifier[win] . identifier[show_all] ()
keyword[if] identifier[run_main] :
identifier[cycler] . identifier[win] . identifier[connect] ( literal[string] , identifier[Gtk] . identifier[main_quit] )
identifier[Gtk] . identifier[main] ()
keyword[else] :
identifier[cycler] . identifier[win] . identifier[connect] ( literal[string] , keyword[lambda] identifier[e] : identifier[cycler] . identifier[win] . identifier[destroy] ())
keyword[if] identifier[save_after_viewing] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[filename] = identifier[save_after_viewing] %( identifier[i] ,)
identifier[settuningi] ( identifier[i] , identifier[cycler] . identifier[last_tunerx] , identifier[cycler] . identifier[last_tunery] )
identifier[surface] , identifier[xoffset] , identifier[yoffset] = identifier[getsurfacei] ( identifier[i] , literal[int] , literal[int] , identifier[w] , identifier[h] )
identifier[surface] . identifier[write_to_png] ( identifier[filename] ) | def cycle(arrays, descs=None, cadence=0.6, toworlds=None, drawoverlay=None, yflip=False, tostatuses=None, run_main=True, save_after_viewing=None):
"""Interactively display a series of 2D data arrays.
arrays
An iterable of 2D arrays (a 3D array works).
descs
An iterable of text descriptions, one for each array
cadence
The time delay before the next array is shown, in seconds.
tostatuses
An iterable of functions that convert cursor positions to a textual
status output corresponding to that position. FIXME details needed.
toworlds
An iterable of functions that convert cursor positions to a
latitude/longitude pair that is displayed in the status output.
The `tostatuses` keyword is a more generic version of this.
FIXME details needed.
drawoverlay
An optional function that draws an overlay on the display after
the underlying data image is presented. FIXME details needed.
yflip
If true, have the numerical *y* coordinates have 0 refer to the
bottom of the image. Note that the data array is still drawn such
that its first row appears at the top!
run_main
If true, run the Gtk mainloop explicitly so that the function does
not return until the window is closed. If false, no mainloop is run.
If the application happens to already be running a mainloop in the
background, the window will appear and the user will be able to
interact with it while this thread continues executing.
save_after_viewing
If set to a string containing an integer percent-formatting specifier,
the data will be written to a series of PNG files after the window is
closed.
"""
n = len(arrays)
amin = amax = h = w = None
if toworlds is not None and tostatuses is not None:
raise ValueError('only one of "toworlds" and "tostatuses" may be given') # depends on [control=['if'], data=[]]
if descs is None:
descs = [''] * n # depends on [control=['if'], data=['descs']]
for array in arrays:
(thish, thisw) = array.shape
(thismin, thismax) = (array.min(), array.max())
if not np.isfinite(thismin):
thismin = array[np.ma.where(np.isfinite(array))].min() # depends on [control=['if'], data=[]]
if not np.isfinite(thismax):
thismax = array[np.ma.where(np.isfinite(array))].max() # depends on [control=['if'], data=[]]
if amin is None:
(w, h, amin, amax) = (thisw, thish, thismin, thismax) # depends on [control=['if'], data=['amin']]
else:
if thisw != w:
raise ValueError('array widths not all equal') # depends on [control=['if'], data=[]]
if thish != h:
raise ValueError('array heights not all equal') # depends on [control=['if'], data=[]]
amin = min(amin, thismin)
amax = max(amax, thismax) # depends on [control=['for'], data=['array']]
stride = cairo.ImageSurface.format_stride_for_width(cairo.FORMAT_ARGB32, w)
assert stride % 4 == 0 # stride is in bytes
imgdata = np.empty((n, h, stride // 4), dtype=np.uint32)
fixed = np.empty((n, h, w), dtype=np.int32)
antimask = np.empty((n, h, w), dtype=np.bool_)
surfaces = [None] * n
imgdata.fill(4278190080)
for (i, array) in enumerate(arrays):
surfaces[i] = cairo.ImageSurface.create_for_data(imgdata[i], cairo.FORMAT_ARGB32, w, h, stride)
if np.ma.is_masked(array):
filled = array.filled(amin)
antimask[i] = ~array.mask # depends on [control=['if'], data=[]]
else:
filled = array
antimask[i].fill(True)
fixed[i] = (filled - amin) * (268435440 / (amax - amin)) # depends on [control=['for'], data=[]]
def getn():
return n
def getshapei(i):
return (w, h)
def getdesci(i):
return descs[i]
clipped = np.zeros((h, w), dtype=np.int32) # scratch arrays -- two needed
clipped2 = np.zeros((h, w), dtype=np.uint32) # to make numpy ufunc casting happy
def settuningi(i, tunerx, tunery):
np.bitwise_and(imgdata[i], 4278190080, imgdata[i])
fmin = int(268435440 * tunerx)
fmax = int(268435440 * tunery)
if fmin == fmax:
np.add(imgdata[i], 255 * (fixed[i] > fmin).astype(np.uint32), imgdata[i]) # depends on [control=['if'], data=['fmin']]
else:
np.clip(fixed[i], fmin, fmax, clipped)
np.subtract(clipped, fmin, clipped)
np.multiply(clipped, 255.0 / (fmax - fmin), clipped2, casting='unsafe')
np.add(imgdata[i], clipped2, imgdata[i])
np.multiply(imgdata[i], antimask[i], imgdata[i])
def getsurfacei(i, xoffset, yoffset, width, height):
return (surfaces[i], xoffset, yoffset)
# see comment in view()
nomasks = [not np.ma.is_masked(a) or a.mask is np.ma.nomask for a in arrays]
if tostatuses is None:
if toworlds is None:
tostatuses = [None] * n # depends on [control=['if'], data=[]]
else:
from .astutil import fmthours, fmtdeglat
def make_status_func(toworld):
def status(y_and_x):
(lat, lon) = toworld(y_and_x)
return 'lat=%s lon=%s' % (fmtdeglat(lat), fmthours(lon))
tostatuses = [make_status_func(toworlds[i]) for i in range(n)] # depends on [control=['if'], data=['tostatuses']]
def fmtstatusi(i, x, y):
s = ''
row = int(np.floor(y + 0.5))
col = int(np.floor(x + 0.5))
if row >= 0 and col >= 0 and (row < h) and (col < w):
if nomasks[i] or not arrays[i].mask[row, col]:
s += '%g ' % arrays[i][row, col] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if yflip:
y = h - 1 - y
row = h - 1 - row # depends on [control=['if'], data=[]]
s += '[%d,%d] x=%.1f y=%.1f' % (row, col, x, y)
if tostatuses[i] is not None:
s += ' ' + tostatuses[i](np.array([y, x])) # depends on [control=['if'], data=[]]
return s
cycler = Cycler()
cycler.set_n_getter(getn)
cycler.set_shape_getter(getshapei)
cycler.set_desc_getter(getdesci)
cycler.set_tuning_setter(settuningi)
cycler.set_surface_getter(getsurfacei)
cycler.set_status_formatter(fmtstatusi)
cycler.set_overlay_drawer(drawoverlay)
cycler.win.show_all()
if run_main:
cycler.win.connect('destroy', Gtk.main_quit)
Gtk.main() # depends on [control=['if'], data=[]]
else:
cycler.win.connect('destroy', lambda e: cycler.win.destroy())
if save_after_viewing is not None:
for i in range(n):
filename = save_after_viewing % (i,)
settuningi(i, cycler.last_tunerx, cycler.last_tunery)
(surface, xoffset, yoffset) = getsurfacei(i, 0, 0, w, h)
surface.write_to_png(filename) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['save_after_viewing']] |
def parse_auth_token(self, auth_token):
"""
Break auth_token up into it's constituent values.
**Parameters:**
- **auth_token:** Auth_token string
**Returns:** dict with Auth Token constituents
"""
# remove the random security key value from the front of the auth_token
auth_token_cleaned = auth_token.split('-', 1)[1]
# URL Decode the Auth Token
auth_token_decoded = self.url_decode(auth_token_cleaned)
# Create a new dict to hold the response.
auth_dict = {}
# Parse the token
for key_value in auth_token_decoded.split("&"):
key_value_list = key_value.split("=")
# check for valid token parts
if len(key_value_list) == 2 and type(key_value_list[0]) in [text_type, binary_type]:
auth_dict[key_value_list[0]] = key_value_list[1]
# Return the dict of key/values in the token.
return auth_dict | def function[parse_auth_token, parameter[self, auth_token]]:
constant[
Break auth_token up into it's constituent values.
**Parameters:**
- **auth_token:** Auth_token string
**Returns:** dict with Auth Token constituents
]
variable[auth_token_cleaned] assign[=] call[call[name[auth_token].split, parameter[constant[-], constant[1]]]][constant[1]]
variable[auth_token_decoded] assign[=] call[name[self].url_decode, parameter[name[auth_token_cleaned]]]
variable[auth_dict] assign[=] dictionary[[], []]
for taget[name[key_value]] in starred[call[name[auth_token_decoded].split, parameter[constant[&]]]] begin[:]
variable[key_value_list] assign[=] call[name[key_value].split, parameter[constant[=]]]
if <ast.BoolOp object at 0x7da1b0feeda0> begin[:]
call[name[auth_dict]][call[name[key_value_list]][constant[0]]] assign[=] call[name[key_value_list]][constant[1]]
return[name[auth_dict]] | keyword[def] identifier[parse_auth_token] ( identifier[self] , identifier[auth_token] ):
literal[string]
identifier[auth_token_cleaned] = identifier[auth_token] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]
identifier[auth_token_decoded] = identifier[self] . identifier[url_decode] ( identifier[auth_token_cleaned] )
identifier[auth_dict] ={}
keyword[for] identifier[key_value] keyword[in] identifier[auth_token_decoded] . identifier[split] ( literal[string] ):
identifier[key_value_list] = identifier[key_value] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[key_value_list] )== literal[int] keyword[and] identifier[type] ( identifier[key_value_list] [ literal[int] ]) keyword[in] [ identifier[text_type] , identifier[binary_type] ]:
identifier[auth_dict] [ identifier[key_value_list] [ literal[int] ]]= identifier[key_value_list] [ literal[int] ]
keyword[return] identifier[auth_dict] | def parse_auth_token(self, auth_token):
"""
Break auth_token up into it's constituent values.
**Parameters:**
- **auth_token:** Auth_token string
**Returns:** dict with Auth Token constituents
"""
# remove the random security key value from the front of the auth_token
auth_token_cleaned = auth_token.split('-', 1)[1]
# URL Decode the Auth Token
auth_token_decoded = self.url_decode(auth_token_cleaned)
# Create a new dict to hold the response.
auth_dict = {}
# Parse the token
for key_value in auth_token_decoded.split('&'):
key_value_list = key_value.split('=')
# check for valid token parts
if len(key_value_list) == 2 and type(key_value_list[0]) in [text_type, binary_type]:
auth_dict[key_value_list[0]] = key_value_list[1] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key_value']]
# Return the dict of key/values in the token.
return auth_dict |
def _get_user(self) -> Dict:
"""
Same thing as for `_get_chat()` but for the user related to the
message.
"""
if 'callback_query' in self._update:
return self._update['callback_query']['from']
elif 'inline_query' in self._update:
return self._update['inline_query']['from']
elif 'message' in self._update:
return self._update['message']['from'] | def function[_get_user, parameter[self]]:
constant[
Same thing as for `_get_chat()` but for the user related to the
message.
]
if compare[constant[callback_query] in name[self]._update] begin[:]
return[call[call[name[self]._update][constant[callback_query]]][constant[from]]] | keyword[def] identifier[_get_user] ( identifier[self] )-> identifier[Dict] :
literal[string]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[_update] :
keyword[return] identifier[self] . identifier[_update] [ literal[string] ][ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[self] . identifier[_update] :
keyword[return] identifier[self] . identifier[_update] [ literal[string] ][ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[self] . identifier[_update] :
keyword[return] identifier[self] . identifier[_update] [ literal[string] ][ literal[string] ] | def _get_user(self) -> Dict:
"""
Same thing as for `_get_chat()` but for the user related to the
message.
"""
if 'callback_query' in self._update:
return self._update['callback_query']['from'] # depends on [control=['if'], data=[]]
elif 'inline_query' in self._update:
return self._update['inline_query']['from'] # depends on [control=['if'], data=[]]
elif 'message' in self._update:
return self._update['message']['from'] # depends on [control=['if'], data=[]] |
def is_struct(declaration):
"""
Returns True if declaration represents a C++ struct
Args:
declaration (declaration_t): the declaration to be checked.
Returns:
bool: True if declaration represents a C++ struct
"""
if not is_class(declaration):
return False
decl = class_traits.get_declaration(declaration)
return decl.class_type == class_declaration.CLASS_TYPES.STRUCT | def function[is_struct, parameter[declaration]]:
constant[
Returns True if declaration represents a C++ struct
Args:
declaration (declaration_t): the declaration to be checked.
Returns:
bool: True if declaration represents a C++ struct
]
if <ast.UnaryOp object at 0x7da1b13040a0> begin[:]
return[constant[False]]
variable[decl] assign[=] call[name[class_traits].get_declaration, parameter[name[declaration]]]
return[compare[name[decl].class_type equal[==] name[class_declaration].CLASS_TYPES.STRUCT]] | keyword[def] identifier[is_struct] ( identifier[declaration] ):
literal[string]
keyword[if] keyword[not] identifier[is_class] ( identifier[declaration] ):
keyword[return] keyword[False]
identifier[decl] = identifier[class_traits] . identifier[get_declaration] ( identifier[declaration] )
keyword[return] identifier[decl] . identifier[class_type] == identifier[class_declaration] . identifier[CLASS_TYPES] . identifier[STRUCT] | def is_struct(declaration):
"""
Returns True if declaration represents a C++ struct
Args:
declaration (declaration_t): the declaration to be checked.
Returns:
bool: True if declaration represents a C++ struct
"""
if not is_class(declaration):
return False # depends on [control=['if'], data=[]]
decl = class_traits.get_declaration(declaration)
return decl.class_type == class_declaration.CLASS_TYPES.STRUCT |
def getLogger(name='generalLoggerName',dr='',lvl=20,addFH=True,addSH=True,):
"""This will either return the logging object already
instantiated, or instantiate a new one and return it.
**Use this function to both create and return any logger** to avoid
accidentally adding additional handlers by using the setUpLogger function
instead.
Args:
name (str): The name for the logging object and
name.log will be the output file written to disk.
lvl (int): The severity level of messages printed to the screen with
the stream handler, default = 20.
addFH (boolean): Add a file handler to this logger? Default severity
level for it will be 1, and it will be named following
name+'.log'. Default = True.
addSH (boolean): Add a stream handler to this logger? Severity set with
the lvl argument. Default = True.
Returns:
log (KMlogger object): A KMlogger object that was either
freshly instantiated or determined to
already exist, then returned.
"""
log = False
try:
log = log_dict[name]
except:
log = setUpLogger(name,dr,lvl,addFH,addSH)
return log | def function[getLogger, parameter[name, dr, lvl, addFH, addSH]]:
constant[This will either return the logging object already
instantiated, or instantiate a new one and return it.
**Use this function to both create and return any logger** to avoid
accidentally adding additional handlers by using the setUpLogger function
instead.
Args:
name (str): The name for the logging object and
name.log will be the output file written to disk.
lvl (int): The severity level of messages printed to the screen with
the stream handler, default = 20.
addFH (boolean): Add a file handler to this logger? Default severity
level for it will be 1, and it will be named following
name+'.log'. Default = True.
addSH (boolean): Add a stream handler to this logger? Severity set with
the lvl argument. Default = True.
Returns:
log (KMlogger object): A KMlogger object that was either
freshly instantiated or determined to
already exist, then returned.
]
variable[log] assign[=] constant[False]
<ast.Try object at 0x7da1b14c6680>
return[name[log]] | keyword[def] identifier[getLogger] ( identifier[name] = literal[string] , identifier[dr] = literal[string] , identifier[lvl] = literal[int] , identifier[addFH] = keyword[True] , identifier[addSH] = keyword[True] ,):
literal[string]
identifier[log] = keyword[False]
keyword[try] :
identifier[log] = identifier[log_dict] [ identifier[name] ]
keyword[except] :
identifier[log] = identifier[setUpLogger] ( identifier[name] , identifier[dr] , identifier[lvl] , identifier[addFH] , identifier[addSH] )
keyword[return] identifier[log] | def getLogger(name='generalLoggerName', dr='', lvl=20, addFH=True, addSH=True):
"""This will either return the logging object already
instantiated, or instantiate a new one and return it.
**Use this function to both create and return any logger** to avoid
accidentally adding additional handlers by using the setUpLogger function
instead.
Args:
name (str): The name for the logging object and
name.log will be the output file written to disk.
lvl (int): The severity level of messages printed to the screen with
the stream handler, default = 20.
addFH (boolean): Add a file handler to this logger? Default severity
level for it will be 1, and it will be named following
name+'.log'. Default = True.
addSH (boolean): Add a stream handler to this logger? Severity set with
the lvl argument. Default = True.
Returns:
log (KMlogger object): A KMlogger object that was either
freshly instantiated or determined to
already exist, then returned.
"""
log = False
try:
log = log_dict[name] # depends on [control=['try'], data=[]]
except:
log = setUpLogger(name, dr, lvl, addFH, addSH) # depends on [control=['except'], data=[]]
return log |
def contains_list(longer, shorter):
"""Check if longer list starts with shorter list"""
if len(longer) <= len(shorter):
return False
for a, b in zip(shorter, longer):
if a != b:
return False
return True | def function[contains_list, parameter[longer, shorter]]:
constant[Check if longer list starts with shorter list]
if compare[call[name[len], parameter[name[longer]]] less_or_equal[<=] call[name[len], parameter[name[shorter]]]] begin[:]
return[constant[False]]
for taget[tuple[[<ast.Name object at 0x7da2045669e0>, <ast.Name object at 0x7da204565f00>]]] in starred[call[name[zip], parameter[name[shorter], name[longer]]]] begin[:]
if compare[name[a] not_equal[!=] name[b]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[contains_list] ( identifier[longer] , identifier[shorter] ):
literal[string]
keyword[if] identifier[len] ( identifier[longer] )<= identifier[len] ( identifier[shorter] ):
keyword[return] keyword[False]
keyword[for] identifier[a] , identifier[b] keyword[in] identifier[zip] ( identifier[shorter] , identifier[longer] ):
keyword[if] identifier[a] != identifier[b] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def contains_list(longer, shorter):
"""Check if longer list starts with shorter list"""
if len(longer) <= len(shorter):
return False # depends on [control=['if'], data=[]]
for (a, b) in zip(shorter, longer):
if a != b:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return True |
def _gccalc(lon, lat, azimuth, maxdist=None):
"""
Original javascript on http://williams.best.vwh.net/gccalc.htm
Translated into python by Thomas Lecocq
This function is a black box, because trigonometry is difficult
"""
glat1 = lat * np.pi / 180.
glon1 = lon * np.pi / 180.
s = maxdist / 1.852243
faz = azimuth * np.pi / 180.
EPS = 0.00000000005
if ((np.abs(np.cos(glat1)) < EPS) and not (np.abs(np.sin(faz)) < EPS)):
raise CourseException("Only North-South courses are meaningful")
a = 6378.137 / 1.852243
f = 1 / 298.257223563
r = 1 - f
tu = r * np.tan(glat1)
sf = np.sin(faz)
cf = np.cos(faz)
if (cf == 0):
b = 0.
else:
b = 2. * np.arctan2 (tu, cf)
cu = 1. / np.sqrt(1 + tu * tu)
su = tu * cu
sa = cu * sf
c2a = 1 - sa * sa
x = 1. + np.sqrt(1. + c2a * (1. / (r * r) - 1.))
x = (x - 2.) / x
c = 1. - x
c = (x * x / 4. + 1.) / c
d = (0.375 * x * x - 1.) * x
tu = s / (r * a * c)
y = tu
c = y + 1
while (np.abs (y - c) > EPS):
sy = np.sin(y)
cy = np.cos(y)
cz = np.cos(b + y)
e = 2. * cz * cz - 1.
c = y
x = e * cy
y = e + e - 1.
y = (((sy * sy * 4. - 3.) * y * cz * d / 6. + x) *
d / 4. - cz) * sy * d + tu
b = cu * cy * cf - su * sy
c = r * np.sqrt(sa * sa + b * b)
d = su * cy + cu * sy * cf
glat2 = (np.arctan2(d, c) + np.pi) % (2*np.pi) - np.pi
c = cu * cy - su * sy * cf
x = np.arctan2(sy * sf, c)
c = ((-3. * c2a + 4.) * f + 4.) * c2a * f / 16.
d = ((e * cy * c + cz) * sy * c + y) * sa
glon2 = ((glon1 + x - (1. - c) * d * f + np.pi) % (2*np.pi)) - np.pi
baz = (np.arctan2(sa, b) + np.pi) % (2 * np.pi)
glon2 *= 180./np.pi
glat2 *= 180./np.pi
baz *= 180./np.pi
return (glon2, glat2, baz) | def function[_gccalc, parameter[lon, lat, azimuth, maxdist]]:
constant[
Original javascript on http://williams.best.vwh.net/gccalc.htm
Translated into python by Thomas Lecocq
This function is a black box, because trigonometry is difficult
]
variable[glat1] assign[=] binary_operation[binary_operation[name[lat] * name[np].pi] / constant[180.0]]
variable[glon1] assign[=] binary_operation[binary_operation[name[lon] * name[np].pi] / constant[180.0]]
variable[s] assign[=] binary_operation[name[maxdist] / constant[1.852243]]
variable[faz] assign[=] binary_operation[binary_operation[name[azimuth] * name[np].pi] / constant[180.0]]
variable[EPS] assign[=] constant[5e-11]
if <ast.BoolOp object at 0x7da2041d8f70> begin[:]
<ast.Raise object at 0x7da2041dbfa0>
variable[a] assign[=] binary_operation[constant[6378.137] / constant[1.852243]]
variable[f] assign[=] binary_operation[constant[1] / constant[298.257223563]]
variable[r] assign[=] binary_operation[constant[1] - name[f]]
variable[tu] assign[=] binary_operation[name[r] * call[name[np].tan, parameter[name[glat1]]]]
variable[sf] assign[=] call[name[np].sin, parameter[name[faz]]]
variable[cf] assign[=] call[name[np].cos, parameter[name[faz]]]
if compare[name[cf] equal[==] constant[0]] begin[:]
variable[b] assign[=] constant[0.0]
variable[cu] assign[=] binary_operation[constant[1.0] / call[name[np].sqrt, parameter[binary_operation[constant[1] + binary_operation[name[tu] * name[tu]]]]]]
variable[su] assign[=] binary_operation[name[tu] * name[cu]]
variable[sa] assign[=] binary_operation[name[cu] * name[sf]]
variable[c2a] assign[=] binary_operation[constant[1] - binary_operation[name[sa] * name[sa]]]
variable[x] assign[=] binary_operation[constant[1.0] + call[name[np].sqrt, parameter[binary_operation[constant[1.0] + binary_operation[name[c2a] * binary_operation[binary_operation[constant[1.0] / binary_operation[name[r] * name[r]]] - constant[1.0]]]]]]]
variable[x] assign[=] binary_operation[binary_operation[name[x] - constant[2.0]] / name[x]]
variable[c] assign[=] binary_operation[constant[1.0] - name[x]]
variable[c] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[x] * name[x]] / constant[4.0]] + constant[1.0]] / name[c]]
variable[d] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[0.375] * name[x]] * name[x]] - constant[1.0]] * name[x]]
variable[tu] assign[=] binary_operation[name[s] / binary_operation[binary_operation[name[r] * name[a]] * name[c]]]
variable[y] assign[=] name[tu]
variable[c] assign[=] binary_operation[name[y] + constant[1]]
while compare[call[name[np].abs, parameter[binary_operation[name[y] - name[c]]]] greater[>] name[EPS]] begin[:]
variable[sy] assign[=] call[name[np].sin, parameter[name[y]]]
variable[cy] assign[=] call[name[np].cos, parameter[name[y]]]
variable[cz] assign[=] call[name[np].cos, parameter[binary_operation[name[b] + name[y]]]]
variable[e] assign[=] binary_operation[binary_operation[binary_operation[constant[2.0] * name[cz]] * name[cz]] - constant[1.0]]
variable[c] assign[=] name[y]
variable[x] assign[=] binary_operation[name[e] * name[cy]]
variable[y] assign[=] binary_operation[binary_operation[name[e] + name[e]] - constant[1.0]]
variable[y] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[sy] * name[sy]] * constant[4.0]] - constant[3.0]] * name[y]] * name[cz]] * name[d]] / constant[6.0]] + name[x]] * name[d]] / constant[4.0]] - name[cz]] * name[sy]] * name[d]] + name[tu]]
variable[b] assign[=] binary_operation[binary_operation[binary_operation[name[cu] * name[cy]] * name[cf]] - binary_operation[name[su] * name[sy]]]
variable[c] assign[=] binary_operation[name[r] * call[name[np].sqrt, parameter[binary_operation[binary_operation[name[sa] * name[sa]] + binary_operation[name[b] * name[b]]]]]]
variable[d] assign[=] binary_operation[binary_operation[name[su] * name[cy]] + binary_operation[binary_operation[name[cu] * name[sy]] * name[cf]]]
variable[glat2] assign[=] binary_operation[binary_operation[binary_operation[call[name[np].arctan2, parameter[name[d], name[c]]] + name[np].pi] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[2] * name[np].pi]] - name[np].pi]
variable[c] assign[=] binary_operation[binary_operation[name[cu] * name[cy]] - binary_operation[binary_operation[name[su] * name[sy]] * name[cf]]]
variable[x] assign[=] call[name[np].arctan2, parameter[binary_operation[name[sy] * name[sf]], name[c]]]
variable[c] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da204960610> * name[c2a]] + constant[4.0]] * name[f]] + constant[4.0]] * name[c2a]] * name[f]] / constant[16.0]]
variable[d] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[e] * name[cy]] * name[c]] + name[cz]] * name[sy]] * name[c]] + name[y]] * name[sa]]
variable[glon2] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[glon1] + name[x]] - binary_operation[binary_operation[binary_operation[constant[1.0] - name[c]] * name[d]] * name[f]]] + name[np].pi] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[2] * name[np].pi]] - name[np].pi]
variable[baz] assign[=] binary_operation[binary_operation[call[name[np].arctan2, parameter[name[sa], name[b]]] + name[np].pi] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[2] * name[np].pi]]
<ast.AugAssign object at 0x7da18f58c310>
<ast.AugAssign object at 0x7da18f58e590>
<ast.AugAssign object at 0x7da18f58d180>
return[tuple[[<ast.Name object at 0x7da18f58f0d0>, <ast.Name object at 0x7da18f58f520>, <ast.Name object at 0x7da18f58f9d0>]]] | keyword[def] identifier[_gccalc] ( identifier[lon] , identifier[lat] , identifier[azimuth] , identifier[maxdist] = keyword[None] ):
literal[string]
identifier[glat1] = identifier[lat] * identifier[np] . identifier[pi] / literal[int]
identifier[glon1] = identifier[lon] * identifier[np] . identifier[pi] / literal[int]
identifier[s] = identifier[maxdist] / literal[int]
identifier[faz] = identifier[azimuth] * identifier[np] . identifier[pi] / literal[int]
identifier[EPS] = literal[int]
keyword[if] (( identifier[np] . identifier[abs] ( identifier[np] . identifier[cos] ( identifier[glat1] ))< identifier[EPS] ) keyword[and] keyword[not] ( identifier[np] . identifier[abs] ( identifier[np] . identifier[sin] ( identifier[faz] ))< identifier[EPS] )):
keyword[raise] identifier[CourseException] ( literal[string] )
identifier[a] = literal[int] / literal[int]
identifier[f] = literal[int] / literal[int]
identifier[r] = literal[int] - identifier[f]
identifier[tu] = identifier[r] * identifier[np] . identifier[tan] ( identifier[glat1] )
identifier[sf] = identifier[np] . identifier[sin] ( identifier[faz] )
identifier[cf] = identifier[np] . identifier[cos] ( identifier[faz] )
keyword[if] ( identifier[cf] == literal[int] ):
identifier[b] = literal[int]
keyword[else] :
identifier[b] = literal[int] * identifier[np] . identifier[arctan2] ( identifier[tu] , identifier[cf] )
identifier[cu] = literal[int] / identifier[np] . identifier[sqrt] ( literal[int] + identifier[tu] * identifier[tu] )
identifier[su] = identifier[tu] * identifier[cu]
identifier[sa] = identifier[cu] * identifier[sf]
identifier[c2a] = literal[int] - identifier[sa] * identifier[sa]
identifier[x] = literal[int] + identifier[np] . identifier[sqrt] ( literal[int] + identifier[c2a] *( literal[int] /( identifier[r] * identifier[r] )- literal[int] ))
identifier[x] =( identifier[x] - literal[int] )/ identifier[x]
identifier[c] = literal[int] - identifier[x]
identifier[c] =( identifier[x] * identifier[x] / literal[int] + literal[int] )/ identifier[c]
identifier[d] =( literal[int] * identifier[x] * identifier[x] - literal[int] )* identifier[x]
identifier[tu] = identifier[s] /( identifier[r] * identifier[a] * identifier[c] )
identifier[y] = identifier[tu]
identifier[c] = identifier[y] + literal[int]
keyword[while] ( identifier[np] . identifier[abs] ( identifier[y] - identifier[c] )> identifier[EPS] ):
identifier[sy] = identifier[np] . identifier[sin] ( identifier[y] )
identifier[cy] = identifier[np] . identifier[cos] ( identifier[y] )
identifier[cz] = identifier[np] . identifier[cos] ( identifier[b] + identifier[y] )
identifier[e] = literal[int] * identifier[cz] * identifier[cz] - literal[int]
identifier[c] = identifier[y]
identifier[x] = identifier[e] * identifier[cy]
identifier[y] = identifier[e] + identifier[e] - literal[int]
identifier[y] =((( identifier[sy] * identifier[sy] * literal[int] - literal[int] )* identifier[y] * identifier[cz] * identifier[d] / literal[int] + identifier[x] )*
identifier[d] / literal[int] - identifier[cz] )* identifier[sy] * identifier[d] + identifier[tu]
identifier[b] = identifier[cu] * identifier[cy] * identifier[cf] - identifier[su] * identifier[sy]
identifier[c] = identifier[r] * identifier[np] . identifier[sqrt] ( identifier[sa] * identifier[sa] + identifier[b] * identifier[b] )
identifier[d] = identifier[su] * identifier[cy] + identifier[cu] * identifier[sy] * identifier[cf]
identifier[glat2] =( identifier[np] . identifier[arctan2] ( identifier[d] , identifier[c] )+ identifier[np] . identifier[pi] )%( literal[int] * identifier[np] . identifier[pi] )- identifier[np] . identifier[pi]
identifier[c] = identifier[cu] * identifier[cy] - identifier[su] * identifier[sy] * identifier[cf]
identifier[x] = identifier[np] . identifier[arctan2] ( identifier[sy] * identifier[sf] , identifier[c] )
identifier[c] =((- literal[int] * identifier[c2a] + literal[int] )* identifier[f] + literal[int] )* identifier[c2a] * identifier[f] / literal[int]
identifier[d] =(( identifier[e] * identifier[cy] * identifier[c] + identifier[cz] )* identifier[sy] * identifier[c] + identifier[y] )* identifier[sa]
identifier[glon2] =(( identifier[glon1] + identifier[x] -( literal[int] - identifier[c] )* identifier[d] * identifier[f] + identifier[np] . identifier[pi] )%( literal[int] * identifier[np] . identifier[pi] ))- identifier[np] . identifier[pi]
identifier[baz] =( identifier[np] . identifier[arctan2] ( identifier[sa] , identifier[b] )+ identifier[np] . identifier[pi] )%( literal[int] * identifier[np] . identifier[pi] )
identifier[glon2] *= literal[int] / identifier[np] . identifier[pi]
identifier[glat2] *= literal[int] / identifier[np] . identifier[pi]
identifier[baz] *= literal[int] / identifier[np] . identifier[pi]
keyword[return] ( identifier[glon2] , identifier[glat2] , identifier[baz] ) | def _gccalc(lon, lat, azimuth, maxdist=None):
"""
Original javascript on http://williams.best.vwh.net/gccalc.htm
Translated into python by Thomas Lecocq
This function is a black box, because trigonometry is difficult
"""
glat1 = lat * np.pi / 180.0
glon1 = lon * np.pi / 180.0
s = maxdist / 1.852243
faz = azimuth * np.pi / 180.0
EPS = 5e-11
if np.abs(np.cos(glat1)) < EPS and (not np.abs(np.sin(faz)) < EPS):
raise CourseException('Only North-South courses are meaningful') # depends on [control=['if'], data=[]]
a = 6378.137 / 1.852243
f = 1 / 298.257223563
r = 1 - f
tu = r * np.tan(glat1)
sf = np.sin(faz)
cf = np.cos(faz)
if cf == 0:
b = 0.0 # depends on [control=['if'], data=[]]
else:
b = 2.0 * np.arctan2(tu, cf)
cu = 1.0 / np.sqrt(1 + tu * tu)
su = tu * cu
sa = cu * sf
c2a = 1 - sa * sa
x = 1.0 + np.sqrt(1.0 + c2a * (1.0 / (r * r) - 1.0))
x = (x - 2.0) / x
c = 1.0 - x
c = (x * x / 4.0 + 1.0) / c
d = (0.375 * x * x - 1.0) * x
tu = s / (r * a * c)
y = tu
c = y + 1
while np.abs(y - c) > EPS:
sy = np.sin(y)
cy = np.cos(y)
cz = np.cos(b + y)
e = 2.0 * cz * cz - 1.0
c = y
x = e * cy
y = e + e - 1.0
y = (((sy * sy * 4.0 - 3.0) * y * cz * d / 6.0 + x) * d / 4.0 - cz) * sy * d + tu # depends on [control=['while'], data=[]]
b = cu * cy * cf - su * sy
c = r * np.sqrt(sa * sa + b * b)
d = su * cy + cu * sy * cf
glat2 = (np.arctan2(d, c) + np.pi) % (2 * np.pi) - np.pi
c = cu * cy - su * sy * cf
x = np.arctan2(sy * sf, c)
c = ((-3.0 * c2a + 4.0) * f + 4.0) * c2a * f / 16.0
d = ((e * cy * c + cz) * sy * c + y) * sa
glon2 = (glon1 + x - (1.0 - c) * d * f + np.pi) % (2 * np.pi) - np.pi
baz = (np.arctan2(sa, b) + np.pi) % (2 * np.pi)
glon2 *= 180.0 / np.pi
glat2 *= 180.0 / np.pi
baz *= 180.0 / np.pi
return (glon2, glat2, baz) |
def run_forever(self):
"""Run the bot, blocking forever."""
res = self.slack.rtm.start()
self.log.info("current channels: %s",
','.join(c['name'] for c in res.body['channels']
if c['is_member']))
self.id = res.body['self']['id']
self.name = res.body['self']['name']
self.my_mention = "<@%s>" % self.id
self.ws = websocket.WebSocketApp(
res.body['url'],
on_message=self._on_message,
on_error=self._on_error,
on_close=self._on_close,
on_open=self._on_open)
self.prepare_connection(self.config)
self.ws.run_forever() | def function[run_forever, parameter[self]]:
constant[Run the bot, blocking forever.]
variable[res] assign[=] call[name[self].slack.rtm.start, parameter[]]
call[name[self].log.info, parameter[constant[current channels: %s], call[constant[,].join, parameter[<ast.GeneratorExp object at 0x7da1b191c370>]]]]
name[self].id assign[=] call[call[name[res].body][constant[self]]][constant[id]]
name[self].name assign[=] call[call[name[res].body][constant[self]]][constant[name]]
name[self].my_mention assign[=] binary_operation[constant[<@%s>] <ast.Mod object at 0x7da2590d6920> name[self].id]
name[self].ws assign[=] call[name[websocket].WebSocketApp, parameter[call[name[res].body][constant[url]]]]
call[name[self].prepare_connection, parameter[name[self].config]]
call[name[self].ws.run_forever, parameter[]] | keyword[def] identifier[run_forever] ( identifier[self] ):
literal[string]
identifier[res] = identifier[self] . identifier[slack] . identifier[rtm] . identifier[start] ()
identifier[self] . identifier[log] . identifier[info] ( literal[string] ,
literal[string] . identifier[join] ( identifier[c] [ literal[string] ] keyword[for] identifier[c] keyword[in] identifier[res] . identifier[body] [ literal[string] ]
keyword[if] identifier[c] [ literal[string] ]))
identifier[self] . identifier[id] = identifier[res] . identifier[body] [ literal[string] ][ literal[string] ]
identifier[self] . identifier[name] = identifier[res] . identifier[body] [ literal[string] ][ literal[string] ]
identifier[self] . identifier[my_mention] = literal[string] % identifier[self] . identifier[id]
identifier[self] . identifier[ws] = identifier[websocket] . identifier[WebSocketApp] (
identifier[res] . identifier[body] [ literal[string] ],
identifier[on_message] = identifier[self] . identifier[_on_message] ,
identifier[on_error] = identifier[self] . identifier[_on_error] ,
identifier[on_close] = identifier[self] . identifier[_on_close] ,
identifier[on_open] = identifier[self] . identifier[_on_open] )
identifier[self] . identifier[prepare_connection] ( identifier[self] . identifier[config] )
identifier[self] . identifier[ws] . identifier[run_forever] () | def run_forever(self):
"""Run the bot, blocking forever."""
res = self.slack.rtm.start()
self.log.info('current channels: %s', ','.join((c['name'] for c in res.body['channels'] if c['is_member'])))
self.id = res.body['self']['id']
self.name = res.body['self']['name']
self.my_mention = '<@%s>' % self.id
self.ws = websocket.WebSocketApp(res.body['url'], on_message=self._on_message, on_error=self._on_error, on_close=self._on_close, on_open=self._on_open)
self.prepare_connection(self.config)
self.ws.run_forever() |
def unseen_videos_reset(self):
"""Reset the unseen videos counter."""
url = RESET_CAM_ENDPOINT.format(self.unique_id)
ret = self._session.query(url).get('success')
return ret | def function[unseen_videos_reset, parameter[self]]:
constant[Reset the unseen videos counter.]
variable[url] assign[=] call[name[RESET_CAM_ENDPOINT].format, parameter[name[self].unique_id]]
variable[ret] assign[=] call[call[name[self]._session.query, parameter[name[url]]].get, parameter[constant[success]]]
return[name[ret]] | keyword[def] identifier[unseen_videos_reset] ( identifier[self] ):
literal[string]
identifier[url] = identifier[RESET_CAM_ENDPOINT] . identifier[format] ( identifier[self] . identifier[unique_id] )
identifier[ret] = identifier[self] . identifier[_session] . identifier[query] ( identifier[url] ). identifier[get] ( literal[string] )
keyword[return] identifier[ret] | def unseen_videos_reset(self):
"""Reset the unseen videos counter."""
url = RESET_CAM_ENDPOINT.format(self.unique_id)
ret = self._session.query(url).get('success')
return ret |
def left_release(self, event):
"""
Callback for the release of the left button.
:param event: Tkinter event
"""
self.config(cursor="")
if len(self.canvas.find_withtag("current")) != 0 and self.current is not None:
self.canvas.itemconfigure(tk.CURRENT, fill=self.item_colors[self.current][1]) | def function[left_release, parameter[self, event]]:
constant[
Callback for the release of the left button.
:param event: Tkinter event
]
call[name[self].config, parameter[]]
if <ast.BoolOp object at 0x7da1b2362e00> begin[:]
call[name[self].canvas.itemconfigure, parameter[name[tk].CURRENT]] | keyword[def] identifier[left_release] ( identifier[self] , identifier[event] ):
literal[string]
identifier[self] . identifier[config] ( identifier[cursor] = literal[string] )
keyword[if] identifier[len] ( identifier[self] . identifier[canvas] . identifier[find_withtag] ( literal[string] ))!= literal[int] keyword[and] identifier[self] . identifier[current] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[canvas] . identifier[itemconfigure] ( identifier[tk] . identifier[CURRENT] , identifier[fill] = identifier[self] . identifier[item_colors] [ identifier[self] . identifier[current] ][ literal[int] ]) | def left_release(self, event):
"""
Callback for the release of the left button.
:param event: Tkinter event
"""
self.config(cursor='')
if len(self.canvas.find_withtag('current')) != 0 and self.current is not None:
self.canvas.itemconfigure(tk.CURRENT, fill=self.item_colors[self.current][1]) # depends on [control=['if'], data=[]] |
def get_all(jail=None):
'''
Return a list of all available services
.. versionchanged:: 2016.3.4
jail: optional jid or jail name
CLI Example:
.. code-block:: bash
salt '*' service.get_all
'''
ret = []
service = _cmd(jail)
for srv in __salt__['cmd.run']('{0} -l'.format(service)).splitlines():
if not srv.isupper():
ret.append(srv)
return sorted(ret) | def function[get_all, parameter[jail]]:
constant[
Return a list of all available services
.. versionchanged:: 2016.3.4
jail: optional jid or jail name
CLI Example:
.. code-block:: bash
salt '*' service.get_all
]
variable[ret] assign[=] list[[]]
variable[service] assign[=] call[name[_cmd], parameter[name[jail]]]
for taget[name[srv]] in starred[call[call[call[name[__salt__]][constant[cmd.run]], parameter[call[constant[{0} -l].format, parameter[name[service]]]]].splitlines, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da1b2169ba0> begin[:]
call[name[ret].append, parameter[name[srv]]]
return[call[name[sorted], parameter[name[ret]]]] | keyword[def] identifier[get_all] ( identifier[jail] = keyword[None] ):
literal[string]
identifier[ret] =[]
identifier[service] = identifier[_cmd] ( identifier[jail] )
keyword[for] identifier[srv] keyword[in] identifier[__salt__] [ literal[string] ]( literal[string] . identifier[format] ( identifier[service] )). identifier[splitlines] ():
keyword[if] keyword[not] identifier[srv] . identifier[isupper] ():
identifier[ret] . identifier[append] ( identifier[srv] )
keyword[return] identifier[sorted] ( identifier[ret] ) | def get_all(jail=None):
"""
Return a list of all available services
.. versionchanged:: 2016.3.4
jail: optional jid or jail name
CLI Example:
.. code-block:: bash
salt '*' service.get_all
"""
ret = []
service = _cmd(jail)
for srv in __salt__['cmd.run']('{0} -l'.format(service)).splitlines():
if not srv.isupper():
ret.append(srv) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['srv']]
return sorted(ret) |
def require_attribute(self, attribute: str, typ: Type = _Any) -> None:
"""Require an attribute on the node to exist.
If `typ` is given, the attribute must have this type.
Args:
attribute: The name of the attribute / mapping key.
typ: The type the attribute must have.
"""
attr_nodes = [
value_node for key_node, value_node in self.yaml_node.value
if key_node.value == attribute
]
if len(attr_nodes) == 0:
raise RecognitionError(
('{}{}Missing required attribute {}').format(
self.yaml_node.start_mark, os.linesep, attribute))
attr_node = attr_nodes[0]
if typ != _Any:
recognized_types, message = self.__recognizer.recognize(
attr_node, cast(Type, typ))
if len(recognized_types) == 0:
raise RecognitionError(message) | def function[require_attribute, parameter[self, attribute, typ]]:
constant[Require an attribute on the node to exist.
If `typ` is given, the attribute must have this type.
Args:
attribute: The name of the attribute / mapping key.
typ: The type the attribute must have.
]
variable[attr_nodes] assign[=] <ast.ListComp object at 0x7da20c6c6f50>
if compare[call[name[len], parameter[name[attr_nodes]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da20c6c5390>
variable[attr_node] assign[=] call[name[attr_nodes]][constant[0]]
if compare[name[typ] not_equal[!=] name[_Any]] begin[:]
<ast.Tuple object at 0x7da20c6c47f0> assign[=] call[name[self].__recognizer.recognize, parameter[name[attr_node], call[name[cast], parameter[name[Type], name[typ]]]]]
if compare[call[name[len], parameter[name[recognized_types]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da20c6c70a0> | keyword[def] identifier[require_attribute] ( identifier[self] , identifier[attribute] : identifier[str] , identifier[typ] : identifier[Type] = identifier[_Any] )-> keyword[None] :
literal[string]
identifier[attr_nodes] =[
identifier[value_node] keyword[for] identifier[key_node] , identifier[value_node] keyword[in] identifier[self] . identifier[yaml_node] . identifier[value]
keyword[if] identifier[key_node] . identifier[value] == identifier[attribute]
]
keyword[if] identifier[len] ( identifier[attr_nodes] )== literal[int] :
keyword[raise] identifier[RecognitionError] (
( literal[string] ). identifier[format] (
identifier[self] . identifier[yaml_node] . identifier[start_mark] , identifier[os] . identifier[linesep] , identifier[attribute] ))
identifier[attr_node] = identifier[attr_nodes] [ literal[int] ]
keyword[if] identifier[typ] != identifier[_Any] :
identifier[recognized_types] , identifier[message] = identifier[self] . identifier[__recognizer] . identifier[recognize] (
identifier[attr_node] , identifier[cast] ( identifier[Type] , identifier[typ] ))
keyword[if] identifier[len] ( identifier[recognized_types] )== literal[int] :
keyword[raise] identifier[RecognitionError] ( identifier[message] ) | def require_attribute(self, attribute: str, typ: Type=_Any) -> None:
"""Require an attribute on the node to exist.
If `typ` is given, the attribute must have this type.
Args:
attribute: The name of the attribute / mapping key.
typ: The type the attribute must have.
"""
attr_nodes = [value_node for (key_node, value_node) in self.yaml_node.value if key_node.value == attribute]
if len(attr_nodes) == 0:
raise RecognitionError('{}{}Missing required attribute {}'.format(self.yaml_node.start_mark, os.linesep, attribute)) # depends on [control=['if'], data=[]]
attr_node = attr_nodes[0]
if typ != _Any:
(recognized_types, message) = self.__recognizer.recognize(attr_node, cast(Type, typ))
if len(recognized_types) == 0:
raise RecognitionError(message) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['typ']] |
def cudnnSetTensor4dDescriptor(tensorDesc, format, dataType, n, c, h, w):
"""
Initialize a previously created Tensor 4D object.
This function initializes a previously created Tensor4D descriptor object. The strides of
the four dimensions are inferred from the format parameter and set in such a way that
the data is contiguous in memory with no padding between dimensions.
Parameters
----------
tensorDesc : cudnnTensorDescriptor
Handle to a previously created tensor descriptor.
format : cudnnTensorFormat
Type of format.
dataType : cudnnDataType
Data type.
n : int
Number of images.
c : int
Number of feature maps per image.
h : int
Height of each feature map.
w : int
Width of each feature map.
"""
status = _libcudnn.cudnnSetTensor4dDescriptor(tensorDesc, format, dataType,
n, c, h, w)
cudnnCheckStatus(status) | def function[cudnnSetTensor4dDescriptor, parameter[tensorDesc, format, dataType, n, c, h, w]]:
constant[
Initialize a previously created Tensor 4D object.
This function initializes a previously created Tensor4D descriptor object. The strides of
the four dimensions are inferred from the format parameter and set in such a way that
the data is contiguous in memory with no padding between dimensions.
Parameters
----------
tensorDesc : cudnnTensorDescriptor
Handle to a previously created tensor descriptor.
format : cudnnTensorFormat
Type of format.
dataType : cudnnDataType
Data type.
n : int
Number of images.
c : int
Number of feature maps per image.
h : int
Height of each feature map.
w : int
Width of each feature map.
]
variable[status] assign[=] call[name[_libcudnn].cudnnSetTensor4dDescriptor, parameter[name[tensorDesc], name[format], name[dataType], name[n], name[c], name[h], name[w]]]
call[name[cudnnCheckStatus], parameter[name[status]]] | keyword[def] identifier[cudnnSetTensor4dDescriptor] ( identifier[tensorDesc] , identifier[format] , identifier[dataType] , identifier[n] , identifier[c] , identifier[h] , identifier[w] ):
literal[string]
identifier[status] = identifier[_libcudnn] . identifier[cudnnSetTensor4dDescriptor] ( identifier[tensorDesc] , identifier[format] , identifier[dataType] ,
identifier[n] , identifier[c] , identifier[h] , identifier[w] )
identifier[cudnnCheckStatus] ( identifier[status] ) | def cudnnSetTensor4dDescriptor(tensorDesc, format, dataType, n, c, h, w):
"""
Initialize a previously created Tensor 4D object.
This function initializes a previously created Tensor4D descriptor object. The strides of
the four dimensions are inferred from the format parameter and set in such a way that
the data is contiguous in memory with no padding between dimensions.
Parameters
----------
tensorDesc : cudnnTensorDescriptor
Handle to a previously created tensor descriptor.
format : cudnnTensorFormat
Type of format.
dataType : cudnnDataType
Data type.
n : int
Number of images.
c : int
Number of feature maps per image.
h : int
Height of each feature map.
w : int
Width of each feature map.
"""
status = _libcudnn.cudnnSetTensor4dDescriptor(tensorDesc, format, dataType, n, c, h, w)
cudnnCheckStatus(status) |
def fuzzy_subset(str_):
"""
converts a string into an argument to list_take
"""
if str_ is None:
return str_
if ':' in str_:
return smart_cast(str_, slice)
if str_.startswith('['):
return smart_cast(str_[1:-1], list)
else:
return smart_cast(str_, list) | def function[fuzzy_subset, parameter[str_]]:
constant[
converts a string into an argument to list_take
]
if compare[name[str_] is constant[None]] begin[:]
return[name[str_]]
if compare[constant[:] in name[str_]] begin[:]
return[call[name[smart_cast], parameter[name[str_], name[slice]]]]
if call[name[str_].startswith, parameter[constant[[]]] begin[:]
return[call[name[smart_cast], parameter[call[name[str_]][<ast.Slice object at 0x7da1b24e24a0>], name[list]]]] | keyword[def] identifier[fuzzy_subset] ( identifier[str_] ):
literal[string]
keyword[if] identifier[str_] keyword[is] keyword[None] :
keyword[return] identifier[str_]
keyword[if] literal[string] keyword[in] identifier[str_] :
keyword[return] identifier[smart_cast] ( identifier[str_] , identifier[slice] )
keyword[if] identifier[str_] . identifier[startswith] ( literal[string] ):
keyword[return] identifier[smart_cast] ( identifier[str_] [ literal[int] :- literal[int] ], identifier[list] )
keyword[else] :
keyword[return] identifier[smart_cast] ( identifier[str_] , identifier[list] ) | def fuzzy_subset(str_):
"""
converts a string into an argument to list_take
"""
if str_ is None:
return str_ # depends on [control=['if'], data=['str_']]
if ':' in str_:
return smart_cast(str_, slice) # depends on [control=['if'], data=['str_']]
if str_.startswith('['):
return smart_cast(str_[1:-1], list) # depends on [control=['if'], data=[]]
else:
return smart_cast(str_, list) |
def besttype(x, encoding="utf-8", percentify=True):
"""Convert string x to the most useful type, i.e. int, float or unicode string.
If x is a quoted string (single or double quotes) then the quotes are
stripped and the enclosed string returned. The string can contain any
number of quotes, it is only important that it begins and ends with either
single or double quotes.
*percentify* = ``True`` turns "34.4%" into the float 0.344.
.. Note::
Strings will be returned as Unicode strings (using
:func:`unicode`), based on the *encoding* argument, which is
utf-8 by default.
"""
def unicodify(x):
return to_unicode(x, encoding)
def percent(x):
try:
if x.endswith("%"):
x = float(x[:-1]) / 100.
else:
raise ValueError
except (AttributeError, ValueError):
raise ValueError
return x
x = unicodify(x) # make unicode as soon as possible
try:
x = x.strip()
except AttributeError:
pass
m = re.match(r"""(?P<quote>['"])(?P<value>.*)(?P=quote)$""", x) # matches "<value>" or '<value>' where <value> COULD contain " or '!
if m is None:
# not a quoted string, try different types
for converter in int, float, percent, unicodify: # try them in increasing order of lenience
try:
return converter(x)
except ValueError:
pass
else:
# quoted string
x = unicodify(m.group('value'))
return x | def function[besttype, parameter[x, encoding, percentify]]:
constant[Convert string x to the most useful type, i.e. int, float or unicode string.
If x is a quoted string (single or double quotes) then the quotes are
stripped and the enclosed string returned. The string can contain any
number of quotes, it is only important that it begins and ends with either
single or double quotes.
*percentify* = ``True`` turns "34.4%" into the float 0.344.
.. Note::
Strings will be returned as Unicode strings (using
:func:`unicode`), based on the *encoding* argument, which is
utf-8 by default.
]
def function[unicodify, parameter[x]]:
return[call[name[to_unicode], parameter[name[x], name[encoding]]]]
def function[percent, parameter[x]]:
<ast.Try object at 0x7da20cabebf0>
return[name[x]]
variable[x] assign[=] call[name[unicodify], parameter[name[x]]]
<ast.Try object at 0x7da20cabf400>
variable[m] assign[=] call[name[re].match, parameter[constant[(?P<quote>['"])(?P<value>.*)(?P=quote)$], name[x]]]
if compare[name[m] is constant[None]] begin[:]
for taget[name[converter]] in starred[tuple[[<ast.Name object at 0x7da20cabfb20>, <ast.Name object at 0x7da20cabc6d0>, <ast.Name object at 0x7da20cabfe50>, <ast.Name object at 0x7da20cabc370>]]] begin[:]
<ast.Try object at 0x7da20c796650>
return[name[x]] | keyword[def] identifier[besttype] ( identifier[x] , identifier[encoding] = literal[string] , identifier[percentify] = keyword[True] ):
literal[string]
keyword[def] identifier[unicodify] ( identifier[x] ):
keyword[return] identifier[to_unicode] ( identifier[x] , identifier[encoding] )
keyword[def] identifier[percent] ( identifier[x] ):
keyword[try] :
keyword[if] identifier[x] . identifier[endswith] ( literal[string] ):
identifier[x] = identifier[float] ( identifier[x] [:- literal[int] ])/ literal[int]
keyword[else] :
keyword[raise] identifier[ValueError]
keyword[except] ( identifier[AttributeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError]
keyword[return] identifier[x]
identifier[x] = identifier[unicodify] ( identifier[x] )
keyword[try] :
identifier[x] = identifier[x] . identifier[strip] ()
keyword[except] identifier[AttributeError] :
keyword[pass]
identifier[m] = identifier[re] . identifier[match] ( literal[string] , identifier[x] )
keyword[if] identifier[m] keyword[is] keyword[None] :
keyword[for] identifier[converter] keyword[in] identifier[int] , identifier[float] , identifier[percent] , identifier[unicodify] :
keyword[try] :
keyword[return] identifier[converter] ( identifier[x] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[else] :
identifier[x] = identifier[unicodify] ( identifier[m] . identifier[group] ( literal[string] ))
keyword[return] identifier[x] | def besttype(x, encoding='utf-8', percentify=True):
"""Convert string x to the most useful type, i.e. int, float or unicode string.
If x is a quoted string (single or double quotes) then the quotes are
stripped and the enclosed string returned. The string can contain any
number of quotes, it is only important that it begins and ends with either
single or double quotes.
*percentify* = ``True`` turns "34.4%" into the float 0.344.
.. Note::
Strings will be returned as Unicode strings (using
:func:`unicode`), based on the *encoding* argument, which is
utf-8 by default.
"""
def unicodify(x):
return to_unicode(x, encoding)
def percent(x):
try:
if x.endswith('%'):
x = float(x[:-1]) / 100.0 # depends on [control=['if'], data=[]]
else:
raise ValueError # depends on [control=['try'], data=[]]
except (AttributeError, ValueError):
raise ValueError # depends on [control=['except'], data=[]]
return x
x = unicodify(x) # make unicode as soon as possible
try:
x = x.strip() # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
m = re.match('(?P<quote>[\'"])(?P<value>.*)(?P=quote)$', x) # matches "<value>" or '<value>' where <value> COULD contain " or '!
if m is None:
# not a quoted string, try different types
for converter in (int, float, percent, unicodify): # try them in increasing order of lenience
try:
return converter(x) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['converter']] # depends on [control=['if'], data=[]]
else:
# quoted string
x = unicodify(m.group('value'))
return x |
def get(self, queue_get):
"""
get to given consumer states.
This function is used for merging of results of parallelized MC.
The first state is used for merging in place. The states must be disjoint.
:param object queue_get: second consumer state
"""
for (c, cs) in izip(self.consumers, queue_get):
c.get(cs)
self.result = [c.result for c in self.consumers] | def function[get, parameter[self, queue_get]]:
constant[
get to given consumer states.
This function is used for merging of results of parallelized MC.
The first state is used for merging in place. The states must be disjoint.
:param object queue_get: second consumer state
]
for taget[tuple[[<ast.Name object at 0x7da20cabee00>, <ast.Name object at 0x7da20cabd330>]]] in starred[call[name[izip], parameter[name[self].consumers, name[queue_get]]]] begin[:]
call[name[c].get, parameter[name[cs]]]
name[self].result assign[=] <ast.ListComp object at 0x7da20cabfbb0> | keyword[def] identifier[get] ( identifier[self] , identifier[queue_get] ):
literal[string]
keyword[for] ( identifier[c] , identifier[cs] ) keyword[in] identifier[izip] ( identifier[self] . identifier[consumers] , identifier[queue_get] ):
identifier[c] . identifier[get] ( identifier[cs] )
identifier[self] . identifier[result] =[ identifier[c] . identifier[result] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[consumers] ] | def get(self, queue_get):
"""
get to given consumer states.
This function is used for merging of results of parallelized MC.
The first state is used for merging in place. The states must be disjoint.
:param object queue_get: second consumer state
"""
for (c, cs) in izip(self.consumers, queue_get):
c.get(cs) # depends on [control=['for'], data=[]]
self.result = [c.result for c in self.consumers] |
def bindmount(self, mountpoint):
"""Bind mounts the volume to another mountpoint. Only works if the volume is already mounted.
:raises NotMountedError: when the volume is not yet mounted
:raises SubsystemError: when the underlying command failed
"""
if not self.mountpoint:
raise NotMountedError(self)
try:
_util.check_call_(['mount', '--bind', self.mountpoint, mountpoint], stdout=subprocess.PIPE)
if 'bindmounts' in self._paths:
self._paths['bindmounts'].append(mountpoint)
else:
self._paths['bindmounts'] = [mountpoint]
return True
except Exception as e:
logger.exception("Error bind mounting {0}.".format(self))
raise SubsystemError(e) | def function[bindmount, parameter[self, mountpoint]]:
constant[Bind mounts the volume to another mountpoint. Only works if the volume is already mounted.
:raises NotMountedError: when the volume is not yet mounted
:raises SubsystemError: when the underlying command failed
]
if <ast.UnaryOp object at 0x7da1b0405570> begin[:]
<ast.Raise object at 0x7da1b0405150>
<ast.Try object at 0x7da1b0407220> | keyword[def] identifier[bindmount] ( identifier[self] , identifier[mountpoint] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[mountpoint] :
keyword[raise] identifier[NotMountedError] ( identifier[self] )
keyword[try] :
identifier[_util] . identifier[check_call_] ([ literal[string] , literal[string] , identifier[self] . identifier[mountpoint] , identifier[mountpoint] ], identifier[stdout] = identifier[subprocess] . identifier[PIPE] )
keyword[if] literal[string] keyword[in] identifier[self] . identifier[_paths] :
identifier[self] . identifier[_paths] [ literal[string] ]. identifier[append] ( identifier[mountpoint] )
keyword[else] :
identifier[self] . identifier[_paths] [ literal[string] ]=[ identifier[mountpoint] ]
keyword[return] keyword[True]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[exception] ( literal[string] . identifier[format] ( identifier[self] ))
keyword[raise] identifier[SubsystemError] ( identifier[e] ) | def bindmount(self, mountpoint):
"""Bind mounts the volume to another mountpoint. Only works if the volume is already mounted.
:raises NotMountedError: when the volume is not yet mounted
:raises SubsystemError: when the underlying command failed
"""
if not self.mountpoint:
raise NotMountedError(self) # depends on [control=['if'], data=[]]
try:
_util.check_call_(['mount', '--bind', self.mountpoint, mountpoint], stdout=subprocess.PIPE)
if 'bindmounts' in self._paths:
self._paths['bindmounts'].append(mountpoint) # depends on [control=['if'], data=[]]
else:
self._paths['bindmounts'] = [mountpoint]
return True # depends on [control=['try'], data=[]]
except Exception as e:
logger.exception('Error bind mounting {0}.'.format(self))
raise SubsystemError(e) # depends on [control=['except'], data=['e']] |
def execute(self):
"""
Execute the http request to the export service
:return ads-classic formatted export string
"""
url = os.path.join(self.HTTP_ENDPOINT, self.format)
self.response = ExportResponse.load_http_response(
self.session.post(url, data=self.json_payload)
)
return self.response.result | def function[execute, parameter[self]]:
constant[
Execute the http request to the export service
:return ads-classic formatted export string
]
variable[url] assign[=] call[name[os].path.join, parameter[name[self].HTTP_ENDPOINT, name[self].format]]
name[self].response assign[=] call[name[ExportResponse].load_http_response, parameter[call[name[self].session.post, parameter[name[url]]]]]
return[name[self].response.result] | keyword[def] identifier[execute] ( identifier[self] ):
literal[string]
identifier[url] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[HTTP_ENDPOINT] , identifier[self] . identifier[format] )
identifier[self] . identifier[response] = identifier[ExportResponse] . identifier[load_http_response] (
identifier[self] . identifier[session] . identifier[post] ( identifier[url] , identifier[data] = identifier[self] . identifier[json_payload] )
)
keyword[return] identifier[self] . identifier[response] . identifier[result] | def execute(self):
"""
Execute the http request to the export service
:return ads-classic formatted export string
"""
url = os.path.join(self.HTTP_ENDPOINT, self.format)
self.response = ExportResponse.load_http_response(self.session.post(url, data=self.json_payload))
return self.response.result |
def get_an(n, mc, dl, F, e):
"""
Compute a_n from Eq. 22 of Taylor et al. (2015).
:param n: Harmonic number
:param mc: Chirp mass of binary [Solar Mass]
:param dl: Luminosity distance [Mpc]
:param F: Orbital frequency of binary [Hz]
:param e: Orbital Eccentricity
:returns: a_n
"""
# convert to seconds
mc *= SOLAR2S
dl *= MPC2S
omega = 2 * np.pi * F
amp = n * mc**(5/3) * omega**(2/3) / dl
ret = -amp * (ss.jn(n-2,n*e) - 2*e*ss.jn(n-1,n*e) +
(2/n)*ss.jn(n,n*e) + 2*e*ss.jn(n+1,n*e) -
ss.jn(n+2,n*e))
return ret | def function[get_an, parameter[n, mc, dl, F, e]]:
constant[
Compute a_n from Eq. 22 of Taylor et al. (2015).
:param n: Harmonic number
:param mc: Chirp mass of binary [Solar Mass]
:param dl: Luminosity distance [Mpc]
:param F: Orbital frequency of binary [Hz]
:param e: Orbital Eccentricity
:returns: a_n
]
<ast.AugAssign object at 0x7da2044c2080>
<ast.AugAssign object at 0x7da2044c15d0>
variable[omega] assign[=] binary_operation[binary_operation[constant[2] * name[np].pi] * name[F]]
variable[amp] assign[=] binary_operation[binary_operation[binary_operation[name[n] * binary_operation[name[mc] ** binary_operation[constant[5] / constant[3]]]] * binary_operation[name[omega] ** binary_operation[constant[2] / constant[3]]]] / name[dl]]
variable[ret] assign[=] binary_operation[<ast.UnaryOp object at 0x7da2044c3520> * binary_operation[binary_operation[binary_operation[binary_operation[call[name[ss].jn, parameter[binary_operation[name[n] - constant[2]], binary_operation[name[n] * name[e]]]] - binary_operation[binary_operation[constant[2] * name[e]] * call[name[ss].jn, parameter[binary_operation[name[n] - constant[1]], binary_operation[name[n] * name[e]]]]]] + binary_operation[binary_operation[constant[2] / name[n]] * call[name[ss].jn, parameter[name[n], binary_operation[name[n] * name[e]]]]]] + binary_operation[binary_operation[constant[2] * name[e]] * call[name[ss].jn, parameter[binary_operation[name[n] + constant[1]], binary_operation[name[n] * name[e]]]]]] - call[name[ss].jn, parameter[binary_operation[name[n] + constant[2]], binary_operation[name[n] * name[e]]]]]]
return[name[ret]] | keyword[def] identifier[get_an] ( identifier[n] , identifier[mc] , identifier[dl] , identifier[F] , identifier[e] ):
literal[string]
identifier[mc] *= identifier[SOLAR2S]
identifier[dl] *= identifier[MPC2S]
identifier[omega] = literal[int] * identifier[np] . identifier[pi] * identifier[F]
identifier[amp] = identifier[n] * identifier[mc] **( literal[int] / literal[int] )* identifier[omega] **( literal[int] / literal[int] )/ identifier[dl]
identifier[ret] =- identifier[amp] *( identifier[ss] . identifier[jn] ( identifier[n] - literal[int] , identifier[n] * identifier[e] )- literal[int] * identifier[e] * identifier[ss] . identifier[jn] ( identifier[n] - literal[int] , identifier[n] * identifier[e] )+
( literal[int] / identifier[n] )* identifier[ss] . identifier[jn] ( identifier[n] , identifier[n] * identifier[e] )+ literal[int] * identifier[e] * identifier[ss] . identifier[jn] ( identifier[n] + literal[int] , identifier[n] * identifier[e] )-
identifier[ss] . identifier[jn] ( identifier[n] + literal[int] , identifier[n] * identifier[e] ))
keyword[return] identifier[ret] | def get_an(n, mc, dl, F, e):
"""
Compute a_n from Eq. 22 of Taylor et al. (2015).
:param n: Harmonic number
:param mc: Chirp mass of binary [Solar Mass]
:param dl: Luminosity distance [Mpc]
:param F: Orbital frequency of binary [Hz]
:param e: Orbital Eccentricity
:returns: a_n
"""
# convert to seconds
mc *= SOLAR2S
dl *= MPC2S
omega = 2 * np.pi * F
amp = n * mc ** (5 / 3) * omega ** (2 / 3) / dl
ret = -amp * (ss.jn(n - 2, n * e) - 2 * e * ss.jn(n - 1, n * e) + 2 / n * ss.jn(n, n * e) + 2 * e * ss.jn(n + 1, n * e) - ss.jn(n + 2, n * e))
return ret |
def _covar_mstep_full(gmm, X, responsibilities, weighted_X_sum, norm,
min_covar):
"""Performing the covariance M step for full cases"""
# Eq. 12 from K. Murphy, "Fitting a Conditional Linear Gaussian
# Distribution"
n_features = X.shape[1]
cv = np.empty((gmm.n_components, n_features, n_features))
for c in range(gmm.n_components):
post = responsibilities[:, c]
mu = gmm.means_[c]
diff = X - mu
with np.errstate(under='ignore'):
# Underflow Errors in doing post * X.T are not important
avg_cv = np.dot(post * diff.T, diff) / (post.sum() + 10 * EPS)
cv[c] = avg_cv + min_covar * np.eye(n_features)
return cv | def function[_covar_mstep_full, parameter[gmm, X, responsibilities, weighted_X_sum, norm, min_covar]]:
constant[Performing the covariance M step for full cases]
variable[n_features] assign[=] call[name[X].shape][constant[1]]
variable[cv] assign[=] call[name[np].empty, parameter[tuple[[<ast.Attribute object at 0x7da207f03ca0>, <ast.Name object at 0x7da207f02740>, <ast.Name object at 0x7da207f00be0>]]]]
for taget[name[c]] in starred[call[name[range], parameter[name[gmm].n_components]]] begin[:]
variable[post] assign[=] call[name[responsibilities]][tuple[[<ast.Slice object at 0x7da207f98f70>, <ast.Name object at 0x7da207f9a950>]]]
variable[mu] assign[=] call[name[gmm].means_][name[c]]
variable[diff] assign[=] binary_operation[name[X] - name[mu]]
with call[name[np].errstate, parameter[]] begin[:]
variable[avg_cv] assign[=] binary_operation[call[name[np].dot, parameter[binary_operation[name[post] * name[diff].T], name[diff]]] / binary_operation[call[name[post].sum, parameter[]] + binary_operation[constant[10] * name[EPS]]]]
call[name[cv]][name[c]] assign[=] binary_operation[name[avg_cv] + binary_operation[name[min_covar] * call[name[np].eye, parameter[name[n_features]]]]]
return[name[cv]] | keyword[def] identifier[_covar_mstep_full] ( identifier[gmm] , identifier[X] , identifier[responsibilities] , identifier[weighted_X_sum] , identifier[norm] ,
identifier[min_covar] ):
literal[string]
identifier[n_features] = identifier[X] . identifier[shape] [ literal[int] ]
identifier[cv] = identifier[np] . identifier[empty] (( identifier[gmm] . identifier[n_components] , identifier[n_features] , identifier[n_features] ))
keyword[for] identifier[c] keyword[in] identifier[range] ( identifier[gmm] . identifier[n_components] ):
identifier[post] = identifier[responsibilities] [:, identifier[c] ]
identifier[mu] = identifier[gmm] . identifier[means_] [ identifier[c] ]
identifier[diff] = identifier[X] - identifier[mu]
keyword[with] identifier[np] . identifier[errstate] ( identifier[under] = literal[string] ):
identifier[avg_cv] = identifier[np] . identifier[dot] ( identifier[post] * identifier[diff] . identifier[T] , identifier[diff] )/( identifier[post] . identifier[sum] ()+ literal[int] * identifier[EPS] )
identifier[cv] [ identifier[c] ]= identifier[avg_cv] + identifier[min_covar] * identifier[np] . identifier[eye] ( identifier[n_features] )
keyword[return] identifier[cv] | def _covar_mstep_full(gmm, X, responsibilities, weighted_X_sum, norm, min_covar):
"""Performing the covariance M step for full cases"""
# Eq. 12 from K. Murphy, "Fitting a Conditional Linear Gaussian
# Distribution"
n_features = X.shape[1]
cv = np.empty((gmm.n_components, n_features, n_features))
for c in range(gmm.n_components):
post = responsibilities[:, c]
mu = gmm.means_[c]
diff = X - mu
with np.errstate(under='ignore'):
# Underflow Errors in doing post * X.T are not important
avg_cv = np.dot(post * diff.T, diff) / (post.sum() + 10 * EPS) # depends on [control=['with'], data=[]]
cv[c] = avg_cv + min_covar * np.eye(n_features) # depends on [control=['for'], data=['c']]
return cv |
def Approval(self, username, approval_id):
"""Returns a reference to an approval."""
return ClientApprovalRef(
client_id=self.client_id,
username=username,
approval_id=approval_id,
context=self._context) | def function[Approval, parameter[self, username, approval_id]]:
constant[Returns a reference to an approval.]
return[call[name[ClientApprovalRef], parameter[]]] | keyword[def] identifier[Approval] ( identifier[self] , identifier[username] , identifier[approval_id] ):
literal[string]
keyword[return] identifier[ClientApprovalRef] (
identifier[client_id] = identifier[self] . identifier[client_id] ,
identifier[username] = identifier[username] ,
identifier[approval_id] = identifier[approval_id] ,
identifier[context] = identifier[self] . identifier[_context] ) | def Approval(self, username, approval_id):
"""Returns a reference to an approval."""
return ClientApprovalRef(client_id=self.client_id, username=username, approval_id=approval_id, context=self._context) |
def uniform_crossover(parents):
"""Perform uniform crossover on two parent chromosomes.
Randomly take genes from one parent or the other.
Ex. p1 = xxxxx, p2 = yyyyy, child = xyxxy
"""
chromosome_length = len(parents[0])
children = [[], []]
for i in range(chromosome_length):
selected_parent = random.randint(0, 1)
# Take from the selected parent, and add it to child 1
# Take from the other parent, and add it to child 2
children[0].append(parents[selected_parent][i])
children[1].append(parents[1 - selected_parent][i])
return children | def function[uniform_crossover, parameter[parents]]:
constant[Perform uniform crossover on two parent chromosomes.
Randomly take genes from one parent or the other.
Ex. p1 = xxxxx, p2 = yyyyy, child = xyxxy
]
variable[chromosome_length] assign[=] call[name[len], parameter[call[name[parents]][constant[0]]]]
variable[children] assign[=] list[[<ast.List object at 0x7da1b04cbd60>, <ast.List object at 0x7da1b04cbac0>]]
for taget[name[i]] in starred[call[name[range], parameter[name[chromosome_length]]]] begin[:]
variable[selected_parent] assign[=] call[name[random].randint, parameter[constant[0], constant[1]]]
call[call[name[children]][constant[0]].append, parameter[call[call[name[parents]][name[selected_parent]]][name[i]]]]
call[call[name[children]][constant[1]].append, parameter[call[call[name[parents]][binary_operation[constant[1] - name[selected_parent]]]][name[i]]]]
return[name[children]] | keyword[def] identifier[uniform_crossover] ( identifier[parents] ):
literal[string]
identifier[chromosome_length] = identifier[len] ( identifier[parents] [ literal[int] ])
identifier[children] =[[],[]]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[chromosome_length] ):
identifier[selected_parent] = identifier[random] . identifier[randint] ( literal[int] , literal[int] )
identifier[children] [ literal[int] ]. identifier[append] ( identifier[parents] [ identifier[selected_parent] ][ identifier[i] ])
identifier[children] [ literal[int] ]. identifier[append] ( identifier[parents] [ literal[int] - identifier[selected_parent] ][ identifier[i] ])
keyword[return] identifier[children] | def uniform_crossover(parents):
"""Perform uniform crossover on two parent chromosomes.
Randomly take genes from one parent or the other.
Ex. p1 = xxxxx, p2 = yyyyy, child = xyxxy
"""
chromosome_length = len(parents[0])
children = [[], []]
for i in range(chromosome_length):
selected_parent = random.randint(0, 1)
# Take from the selected parent, and add it to child 1
# Take from the other parent, and add it to child 2
children[0].append(parents[selected_parent][i])
children[1].append(parents[1 - selected_parent][i]) # depends on [control=['for'], data=['i']]
return children |
def _wc_hard_wrap(line, length):
"""
Wrap text to length characters, breaking when target length is reached,
taking into account character width.
Used to wrap lines which cannot be wrapped on whitespace.
"""
chars = []
chars_len = 0
for char in line:
char_len = wcwidth(char)
if chars_len + char_len > length:
yield "".join(chars)
chars = []
chars_len = 0
chars.append(char)
chars_len += char_len
if chars:
yield "".join(chars) | def function[_wc_hard_wrap, parameter[line, length]]:
constant[
Wrap text to length characters, breaking when target length is reached,
taking into account character width.
Used to wrap lines which cannot be wrapped on whitespace.
]
variable[chars] assign[=] list[[]]
variable[chars_len] assign[=] constant[0]
for taget[name[char]] in starred[name[line]] begin[:]
variable[char_len] assign[=] call[name[wcwidth], parameter[name[char]]]
if compare[binary_operation[name[chars_len] + name[char_len]] greater[>] name[length]] begin[:]
<ast.Yield object at 0x7da20c76e020>
variable[chars] assign[=] list[[]]
variable[chars_len] assign[=] constant[0]
call[name[chars].append, parameter[name[char]]]
<ast.AugAssign object at 0x7da20c76ca60>
if name[chars] begin[:]
<ast.Yield object at 0x7da20c76d6c0> | keyword[def] identifier[_wc_hard_wrap] ( identifier[line] , identifier[length] ):
literal[string]
identifier[chars] =[]
identifier[chars_len] = literal[int]
keyword[for] identifier[char] keyword[in] identifier[line] :
identifier[char_len] = identifier[wcwidth] ( identifier[char] )
keyword[if] identifier[chars_len] + identifier[char_len] > identifier[length] :
keyword[yield] literal[string] . identifier[join] ( identifier[chars] )
identifier[chars] =[]
identifier[chars_len] = literal[int]
identifier[chars] . identifier[append] ( identifier[char] )
identifier[chars_len] += identifier[char_len]
keyword[if] identifier[chars] :
keyword[yield] literal[string] . identifier[join] ( identifier[chars] ) | def _wc_hard_wrap(line, length):
"""
Wrap text to length characters, breaking when target length is reached,
taking into account character width.
Used to wrap lines which cannot be wrapped on whitespace.
"""
chars = []
chars_len = 0
for char in line:
char_len = wcwidth(char)
if chars_len + char_len > length:
yield ''.join(chars)
chars = []
chars_len = 0 # depends on [control=['if'], data=[]]
chars.append(char)
chars_len += char_len # depends on [control=['for'], data=['char']]
if chars:
yield ''.join(chars) # depends on [control=['if'], data=[]] |
def generate_vars(project_name, project_dir):
"""
Generates the variables to replace in files
"""
out = vary_name(project_name)
out['random_key'] = make_random_key()
out['settings_file'] = make_file_path(
project_dir,
project_name,
path.join('src', project_name),
'settings.py',
)
return out | def function[generate_vars, parameter[project_name, project_dir]]:
constant[
Generates the variables to replace in files
]
variable[out] assign[=] call[name[vary_name], parameter[name[project_name]]]
call[name[out]][constant[random_key]] assign[=] call[name[make_random_key], parameter[]]
call[name[out]][constant[settings_file]] assign[=] call[name[make_file_path], parameter[name[project_dir], name[project_name], call[name[path].join, parameter[constant[src], name[project_name]]], constant[settings.py]]]
return[name[out]] | keyword[def] identifier[generate_vars] ( identifier[project_name] , identifier[project_dir] ):
literal[string]
identifier[out] = identifier[vary_name] ( identifier[project_name] )
identifier[out] [ literal[string] ]= identifier[make_random_key] ()
identifier[out] [ literal[string] ]= identifier[make_file_path] (
identifier[project_dir] ,
identifier[project_name] ,
identifier[path] . identifier[join] ( literal[string] , identifier[project_name] ),
literal[string] ,
)
keyword[return] identifier[out] | def generate_vars(project_name, project_dir):
"""
Generates the variables to replace in files
"""
out = vary_name(project_name)
out['random_key'] = make_random_key()
out['settings_file'] = make_file_path(project_dir, project_name, path.join('src', project_name), 'settings.py')
return out |
def save_new_channel(self):
"""
It saves new channel according to specified channel features.
"""
form_info = self.input['form']
channel = Channel(typ=15, name=form_info['name'],
description=form_info['description'],
owner_id=form_info['owner_id'])
channel.blocking_save()
self.current.task_data['target_channel_key'] = channel.key | def function[save_new_channel, parameter[self]]:
constant[
It saves new channel according to specified channel features.
]
variable[form_info] assign[=] call[name[self].input][constant[form]]
variable[channel] assign[=] call[name[Channel], parameter[]]
call[name[channel].blocking_save, parameter[]]
call[name[self].current.task_data][constant[target_channel_key]] assign[=] name[channel].key | keyword[def] identifier[save_new_channel] ( identifier[self] ):
literal[string]
identifier[form_info] = identifier[self] . identifier[input] [ literal[string] ]
identifier[channel] = identifier[Channel] ( identifier[typ] = literal[int] , identifier[name] = identifier[form_info] [ literal[string] ],
identifier[description] = identifier[form_info] [ literal[string] ],
identifier[owner_id] = identifier[form_info] [ literal[string] ])
identifier[channel] . identifier[blocking_save] ()
identifier[self] . identifier[current] . identifier[task_data] [ literal[string] ]= identifier[channel] . identifier[key] | def save_new_channel(self):
"""
It saves new channel according to specified channel features.
"""
form_info = self.input['form']
channel = Channel(typ=15, name=form_info['name'], description=form_info['description'], owner_id=form_info['owner_id'])
channel.blocking_save()
self.current.task_data['target_channel_key'] = channel.key |
def generate_data_key(self, name, key_type, context="", nonce="", bits=256, mount_point=DEFAULT_MOUNT_POINT):
"""Generates a new high-entropy key and the value encrypted with the named key.
Optionally return the plaintext of the key as well. Whether plaintext is returned depends on the path; as a
result, you can use Vault ACL policies to control whether a user is allowed to retrieve the plaintext value of a
key. This is useful if you want an untrusted user or operation to generate keys that are then made available to
trusted users.
Supported methods:
POST: /{mount_point}/datakey/{key_type}/{name}. Produces: 200 application/json
:param name: Specifies the name of the encryption key to use to encrypt the datakey. This is specified as part
of the URL.
:type name: str | unicode
:param key_type: Specifies the type of key to generate. If plaintext, the plaintext key will be returned along
with the ciphertext. If wrapped, only the ciphertext value will be returned. This is specified as part of
the URL.
:type key_type: str | unicode
:param context: Specifies the key derivation context, provided as a base64-encoded string. This must be provided
if derivation is enabled.
:type context: str | unicode
:param nonce: Specifies a nonce value, provided as base64 encoded. Must be provided if convergent encryption is
enabled for this key and the key was generated with Vault 0.6.1. Not required for keys created in 0.6.2+.
The value must be exactly 96 bits (12 bytes) long and the user must ensure that for any given context (and
thus, any given encryption key) this nonce value is never reused.
:type nonce: str | unicode
:param bits: Specifies the number of bits in the desired key. Can be 128, 256, or 512.
:type bits: int
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
if key_type not in transit_constants.ALLOWED_DATA_KEY_TYPES:
error_msg = 'invalid key_type argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=key_type,
allowed_types=', '.join(transit_constants.ALLOWED_DATA_KEY_TYPES),
))
if bits not in transit_constants.ALLOWED_DATA_KEY_BITS:
error_msg = 'invalid bits argument provided "{arg}", supported values: "{allowed_values}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=bits,
allowed_values=', '.join([str(b) for b in transit_constants.ALLOWED_DATA_KEY_BITS]),
))
params = {
'context': context,
'nonce': nonce,
'bits': bits,
}
api_path = '/v1/{mount_point}/datakey/{key_type}/{name}'.format(
mount_point=mount_point,
key_type=key_type,
name=name,
)
response = self._adapter.post(
url=api_path,
json=params,
)
return response.json() | def function[generate_data_key, parameter[self, name, key_type, context, nonce, bits, mount_point]]:
constant[Generates a new high-entropy key and the value encrypted with the named key.
Optionally return the plaintext of the key as well. Whether plaintext is returned depends on the path; as a
result, you can use Vault ACL policies to control whether a user is allowed to retrieve the plaintext value of a
key. This is useful if you want an untrusted user or operation to generate keys that are then made available to
trusted users.
Supported methods:
POST: /{mount_point}/datakey/{key_type}/{name}. Produces: 200 application/json
:param name: Specifies the name of the encryption key to use to encrypt the datakey. This is specified as part
of the URL.
:type name: str | unicode
:param key_type: Specifies the type of key to generate. If plaintext, the plaintext key will be returned along
with the ciphertext. If wrapped, only the ciphertext value will be returned. This is specified as part of
the URL.
:type key_type: str | unicode
:param context: Specifies the key derivation context, provided as a base64-encoded string. This must be provided
if derivation is enabled.
:type context: str | unicode
:param nonce: Specifies a nonce value, provided as base64 encoded. Must be provided if convergent encryption is
enabled for this key and the key was generated with Vault 0.6.1. Not required for keys created in 0.6.2+.
The value must be exactly 96 bits (12 bytes) long and the user must ensure that for any given context (and
thus, any given encryption key) this nonce value is never reused.
:type nonce: str | unicode
:param bits: Specifies the number of bits in the desired key. Can be 128, 256, or 512.
:type bits: int
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
]
if compare[name[key_type] <ast.NotIn object at 0x7da2590d7190> name[transit_constants].ALLOWED_DATA_KEY_TYPES] begin[:]
variable[error_msg] assign[=] constant[invalid key_type argument provided "{arg}", supported types: "{allowed_types}"]
<ast.Raise object at 0x7da20c993af0>
if compare[name[bits] <ast.NotIn object at 0x7da2590d7190> name[transit_constants].ALLOWED_DATA_KEY_BITS] begin[:]
variable[error_msg] assign[=] constant[invalid bits argument provided "{arg}", supported values: "{allowed_values}"]
<ast.Raise object at 0x7da20c9911e0>
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c992fe0>, <ast.Constant object at 0x7da20c991f00>, <ast.Constant object at 0x7da20c990700>], [<ast.Name object at 0x7da20c9910f0>, <ast.Name object at 0x7da20c992c20>, <ast.Name object at 0x7da20c990bb0>]]
variable[api_path] assign[=] call[constant[/v1/{mount_point}/datakey/{key_type}/{name}].format, parameter[]]
variable[response] assign[=] call[name[self]._adapter.post, parameter[]]
return[call[name[response].json, parameter[]]] | keyword[def] identifier[generate_data_key] ( identifier[self] , identifier[name] , identifier[key_type] , identifier[context] = literal[string] , identifier[nonce] = literal[string] , identifier[bits] = literal[int] , identifier[mount_point] = identifier[DEFAULT_MOUNT_POINT] ):
literal[string]
keyword[if] identifier[key_type] keyword[not] keyword[in] identifier[transit_constants] . identifier[ALLOWED_DATA_KEY_TYPES] :
identifier[error_msg] = literal[string]
keyword[raise] identifier[exceptions] . identifier[ParamValidationError] ( identifier[error_msg] . identifier[format] (
identifier[arg] = identifier[key_type] ,
identifier[allowed_types] = literal[string] . identifier[join] ( identifier[transit_constants] . identifier[ALLOWED_DATA_KEY_TYPES] ),
))
keyword[if] identifier[bits] keyword[not] keyword[in] identifier[transit_constants] . identifier[ALLOWED_DATA_KEY_BITS] :
identifier[error_msg] = literal[string]
keyword[raise] identifier[exceptions] . identifier[ParamValidationError] ( identifier[error_msg] . identifier[format] (
identifier[arg] = identifier[bits] ,
identifier[allowed_values] = literal[string] . identifier[join] ([ identifier[str] ( identifier[b] ) keyword[for] identifier[b] keyword[in] identifier[transit_constants] . identifier[ALLOWED_DATA_KEY_BITS] ]),
))
identifier[params] ={
literal[string] : identifier[context] ,
literal[string] : identifier[nonce] ,
literal[string] : identifier[bits] ,
}
identifier[api_path] = literal[string] . identifier[format] (
identifier[mount_point] = identifier[mount_point] ,
identifier[key_type] = identifier[key_type] ,
identifier[name] = identifier[name] ,
)
identifier[response] = identifier[self] . identifier[_adapter] . identifier[post] (
identifier[url] = identifier[api_path] ,
identifier[json] = identifier[params] ,
)
keyword[return] identifier[response] . identifier[json] () | def generate_data_key(self, name, key_type, context='', nonce='', bits=256, mount_point=DEFAULT_MOUNT_POINT):
"""Generates a new high-entropy key and the value encrypted with the named key.
Optionally return the plaintext of the key as well. Whether plaintext is returned depends on the path; as a
result, you can use Vault ACL policies to control whether a user is allowed to retrieve the plaintext value of a
key. This is useful if you want an untrusted user or operation to generate keys that are then made available to
trusted users.
Supported methods:
POST: /{mount_point}/datakey/{key_type}/{name}. Produces: 200 application/json
:param name: Specifies the name of the encryption key to use to encrypt the datakey. This is specified as part
of the URL.
:type name: str | unicode
:param key_type: Specifies the type of key to generate. If plaintext, the plaintext key will be returned along
with the ciphertext. If wrapped, only the ciphertext value will be returned. This is specified as part of
the URL.
:type key_type: str | unicode
:param context: Specifies the key derivation context, provided as a base64-encoded string. This must be provided
if derivation is enabled.
:type context: str | unicode
:param nonce: Specifies a nonce value, provided as base64 encoded. Must be provided if convergent encryption is
enabled for this key and the key was generated with Vault 0.6.1. Not required for keys created in 0.6.2+.
The value must be exactly 96 bits (12 bytes) long and the user must ensure that for any given context (and
thus, any given encryption key) this nonce value is never reused.
:type nonce: str | unicode
:param bits: Specifies the number of bits in the desired key. Can be 128, 256, or 512.
:type bits: int
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
if key_type not in transit_constants.ALLOWED_DATA_KEY_TYPES:
error_msg = 'invalid key_type argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(arg=key_type, allowed_types=', '.join(transit_constants.ALLOWED_DATA_KEY_TYPES))) # depends on [control=['if'], data=['key_type']]
if bits not in transit_constants.ALLOWED_DATA_KEY_BITS:
error_msg = 'invalid bits argument provided "{arg}", supported values: "{allowed_values}"'
raise exceptions.ParamValidationError(error_msg.format(arg=bits, allowed_values=', '.join([str(b) for b in transit_constants.ALLOWED_DATA_KEY_BITS]))) # depends on [control=['if'], data=['bits']]
params = {'context': context, 'nonce': nonce, 'bits': bits}
api_path = '/v1/{mount_point}/datakey/{key_type}/{name}'.format(mount_point=mount_point, key_type=key_type, name=name)
response = self._adapter.post(url=api_path, json=params)
return response.json() |
def _dict_to_report_line(cls, report_dict):
'''Takes a report_dict as input and returns a report line'''
return '\t'.join([str(report_dict[x]) for x in report.columns]) | def function[_dict_to_report_line, parameter[cls, report_dict]]:
constant[Takes a report_dict as input and returns a report line]
return[call[constant[ ].join, parameter[<ast.ListComp object at 0x7da18fe92c80>]]] | keyword[def] identifier[_dict_to_report_line] ( identifier[cls] , identifier[report_dict] ):
literal[string]
keyword[return] literal[string] . identifier[join] ([ identifier[str] ( identifier[report_dict] [ identifier[x] ]) keyword[for] identifier[x] keyword[in] identifier[report] . identifier[columns] ]) | def _dict_to_report_line(cls, report_dict):
"""Takes a report_dict as input and returns a report line"""
return '\t'.join([str(report_dict[x]) for x in report.columns]) |
def run_xenon_simple(workflow, machine, worker_config):
"""Run a workflow using a single Xenon remote worker.
:param workflow: |Workflow| or |PromisedObject| to evaluate.
:param machine: |Machine| instance.
:param worker_config: Configuration for the pilot job."""
scheduler = Scheduler()
return scheduler.run(
xenon_interactive_worker(machine, worker_config),
get_workflow(workflow)
) | def function[run_xenon_simple, parameter[workflow, machine, worker_config]]:
constant[Run a workflow using a single Xenon remote worker.
:param workflow: |Workflow| or |PromisedObject| to evaluate.
:param machine: |Machine| instance.
:param worker_config: Configuration for the pilot job.]
variable[scheduler] assign[=] call[name[Scheduler], parameter[]]
return[call[name[scheduler].run, parameter[call[name[xenon_interactive_worker], parameter[name[machine], name[worker_config]]], call[name[get_workflow], parameter[name[workflow]]]]]] | keyword[def] identifier[run_xenon_simple] ( identifier[workflow] , identifier[machine] , identifier[worker_config] ):
literal[string]
identifier[scheduler] = identifier[Scheduler] ()
keyword[return] identifier[scheduler] . identifier[run] (
identifier[xenon_interactive_worker] ( identifier[machine] , identifier[worker_config] ),
identifier[get_workflow] ( identifier[workflow] )
) | def run_xenon_simple(workflow, machine, worker_config):
"""Run a workflow using a single Xenon remote worker.
:param workflow: |Workflow| or |PromisedObject| to evaluate.
:param machine: |Machine| instance.
:param worker_config: Configuration for the pilot job."""
scheduler = Scheduler()
return scheduler.run(xenon_interactive_worker(machine, worker_config), get_workflow(workflow)) |
def _initialize_initial_state_fluents(self):
'''Returns the initial state-fluents instantiated.'''
state_fluents = self.rddl.domain.state_fluents
initializer = self.rddl.instance.init_state
self.initial_state_fluents = self._initialize_pvariables(
state_fluents,
self.rddl.domain.state_fluent_ordering,
initializer)
return self.initial_state_fluents | def function[_initialize_initial_state_fluents, parameter[self]]:
constant[Returns the initial state-fluents instantiated.]
variable[state_fluents] assign[=] name[self].rddl.domain.state_fluents
variable[initializer] assign[=] name[self].rddl.instance.init_state
name[self].initial_state_fluents assign[=] call[name[self]._initialize_pvariables, parameter[name[state_fluents], name[self].rddl.domain.state_fluent_ordering, name[initializer]]]
return[name[self].initial_state_fluents] | keyword[def] identifier[_initialize_initial_state_fluents] ( identifier[self] ):
literal[string]
identifier[state_fluents] = identifier[self] . identifier[rddl] . identifier[domain] . identifier[state_fluents]
identifier[initializer] = identifier[self] . identifier[rddl] . identifier[instance] . identifier[init_state]
identifier[self] . identifier[initial_state_fluents] = identifier[self] . identifier[_initialize_pvariables] (
identifier[state_fluents] ,
identifier[self] . identifier[rddl] . identifier[domain] . identifier[state_fluent_ordering] ,
identifier[initializer] )
keyword[return] identifier[self] . identifier[initial_state_fluents] | def _initialize_initial_state_fluents(self):
"""Returns the initial state-fluents instantiated."""
state_fluents = self.rddl.domain.state_fluents
initializer = self.rddl.instance.init_state
self.initial_state_fluents = self._initialize_pvariables(state_fluents, self.rddl.domain.state_fluent_ordering, initializer)
return self.initial_state_fluents |
def universe(self):
"""
Data universe available at the current time.
Universe contains the data passed in when creating a Backtest.
Use this data to determine strategy logic.
"""
# avoid windowing every time
# if calling and on same date return
# cached value
if self.now == self._last_chk:
return self._funiverse
else:
self._last_chk = self.now
self._funiverse = self._universe.loc[:self.now]
return self._funiverse | def function[universe, parameter[self]]:
constant[
Data universe available at the current time.
Universe contains the data passed in when creating a Backtest.
Use this data to determine strategy logic.
]
if compare[name[self].now equal[==] name[self]._last_chk] begin[:]
return[name[self]._funiverse] | keyword[def] identifier[universe] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[now] == identifier[self] . identifier[_last_chk] :
keyword[return] identifier[self] . identifier[_funiverse]
keyword[else] :
identifier[self] . identifier[_last_chk] = identifier[self] . identifier[now]
identifier[self] . identifier[_funiverse] = identifier[self] . identifier[_universe] . identifier[loc] [: identifier[self] . identifier[now] ]
keyword[return] identifier[self] . identifier[_funiverse] | def universe(self):
"""
Data universe available at the current time.
Universe contains the data passed in when creating a Backtest.
Use this data to determine strategy logic.
"""
# avoid windowing every time
# if calling and on same date return
# cached value
if self.now == self._last_chk:
return self._funiverse # depends on [control=['if'], data=[]]
else:
self._last_chk = self.now
self._funiverse = self._universe.loc[:self.now]
return self._funiverse |
def save(self, *args, **kwargs):
"""
Custom save method
"""
# change status to scheduled if necessary
if self.is_scheduled and self.status is not OUTWARD_STATUS.get('scheduled'):
self.status = OUTWARD_STATUS.get('scheduled')
# call super.save()
super(Outward, self).save(*args, **kwargs) | def function[save, parameter[self]]:
constant[
Custom save method
]
if <ast.BoolOp object at 0x7da18c4cf2b0> begin[:]
name[self].status assign[=] call[name[OUTWARD_STATUS].get, parameter[constant[scheduled]]]
call[call[name[super], parameter[name[Outward], name[self]]].save, parameter[<ast.Starred object at 0x7da18c4cdea0>]] | keyword[def] identifier[save] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[is_scheduled] keyword[and] identifier[self] . identifier[status] keyword[is] keyword[not] identifier[OUTWARD_STATUS] . identifier[get] ( literal[string] ):
identifier[self] . identifier[status] = identifier[OUTWARD_STATUS] . identifier[get] ( literal[string] )
identifier[super] ( identifier[Outward] , identifier[self] ). identifier[save] (* identifier[args] ,** identifier[kwargs] ) | def save(self, *args, **kwargs):
"""
Custom save method
"""
# change status to scheduled if necessary
if self.is_scheduled and self.status is not OUTWARD_STATUS.get('scheduled'):
self.status = OUTWARD_STATUS.get('scheduled') # depends on [control=['if'], data=[]]
# call super.save()
super(Outward, self).save(*args, **kwargs) |
def export_wif(self) -> str:
"""
This interface is used to get export ECDSA private key in the form of WIF which
is a way to encoding an ECDSA private key and make it easier to copy.
:return: a WIF encode private key.
"""
data = b''.join([b'\x80', self.__private_key, b'\01'])
checksum = Digest.hash256(data[0:34])
wif = base58.b58encode(b''.join([data, checksum[0:4]]))
return wif.decode('ascii') | def function[export_wif, parameter[self]]:
constant[
This interface is used to get export ECDSA private key in the form of WIF which
is a way to encoding an ECDSA private key and make it easier to copy.
:return: a WIF encode private key.
]
variable[data] assign[=] call[constant[b''].join, parameter[list[[<ast.Constant object at 0x7da20c794e20>, <ast.Attribute object at 0x7da20c796350>, <ast.Constant object at 0x7da20c796560>]]]]
variable[checksum] assign[=] call[name[Digest].hash256, parameter[call[name[data]][<ast.Slice object at 0x7da20c796260>]]]
variable[wif] assign[=] call[name[base58].b58encode, parameter[call[constant[b''].join, parameter[list[[<ast.Name object at 0x7da20c7948b0>, <ast.Subscript object at 0x7da20c795f00>]]]]]]
return[call[name[wif].decode, parameter[constant[ascii]]]] | keyword[def] identifier[export_wif] ( identifier[self] )-> identifier[str] :
literal[string]
identifier[data] = literal[string] . identifier[join] ([ literal[string] , identifier[self] . identifier[__private_key] , literal[string] ])
identifier[checksum] = identifier[Digest] . identifier[hash256] ( identifier[data] [ literal[int] : literal[int] ])
identifier[wif] = identifier[base58] . identifier[b58encode] ( literal[string] . identifier[join] ([ identifier[data] , identifier[checksum] [ literal[int] : literal[int] ]]))
keyword[return] identifier[wif] . identifier[decode] ( literal[string] ) | def export_wif(self) -> str:
"""
This interface is used to get export ECDSA private key in the form of WIF which
is a way to encoding an ECDSA private key and make it easier to copy.
:return: a WIF encode private key.
"""
data = b''.join([b'\x80', self.__private_key, b'\x01'])
checksum = Digest.hash256(data[0:34])
wif = base58.b58encode(b''.join([data, checksum[0:4]]))
return wif.decode('ascii') |
def from_list(commands):
"""
Given a list of tuples of form (depth, text)
that represents a DFS traversal of a command tree,
returns a dictionary representing command tree.
"""
def subtrees(commands, level):
if not commands:
return
acc = []
parent, *commands = commands
for command in commands:
if command['level'] > level:
acc.append(command)
else:
yield (parent, acc)
parent = command
acc.clear()
yield (parent, acc)
def walk(commands, level=0):
return [
{
'description': key['description'],
'children': walk(subtree, level + 1),
'id': key['id']
}
for key, subtree in subtrees(commands, level)
]
return walk(commands) | def function[from_list, parameter[commands]]:
constant[
Given a list of tuples of form (depth, text)
that represents a DFS traversal of a command tree,
returns a dictionary representing command tree.
]
def function[subtrees, parameter[commands, level]]:
if <ast.UnaryOp object at 0x7da204962e60> begin[:]
return[None]
variable[acc] assign[=] list[[]]
<ast.Tuple object at 0x7da204961360> assign[=] name[commands]
for taget[name[command]] in starred[name[commands]] begin[:]
if compare[call[name[command]][constant[level]] greater[>] name[level]] begin[:]
call[name[acc].append, parameter[name[command]]]
<ast.Yield object at 0x7da1b26ac460>
def function[walk, parameter[commands, level]]:
return[<ast.ListComp object at 0x7da1b26ad060>]
return[call[name[walk], parameter[name[commands]]]] | keyword[def] identifier[from_list] ( identifier[commands] ):
literal[string]
keyword[def] identifier[subtrees] ( identifier[commands] , identifier[level] ):
keyword[if] keyword[not] identifier[commands] :
keyword[return]
identifier[acc] =[]
identifier[parent] ,* identifier[commands] = identifier[commands]
keyword[for] identifier[command] keyword[in] identifier[commands] :
keyword[if] identifier[command] [ literal[string] ]> identifier[level] :
identifier[acc] . identifier[append] ( identifier[command] )
keyword[else] :
keyword[yield] ( identifier[parent] , identifier[acc] )
identifier[parent] = identifier[command]
identifier[acc] . identifier[clear] ()
keyword[yield] ( identifier[parent] , identifier[acc] )
keyword[def] identifier[walk] ( identifier[commands] , identifier[level] = literal[int] ):
keyword[return] [
{
literal[string] : identifier[key] [ literal[string] ],
literal[string] : identifier[walk] ( identifier[subtree] , identifier[level] + literal[int] ),
literal[string] : identifier[key] [ literal[string] ]
}
keyword[for] identifier[key] , identifier[subtree] keyword[in] identifier[subtrees] ( identifier[commands] , identifier[level] )
]
keyword[return] identifier[walk] ( identifier[commands] ) | def from_list(commands):
"""
Given a list of tuples of form (depth, text)
that represents a DFS traversal of a command tree,
returns a dictionary representing command tree.
"""
def subtrees(commands, level):
if not commands:
return # depends on [control=['if'], data=[]]
acc = []
(parent, *commands) = commands
for command in commands:
if command['level'] > level:
acc.append(command) # depends on [control=['if'], data=[]]
else:
yield (parent, acc)
parent = command
acc.clear() # depends on [control=['for'], data=['command']]
yield (parent, acc)
def walk(commands, level=0):
return [{'description': key['description'], 'children': walk(subtree, level + 1), 'id': key['id']} for (key, subtree) in subtrees(commands, level)]
return walk(commands) |
def makePFcFunc(self):
'''
Makes the (linear) consumption function for this period.
Parameters
----------
none
Returns
-------
none
'''
# Calculate human wealth this period (and lower bound of m)
self.hNrmNow = (self.PermGroFac/self.Rfree)*(self.solution_next.hNrm + 1.0)
self.mNrmMin = -self.hNrmNow
# Calculate the (constant) marginal propensity to consume
PatFac = ((self.Rfree*self.DiscFacEff)**(1.0/self.CRRA))/self.Rfree
self.MPC = 1.0/(1.0 + PatFac/self.solution_next.MPCmin)
# Construct the consumption function
self.cFunc = LinearInterp([self.mNrmMin, self.mNrmMin+1.0],[0.0, self.MPC])
# Add two attributes to enable calculation of steady state market resources
self.ExIncNext = 1.0 # Perfect foresight income of 1
self.mNrmMinNow = self.mNrmMin | def function[makePFcFunc, parameter[self]]:
constant[
Makes the (linear) consumption function for this period.
Parameters
----------
none
Returns
-------
none
]
name[self].hNrmNow assign[=] binary_operation[binary_operation[name[self].PermGroFac / name[self].Rfree] * binary_operation[name[self].solution_next.hNrm + constant[1.0]]]
name[self].mNrmMin assign[=] <ast.UnaryOp object at 0x7da204962a70>
variable[PatFac] assign[=] binary_operation[binary_operation[binary_operation[name[self].Rfree * name[self].DiscFacEff] ** binary_operation[constant[1.0] / name[self].CRRA]] / name[self].Rfree]
name[self].MPC assign[=] binary_operation[constant[1.0] / binary_operation[constant[1.0] + binary_operation[name[PatFac] / name[self].solution_next.MPCmin]]]
name[self].cFunc assign[=] call[name[LinearInterp], parameter[list[[<ast.Attribute object at 0x7da204960610>, <ast.BinOp object at 0x7da204963bb0>]], list[[<ast.Constant object at 0x7da1b074f310>, <ast.Attribute object at 0x7da1b074dbd0>]]]]
name[self].ExIncNext assign[=] constant[1.0]
name[self].mNrmMinNow assign[=] name[self].mNrmMin | keyword[def] identifier[makePFcFunc] ( identifier[self] ):
literal[string]
identifier[self] . identifier[hNrmNow] =( identifier[self] . identifier[PermGroFac] / identifier[self] . identifier[Rfree] )*( identifier[self] . identifier[solution_next] . identifier[hNrm] + literal[int] )
identifier[self] . identifier[mNrmMin] =- identifier[self] . identifier[hNrmNow]
identifier[PatFac] =(( identifier[self] . identifier[Rfree] * identifier[self] . identifier[DiscFacEff] )**( literal[int] / identifier[self] . identifier[CRRA] ))/ identifier[self] . identifier[Rfree]
identifier[self] . identifier[MPC] = literal[int] /( literal[int] + identifier[PatFac] / identifier[self] . identifier[solution_next] . identifier[MPCmin] )
identifier[self] . identifier[cFunc] = identifier[LinearInterp] ([ identifier[self] . identifier[mNrmMin] , identifier[self] . identifier[mNrmMin] + literal[int] ],[ literal[int] , identifier[self] . identifier[MPC] ])
identifier[self] . identifier[ExIncNext] = literal[int]
identifier[self] . identifier[mNrmMinNow] = identifier[self] . identifier[mNrmMin] | def makePFcFunc(self):
"""
Makes the (linear) consumption function for this period.
Parameters
----------
none
Returns
-------
none
"""
# Calculate human wealth this period (and lower bound of m)
self.hNrmNow = self.PermGroFac / self.Rfree * (self.solution_next.hNrm + 1.0)
self.mNrmMin = -self.hNrmNow
# Calculate the (constant) marginal propensity to consume
PatFac = (self.Rfree * self.DiscFacEff) ** (1.0 / self.CRRA) / self.Rfree
self.MPC = 1.0 / (1.0 + PatFac / self.solution_next.MPCmin)
# Construct the consumption function
self.cFunc = LinearInterp([self.mNrmMin, self.mNrmMin + 1.0], [0.0, self.MPC])
# Add two attributes to enable calculation of steady state market resources
self.ExIncNext = 1.0 # Perfect foresight income of 1
self.mNrmMinNow = self.mNrmMin |
def _handle_event(self, event, *args, **kw):
"""Broadcast an event to the database connections registered."""
for engine in self.engines.values():
if hasattr(engine, event):
getattr(engine, event)(*args, **kw) | def function[_handle_event, parameter[self, event]]:
constant[Broadcast an event to the database connections registered.]
for taget[name[engine]] in starred[call[name[self].engines.values, parameter[]]] begin[:]
if call[name[hasattr], parameter[name[engine], name[event]]] begin[:]
call[call[name[getattr], parameter[name[engine], name[event]]], parameter[<ast.Starred object at 0x7da204345b40>]] | keyword[def] identifier[_handle_event] ( identifier[self] , identifier[event] ,* identifier[args] ,** identifier[kw] ):
literal[string]
keyword[for] identifier[engine] keyword[in] identifier[self] . identifier[engines] . identifier[values] ():
keyword[if] identifier[hasattr] ( identifier[engine] , identifier[event] ):
identifier[getattr] ( identifier[engine] , identifier[event] )(* identifier[args] ,** identifier[kw] ) | def _handle_event(self, event, *args, **kw):
"""Broadcast an event to the database connections registered."""
for engine in self.engines.values():
if hasattr(engine, event):
getattr(engine, event)(*args, **kw) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['engine']] |
def __is_exported(self, name, module):
"""
Returns `True` if and only if `pydoc` considers `name` to be
a public identifier for this module where `name` was defined
in the Python module `module`.
If this module has an `__all__` attribute, then `name` is
considered to be exported if and only if it is a member of
this module's `__all__` list.
If `__all__` is not set, then whether `name` is exported or
not is heuristically determined. Firstly, if `name` starts
with an underscore, it will not be considered exported.
Secondly, if `name` was defined in a module other than this
one, it will not be considered exported. In all other cases,
`name` will be considered exported.
"""
if hasattr(self.module, '__all__'):
return name in self.module.__all__
if not _is_exported(name):
return False
if module is None:
return False
if module is not None and self.module.__name__ != module.__name__:
return name in self._declared_variables
return True | def function[__is_exported, parameter[self, name, module]]:
constant[
Returns `True` if and only if `pydoc` considers `name` to be
a public identifier for this module where `name` was defined
in the Python module `module`.
If this module has an `__all__` attribute, then `name` is
considered to be exported if and only if it is a member of
this module's `__all__` list.
If `__all__` is not set, then whether `name` is exported or
not is heuristically determined. Firstly, if `name` starts
with an underscore, it will not be considered exported.
Secondly, if `name` was defined in a module other than this
one, it will not be considered exported. In all other cases,
`name` will be considered exported.
]
if call[name[hasattr], parameter[name[self].module, constant[__all__]]] begin[:]
return[compare[name[name] in name[self].module.__all__]]
if <ast.UnaryOp object at 0x7da1b149e3b0> begin[:]
return[constant[False]]
if compare[name[module] is constant[None]] begin[:]
return[constant[False]]
if <ast.BoolOp object at 0x7da1b149e2f0> begin[:]
return[compare[name[name] in name[self]._declared_variables]]
return[constant[True]] | keyword[def] identifier[__is_exported] ( identifier[self] , identifier[name] , identifier[module] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] . identifier[module] , literal[string] ):
keyword[return] identifier[name] keyword[in] identifier[self] . identifier[module] . identifier[__all__]
keyword[if] keyword[not] identifier[_is_exported] ( identifier[name] ):
keyword[return] keyword[False]
keyword[if] identifier[module] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[if] identifier[module] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[module] . identifier[__name__] != identifier[module] . identifier[__name__] :
keyword[return] identifier[name] keyword[in] identifier[self] . identifier[_declared_variables]
keyword[return] keyword[True] | def __is_exported(self, name, module):
"""
Returns `True` if and only if `pydoc` considers `name` to be
a public identifier for this module where `name` was defined
in the Python module `module`.
If this module has an `__all__` attribute, then `name` is
considered to be exported if and only if it is a member of
this module's `__all__` list.
If `__all__` is not set, then whether `name` is exported or
not is heuristically determined. Firstly, if `name` starts
with an underscore, it will not be considered exported.
Secondly, if `name` was defined in a module other than this
one, it will not be considered exported. In all other cases,
`name` will be considered exported.
"""
if hasattr(self.module, '__all__'):
return name in self.module.__all__ # depends on [control=['if'], data=[]]
if not _is_exported(name):
return False # depends on [control=['if'], data=[]]
if module is None:
return False # depends on [control=['if'], data=[]]
if module is not None and self.module.__name__ != module.__name__:
return name in self._declared_variables # depends on [control=['if'], data=[]]
return True |
def _save_results(options, module, core_results, fit_results):
"""
Save results of analysis as tables and figures
Parameters
----------
options : dict
Option names and values for analysis
module : str
Module that contained function used to generate core_results
core_results : dataframe, array, value, list of tuples
Results of main analysis
fit_results : list or None
Results of comparing emp analysis to models, None if not applicable
"""
logging.info("Saving all results")
# Use custom plot format
mpl.rcParams.update(misc.rcparams.ggplot_rc)
# Make run directory
os.makedirs(options['run_dir'])
# Write core results
_write_core_tables(options, module, core_results)
# Write additional results if analysis from emp
if module == 'emp':
_write_subset_index_file(options, core_results)
# Write model/data comparison if models were given
if fit_results:
models = options['models'].replace(' ','').split(';')
for i, core_result in enumerate(core_results):
_write_fitted_params(i, models, options, fit_results)
_write_test_statistics(i, models, options, fit_results)
_write_comparison_plot_table(i, models, options,
core_results, fit_results) | def function[_save_results, parameter[options, module, core_results, fit_results]]:
constant[
Save results of analysis as tables and figures
Parameters
----------
options : dict
Option names and values for analysis
module : str
Module that contained function used to generate core_results
core_results : dataframe, array, value, list of tuples
Results of main analysis
fit_results : list or None
Results of comparing emp analysis to models, None if not applicable
]
call[name[logging].info, parameter[constant[Saving all results]]]
call[name[mpl].rcParams.update, parameter[name[misc].rcparams.ggplot_rc]]
call[name[os].makedirs, parameter[call[name[options]][constant[run_dir]]]]
call[name[_write_core_tables], parameter[name[options], name[module], name[core_results]]]
if compare[name[module] equal[==] constant[emp]] begin[:]
call[name[_write_subset_index_file], parameter[name[options], name[core_results]]]
if name[fit_results] begin[:]
variable[models] assign[=] call[call[call[name[options]][constant[models]].replace, parameter[constant[ ], constant[]]].split, parameter[constant[;]]]
for taget[tuple[[<ast.Name object at 0x7da204621cf0>, <ast.Name object at 0x7da204620070>]]] in starred[call[name[enumerate], parameter[name[core_results]]]] begin[:]
call[name[_write_fitted_params], parameter[name[i], name[models], name[options], name[fit_results]]]
call[name[_write_test_statistics], parameter[name[i], name[models], name[options], name[fit_results]]]
call[name[_write_comparison_plot_table], parameter[name[i], name[models], name[options], name[core_results], name[fit_results]]] | keyword[def] identifier[_save_results] ( identifier[options] , identifier[module] , identifier[core_results] , identifier[fit_results] ):
literal[string]
identifier[logging] . identifier[info] ( literal[string] )
identifier[mpl] . identifier[rcParams] . identifier[update] ( identifier[misc] . identifier[rcparams] . identifier[ggplot_rc] )
identifier[os] . identifier[makedirs] ( identifier[options] [ literal[string] ])
identifier[_write_core_tables] ( identifier[options] , identifier[module] , identifier[core_results] )
keyword[if] identifier[module] == literal[string] :
identifier[_write_subset_index_file] ( identifier[options] , identifier[core_results] )
keyword[if] identifier[fit_results] :
identifier[models] = identifier[options] [ literal[string] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[split] ( literal[string] )
keyword[for] identifier[i] , identifier[core_result] keyword[in] identifier[enumerate] ( identifier[core_results] ):
identifier[_write_fitted_params] ( identifier[i] , identifier[models] , identifier[options] , identifier[fit_results] )
identifier[_write_test_statistics] ( identifier[i] , identifier[models] , identifier[options] , identifier[fit_results] )
identifier[_write_comparison_plot_table] ( identifier[i] , identifier[models] , identifier[options] ,
identifier[core_results] , identifier[fit_results] ) | def _save_results(options, module, core_results, fit_results):
"""
Save results of analysis as tables and figures
Parameters
----------
options : dict
Option names and values for analysis
module : str
Module that contained function used to generate core_results
core_results : dataframe, array, value, list of tuples
Results of main analysis
fit_results : list or None
Results of comparing emp analysis to models, None if not applicable
"""
logging.info('Saving all results')
# Use custom plot format
mpl.rcParams.update(misc.rcparams.ggplot_rc)
# Make run directory
os.makedirs(options['run_dir'])
# Write core results
_write_core_tables(options, module, core_results)
# Write additional results if analysis from emp
if module == 'emp':
_write_subset_index_file(options, core_results) # depends on [control=['if'], data=[]]
# Write model/data comparison if models were given
if fit_results:
models = options['models'].replace(' ', '').split(';')
for (i, core_result) in enumerate(core_results):
_write_fitted_params(i, models, options, fit_results)
_write_test_statistics(i, models, options, fit_results)
_write_comparison_plot_table(i, models, options, core_results, fit_results) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] |
def __read_stored_routine_metadata(self):
"""
Reads the metadata of stored routines from the metadata file.
"""
if os.path.isfile(self._pystratum_metadata_filename):
with open(self._pystratum_metadata_filename, 'r') as file:
self._pystratum_metadata = json.load(file) | def function[__read_stored_routine_metadata, parameter[self]]:
constant[
Reads the metadata of stored routines from the metadata file.
]
if call[name[os].path.isfile, parameter[name[self]._pystratum_metadata_filename]] begin[:]
with call[name[open], parameter[name[self]._pystratum_metadata_filename, constant[r]]] begin[:]
name[self]._pystratum_metadata assign[=] call[name[json].load, parameter[name[file]]] | keyword[def] identifier[__read_stored_routine_metadata] ( identifier[self] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[_pystratum_metadata_filename] ):
keyword[with] identifier[open] ( identifier[self] . identifier[_pystratum_metadata_filename] , literal[string] ) keyword[as] identifier[file] :
identifier[self] . identifier[_pystratum_metadata] = identifier[json] . identifier[load] ( identifier[file] ) | def __read_stored_routine_metadata(self):
"""
Reads the metadata of stored routines from the metadata file.
"""
if os.path.isfile(self._pystratum_metadata_filename):
with open(self._pystratum_metadata_filename, 'r') as file:
self._pystratum_metadata = json.load(file) # depends on [control=['with'], data=['file']] # depends on [control=['if'], data=[]] |
async def logout(self, request):
"""
Simple handler for logout
"""
if "Authorization" not in request.headers:
msg = "Auth header is not present, can not destroy token"
raise JsonValidaitonError(msg)
response = json_response()
await forget(request, response)
return response | <ast.AsyncFunctionDef object at 0x7da1b12947c0> | keyword[async] keyword[def] identifier[logout] ( identifier[self] , identifier[request] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[request] . identifier[headers] :
identifier[msg] = literal[string]
keyword[raise] identifier[JsonValidaitonError] ( identifier[msg] )
identifier[response] = identifier[json_response] ()
keyword[await] identifier[forget] ( identifier[request] , identifier[response] )
keyword[return] identifier[response] | async def logout(self, request):
"""
Simple handler for logout
"""
if 'Authorization' not in request.headers:
msg = 'Auth header is not present, can not destroy token'
raise JsonValidaitonError(msg) # depends on [control=['if'], data=[]]
response = json_response()
await forget(request, response)
return response |
def parse_field(setting, field_name, default):
"""
Extract result from single-value or dict-type setting like fallback_values.
"""
if isinstance(setting, dict):
return setting.get(field_name, default)
else:
return setting | def function[parse_field, parameter[setting, field_name, default]]:
constant[
Extract result from single-value or dict-type setting like fallback_values.
]
if call[name[isinstance], parameter[name[setting], name[dict]]] begin[:]
return[call[name[setting].get, parameter[name[field_name], name[default]]]] | keyword[def] identifier[parse_field] ( identifier[setting] , identifier[field_name] , identifier[default] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[setting] , identifier[dict] ):
keyword[return] identifier[setting] . identifier[get] ( identifier[field_name] , identifier[default] )
keyword[else] :
keyword[return] identifier[setting] | def parse_field(setting, field_name, default):
"""
Extract result from single-value or dict-type setting like fallback_values.
"""
if isinstance(setting, dict):
return setting.get(field_name, default) # depends on [control=['if'], data=[]]
else:
return setting |
def _find_hstreaming():
"""Finds the whole path to the hadoop streaming jar.
If the environmental var HADOOP_HOME is specified, then start the search
from there.
Returns:
Full path to the hadoop streaming jar if found, else return an empty
string.
"""
global WARNED_HADOOP_HOME, HADOOP_STREAMING_PATH_CACHE
if HADOOP_STREAMING_PATH_CACHE:
return HADOOP_STREAMING_PATH_CACHE
try:
search_root = os.environ['HADOOP_HOME']
except KeyError:
search_root = '/'
cmd = 'find %s -name hadoop*streaming*.jar' % (search_root)
p = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
HADOOP_STREAMING_PATH_CACHE = p.communicate()[0].split('\n')[0]
if search_root == '/' and not WARNED_HADOOP_HOME:
WARNED_HADOOP_HOME = True
hadoop_home = HADOOP_STREAMING_PATH_CACHE[:HADOOP_STREAMING_PATH_CACHE.rfind('/contrib/')]
logging.warn('Set the HADOOP_HOME environmental variable to your hadoop path to improve performance. Put the following [export HADOOP_HOME="%s"] in ~/.bashrc' % hadoop_home)
return HADOOP_STREAMING_PATH_CACHE | def function[_find_hstreaming, parameter[]]:
constant[Finds the whole path to the hadoop streaming jar.
If the environmental var HADOOP_HOME is specified, then start the search
from there.
Returns:
Full path to the hadoop streaming jar if found, else return an empty
string.
]
<ast.Global object at 0x7da1b0e38bb0>
if name[HADOOP_STREAMING_PATH_CACHE] begin[:]
return[name[HADOOP_STREAMING_PATH_CACHE]]
<ast.Try object at 0x7da1b0e38b50>
variable[cmd] assign[=] binary_operation[constant[find %s -name hadoop*streaming*.jar] <ast.Mod object at 0x7da2590d6920> name[search_root]]
variable[p] assign[=] call[name[subprocess].Popen, parameter[call[name[cmd].split, parameter[]]]]
variable[HADOOP_STREAMING_PATH_CACHE] assign[=] call[call[call[call[name[p].communicate, parameter[]]][constant[0]].split, parameter[constant[
]]]][constant[0]]
if <ast.BoolOp object at 0x7da1b0e3baf0> begin[:]
variable[WARNED_HADOOP_HOME] assign[=] constant[True]
variable[hadoop_home] assign[=] call[name[HADOOP_STREAMING_PATH_CACHE]][<ast.Slice object at 0x7da1b0ed4820>]
call[name[logging].warn, parameter[binary_operation[constant[Set the HADOOP_HOME environmental variable to your hadoop path to improve performance. Put the following [export HADOOP_HOME="%s"] in ~/.bashrc] <ast.Mod object at 0x7da2590d6920> name[hadoop_home]]]]
return[name[HADOOP_STREAMING_PATH_CACHE]] | keyword[def] identifier[_find_hstreaming] ():
literal[string]
keyword[global] identifier[WARNED_HADOOP_HOME] , identifier[HADOOP_STREAMING_PATH_CACHE]
keyword[if] identifier[HADOOP_STREAMING_PATH_CACHE] :
keyword[return] identifier[HADOOP_STREAMING_PATH_CACHE]
keyword[try] :
identifier[search_root] = identifier[os] . identifier[environ] [ literal[string] ]
keyword[except] identifier[KeyError] :
identifier[search_root] = literal[string]
identifier[cmd] = literal[string] %( identifier[search_root] )
identifier[p] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] . identifier[split] (), identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] )
identifier[HADOOP_STREAMING_PATH_CACHE] = identifier[p] . identifier[communicate] ()[ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[search_root] == literal[string] keyword[and] keyword[not] identifier[WARNED_HADOOP_HOME] :
identifier[WARNED_HADOOP_HOME] = keyword[True]
identifier[hadoop_home] = identifier[HADOOP_STREAMING_PATH_CACHE] [: identifier[HADOOP_STREAMING_PATH_CACHE] . identifier[rfind] ( literal[string] )]
identifier[logging] . identifier[warn] ( literal[string] % identifier[hadoop_home] )
keyword[return] identifier[HADOOP_STREAMING_PATH_CACHE] | def _find_hstreaming():
"""Finds the whole path to the hadoop streaming jar.
If the environmental var HADOOP_HOME is specified, then start the search
from there.
Returns:
Full path to the hadoop streaming jar if found, else return an empty
string.
"""
global WARNED_HADOOP_HOME, HADOOP_STREAMING_PATH_CACHE
if HADOOP_STREAMING_PATH_CACHE:
return HADOOP_STREAMING_PATH_CACHE # depends on [control=['if'], data=[]]
try:
search_root = os.environ['HADOOP_HOME'] # depends on [control=['try'], data=[]]
except KeyError:
search_root = '/' # depends on [control=['except'], data=[]]
cmd = 'find %s -name hadoop*streaming*.jar' % search_root
p = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
HADOOP_STREAMING_PATH_CACHE = p.communicate()[0].split('\n')[0]
if search_root == '/' and (not WARNED_HADOOP_HOME):
WARNED_HADOOP_HOME = True
hadoop_home = HADOOP_STREAMING_PATH_CACHE[:HADOOP_STREAMING_PATH_CACHE.rfind('/contrib/')]
logging.warn('Set the HADOOP_HOME environmental variable to your hadoop path to improve performance. Put the following [export HADOOP_HOME="%s"] in ~/.bashrc' % hadoop_home) # depends on [control=['if'], data=[]]
return HADOOP_STREAMING_PATH_CACHE |
def returns(ts, **kwargs):
'''
Compute returns on the given period
@param ts : time serie to process
@param kwargs.type: gross or simple returns
@param delta : period betweend two computed returns
@param start : with end, will return the return betweend this elapsed time
@param period : delta is the number of lines/periods provided
@param end : so said
@param cumulative: compute cumulative returns
'''
returns_type = kwargs.get('type', 'net')
cumulative = kwargs.get('cumulative', False)
if returns_type == 'net':
relative = 0
else:
relative = 1 # gross
start = kwargs.get('start', None)
end = kwargs.get('end', dt.datetime.today())
#delta = kwargs.get('delta', None)
period = kwargs.get('period', 1)
if isinstance(start, dt.datetime):
log.debug('{} / {} -1'.format(ts[end], ts[start]))
return ts[end] / ts[start] - 1 + relative
#elif isinstance(delta, pd.DateOffset) or isinstance(delta, dt.timedelta):
#FIXME timezone problem
#FIXME reIndexDF is deprecated
#ts = reIndexDF(ts, delta=delta)
#period = 1
rets_df = ts / ts.shift(period) - 1 + relative
if cumulative:
return rets_df.cumprod()
return rets_df[1:] | def function[returns, parameter[ts]]:
constant[
Compute returns on the given period
@param ts : time serie to process
@param kwargs.type: gross or simple returns
@param delta : period betweend two computed returns
@param start : with end, will return the return betweend this elapsed time
@param period : delta is the number of lines/periods provided
@param end : so said
@param cumulative: compute cumulative returns
]
variable[returns_type] assign[=] call[name[kwargs].get, parameter[constant[type], constant[net]]]
variable[cumulative] assign[=] call[name[kwargs].get, parameter[constant[cumulative], constant[False]]]
if compare[name[returns_type] equal[==] constant[net]] begin[:]
variable[relative] assign[=] constant[0]
variable[start] assign[=] call[name[kwargs].get, parameter[constant[start], constant[None]]]
variable[end] assign[=] call[name[kwargs].get, parameter[constant[end], call[name[dt].datetime.today, parameter[]]]]
variable[period] assign[=] call[name[kwargs].get, parameter[constant[period], constant[1]]]
if call[name[isinstance], parameter[name[start], name[dt].datetime]] begin[:]
call[name[log].debug, parameter[call[constant[{} / {} -1].format, parameter[call[name[ts]][name[end]], call[name[ts]][name[start]]]]]]
return[binary_operation[binary_operation[binary_operation[call[name[ts]][name[end]] / call[name[ts]][name[start]]] - constant[1]] + name[relative]]]
variable[rets_df] assign[=] binary_operation[binary_operation[binary_operation[name[ts] / call[name[ts].shift, parameter[name[period]]]] - constant[1]] + name[relative]]
if name[cumulative] begin[:]
return[call[name[rets_df].cumprod, parameter[]]]
return[call[name[rets_df]][<ast.Slice object at 0x7da1b0d1ab60>]] | keyword[def] identifier[returns] ( identifier[ts] ,** identifier[kwargs] ):
literal[string]
identifier[returns_type] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[cumulative] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
keyword[if] identifier[returns_type] == literal[string] :
identifier[relative] = literal[int]
keyword[else] :
identifier[relative] = literal[int]
identifier[start] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[end] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[dt] . identifier[datetime] . identifier[today] ())
identifier[period] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
keyword[if] identifier[isinstance] ( identifier[start] , identifier[dt] . identifier[datetime] ):
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[ts] [ identifier[end] ], identifier[ts] [ identifier[start] ]))
keyword[return] identifier[ts] [ identifier[end] ]/ identifier[ts] [ identifier[start] ]- literal[int] + identifier[relative]
identifier[rets_df] = identifier[ts] / identifier[ts] . identifier[shift] ( identifier[period] )- literal[int] + identifier[relative]
keyword[if] identifier[cumulative] :
keyword[return] identifier[rets_df] . identifier[cumprod] ()
keyword[return] identifier[rets_df] [ literal[int] :] | def returns(ts, **kwargs):
"""
Compute returns on the given period
@param ts : time serie to process
@param kwargs.type: gross or simple returns
@param delta : period betweend two computed returns
@param start : with end, will return the return betweend this elapsed time
@param period : delta is the number of lines/periods provided
@param end : so said
@param cumulative: compute cumulative returns
"""
returns_type = kwargs.get('type', 'net')
cumulative = kwargs.get('cumulative', False)
if returns_type == 'net':
relative = 0 # depends on [control=['if'], data=[]]
else:
relative = 1 # gross
start = kwargs.get('start', None)
end = kwargs.get('end', dt.datetime.today())
#delta = kwargs.get('delta', None)
period = kwargs.get('period', 1)
if isinstance(start, dt.datetime):
log.debug('{} / {} -1'.format(ts[end], ts[start]))
return ts[end] / ts[start] - 1 + relative # depends on [control=['if'], data=[]]
#elif isinstance(delta, pd.DateOffset) or isinstance(delta, dt.timedelta):
#FIXME timezone problem
#FIXME reIndexDF is deprecated
#ts = reIndexDF(ts, delta=delta)
#period = 1
rets_df = ts / ts.shift(period) - 1 + relative
if cumulative:
return rets_df.cumprod() # depends on [control=['if'], data=[]]
return rets_df[1:] |
def make_eventlogitem_log(log, condition='is', negate=False, preserve_case=False):
"""
Create a node for EventLogItem/log
:return: A IndicatorItem represented as an Element node
"""
document = 'EventLogItem'
search = 'EventLogItem/log'
content_type = 'string'
content = log
ii_node = ioc_api.make_indicatoritem_node(condition, document, search, content_type, content,
negate=negate, preserve_case=preserve_case)
return ii_node | def function[make_eventlogitem_log, parameter[log, condition, negate, preserve_case]]:
constant[
Create a node for EventLogItem/log
:return: A IndicatorItem represented as an Element node
]
variable[document] assign[=] constant[EventLogItem]
variable[search] assign[=] constant[EventLogItem/log]
variable[content_type] assign[=] constant[string]
variable[content] assign[=] name[log]
variable[ii_node] assign[=] call[name[ioc_api].make_indicatoritem_node, parameter[name[condition], name[document], name[search], name[content_type], name[content]]]
return[name[ii_node]] | keyword[def] identifier[make_eventlogitem_log] ( identifier[log] , identifier[condition] = literal[string] , identifier[negate] = keyword[False] , identifier[preserve_case] = keyword[False] ):
literal[string]
identifier[document] = literal[string]
identifier[search] = literal[string]
identifier[content_type] = literal[string]
identifier[content] = identifier[log]
identifier[ii_node] = identifier[ioc_api] . identifier[make_indicatoritem_node] ( identifier[condition] , identifier[document] , identifier[search] , identifier[content_type] , identifier[content] ,
identifier[negate] = identifier[negate] , identifier[preserve_case] = identifier[preserve_case] )
keyword[return] identifier[ii_node] | def make_eventlogitem_log(log, condition='is', negate=False, preserve_case=False):
"""
Create a node for EventLogItem/log
:return: A IndicatorItem represented as an Element node
"""
document = 'EventLogItem'
search = 'EventLogItem/log'
content_type = 'string'
content = log
ii_node = ioc_api.make_indicatoritem_node(condition, document, search, content_type, content, negate=negate, preserve_case=preserve_case)
return ii_node |
def connect_combo_data(instance, prop, widget):
"""
Connect a callback property with a QComboBox widget based on the userData.
Parameters
----------
instance : object
The class instance that the callback property is attached to
prop : str
The name of the callback property
widget : QComboBox
The combo box to connect.
See Also
--------
connect_combo_text: connect a callback property with a QComboBox widget based on the text.
"""
def update_widget(value):
try:
idx = _find_combo_data(widget, value)
except ValueError:
if value is None:
idx = -1
else:
raise
widget.setCurrentIndex(idx)
def update_prop(idx):
if idx == -1:
setattr(instance, prop, None)
else:
setattr(instance, prop, widget.itemData(idx))
add_callback(instance, prop, update_widget)
widget.currentIndexChanged.connect(update_prop)
update_widget(getattr(instance, prop)) | def function[connect_combo_data, parameter[instance, prop, widget]]:
constant[
Connect a callback property with a QComboBox widget based on the userData.
Parameters
----------
instance : object
The class instance that the callback property is attached to
prop : str
The name of the callback property
widget : QComboBox
The combo box to connect.
See Also
--------
connect_combo_text: connect a callback property with a QComboBox widget based on the text.
]
def function[update_widget, parameter[value]]:
<ast.Try object at 0x7da18ede7af0>
call[name[widget].setCurrentIndex, parameter[name[idx]]]
def function[update_prop, parameter[idx]]:
if compare[name[idx] equal[==] <ast.UnaryOp object at 0x7da18f00cf40>] begin[:]
call[name[setattr], parameter[name[instance], name[prop], constant[None]]]
call[name[add_callback], parameter[name[instance], name[prop], name[update_widget]]]
call[name[widget].currentIndexChanged.connect, parameter[name[update_prop]]]
call[name[update_widget], parameter[call[name[getattr], parameter[name[instance], name[prop]]]]] | keyword[def] identifier[connect_combo_data] ( identifier[instance] , identifier[prop] , identifier[widget] ):
literal[string]
keyword[def] identifier[update_widget] ( identifier[value] ):
keyword[try] :
identifier[idx] = identifier[_find_combo_data] ( identifier[widget] , identifier[value] )
keyword[except] identifier[ValueError] :
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[idx] =- literal[int]
keyword[else] :
keyword[raise]
identifier[widget] . identifier[setCurrentIndex] ( identifier[idx] )
keyword[def] identifier[update_prop] ( identifier[idx] ):
keyword[if] identifier[idx] ==- literal[int] :
identifier[setattr] ( identifier[instance] , identifier[prop] , keyword[None] )
keyword[else] :
identifier[setattr] ( identifier[instance] , identifier[prop] , identifier[widget] . identifier[itemData] ( identifier[idx] ))
identifier[add_callback] ( identifier[instance] , identifier[prop] , identifier[update_widget] )
identifier[widget] . identifier[currentIndexChanged] . identifier[connect] ( identifier[update_prop] )
identifier[update_widget] ( identifier[getattr] ( identifier[instance] , identifier[prop] )) | def connect_combo_data(instance, prop, widget):
"""
Connect a callback property with a QComboBox widget based on the userData.
Parameters
----------
instance : object
The class instance that the callback property is attached to
prop : str
The name of the callback property
widget : QComboBox
The combo box to connect.
See Also
--------
connect_combo_text: connect a callback property with a QComboBox widget based on the text.
"""
def update_widget(value):
try:
idx = _find_combo_data(widget, value) # depends on [control=['try'], data=[]]
except ValueError:
if value is None:
idx = -1 # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=[]]
widget.setCurrentIndex(idx)
def update_prop(idx):
if idx == -1:
setattr(instance, prop, None) # depends on [control=['if'], data=[]]
else:
setattr(instance, prop, widget.itemData(idx))
add_callback(instance, prop, update_widget)
widget.currentIndexChanged.connect(update_prop)
update_widget(getattr(instance, prop)) |
def load_stream(self, stream):
"""
Deserialize ArrowRecordBatches to an Arrow table and return as a list of pandas.Series.
"""
batches = super(ArrowStreamPandasSerializer, self).load_stream(stream)
import pyarrow as pa
for batch in batches:
yield [self.arrow_to_pandas(c) for c in pa.Table.from_batches([batch]).itercolumns()] | def function[load_stream, parameter[self, stream]]:
constant[
Deserialize ArrowRecordBatches to an Arrow table and return as a list of pandas.Series.
]
variable[batches] assign[=] call[call[name[super], parameter[name[ArrowStreamPandasSerializer], name[self]]].load_stream, parameter[name[stream]]]
import module[pyarrow] as alias[pa]
for taget[name[batch]] in starred[name[batches]] begin[:]
<ast.Yield object at 0x7da20e954ee0> | keyword[def] identifier[load_stream] ( identifier[self] , identifier[stream] ):
literal[string]
identifier[batches] = identifier[super] ( identifier[ArrowStreamPandasSerializer] , identifier[self] ). identifier[load_stream] ( identifier[stream] )
keyword[import] identifier[pyarrow] keyword[as] identifier[pa]
keyword[for] identifier[batch] keyword[in] identifier[batches] :
keyword[yield] [ identifier[self] . identifier[arrow_to_pandas] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[pa] . identifier[Table] . identifier[from_batches] ([ identifier[batch] ]). identifier[itercolumns] ()] | def load_stream(self, stream):
"""
Deserialize ArrowRecordBatches to an Arrow table and return as a list of pandas.Series.
"""
batches = super(ArrowStreamPandasSerializer, self).load_stream(stream)
import pyarrow as pa
for batch in batches:
yield [self.arrow_to_pandas(c) for c in pa.Table.from_batches([batch]).itercolumns()] # depends on [control=['for'], data=['batch']] |
def initialize_schema(connection):
"""Initializes the TensorBoard sqlite schema using the given connection.
Args:
connection: A sqlite DB connection.
"""
cursor = connection.cursor()
cursor.execute("PRAGMA application_id={}".format(_TENSORBOARD_APPLICATION_ID))
cursor.execute("PRAGMA user_version={}".format(_TENSORBOARD_USER_VERSION))
with connection:
for statement in _SCHEMA_STATEMENTS:
lines = statement.strip('\n').split('\n')
message = lines[0] + ('...' if len(lines) > 1 else '')
logger.debug('Running DB init statement: %s', message)
cursor.execute(statement) | def function[initialize_schema, parameter[connection]]:
constant[Initializes the TensorBoard sqlite schema using the given connection.
Args:
connection: A sqlite DB connection.
]
variable[cursor] assign[=] call[name[connection].cursor, parameter[]]
call[name[cursor].execute, parameter[call[constant[PRAGMA application_id={}].format, parameter[name[_TENSORBOARD_APPLICATION_ID]]]]]
call[name[cursor].execute, parameter[call[constant[PRAGMA user_version={}].format, parameter[name[_TENSORBOARD_USER_VERSION]]]]]
with name[connection] begin[:]
for taget[name[statement]] in starred[name[_SCHEMA_STATEMENTS]] begin[:]
variable[lines] assign[=] call[call[name[statement].strip, parameter[constant[
]]].split, parameter[constant[
]]]
variable[message] assign[=] binary_operation[call[name[lines]][constant[0]] + <ast.IfExp object at 0x7da20e9b0820>]
call[name[logger].debug, parameter[constant[Running DB init statement: %s], name[message]]]
call[name[cursor].execute, parameter[name[statement]]] | keyword[def] identifier[initialize_schema] ( identifier[connection] ):
literal[string]
identifier[cursor] = identifier[connection] . identifier[cursor] ()
identifier[cursor] . identifier[execute] ( literal[string] . identifier[format] ( identifier[_TENSORBOARD_APPLICATION_ID] ))
identifier[cursor] . identifier[execute] ( literal[string] . identifier[format] ( identifier[_TENSORBOARD_USER_VERSION] ))
keyword[with] identifier[connection] :
keyword[for] identifier[statement] keyword[in] identifier[_SCHEMA_STATEMENTS] :
identifier[lines] = identifier[statement] . identifier[strip] ( literal[string] ). identifier[split] ( literal[string] )
identifier[message] = identifier[lines] [ literal[int] ]+( literal[string] keyword[if] identifier[len] ( identifier[lines] )> literal[int] keyword[else] literal[string] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[message] )
identifier[cursor] . identifier[execute] ( identifier[statement] ) | def initialize_schema(connection):
"""Initializes the TensorBoard sqlite schema using the given connection.
Args:
connection: A sqlite DB connection.
"""
cursor = connection.cursor()
cursor.execute('PRAGMA application_id={}'.format(_TENSORBOARD_APPLICATION_ID))
cursor.execute('PRAGMA user_version={}'.format(_TENSORBOARD_USER_VERSION))
with connection:
for statement in _SCHEMA_STATEMENTS:
lines = statement.strip('\n').split('\n')
message = lines[0] + ('...' if len(lines) > 1 else '')
logger.debug('Running DB init statement: %s', message)
cursor.execute(statement) # depends on [control=['for'], data=['statement']] # depends on [control=['with'], data=[]] |
def predict_noiseless(self, Xnew, full_cov=False, Y_metadata=None, kern=None):
"""
Convenience function to predict the underlying function of the GP (often
referred to as f) without adding the likelihood variance on the
prediction function.
This is most likely what you want to use for your predictions.
:param Xnew: The points at which to make a prediction
:type Xnew: np.ndarray (Nnew x self.input_dim)
:param full_cov: whether to return the full covariance matrix, or just
the diagonal
:type full_cov: bool
:param Y_metadata: metadata about the predicting point to pass to the likelihood
:param kern: The kernel to use for prediction (defaults to the model
kern). this is useful for examining e.g. subprocesses.
:returns: (mean, var):
mean: posterior mean, a Numpy array, Nnew x self.input_dim
var: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise
If full_cov and self.input_dim > 1, the return shape of var is Nnew x Nnew x self.input_dim. If self.input_dim == 1, the return shape is Nnew x Nnew.
This is to allow for different normalizations of the output dimensions.
Note: If you want the predictive quantiles (e.g. 95% confidence interval) use :py:func:"~GPy.core.gp.GP.predict_quantiles".
"""
return self.predict(Xnew, full_cov, Y_metadata, kern, None, False) | def function[predict_noiseless, parameter[self, Xnew, full_cov, Y_metadata, kern]]:
constant[
Convenience function to predict the underlying function of the GP (often
referred to as f) without adding the likelihood variance on the
prediction function.
This is most likely what you want to use for your predictions.
:param Xnew: The points at which to make a prediction
:type Xnew: np.ndarray (Nnew x self.input_dim)
:param full_cov: whether to return the full covariance matrix, or just
the diagonal
:type full_cov: bool
:param Y_metadata: metadata about the predicting point to pass to the likelihood
:param kern: The kernel to use for prediction (defaults to the model
kern). this is useful for examining e.g. subprocesses.
:returns: (mean, var):
mean: posterior mean, a Numpy array, Nnew x self.input_dim
var: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise
If full_cov and self.input_dim > 1, the return shape of var is Nnew x Nnew x self.input_dim. If self.input_dim == 1, the return shape is Nnew x Nnew.
This is to allow for different normalizations of the output dimensions.
Note: If you want the predictive quantiles (e.g. 95% confidence interval) use :py:func:"~GPy.core.gp.GP.predict_quantiles".
]
return[call[name[self].predict, parameter[name[Xnew], name[full_cov], name[Y_metadata], name[kern], constant[None], constant[False]]]] | keyword[def] identifier[predict_noiseless] ( identifier[self] , identifier[Xnew] , identifier[full_cov] = keyword[False] , identifier[Y_metadata] = keyword[None] , identifier[kern] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[predict] ( identifier[Xnew] , identifier[full_cov] , identifier[Y_metadata] , identifier[kern] , keyword[None] , keyword[False] ) | def predict_noiseless(self, Xnew, full_cov=False, Y_metadata=None, kern=None):
"""
Convenience function to predict the underlying function of the GP (often
referred to as f) without adding the likelihood variance on the
prediction function.
This is most likely what you want to use for your predictions.
:param Xnew: The points at which to make a prediction
:type Xnew: np.ndarray (Nnew x self.input_dim)
:param full_cov: whether to return the full covariance matrix, or just
the diagonal
:type full_cov: bool
:param Y_metadata: metadata about the predicting point to pass to the likelihood
:param kern: The kernel to use for prediction (defaults to the model
kern). this is useful for examining e.g. subprocesses.
:returns: (mean, var):
mean: posterior mean, a Numpy array, Nnew x self.input_dim
var: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise
If full_cov and self.input_dim > 1, the return shape of var is Nnew x Nnew x self.input_dim. If self.input_dim == 1, the return shape is Nnew x Nnew.
This is to allow for different normalizations of the output dimensions.
Note: If you want the predictive quantiles (e.g. 95% confidence interval) use :py:func:"~GPy.core.gp.GP.predict_quantiles".
"""
return self.predict(Xnew, full_cov, Y_metadata, kern, None, False) |
def _raise_error_if_column_exists(dataset, column_name = 'dataset',
dataset_variable_name = 'dataset',
column_name_error_message_name = 'column_name'):
"""
Check if a column exists in an SFrame with error message.
"""
err_msg = 'The SFrame {0} must contain the column {1}.'.format(
dataset_variable_name,
column_name_error_message_name)
if column_name not in dataset.column_names():
raise ToolkitError(str(err_msg)) | def function[_raise_error_if_column_exists, parameter[dataset, column_name, dataset_variable_name, column_name_error_message_name]]:
constant[
Check if a column exists in an SFrame with error message.
]
variable[err_msg] assign[=] call[constant[The SFrame {0} must contain the column {1}.].format, parameter[name[dataset_variable_name], name[column_name_error_message_name]]]
if compare[name[column_name] <ast.NotIn object at 0x7da2590d7190> call[name[dataset].column_names, parameter[]]] begin[:]
<ast.Raise object at 0x7da1b1f09b70> | keyword[def] identifier[_raise_error_if_column_exists] ( identifier[dataset] , identifier[column_name] = literal[string] ,
identifier[dataset_variable_name] = literal[string] ,
identifier[column_name_error_message_name] = literal[string] ):
literal[string]
identifier[err_msg] = literal[string] . identifier[format] (
identifier[dataset_variable_name] ,
identifier[column_name_error_message_name] )
keyword[if] identifier[column_name] keyword[not] keyword[in] identifier[dataset] . identifier[column_names] ():
keyword[raise] identifier[ToolkitError] ( identifier[str] ( identifier[err_msg] )) | def _raise_error_if_column_exists(dataset, column_name='dataset', dataset_variable_name='dataset', column_name_error_message_name='column_name'):
"""
Check if a column exists in an SFrame with error message.
"""
err_msg = 'The SFrame {0} must contain the column {1}.'.format(dataset_variable_name, column_name_error_message_name)
if column_name not in dataset.column_names():
raise ToolkitError(str(err_msg)) # depends on [control=['if'], data=[]] |
def pbkdf2_bin(
data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None
):
"""Returns a binary digest for the PBKDF2 hash algorithm of `data`
with the given `salt`. It iterates `iterations` times and produces a
key of `keylen` bytes. By default, SHA-256 is used as hash function;
a different hashlib `hashfunc` can be provided.
.. versionadded:: 0.9
:param data: the data to derive.
:param salt: the salt for the derivation.
:param iterations: the number of iterations.
:param keylen: the length of the resulting key. If not provided
the digest size will be used.
:param hashfunc: the hash function to use. This can either be the
string name of a known hash function or a function
from the hashlib module. Defaults to sha256.
"""
if not hashfunc:
hashfunc = "sha256"
data = to_bytes(data)
salt = to_bytes(salt)
if callable(hashfunc):
_test_hash = hashfunc()
hash_name = getattr(_test_hash, "name", None)
else:
hash_name = hashfunc
return hashlib.pbkdf2_hmac(hash_name, data, salt, iterations, keylen) | def function[pbkdf2_bin, parameter[data, salt, iterations, keylen, hashfunc]]:
constant[Returns a binary digest for the PBKDF2 hash algorithm of `data`
with the given `salt`. It iterates `iterations` times and produces a
key of `keylen` bytes. By default, SHA-256 is used as hash function;
a different hashlib `hashfunc` can be provided.
.. versionadded:: 0.9
:param data: the data to derive.
:param salt: the salt for the derivation.
:param iterations: the number of iterations.
:param keylen: the length of the resulting key. If not provided
the digest size will be used.
:param hashfunc: the hash function to use. This can either be the
string name of a known hash function or a function
from the hashlib module. Defaults to sha256.
]
if <ast.UnaryOp object at 0x7da18f58fdf0> begin[:]
variable[hashfunc] assign[=] constant[sha256]
variable[data] assign[=] call[name[to_bytes], parameter[name[data]]]
variable[salt] assign[=] call[name[to_bytes], parameter[name[salt]]]
if call[name[callable], parameter[name[hashfunc]]] begin[:]
variable[_test_hash] assign[=] call[name[hashfunc], parameter[]]
variable[hash_name] assign[=] call[name[getattr], parameter[name[_test_hash], constant[name], constant[None]]]
return[call[name[hashlib].pbkdf2_hmac, parameter[name[hash_name], name[data], name[salt], name[iterations], name[keylen]]]] | keyword[def] identifier[pbkdf2_bin] (
identifier[data] , identifier[salt] , identifier[iterations] = identifier[DEFAULT_PBKDF2_ITERATIONS] , identifier[keylen] = keyword[None] , identifier[hashfunc] = keyword[None]
):
literal[string]
keyword[if] keyword[not] identifier[hashfunc] :
identifier[hashfunc] = literal[string]
identifier[data] = identifier[to_bytes] ( identifier[data] )
identifier[salt] = identifier[to_bytes] ( identifier[salt] )
keyword[if] identifier[callable] ( identifier[hashfunc] ):
identifier[_test_hash] = identifier[hashfunc] ()
identifier[hash_name] = identifier[getattr] ( identifier[_test_hash] , literal[string] , keyword[None] )
keyword[else] :
identifier[hash_name] = identifier[hashfunc]
keyword[return] identifier[hashlib] . identifier[pbkdf2_hmac] ( identifier[hash_name] , identifier[data] , identifier[salt] , identifier[iterations] , identifier[keylen] ) | def pbkdf2_bin(data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None):
"""Returns a binary digest for the PBKDF2 hash algorithm of `data`
with the given `salt`. It iterates `iterations` times and produces a
key of `keylen` bytes. By default, SHA-256 is used as hash function;
a different hashlib `hashfunc` can be provided.
.. versionadded:: 0.9
:param data: the data to derive.
:param salt: the salt for the derivation.
:param iterations: the number of iterations.
:param keylen: the length of the resulting key. If not provided
the digest size will be used.
:param hashfunc: the hash function to use. This can either be the
string name of a known hash function or a function
from the hashlib module. Defaults to sha256.
"""
if not hashfunc:
hashfunc = 'sha256' # depends on [control=['if'], data=[]]
data = to_bytes(data)
salt = to_bytes(salt)
if callable(hashfunc):
_test_hash = hashfunc()
hash_name = getattr(_test_hash, 'name', None) # depends on [control=['if'], data=[]]
else:
hash_name = hashfunc
return hashlib.pbkdf2_hmac(hash_name, data, salt, iterations, keylen) |
def create_migration(self, app, fixture_path):
"""
Create a data migration for app that uses fixture_path.
"""
self.monkey_patch_migration_template(app, fixture_path)
out = StringIO()
management.call_command('makemigrations', app.label, empty=True, stdout=out)
self.restore_migration_template()
self.stdout.write(out.getvalue()) | def function[create_migration, parameter[self, app, fixture_path]]:
constant[
Create a data migration for app that uses fixture_path.
]
call[name[self].monkey_patch_migration_template, parameter[name[app], name[fixture_path]]]
variable[out] assign[=] call[name[StringIO], parameter[]]
call[name[management].call_command, parameter[constant[makemigrations], name[app].label]]
call[name[self].restore_migration_template, parameter[]]
call[name[self].stdout.write, parameter[call[name[out].getvalue, parameter[]]]] | keyword[def] identifier[create_migration] ( identifier[self] , identifier[app] , identifier[fixture_path] ):
literal[string]
identifier[self] . identifier[monkey_patch_migration_template] ( identifier[app] , identifier[fixture_path] )
identifier[out] = identifier[StringIO] ()
identifier[management] . identifier[call_command] ( literal[string] , identifier[app] . identifier[label] , identifier[empty] = keyword[True] , identifier[stdout] = identifier[out] )
identifier[self] . identifier[restore_migration_template] ()
identifier[self] . identifier[stdout] . identifier[write] ( identifier[out] . identifier[getvalue] ()) | def create_migration(self, app, fixture_path):
"""
Create a data migration for app that uses fixture_path.
"""
self.monkey_patch_migration_template(app, fixture_path)
out = StringIO()
management.call_command('makemigrations', app.label, empty=True, stdout=out)
self.restore_migration_template()
self.stdout.write(out.getvalue()) |
def _extract_ld_data(data, data_format=None, **kwargs):
"""Extract the given :attr:`data` into a
:class:`~.ExtractedLinkedDataResult` with the resulting data
stripped of any Linked Data specifics. Any missing Linked Data
properties are returned as ``None`` in the resulting
:class:`~.ExtractLinkedDataResult`.
Does not modify the given :attr:`data`.
"""
if not data_format:
data_format = _get_format_from_data(data)
extract_ld_data_fn = _data_format_resolver(data_format, {
'jsonld': _extract_ld_data_from_jsonld,
'json': _extract_ld_data_from_json,
'ipld': _extract_ld_data_from_ipld,
})
return extract_ld_data_fn(data, **kwargs) | def function[_extract_ld_data, parameter[data, data_format]]:
constant[Extract the given :attr:`data` into a
:class:`~.ExtractedLinkedDataResult` with the resulting data
stripped of any Linked Data specifics. Any missing Linked Data
properties are returned as ``None`` in the resulting
:class:`~.ExtractLinkedDataResult`.
Does not modify the given :attr:`data`.
]
if <ast.UnaryOp object at 0x7da18f810790> begin[:]
variable[data_format] assign[=] call[name[_get_format_from_data], parameter[name[data]]]
variable[extract_ld_data_fn] assign[=] call[name[_data_format_resolver], parameter[name[data_format], dictionary[[<ast.Constant object at 0x7da18dc07100>, <ast.Constant object at 0x7da18dc041f0>, <ast.Constant object at 0x7da18dc05750>], [<ast.Name object at 0x7da18dc048b0>, <ast.Name object at 0x7da18dc04b20>, <ast.Name object at 0x7da18dc05c30>]]]]
return[call[name[extract_ld_data_fn], parameter[name[data]]]] | keyword[def] identifier[_extract_ld_data] ( identifier[data] , identifier[data_format] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[data_format] :
identifier[data_format] = identifier[_get_format_from_data] ( identifier[data] )
identifier[extract_ld_data_fn] = identifier[_data_format_resolver] ( identifier[data_format] ,{
literal[string] : identifier[_extract_ld_data_from_jsonld] ,
literal[string] : identifier[_extract_ld_data_from_json] ,
literal[string] : identifier[_extract_ld_data_from_ipld] ,
})
keyword[return] identifier[extract_ld_data_fn] ( identifier[data] ,** identifier[kwargs] ) | def _extract_ld_data(data, data_format=None, **kwargs):
"""Extract the given :attr:`data` into a
:class:`~.ExtractedLinkedDataResult` with the resulting data
stripped of any Linked Data specifics. Any missing Linked Data
properties are returned as ``None`` in the resulting
:class:`~.ExtractLinkedDataResult`.
Does not modify the given :attr:`data`.
"""
if not data_format:
data_format = _get_format_from_data(data) # depends on [control=['if'], data=[]]
extract_ld_data_fn = _data_format_resolver(data_format, {'jsonld': _extract_ld_data_from_jsonld, 'json': _extract_ld_data_from_json, 'ipld': _extract_ld_data_from_ipld})
return extract_ld_data_fn(data, **kwargs) |
def get_declared_items(self):
""" Override to do it manually
"""
for k, v in super(AndroidListView, self).get_declared_items():
if k == 'layout':
yield k, v
break | def function[get_declared_items, parameter[self]]:
constant[ Override to do it manually
]
for taget[tuple[[<ast.Name object at 0x7da1b1b9dff0>, <ast.Name object at 0x7da1b1b9ee90>]]] in starred[call[call[name[super], parameter[name[AndroidListView], name[self]]].get_declared_items, parameter[]]] begin[:]
if compare[name[k] equal[==] constant[layout]] begin[:]
<ast.Yield object at 0x7da1b1b9e560>
break | keyword[def] identifier[get_declared_items] ( identifier[self] ):
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[super] ( identifier[AndroidListView] , identifier[self] ). identifier[get_declared_items] ():
keyword[if] identifier[k] == literal[string] :
keyword[yield] identifier[k] , identifier[v]
keyword[break] | def get_declared_items(self):
""" Override to do it manually
"""
for (k, v) in super(AndroidListView, self).get_declared_items():
if k == 'layout':
yield (k, v)
break # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=[]] |
def _2ndderiv_xyz(self,x,y,z,i,j):
"""General 2nd derivative of the potential as a function of (x,y,z)
in the aligned coordinate frame"""
return 4.*numpy.pi*self._b*self._c\
*_2ndDerivInt(x,y,z,
lambda m: self._mdens(m),
lambda m: self._mdens_deriv(m),
self._b2,self._c2,i,j,glx=self._glx,glw=self._glw) | def function[_2ndderiv_xyz, parameter[self, x, y, z, i, j]]:
constant[General 2nd derivative of the potential as a function of (x,y,z)
in the aligned coordinate frame]
return[binary_operation[binary_operation[binary_operation[binary_operation[constant[4.0] * name[numpy].pi] * name[self]._b] * name[self]._c] * call[name[_2ndDerivInt], parameter[name[x], name[y], name[z], <ast.Lambda object at 0x7da1b0e8f670>, <ast.Lambda object at 0x7da1b0e8eef0>, name[self]._b2, name[self]._c2, name[i], name[j]]]]] | keyword[def] identifier[_2ndderiv_xyz] ( identifier[self] , identifier[x] , identifier[y] , identifier[z] , identifier[i] , identifier[j] ):
literal[string]
keyword[return] literal[int] * identifier[numpy] . identifier[pi] * identifier[self] . identifier[_b] * identifier[self] . identifier[_c] * identifier[_2ndDerivInt] ( identifier[x] , identifier[y] , identifier[z] ,
keyword[lambda] identifier[m] : identifier[self] . identifier[_mdens] ( identifier[m] ),
keyword[lambda] identifier[m] : identifier[self] . identifier[_mdens_deriv] ( identifier[m] ),
identifier[self] . identifier[_b2] , identifier[self] . identifier[_c2] , identifier[i] , identifier[j] , identifier[glx] = identifier[self] . identifier[_glx] , identifier[glw] = identifier[self] . identifier[_glw] ) | def _2ndderiv_xyz(self, x, y, z, i, j):
"""General 2nd derivative of the potential as a function of (x,y,z)
in the aligned coordinate frame"""
return 4.0 * numpy.pi * self._b * self._c * _2ndDerivInt(x, y, z, lambda m: self._mdens(m), lambda m: self._mdens_deriv(m), self._b2, self._c2, i, j, glx=self._glx, glw=self._glw) |
def find_elements_by_partial_link_text(self, link_text):
"""
Finds elements by a partial match of their link text.
:Args:
- link_text: The text of the element to partial match on.
:Returns:
- list of webelement - a list with elements if any was found. an
empty list if not
:Usage:
::
elements = driver.find_elements_by_partial_link_text('Sign')
"""
return self.find_elements(by=By.PARTIAL_LINK_TEXT, value=link_text) | def function[find_elements_by_partial_link_text, parameter[self, link_text]]:
constant[
Finds elements by a partial match of their link text.
:Args:
- link_text: The text of the element to partial match on.
:Returns:
- list of webelement - a list with elements if any was found. an
empty list if not
:Usage:
::
elements = driver.find_elements_by_partial_link_text('Sign')
]
return[call[name[self].find_elements, parameter[]]] | keyword[def] identifier[find_elements_by_partial_link_text] ( identifier[self] , identifier[link_text] ):
literal[string]
keyword[return] identifier[self] . identifier[find_elements] ( identifier[by] = identifier[By] . identifier[PARTIAL_LINK_TEXT] , identifier[value] = identifier[link_text] ) | def find_elements_by_partial_link_text(self, link_text):
"""
Finds elements by a partial match of their link text.
:Args:
- link_text: The text of the element to partial match on.
:Returns:
- list of webelement - a list with elements if any was found. an
empty list if not
:Usage:
::
elements = driver.find_elements_by_partial_link_text('Sign')
"""
return self.find_elements(by=By.PARTIAL_LINK_TEXT, value=link_text) |
def containerIsRunning(name_or_id):
'''Check if container with the given name or ID (str) is running. No side
effects. Idempotent. Returns True if running, False if not.'''
require_str("name_or_id", name_or_id)
try:
container = getContainer(name_or_id)
# Refer to the latest status list here: https://docs.docker.com/engine/
# api/v1.33/#operation/ContainerList
if container:
if container.status == 'created':
return False
elif container.status == 'restarting':
return True
elif container.status == 'running':
return True
elif container.status == 'removing':
return False
elif container.status == 'paused':
return False
elif container.status == 'exited':
return False
elif container.status == 'dead':
return False
else:
return False
except NotFound as exc:
return False
return False | def function[containerIsRunning, parameter[name_or_id]]:
constant[Check if container with the given name or ID (str) is running. No side
effects. Idempotent. Returns True if running, False if not.]
call[name[require_str], parameter[constant[name_or_id], name[name_or_id]]]
<ast.Try object at 0x7da18f09f4f0>
return[constant[False]] | keyword[def] identifier[containerIsRunning] ( identifier[name_or_id] ):
literal[string]
identifier[require_str] ( literal[string] , identifier[name_or_id] )
keyword[try] :
identifier[container] = identifier[getContainer] ( identifier[name_or_id] )
keyword[if] identifier[container] :
keyword[if] identifier[container] . identifier[status] == literal[string] :
keyword[return] keyword[False]
keyword[elif] identifier[container] . identifier[status] == literal[string] :
keyword[return] keyword[True]
keyword[elif] identifier[container] . identifier[status] == literal[string] :
keyword[return] keyword[True]
keyword[elif] identifier[container] . identifier[status] == literal[string] :
keyword[return] keyword[False]
keyword[elif] identifier[container] . identifier[status] == literal[string] :
keyword[return] keyword[False]
keyword[elif] identifier[container] . identifier[status] == literal[string] :
keyword[return] keyword[False]
keyword[elif] identifier[container] . identifier[status] == literal[string] :
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[False]
keyword[except] identifier[NotFound] keyword[as] identifier[exc] :
keyword[return] keyword[False]
keyword[return] keyword[False] | def containerIsRunning(name_or_id):
"""Check if container with the given name or ID (str) is running. No side
effects. Idempotent. Returns True if running, False if not."""
require_str('name_or_id', name_or_id)
try:
container = getContainer(name_or_id)
# Refer to the latest status list here: https://docs.docker.com/engine/
# api/v1.33/#operation/ContainerList
if container:
if container.status == 'created':
return False # depends on [control=['if'], data=[]]
elif container.status == 'restarting':
return True # depends on [control=['if'], data=[]]
elif container.status == 'running':
return True # depends on [control=['if'], data=[]]
elif container.status == 'removing':
return False # depends on [control=['if'], data=[]]
elif container.status == 'paused':
return False # depends on [control=['if'], data=[]]
elif container.status == 'exited':
return False # depends on [control=['if'], data=[]]
elif container.status == 'dead':
return False # depends on [control=['if'], data=[]]
else:
return False # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except NotFound as exc:
return False # depends on [control=['except'], data=[]]
return False |
def next(self):
"""Return the next available message
Blocks indefinitely unless consumer_timeout_ms > 0
Returns:
a single KafkaMessage from the message iterator
Raises:
ConsumerTimeout after consumer_timeout_ms and no message
Note:
This is also the method called internally during iteration
"""
self._set_consumer_timeout_start()
while True:
try:
return six.next(self._get_message_iterator())
# Handle batch completion
except StopIteration:
self._reset_message_iterator()
self._check_consumer_timeout() | def function[next, parameter[self]]:
constant[Return the next available message
Blocks indefinitely unless consumer_timeout_ms > 0
Returns:
a single KafkaMessage from the message iterator
Raises:
ConsumerTimeout after consumer_timeout_ms and no message
Note:
This is also the method called internally during iteration
]
call[name[self]._set_consumer_timeout_start, parameter[]]
while constant[True] begin[:]
<ast.Try object at 0x7da1b19b5bd0>
call[name[self]._check_consumer_timeout, parameter[]] | keyword[def] identifier[next] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_set_consumer_timeout_start] ()
keyword[while] keyword[True] :
keyword[try] :
keyword[return] identifier[six] . identifier[next] ( identifier[self] . identifier[_get_message_iterator] ())
keyword[except] identifier[StopIteration] :
identifier[self] . identifier[_reset_message_iterator] ()
identifier[self] . identifier[_check_consumer_timeout] () | def next(self):
"""Return the next available message
Blocks indefinitely unless consumer_timeout_ms > 0
Returns:
a single KafkaMessage from the message iterator
Raises:
ConsumerTimeout after consumer_timeout_ms and no message
Note:
This is also the method called internally during iteration
"""
self._set_consumer_timeout_start()
while True:
try:
return six.next(self._get_message_iterator()) # depends on [control=['try'], data=[]]
# Handle batch completion
except StopIteration:
self._reset_message_iterator() # depends on [control=['except'], data=[]]
self._check_consumer_timeout() # depends on [control=['while'], data=[]] |
def load_field_config(self, file_id):
"""
Loads the configuration fields file for the id.
:param file_id: the id for the field
:return: the fields configuration
"""
if file_id not in self._field_configs:
self._field_configs[file_id] = self._reader.read_yaml_file(
'field_config_%s.yml' % file_id)
return self._field_configs[file_id] | def function[load_field_config, parameter[self, file_id]]:
constant[
Loads the configuration fields file for the id.
:param file_id: the id for the field
:return: the fields configuration
]
if compare[name[file_id] <ast.NotIn object at 0x7da2590d7190> name[self]._field_configs] begin[:]
call[name[self]._field_configs][name[file_id]] assign[=] call[name[self]._reader.read_yaml_file, parameter[binary_operation[constant[field_config_%s.yml] <ast.Mod object at 0x7da2590d6920> name[file_id]]]]
return[call[name[self]._field_configs][name[file_id]]] | keyword[def] identifier[load_field_config] ( identifier[self] , identifier[file_id] ):
literal[string]
keyword[if] identifier[file_id] keyword[not] keyword[in] identifier[self] . identifier[_field_configs] :
identifier[self] . identifier[_field_configs] [ identifier[file_id] ]= identifier[self] . identifier[_reader] . identifier[read_yaml_file] (
literal[string] % identifier[file_id] )
keyword[return] identifier[self] . identifier[_field_configs] [ identifier[file_id] ] | def load_field_config(self, file_id):
"""
Loads the configuration fields file for the id.
:param file_id: the id for the field
:return: the fields configuration
"""
if file_id not in self._field_configs:
self._field_configs[file_id] = self._reader.read_yaml_file('field_config_%s.yml' % file_id) # depends on [control=['if'], data=['file_id']]
return self._field_configs[file_id] |
def write(self, buf):
"""Writes data to the pyboard over the serial port."""
self.check_pyb()
try:
return self.pyb.serial.write(buf)
except (serial.serialutil.SerialException, BrokenPipeError, TypeError):
# Write failed - assume that we got disconnected
self.close()
raise DeviceError('{} closed'.format(self.dev_name_short)) | def function[write, parameter[self, buf]]:
constant[Writes data to the pyboard over the serial port.]
call[name[self].check_pyb, parameter[]]
<ast.Try object at 0x7da1b17351e0> | keyword[def] identifier[write] ( identifier[self] , identifier[buf] ):
literal[string]
identifier[self] . identifier[check_pyb] ()
keyword[try] :
keyword[return] identifier[self] . identifier[pyb] . identifier[serial] . identifier[write] ( identifier[buf] )
keyword[except] ( identifier[serial] . identifier[serialutil] . identifier[SerialException] , identifier[BrokenPipeError] , identifier[TypeError] ):
identifier[self] . identifier[close] ()
keyword[raise] identifier[DeviceError] ( literal[string] . identifier[format] ( identifier[self] . identifier[dev_name_short] )) | def write(self, buf):
"""Writes data to the pyboard over the serial port."""
self.check_pyb()
try:
return self.pyb.serial.write(buf) # depends on [control=['try'], data=[]]
except (serial.serialutil.SerialException, BrokenPipeError, TypeError):
# Write failed - assume that we got disconnected
self.close()
raise DeviceError('{} closed'.format(self.dev_name_short)) # depends on [control=['except'], data=[]] |
def convert_activation(node, **kwargs):
"""Map MXNet's Activation operator attributes to onnx's Tanh/Relu operator
and return the created node.
"""
name, input_nodes, attrs = get_inputs(node, kwargs)
act_type = attrs["act_type"]
# Creating a dictionary here, but if this titlecase pattern
# mxnet_name.title()
act_types = {
"tanh": "Tanh",
"relu": "Relu",
"sigmoid": "Sigmoid",
"softrelu": "Softplus",
"softsign": "Softsign"
}
act_name = act_types.get(act_type)
if act_name:
node = onnx.helper.make_node(
act_name,
input_nodes,
[name],
name=name
)
else:
raise AttributeError(
"Activation %s not implemented or recognized in the converter" % act_type
)
return [node] | def function[convert_activation, parameter[node]]:
constant[Map MXNet's Activation operator attributes to onnx's Tanh/Relu operator
and return the created node.
]
<ast.Tuple object at 0x7da1b1e401f0> assign[=] call[name[get_inputs], parameter[name[node], name[kwargs]]]
variable[act_type] assign[=] call[name[attrs]][constant[act_type]]
variable[act_types] assign[=] dictionary[[<ast.Constant object at 0x7da1b1e43c70>, <ast.Constant object at 0x7da1b1e41480>, <ast.Constant object at 0x7da1b1e405b0>, <ast.Constant object at 0x7da1b1e41210>, <ast.Constant object at 0x7da1b1e40610>], [<ast.Constant object at 0x7da1b1e40700>, <ast.Constant object at 0x7da1b1e40550>, <ast.Constant object at 0x7da1b1e40040>, <ast.Constant object at 0x7da1b1e40e80>, <ast.Constant object at 0x7da1b1e40b80>]]
variable[act_name] assign[=] call[name[act_types].get, parameter[name[act_type]]]
if name[act_name] begin[:]
variable[node] assign[=] call[name[onnx].helper.make_node, parameter[name[act_name], name[input_nodes], list[[<ast.Name object at 0x7da1b208a650>]]]]
return[list[[<ast.Name object at 0x7da1b20883a0>]]] | keyword[def] identifier[convert_activation] ( identifier[node] ,** identifier[kwargs] ):
literal[string]
identifier[name] , identifier[input_nodes] , identifier[attrs] = identifier[get_inputs] ( identifier[node] , identifier[kwargs] )
identifier[act_type] = identifier[attrs] [ literal[string] ]
identifier[act_types] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[act_name] = identifier[act_types] . identifier[get] ( identifier[act_type] )
keyword[if] identifier[act_name] :
identifier[node] = identifier[onnx] . identifier[helper] . identifier[make_node] (
identifier[act_name] ,
identifier[input_nodes] ,
[ identifier[name] ],
identifier[name] = identifier[name]
)
keyword[else] :
keyword[raise] identifier[AttributeError] (
literal[string] % identifier[act_type]
)
keyword[return] [ identifier[node] ] | def convert_activation(node, **kwargs):
"""Map MXNet's Activation operator attributes to onnx's Tanh/Relu operator
and return the created node.
"""
(name, input_nodes, attrs) = get_inputs(node, kwargs)
act_type = attrs['act_type']
# Creating a dictionary here, but if this titlecase pattern
# mxnet_name.title()
act_types = {'tanh': 'Tanh', 'relu': 'Relu', 'sigmoid': 'Sigmoid', 'softrelu': 'Softplus', 'softsign': 'Softsign'}
act_name = act_types.get(act_type)
if act_name:
node = onnx.helper.make_node(act_name, input_nodes, [name], name=name) # depends on [control=['if'], data=[]]
else:
raise AttributeError('Activation %s not implemented or recognized in the converter' % act_type)
return [node] |
def add_bundle(self, prov_bundle, identifier):
"""
Verbose method of adding a bundle.
Can also be done as:
>>> api = Api()
>>> document = api.document.get(148)
>>> document.bundles['identifier'] = prov_bundle
:param prov_bundle: The bundle to be added
:param str identifier: URI or QName for this bundle
:type prov_bundle: :py:class:`prov.model.ProvDocument` or :py:class:`str`
"""
if self.abstract:
raise AbstractDocumentException()
self._api.add_bundle(self.id, prov_bundle.serialize(), identifier) | def function[add_bundle, parameter[self, prov_bundle, identifier]]:
constant[
Verbose method of adding a bundle.
Can also be done as:
>>> api = Api()
>>> document = api.document.get(148)
>>> document.bundles['identifier'] = prov_bundle
:param prov_bundle: The bundle to be added
:param str identifier: URI or QName for this bundle
:type prov_bundle: :py:class:`prov.model.ProvDocument` or :py:class:`str`
]
if name[self].abstract begin[:]
<ast.Raise object at 0x7da20e9b14e0>
call[name[self]._api.add_bundle, parameter[name[self].id, call[name[prov_bundle].serialize, parameter[]], name[identifier]]] | keyword[def] identifier[add_bundle] ( identifier[self] , identifier[prov_bundle] , identifier[identifier] ):
literal[string]
keyword[if] identifier[self] . identifier[abstract] :
keyword[raise] identifier[AbstractDocumentException] ()
identifier[self] . identifier[_api] . identifier[add_bundle] ( identifier[self] . identifier[id] , identifier[prov_bundle] . identifier[serialize] (), identifier[identifier] ) | def add_bundle(self, prov_bundle, identifier):
"""
Verbose method of adding a bundle.
Can also be done as:
>>> api = Api()
>>> document = api.document.get(148)
>>> document.bundles['identifier'] = prov_bundle
:param prov_bundle: The bundle to be added
:param str identifier: URI or QName for this bundle
:type prov_bundle: :py:class:`prov.model.ProvDocument` or :py:class:`str`
"""
if self.abstract:
raise AbstractDocumentException() # depends on [control=['if'], data=[]]
self._api.add_bundle(self.id, prov_bundle.serialize(), identifier) |
def _store_variable(self, j, key, m, value):
"""Store a copy of the variable in the history
"""
if hasattr(value, 'copy'):
v = value.copy()
else:
v = value
self.history[j][key][m].append(v) | def function[_store_variable, parameter[self, j, key, m, value]]:
constant[Store a copy of the variable in the history
]
if call[name[hasattr], parameter[name[value], constant[copy]]] begin[:]
variable[v] assign[=] call[name[value].copy, parameter[]]
call[call[call[call[name[self].history][name[j]]][name[key]]][name[m]].append, parameter[name[v]]] | keyword[def] identifier[_store_variable] ( identifier[self] , identifier[j] , identifier[key] , identifier[m] , identifier[value] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ):
identifier[v] = identifier[value] . identifier[copy] ()
keyword[else] :
identifier[v] = identifier[value]
identifier[self] . identifier[history] [ identifier[j] ][ identifier[key] ][ identifier[m] ]. identifier[append] ( identifier[v] ) | def _store_variable(self, j, key, m, value):
"""Store a copy of the variable in the history
"""
if hasattr(value, 'copy'):
v = value.copy() # depends on [control=['if'], data=[]]
else:
v = value
self.history[j][key][m].append(v) |
def updateSocialTone(user, socialTone, maintainHistory):
"""
updateSocialTone updates the user with the social tones interpreted based on
the specified thresholds
@param user a json object representing user information (tone) to be used in
conversing with the Conversation Service
@param socialTone a json object containing the social tones in the payload
returned by the Tone Analyzer
"""
currentSocial = []
currentSocialObject = []
# Process each social tone and determine if it is high or low
for tone in socialTone['tones']:
if tone['score'] >= SOCIAL_HIGH_SCORE_THRESHOLD:
currentSocial.append(tone['tone_name'].lower() + '_high')
currentSocialObject.append({
'tone_name': tone['tone_name'].lower(),
'score': tone['score'],
'interpretation': 'likely high'
})
elif tone['score'] <= SOCIAL_LOW_SCORE_THRESHOLD:
currentSocial.append(tone['tone_name'].lower() + '_low')
currentSocialObject.append({
'tone_name': tone['tone_name'].lower(),
'score': tone['score'],
'interpretation': 'likely low'
})
else:
currentSocialObject.append({
'tone_name': tone['tone_name'].lower(),
'score': tone['score'],
'interpretation': 'likely medium'
})
# update user social tone
user['tone']['social']['current'] = currentSocial
if maintainHistory:
if not user['tone']['social']['current']:
user['tone']['social']['current'] = []
user['tone']['social']['current'].append(currentSocialObject) | def function[updateSocialTone, parameter[user, socialTone, maintainHistory]]:
constant[
updateSocialTone updates the user with the social tones interpreted based on
the specified thresholds
@param user a json object representing user information (tone) to be used in
conversing with the Conversation Service
@param socialTone a json object containing the social tones in the payload
returned by the Tone Analyzer
]
variable[currentSocial] assign[=] list[[]]
variable[currentSocialObject] assign[=] list[[]]
for taget[name[tone]] in starred[call[name[socialTone]][constant[tones]]] begin[:]
if compare[call[name[tone]][constant[score]] greater_or_equal[>=] name[SOCIAL_HIGH_SCORE_THRESHOLD]] begin[:]
call[name[currentSocial].append, parameter[binary_operation[call[call[name[tone]][constant[tone_name]].lower, parameter[]] + constant[_high]]]]
call[name[currentSocialObject].append, parameter[dictionary[[<ast.Constant object at 0x7da18fe930a0>, <ast.Constant object at 0x7da18fe903d0>, <ast.Constant object at 0x7da18fe92350>], [<ast.Call object at 0x7da18fe91450>, <ast.Subscript object at 0x7da18fe935e0>, <ast.Constant object at 0x7da18fe932b0>]]]]
call[call[call[name[user]][constant[tone]]][constant[social]]][constant[current]] assign[=] name[currentSocial]
if name[maintainHistory] begin[:]
if <ast.UnaryOp object at 0x7da1b2346c50> begin[:]
call[call[call[name[user]][constant[tone]]][constant[social]]][constant[current]] assign[=] list[[]]
call[call[call[call[name[user]][constant[tone]]][constant[social]]][constant[current]].append, parameter[name[currentSocialObject]]] | keyword[def] identifier[updateSocialTone] ( identifier[user] , identifier[socialTone] , identifier[maintainHistory] ):
literal[string]
identifier[currentSocial] =[]
identifier[currentSocialObject] =[]
keyword[for] identifier[tone] keyword[in] identifier[socialTone] [ literal[string] ]:
keyword[if] identifier[tone] [ literal[string] ]>= identifier[SOCIAL_HIGH_SCORE_THRESHOLD] :
identifier[currentSocial] . identifier[append] ( identifier[tone] [ literal[string] ]. identifier[lower] ()+ literal[string] )
identifier[currentSocialObject] . identifier[append] ({
literal[string] : identifier[tone] [ literal[string] ]. identifier[lower] (),
literal[string] : identifier[tone] [ literal[string] ],
literal[string] : literal[string]
})
keyword[elif] identifier[tone] [ literal[string] ]<= identifier[SOCIAL_LOW_SCORE_THRESHOLD] :
identifier[currentSocial] . identifier[append] ( identifier[tone] [ literal[string] ]. identifier[lower] ()+ literal[string] )
identifier[currentSocialObject] . identifier[append] ({
literal[string] : identifier[tone] [ literal[string] ]. identifier[lower] (),
literal[string] : identifier[tone] [ literal[string] ],
literal[string] : literal[string]
})
keyword[else] :
identifier[currentSocialObject] . identifier[append] ({
literal[string] : identifier[tone] [ literal[string] ]. identifier[lower] (),
literal[string] : identifier[tone] [ literal[string] ],
literal[string] : literal[string]
})
identifier[user] [ literal[string] ][ literal[string] ][ literal[string] ]= identifier[currentSocial]
keyword[if] identifier[maintainHistory] :
keyword[if] keyword[not] identifier[user] [ literal[string] ][ literal[string] ][ literal[string] ]:
identifier[user] [ literal[string] ][ literal[string] ][ literal[string] ]=[]
identifier[user] [ literal[string] ][ literal[string] ][ literal[string] ]. identifier[append] ( identifier[currentSocialObject] ) | def updateSocialTone(user, socialTone, maintainHistory):
"""
updateSocialTone updates the user with the social tones interpreted based on
the specified thresholds
@param user a json object representing user information (tone) to be used in
conversing with the Conversation Service
@param socialTone a json object containing the social tones in the payload
returned by the Tone Analyzer
"""
currentSocial = []
currentSocialObject = []
# Process each social tone and determine if it is high or low
for tone in socialTone['tones']:
if tone['score'] >= SOCIAL_HIGH_SCORE_THRESHOLD:
currentSocial.append(tone['tone_name'].lower() + '_high')
currentSocialObject.append({'tone_name': tone['tone_name'].lower(), 'score': tone['score'], 'interpretation': 'likely high'}) # depends on [control=['if'], data=[]]
elif tone['score'] <= SOCIAL_LOW_SCORE_THRESHOLD:
currentSocial.append(tone['tone_name'].lower() + '_low')
currentSocialObject.append({'tone_name': tone['tone_name'].lower(), 'score': tone['score'], 'interpretation': 'likely low'}) # depends on [control=['if'], data=[]]
else:
currentSocialObject.append({'tone_name': tone['tone_name'].lower(), 'score': tone['score'], 'interpretation': 'likely medium'}) # depends on [control=['for'], data=['tone']]
# update user social tone
user['tone']['social']['current'] = currentSocial
if maintainHistory:
if not user['tone']['social']['current']:
user['tone']['social']['current'] = [] # depends on [control=['if'], data=[]]
user['tone']['social']['current'].append(currentSocialObject) # depends on [control=['if'], data=[]] |
def _set_remap(self, v, load=False):
"""
Setter method for remap, mapped from YANG variable /cee_map/remap (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_remap is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_remap() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=remap.remap, is_container='container', presence=False, yang_name="remap", rest_name="remap", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u' Configure Class of Service (CoS) to be \n remapped', u'callpoint': u'qos_cee_remap', u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-cee-map', defining_module='brocade-cee-map', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """remap must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=remap.remap, is_container='container', presence=False, yang_name="remap", rest_name="remap", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u' Configure Class of Service (CoS) to be \n remapped', u'callpoint': u'qos_cee_remap', u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-cee-map', defining_module='brocade-cee-map', yang_type='container', is_config=True)""",
})
self.__remap = t
if hasattr(self, '_set'):
self._set() | def function[_set_remap, parameter[self, v, load]]:
constant[
Setter method for remap, mapped from YANG variable /cee_map/remap (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_remap is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_remap() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da207f00580>
name[self].__remap assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_remap] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[remap] . identifier[remap] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__remap] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_remap(self, v, load=False):
"""
Setter method for remap, mapped from YANG variable /cee_map/remap (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_remap is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_remap() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=remap.remap, is_container='container', presence=False, yang_name='remap', rest_name='remap', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u' Configure Class of Service (CoS) to be \n remapped', u'callpoint': u'qos_cee_remap', u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-cee-map', defining_module='brocade-cee-map', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'remap must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=remap.remap, is_container=\'container\', presence=False, yang_name="remap", rest_name="remap", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\' Configure Class of Service (CoS) to be \n remapped\', u\'callpoint\': u\'qos_cee_remap\', u\'display-when\': u\'/vcsmode/vcs-mode = "true"\'}}, namespace=\'urn:brocade.com:mgmt:brocade-cee-map\', defining_module=\'brocade-cee-map\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__remap = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def url_for(self, operation, _external=True, **kwargs):
"""
Construct a URL for an operation against a resource.
:param kwargs: additional arguments for URL path expansion,
which are passed to flask.url_for.
In particular, _external=True produces absolute url.
"""
return url_for(self.endpoint_for(operation), _external=_external, **kwargs) | def function[url_for, parameter[self, operation, _external]]:
constant[
Construct a URL for an operation against a resource.
:param kwargs: additional arguments for URL path expansion,
which are passed to flask.url_for.
In particular, _external=True produces absolute url.
]
return[call[name[url_for], parameter[call[name[self].endpoint_for, parameter[name[operation]]]]]] | keyword[def] identifier[url_for] ( identifier[self] , identifier[operation] , identifier[_external] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[url_for] ( identifier[self] . identifier[endpoint_for] ( identifier[operation] ), identifier[_external] = identifier[_external] ,** identifier[kwargs] ) | def url_for(self, operation, _external=True, **kwargs):
"""
Construct a URL for an operation against a resource.
:param kwargs: additional arguments for URL path expansion,
which are passed to flask.url_for.
In particular, _external=True produces absolute url.
"""
return url_for(self.endpoint_for(operation), _external=_external, **kwargs) |
def convert_ini(config_dict):
"""Convert _config_dict_ into a list of INI formatted strings.
Args:
config_dict (dict): Configuration dictionary to be flattened.
Returns:
(list) Lines to be written to a file in the format of KEY1_KEY2=value.
"""
config_lines = []
for env, configs in sorted(config_dict.items()):
for resource, app_properties in sorted(configs.items()):
try:
for app_property, value in sorted(app_properties.items()):
variable = '{env}_{resource}_{app_property}'.format(
env=env, resource=resource, app_property=app_property).upper()
if isinstance(value, (dict, DeepChainMap)):
safe_value = "'{0}'".format(json.dumps(dict(value)))
else:
safe_value = json.dumps(value)
line = "{variable}={value}".format(variable=variable, value=safe_value)
LOG.debug('INI line: %s', line)
config_lines.append(line)
except AttributeError:
resource = resource.upper()
app_properties = "'{}'".format(json.dumps(app_properties))
line = '{0}={1}'.format(resource, app_properties)
LOG.debug('INI line: %s', line)
config_lines.append(line)
return config_lines | def function[convert_ini, parameter[config_dict]]:
constant[Convert _config_dict_ into a list of INI formatted strings.
Args:
config_dict (dict): Configuration dictionary to be flattened.
Returns:
(list) Lines to be written to a file in the format of KEY1_KEY2=value.
]
variable[config_lines] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c6e6a10>, <ast.Name object at 0x7da20c6e7af0>]]] in starred[call[name[sorted], parameter[call[name[config_dict].items, parameter[]]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18fe93be0>, <ast.Name object at 0x7da18fe90220>]]] in starred[call[name[sorted], parameter[call[name[configs].items, parameter[]]]]] begin[:]
<ast.Try object at 0x7da18fe90d30>
return[name[config_lines]] | keyword[def] identifier[convert_ini] ( identifier[config_dict] ):
literal[string]
identifier[config_lines] =[]
keyword[for] identifier[env] , identifier[configs] keyword[in] identifier[sorted] ( identifier[config_dict] . identifier[items] ()):
keyword[for] identifier[resource] , identifier[app_properties] keyword[in] identifier[sorted] ( identifier[configs] . identifier[items] ()):
keyword[try] :
keyword[for] identifier[app_property] , identifier[value] keyword[in] identifier[sorted] ( identifier[app_properties] . identifier[items] ()):
identifier[variable] = literal[string] . identifier[format] (
identifier[env] = identifier[env] , identifier[resource] = identifier[resource] , identifier[app_property] = identifier[app_property] ). identifier[upper] ()
keyword[if] identifier[isinstance] ( identifier[value] ,( identifier[dict] , identifier[DeepChainMap] )):
identifier[safe_value] = literal[string] . identifier[format] ( identifier[json] . identifier[dumps] ( identifier[dict] ( identifier[value] )))
keyword[else] :
identifier[safe_value] = identifier[json] . identifier[dumps] ( identifier[value] )
identifier[line] = literal[string] . identifier[format] ( identifier[variable] = identifier[variable] , identifier[value] = identifier[safe_value] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[line] )
identifier[config_lines] . identifier[append] ( identifier[line] )
keyword[except] identifier[AttributeError] :
identifier[resource] = identifier[resource] . identifier[upper] ()
identifier[app_properties] = literal[string] . identifier[format] ( identifier[json] . identifier[dumps] ( identifier[app_properties] ))
identifier[line] = literal[string] . identifier[format] ( identifier[resource] , identifier[app_properties] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[line] )
identifier[config_lines] . identifier[append] ( identifier[line] )
keyword[return] identifier[config_lines] | def convert_ini(config_dict):
"""Convert _config_dict_ into a list of INI formatted strings.
Args:
config_dict (dict): Configuration dictionary to be flattened.
Returns:
(list) Lines to be written to a file in the format of KEY1_KEY2=value.
"""
config_lines = []
for (env, configs) in sorted(config_dict.items()):
for (resource, app_properties) in sorted(configs.items()):
try:
for (app_property, value) in sorted(app_properties.items()):
variable = '{env}_{resource}_{app_property}'.format(env=env, resource=resource, app_property=app_property).upper()
if isinstance(value, (dict, DeepChainMap)):
safe_value = "'{0}'".format(json.dumps(dict(value))) # depends on [control=['if'], data=[]]
else:
safe_value = json.dumps(value)
line = '{variable}={value}'.format(variable=variable, value=safe_value)
LOG.debug('INI line: %s', line)
config_lines.append(line) # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]]
except AttributeError:
resource = resource.upper()
app_properties = "'{}'".format(json.dumps(app_properties))
line = '{0}={1}'.format(resource, app_properties)
LOG.debug('INI line: %s', line)
config_lines.append(line) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return config_lines |
def has_child_catalogs(self, catalog_id):
"""Tests if a catalog has any children.
arg: catalog_id (osid.id.Id): a ``catalog_id``
return: (boolean) - ``true`` if the ``catalog_id`` has children,
``false`` otherwise
raise: NotFound - ``catalog_id`` is not found
raise: NullArgument - ``catalog_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.has_child_bins
if self._catalog_session is not None:
return self._catalog_session.has_child_catalogs(catalog_id=catalog_id)
return self._hierarchy_session.has_children(id_=catalog_id) | def function[has_child_catalogs, parameter[self, catalog_id]]:
constant[Tests if a catalog has any children.
arg: catalog_id (osid.id.Id): a ``catalog_id``
return: (boolean) - ``true`` if the ``catalog_id`` has children,
``false`` otherwise
raise: NotFound - ``catalog_id`` is not found
raise: NullArgument - ``catalog_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.has_child_catalogs, parameter[]]]
return[call[name[self]._hierarchy_session.has_children, parameter[]]] | keyword[def] identifier[has_child_catalogs] ( identifier[self] , identifier[catalog_id] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[has_child_catalogs] ( identifier[catalog_id] = identifier[catalog_id] )
keyword[return] identifier[self] . identifier[_hierarchy_session] . identifier[has_children] ( identifier[id_] = identifier[catalog_id] ) | def has_child_catalogs(self, catalog_id):
"""Tests if a catalog has any children.
arg: catalog_id (osid.id.Id): a ``catalog_id``
return: (boolean) - ``true`` if the ``catalog_id`` has children,
``false`` otherwise
raise: NotFound - ``catalog_id`` is not found
raise: NullArgument - ``catalog_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.has_child_bins
if self._catalog_session is not None:
return self._catalog_session.has_child_catalogs(catalog_id=catalog_id) # depends on [control=['if'], data=[]]
return self._hierarchy_session.has_children(id_=catalog_id) |
def i2repr(self, pkt, x):
"""Convert internal value to a nice representation"""
if len(hex(self.i2m(pkt, x))) < 7: # short address
return hex(self.i2m(pkt, x))
else: # long address
x = "%016x" % self.i2m(pkt, x)
return ":".join(["%s%s" % (x[i], x[i + 1]) for i in range(0, len(x), 2)]) | def function[i2repr, parameter[self, pkt, x]]:
constant[Convert internal value to a nice representation]
if compare[call[name[len], parameter[call[name[hex], parameter[call[name[self].i2m, parameter[name[pkt], name[x]]]]]]] less[<] constant[7]] begin[:]
return[call[name[hex], parameter[call[name[self].i2m, parameter[name[pkt], name[x]]]]]] | keyword[def] identifier[i2repr] ( identifier[self] , identifier[pkt] , identifier[x] ):
literal[string]
keyword[if] identifier[len] ( identifier[hex] ( identifier[self] . identifier[i2m] ( identifier[pkt] , identifier[x] )))< literal[int] :
keyword[return] identifier[hex] ( identifier[self] . identifier[i2m] ( identifier[pkt] , identifier[x] ))
keyword[else] :
identifier[x] = literal[string] % identifier[self] . identifier[i2m] ( identifier[pkt] , identifier[x] )
keyword[return] literal[string] . identifier[join] ([ literal[string] %( identifier[x] [ identifier[i] ], identifier[x] [ identifier[i] + literal[int] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[x] ), literal[int] )]) | def i2repr(self, pkt, x):
"""Convert internal value to a nice representation"""
if len(hex(self.i2m(pkt, x))) < 7: # short address
return hex(self.i2m(pkt, x)) # depends on [control=['if'], data=[]]
else: # long address
x = '%016x' % self.i2m(pkt, x)
return ':'.join(['%s%s' % (x[i], x[i + 1]) for i in range(0, len(x), 2)]) |
def sunrise_sunset(moment, latitude, longitude):
r'''Calculates the times at which the sun is at sunset; sunrise; and
halfway between sunrise and sunset (transit).
Uses the Reda and Andreas (2004) model described in [1]_,
originally incorporated into the excellent
`pvlib library <https://github.com/pvlib/pvlib-python>`_
Parameters
----------
moment : datetime
Date for the calculationl; needs to contain only the year, month, and
day, [-]
latitude : float
Latitude, between -90 and 90 [degrees]
longitude : float
Longitude, between -180 and 180, [degrees]
Returns
-------
sunrise : datetime
The time at the specified day when the sun rises **IN UTC**, [-]
sunset : datetime
The time at the specified day when the sun sets **IN UTC**, [-]
transit : datetime
The time at the specified day when the sun is at solar noon - halfway
between sunrise and sunset **IN UTC**, [-]
Examples
--------
>>> sunrise, sunset, transit = sunrise_sunset(datetime(2018, 4, 17),
... 51.0486, -114.07)
>>> sunrise
datetime.datetime(2018, 4, 17, 12, 36, 55, 782660)
>>> sunset
datetime.datetime(2018, 4, 18, 2, 34, 4, 249326)
>>> transit
datetime.datetime(2018, 4, 17, 19, 35, 46, 686265)
Notes
-----
This functions takes on the order of 2 ms per calculation.
The reason the function cannot return the time correct the local
timezone is that the function does not know the timezone at the specified
lat/long.
References
----------
.. [1] Reda, Ibrahim, and Afshin Andreas. "Solar Position Algorithm for
Solar Radiation Applications." Solar Energy 76, no. 5 (January 1, 2004):
577-89. https://doi.org/10.1016/j.solener.2003.12.003.
'''
from fluids.optional import spa
delta_t = spa.calculate_deltat(moment.year, moment.month)
# Strip the part of the day
moment = datetime(moment.year, moment.month, moment.day)
import calendar
unixtime = calendar.timegm(moment.timetuple())
unixtime = unixtime - unixtime % (86400) # Remove the remainder of the value, rounding it to the day it is
transit, sunrise, sunset = spa.transit_sunrise_sunset(np.array([unixtime]), lat=latitude, lon=longitude, delta_t=delta_t, numthreads=1)
transit = datetime.utcfromtimestamp(float(transit))
sunrise = datetime.utcfromtimestamp(float(sunrise))
sunset = datetime.utcfromtimestamp(float(sunset))
return sunrise, sunset, transit | def function[sunrise_sunset, parameter[moment, latitude, longitude]]:
constant[Calculates the times at which the sun is at sunset; sunrise; and
halfway between sunrise and sunset (transit).
Uses the Reda and Andreas (2004) model described in [1]_,
originally incorporated into the excellent
`pvlib library <https://github.com/pvlib/pvlib-python>`_
Parameters
----------
moment : datetime
Date for the calculationl; needs to contain only the year, month, and
day, [-]
latitude : float
Latitude, between -90 and 90 [degrees]
longitude : float
Longitude, between -180 and 180, [degrees]
Returns
-------
sunrise : datetime
The time at the specified day when the sun rises **IN UTC**, [-]
sunset : datetime
The time at the specified day when the sun sets **IN UTC**, [-]
transit : datetime
The time at the specified day when the sun is at solar noon - halfway
between sunrise and sunset **IN UTC**, [-]
Examples
--------
>>> sunrise, sunset, transit = sunrise_sunset(datetime(2018, 4, 17),
... 51.0486, -114.07)
>>> sunrise
datetime.datetime(2018, 4, 17, 12, 36, 55, 782660)
>>> sunset
datetime.datetime(2018, 4, 18, 2, 34, 4, 249326)
>>> transit
datetime.datetime(2018, 4, 17, 19, 35, 46, 686265)
Notes
-----
This functions takes on the order of 2 ms per calculation.
The reason the function cannot return the time correct the local
timezone is that the function does not know the timezone at the specified
lat/long.
References
----------
.. [1] Reda, Ibrahim, and Afshin Andreas. "Solar Position Algorithm for
Solar Radiation Applications." Solar Energy 76, no. 5 (January 1, 2004):
577-89. https://doi.org/10.1016/j.solener.2003.12.003.
]
from relative_module[fluids.optional] import module[spa]
variable[delta_t] assign[=] call[name[spa].calculate_deltat, parameter[name[moment].year, name[moment].month]]
variable[moment] assign[=] call[name[datetime], parameter[name[moment].year, name[moment].month, name[moment].day]]
import module[calendar]
variable[unixtime] assign[=] call[name[calendar].timegm, parameter[call[name[moment].timetuple, parameter[]]]]
variable[unixtime] assign[=] binary_operation[name[unixtime] - binary_operation[name[unixtime] <ast.Mod object at 0x7da2590d6920> constant[86400]]]
<ast.Tuple object at 0x7da18bc71fc0> assign[=] call[name[spa].transit_sunrise_sunset, parameter[call[name[np].array, parameter[list[[<ast.Name object at 0x7da18bc72bc0>]]]]]]
variable[transit] assign[=] call[name[datetime].utcfromtimestamp, parameter[call[name[float], parameter[name[transit]]]]]
variable[sunrise] assign[=] call[name[datetime].utcfromtimestamp, parameter[call[name[float], parameter[name[sunrise]]]]]
variable[sunset] assign[=] call[name[datetime].utcfromtimestamp, parameter[call[name[float], parameter[name[sunset]]]]]
return[tuple[[<ast.Name object at 0x7da18bc70490>, <ast.Name object at 0x7da18bc70310>, <ast.Name object at 0x7da18bc737f0>]]] | keyword[def] identifier[sunrise_sunset] ( identifier[moment] , identifier[latitude] , identifier[longitude] ):
literal[string]
keyword[from] identifier[fluids] . identifier[optional] keyword[import] identifier[spa]
identifier[delta_t] = identifier[spa] . identifier[calculate_deltat] ( identifier[moment] . identifier[year] , identifier[moment] . identifier[month] )
identifier[moment] = identifier[datetime] ( identifier[moment] . identifier[year] , identifier[moment] . identifier[month] , identifier[moment] . identifier[day] )
keyword[import] identifier[calendar]
identifier[unixtime] = identifier[calendar] . identifier[timegm] ( identifier[moment] . identifier[timetuple] ())
identifier[unixtime] = identifier[unixtime] - identifier[unixtime] %( literal[int] )
identifier[transit] , identifier[sunrise] , identifier[sunset] = identifier[spa] . identifier[transit_sunrise_sunset] ( identifier[np] . identifier[array] ([ identifier[unixtime] ]), identifier[lat] = identifier[latitude] , identifier[lon] = identifier[longitude] , identifier[delta_t] = identifier[delta_t] , identifier[numthreads] = literal[int] )
identifier[transit] = identifier[datetime] . identifier[utcfromtimestamp] ( identifier[float] ( identifier[transit] ))
identifier[sunrise] = identifier[datetime] . identifier[utcfromtimestamp] ( identifier[float] ( identifier[sunrise] ))
identifier[sunset] = identifier[datetime] . identifier[utcfromtimestamp] ( identifier[float] ( identifier[sunset] ))
keyword[return] identifier[sunrise] , identifier[sunset] , identifier[transit] | def sunrise_sunset(moment, latitude, longitude):
"""Calculates the times at which the sun is at sunset; sunrise; and
halfway between sunrise and sunset (transit).
Uses the Reda and Andreas (2004) model described in [1]_,
originally incorporated into the excellent
`pvlib library <https://github.com/pvlib/pvlib-python>`_
Parameters
----------
moment : datetime
Date for the calculationl; needs to contain only the year, month, and
day, [-]
latitude : float
Latitude, between -90 and 90 [degrees]
longitude : float
Longitude, between -180 and 180, [degrees]
Returns
-------
sunrise : datetime
The time at the specified day when the sun rises **IN UTC**, [-]
sunset : datetime
The time at the specified day when the sun sets **IN UTC**, [-]
transit : datetime
The time at the specified day when the sun is at solar noon - halfway
between sunrise and sunset **IN UTC**, [-]
Examples
--------
>>> sunrise, sunset, transit = sunrise_sunset(datetime(2018, 4, 17),
... 51.0486, -114.07)
>>> sunrise
datetime.datetime(2018, 4, 17, 12, 36, 55, 782660)
>>> sunset
datetime.datetime(2018, 4, 18, 2, 34, 4, 249326)
>>> transit
datetime.datetime(2018, 4, 17, 19, 35, 46, 686265)
Notes
-----
This functions takes on the order of 2 ms per calculation.
The reason the function cannot return the time correct the local
timezone is that the function does not know the timezone at the specified
lat/long.
References
----------
.. [1] Reda, Ibrahim, and Afshin Andreas. "Solar Position Algorithm for
Solar Radiation Applications." Solar Energy 76, no. 5 (January 1, 2004):
577-89. https://doi.org/10.1016/j.solener.2003.12.003.
"""
from fluids.optional import spa
delta_t = spa.calculate_deltat(moment.year, moment.month)
# Strip the part of the day
moment = datetime(moment.year, moment.month, moment.day)
import calendar
unixtime = calendar.timegm(moment.timetuple())
unixtime = unixtime - unixtime % 86400 # Remove the remainder of the value, rounding it to the day it is
(transit, sunrise, sunset) = spa.transit_sunrise_sunset(np.array([unixtime]), lat=latitude, lon=longitude, delta_t=delta_t, numthreads=1)
transit = datetime.utcfromtimestamp(float(transit))
sunrise = datetime.utcfromtimestamp(float(sunrise))
sunset = datetime.utcfromtimestamp(float(sunset))
return (sunrise, sunset, transit) |
def findDuplicates(tfam):
"""Finds the duplicates in a TFAM.
:param tfam: representation of a ``tfam`` file.
:type tfam: list
:returns: two :py:class:`dict`, containing unique and duplicated samples
position.
"""
uSamples = {}
dSamples = defaultdict(list)
for i, row in enumerate(tfam):
sampleID = tuple(row[:2])
if sampleID not in uSamples:
# This is the first time we see this sample
uSamples[sampleID] = i
else:
# We have seen this sample at least once...
if sampleID not in dSamples:
# This is the second time we see this sample...
dSamples[sampleID].extend([uSamples[sampleID], i])
else:
# We have seen this sample multiple times
dSamples[sampleID].append(i)
# Removing the duplicates from the unique samples
for sampleID in dSamples.iterkeys():
if sampleID in uSamples:
del uSamples[sampleID]
return uSamples, dSamples | def function[findDuplicates, parameter[tfam]]:
constant[Finds the duplicates in a TFAM.
:param tfam: representation of a ``tfam`` file.
:type tfam: list
:returns: two :py:class:`dict`, containing unique and duplicated samples
position.
]
variable[uSamples] assign[=] dictionary[[], []]
variable[dSamples] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[tuple[[<ast.Name object at 0x7da1b0a4ca00>, <ast.Name object at 0x7da1b0a4c2b0>]]] in starred[call[name[enumerate], parameter[name[tfam]]]] begin[:]
variable[sampleID] assign[=] call[name[tuple], parameter[call[name[row]][<ast.Slice object at 0x7da1b0a4d0c0>]]]
if compare[name[sampleID] <ast.NotIn object at 0x7da2590d7190> name[uSamples]] begin[:]
call[name[uSamples]][name[sampleID]] assign[=] name[i]
for taget[name[sampleID]] in starred[call[name[dSamples].iterkeys, parameter[]]] begin[:]
if compare[name[sampleID] in name[uSamples]] begin[:]
<ast.Delete object at 0x7da1b0adaad0>
return[tuple[[<ast.Name object at 0x7da1b0ad8160>, <ast.Name object at 0x7da1b0adbbb0>]]] | keyword[def] identifier[findDuplicates] ( identifier[tfam] ):
literal[string]
identifier[uSamples] ={}
identifier[dSamples] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[i] , identifier[row] keyword[in] identifier[enumerate] ( identifier[tfam] ):
identifier[sampleID] = identifier[tuple] ( identifier[row] [: literal[int] ])
keyword[if] identifier[sampleID] keyword[not] keyword[in] identifier[uSamples] :
identifier[uSamples] [ identifier[sampleID] ]= identifier[i]
keyword[else] :
keyword[if] identifier[sampleID] keyword[not] keyword[in] identifier[dSamples] :
identifier[dSamples] [ identifier[sampleID] ]. identifier[extend] ([ identifier[uSamples] [ identifier[sampleID] ], identifier[i] ])
keyword[else] :
identifier[dSamples] [ identifier[sampleID] ]. identifier[append] ( identifier[i] )
keyword[for] identifier[sampleID] keyword[in] identifier[dSamples] . identifier[iterkeys] ():
keyword[if] identifier[sampleID] keyword[in] identifier[uSamples] :
keyword[del] identifier[uSamples] [ identifier[sampleID] ]
keyword[return] identifier[uSamples] , identifier[dSamples] | def findDuplicates(tfam):
"""Finds the duplicates in a TFAM.
:param tfam: representation of a ``tfam`` file.
:type tfam: list
:returns: two :py:class:`dict`, containing unique and duplicated samples
position.
"""
uSamples = {}
dSamples = defaultdict(list)
for (i, row) in enumerate(tfam):
sampleID = tuple(row[:2])
if sampleID not in uSamples:
# This is the first time we see this sample
uSamples[sampleID] = i # depends on [control=['if'], data=['sampleID', 'uSamples']]
# We have seen this sample at least once...
elif sampleID not in dSamples:
# This is the second time we see this sample...
dSamples[sampleID].extend([uSamples[sampleID], i]) # depends on [control=['if'], data=['sampleID', 'dSamples']]
else:
# We have seen this sample multiple times
dSamples[sampleID].append(i) # depends on [control=['for'], data=[]]
# Removing the duplicates from the unique samples
for sampleID in dSamples.iterkeys():
if sampleID in uSamples:
del uSamples[sampleID] # depends on [control=['if'], data=['sampleID', 'uSamples']] # depends on [control=['for'], data=['sampleID']]
return (uSamples, dSamples) |
def getElementById(self, _id, root='root', useIndex=True):
'''
getElementById - Searches and returns the first (should only be one) element with the given ID.
@param id <str> - A string of the id attribute.
@param root <AdvancedTag/'root'> - Search starting at a specific node, if provided. if string 'root', the root of the parsed tree will be used.
@param useIndex <bool> If useIndex is True and ids are indexed [see constructor] only the index will be used. Otherwise a full search is performed.
'''
(root, isFromRoot) = self._handleRootArg(root)
if self.useIndex is True and self.indexIDs is True:
element = self._idMap.get(_id, None)
if isFromRoot is False and element is not None:
if self._hasTagInParentLine(element, root) is False:
element = None
return element
return AdvancedHTMLParser.getElementById(self, _id, root) | def function[getElementById, parameter[self, _id, root, useIndex]]:
constant[
getElementById - Searches and returns the first (should only be one) element with the given ID.
@param id <str> - A string of the id attribute.
@param root <AdvancedTag/'root'> - Search starting at a specific node, if provided. if string 'root', the root of the parsed tree will be used.
@param useIndex <bool> If useIndex is True and ids are indexed [see constructor] only the index will be used. Otherwise a full search is performed.
]
<ast.Tuple object at 0x7da1b10d7a90> assign[=] call[name[self]._handleRootArg, parameter[name[root]]]
if <ast.BoolOp object at 0x7da20c76eec0> begin[:]
variable[element] assign[=] call[name[self]._idMap.get, parameter[name[_id], constant[None]]]
if <ast.BoolOp object at 0x7da18f58dd50> begin[:]
if compare[call[name[self]._hasTagInParentLine, parameter[name[element], name[root]]] is constant[False]] begin[:]
variable[element] assign[=] constant[None]
return[name[element]]
return[call[name[AdvancedHTMLParser].getElementById, parameter[name[self], name[_id], name[root]]]] | keyword[def] identifier[getElementById] ( identifier[self] , identifier[_id] , identifier[root] = literal[string] , identifier[useIndex] = keyword[True] ):
literal[string]
( identifier[root] , identifier[isFromRoot] )= identifier[self] . identifier[_handleRootArg] ( identifier[root] )
keyword[if] identifier[self] . identifier[useIndex] keyword[is] keyword[True] keyword[and] identifier[self] . identifier[indexIDs] keyword[is] keyword[True] :
identifier[element] = identifier[self] . identifier[_idMap] . identifier[get] ( identifier[_id] , keyword[None] )
keyword[if] identifier[isFromRoot] keyword[is] keyword[False] keyword[and] identifier[element] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[_hasTagInParentLine] ( identifier[element] , identifier[root] ) keyword[is] keyword[False] :
identifier[element] = keyword[None]
keyword[return] identifier[element]
keyword[return] identifier[AdvancedHTMLParser] . identifier[getElementById] ( identifier[self] , identifier[_id] , identifier[root] ) | def getElementById(self, _id, root='root', useIndex=True):
"""
getElementById - Searches and returns the first (should only be one) element with the given ID.
@param id <str> - A string of the id attribute.
@param root <AdvancedTag/'root'> - Search starting at a specific node, if provided. if string 'root', the root of the parsed tree will be used.
@param useIndex <bool> If useIndex is True and ids are indexed [see constructor] only the index will be used. Otherwise a full search is performed.
"""
(root, isFromRoot) = self._handleRootArg(root)
if self.useIndex is True and self.indexIDs is True:
element = self._idMap.get(_id, None)
if isFromRoot is False and element is not None:
if self._hasTagInParentLine(element, root) is False:
element = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return element # depends on [control=['if'], data=[]]
return AdvancedHTMLParser.getElementById(self, _id, root) |
def get_bios_firmware_version(snmp_client):
"""Get bios firmware version of the node.
:param snmp_client: an SNMP client object.
:raises: SNMPFailure if SNMP operation failed.
:returns: a string of bios firmware version.
"""
try:
bios_firmware_version = snmp_client.get(BIOS_FW_VERSION_OID)
return six.text_type(bios_firmware_version)
except SNMPFailure as e:
raise SNMPBIOSFirmwareFailure(
SNMP_FAILURE_MSG % ("GET BIOS FIRMWARE VERSION", e)) | def function[get_bios_firmware_version, parameter[snmp_client]]:
constant[Get bios firmware version of the node.
:param snmp_client: an SNMP client object.
:raises: SNMPFailure if SNMP operation failed.
:returns: a string of bios firmware version.
]
<ast.Try object at 0x7da1b1a76f80> | keyword[def] identifier[get_bios_firmware_version] ( identifier[snmp_client] ):
literal[string]
keyword[try] :
identifier[bios_firmware_version] = identifier[snmp_client] . identifier[get] ( identifier[BIOS_FW_VERSION_OID] )
keyword[return] identifier[six] . identifier[text_type] ( identifier[bios_firmware_version] )
keyword[except] identifier[SNMPFailure] keyword[as] identifier[e] :
keyword[raise] identifier[SNMPBIOSFirmwareFailure] (
identifier[SNMP_FAILURE_MSG] %( literal[string] , identifier[e] )) | def get_bios_firmware_version(snmp_client):
"""Get bios firmware version of the node.
:param snmp_client: an SNMP client object.
:raises: SNMPFailure if SNMP operation failed.
:returns: a string of bios firmware version.
"""
try:
bios_firmware_version = snmp_client.get(BIOS_FW_VERSION_OID)
return six.text_type(bios_firmware_version) # depends on [control=['try'], data=[]]
except SNMPFailure as e:
raise SNMPBIOSFirmwareFailure(SNMP_FAILURE_MSG % ('GET BIOS FIRMWARE VERSION', e)) # depends on [control=['except'], data=['e']] |
def get_rel_sciobj_file_path(pid):
"""Get the relative local path to the file holding an object's bytes.
- The path is relative to settings.OBJECT_STORE_PATH
- There is a one-to-one mapping between pid and path
- The path is based on a SHA1 hash. It's now possible to craft SHA1 collisions, but
it's so unlikely that we ignore it for now
- The path may or may not exist (yet).
"""
hash_str = hashlib.sha1(pid.encode('utf-8')).hexdigest()
return os.path.join(hash_str[:2], hash_str[2:4], hash_str) | def function[get_rel_sciobj_file_path, parameter[pid]]:
constant[Get the relative local path to the file holding an object's bytes.
- The path is relative to settings.OBJECT_STORE_PATH
- There is a one-to-one mapping between pid and path
- The path is based on a SHA1 hash. It's now possible to craft SHA1 collisions, but
it's so unlikely that we ignore it for now
- The path may or may not exist (yet).
]
variable[hash_str] assign[=] call[call[name[hashlib].sha1, parameter[call[name[pid].encode, parameter[constant[utf-8]]]]].hexdigest, parameter[]]
return[call[name[os].path.join, parameter[call[name[hash_str]][<ast.Slice object at 0x7da1b1939180>], call[name[hash_str]][<ast.Slice object at 0x7da1b1938d90>], name[hash_str]]]] | keyword[def] identifier[get_rel_sciobj_file_path] ( identifier[pid] ):
literal[string]
identifier[hash_str] = identifier[hashlib] . identifier[sha1] ( identifier[pid] . identifier[encode] ( literal[string] )). identifier[hexdigest] ()
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[hash_str] [: literal[int] ], identifier[hash_str] [ literal[int] : literal[int] ], identifier[hash_str] ) | def get_rel_sciobj_file_path(pid):
"""Get the relative local path to the file holding an object's bytes.
- The path is relative to settings.OBJECT_STORE_PATH
- There is a one-to-one mapping between pid and path
- The path is based on a SHA1 hash. It's now possible to craft SHA1 collisions, but
it's so unlikely that we ignore it for now
- The path may or may not exist (yet).
"""
hash_str = hashlib.sha1(pid.encode('utf-8')).hexdigest()
return os.path.join(hash_str[:2], hash_str[2:4], hash_str) |
def _enforce_ttl_key(self, key):
'''
Enforce the TTL to a specific key, delete if its past TTL
'''
if key not in self._key_cache_time or self._ttl == 0:
return
if time.time() - self._key_cache_time[key] > self._ttl:
del self._key_cache_time[key]
self._dict.__delitem__(key) | def function[_enforce_ttl_key, parameter[self, key]]:
constant[
Enforce the TTL to a specific key, delete if its past TTL
]
if <ast.BoolOp object at 0x7da18c4cf130> begin[:]
return[None]
if compare[binary_operation[call[name[time].time, parameter[]] - call[name[self]._key_cache_time][name[key]]] greater[>] name[self]._ttl] begin[:]
<ast.Delete object at 0x7da18c4cdde0>
call[name[self]._dict.__delitem__, parameter[name[key]]] | keyword[def] identifier[_enforce_ttl_key] ( identifier[self] , identifier[key] ):
literal[string]
keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[_key_cache_time] keyword[or] identifier[self] . identifier[_ttl] == literal[int] :
keyword[return]
keyword[if] identifier[time] . identifier[time] ()- identifier[self] . identifier[_key_cache_time] [ identifier[key] ]> identifier[self] . identifier[_ttl] :
keyword[del] identifier[self] . identifier[_key_cache_time] [ identifier[key] ]
identifier[self] . identifier[_dict] . identifier[__delitem__] ( identifier[key] ) | def _enforce_ttl_key(self, key):
"""
Enforce the TTL to a specific key, delete if its past TTL
"""
if key not in self._key_cache_time or self._ttl == 0:
return # depends on [control=['if'], data=[]]
if time.time() - self._key_cache_time[key] > self._ttl:
del self._key_cache_time[key]
self._dict.__delitem__(key) # depends on [control=['if'], data=[]] |
def itermerged(self):
"""Iterate over all headers, merging duplicate ones together."""
for key in self:
val = _dict_getitem(self, key)
yield val[0], ', '.join(val[1:]) | def function[itermerged, parameter[self]]:
constant[Iterate over all headers, merging duplicate ones together.]
for taget[name[key]] in starred[name[self]] begin[:]
variable[val] assign[=] call[name[_dict_getitem], parameter[name[self], name[key]]]
<ast.Yield object at 0x7da2054a4f40> | keyword[def] identifier[itermerged] ( identifier[self] ):
literal[string]
keyword[for] identifier[key] keyword[in] identifier[self] :
identifier[val] = identifier[_dict_getitem] ( identifier[self] , identifier[key] )
keyword[yield] identifier[val] [ literal[int] ], literal[string] . identifier[join] ( identifier[val] [ literal[int] :]) | def itermerged(self):
"""Iterate over all headers, merging duplicate ones together."""
for key in self:
val = _dict_getitem(self, key)
yield (val[0], ', '.join(val[1:])) # depends on [control=['for'], data=['key']] |
def get_Cpt_params(A, Cnodes, AggOp, T):
"""Return C and F pts.
Helper function that returns a dictionary of sparse matrices and arrays
which allow us to easily operate on Cpts and Fpts separately.
Parameters
----------
A : {csr_matrix, bsr_matrix}
Operator
Cnodes : {array}
Array of all root node indices. This is an array of nodal indices,
not degree-of-freedom indices. If the blocksize of T is 1, then
nodal indices and degree-of-freedom indices coincide.
AggOp : {csr_matrix}
Aggregation operator corresponding to A
T : {bsr_matrix}
Tentative prolongator based on AggOp
Returns
-------
Dictionary containing these parameters:
P_I : {bsr_matrix}
Interpolation operator that carries out only simple injection from the
coarse grid to fine grid Cpts nodes
I_F : {bsr_matrix}
Identity operator on Fpts, i.e., the action of this matrix zeros
out entries in a vector at all Cpts, leaving Fpts untouched
I_C : {bsr_matrix}
Identity operator on Cpts nodes, i.e., the action of this matrix zeros
out entries in a vector at all Fpts, leaving Cpts untouched
Cpts : {array}
An array of all root node dofs, corresponding to the F/C splitting
Fpts : {array}
An array of all non root node dofs, corresponding to the F/C splitting
Examples
--------
>>> from numpy import array
>>> from pyamg.util.utils import get_Cpt_params
>>> from pyamg.gallery import poisson
>>> from scipy.sparse import csr_matrix, bsr_matrix
>>> A = poisson((10,), format='csr')
>>> Cpts = array([3, 7])
>>> AggOp = ([[ 1., 0.], [ 1., 0.],
... [ 1., 0.], [ 1., 0.],
... [ 1., 0.], [ 0., 1.],
... [ 0., 1.], [ 0., 1.],
... [ 0., 1.], [ 0., 1.]])
>>> AggOp = csr_matrix(AggOp)
>>> T = AggOp.copy().tobsr()
>>> params = get_Cpt_params(A, Cpts, AggOp, T)
>>> params['P_I'].todense()
matrix([[ 0., 0.],
[ 0., 0.],
[ 0., 0.],
[ 1., 0.],
[ 0., 0.],
[ 0., 0.],
[ 0., 0.],
[ 0., 1.],
[ 0., 0.],
[ 0., 0.]])
Notes
-----
The principal calling routine is
aggregation.smooth.energy_prolongation_smoother,
which uses the Cpt_param dictionary for root-node style
prolongation smoothing
"""
if not isspmatrix_bsr(A) and not isspmatrix_csr(A):
raise TypeError('Expected BSR or CSR matrix A')
if not isspmatrix_csr(AggOp):
raise TypeError('Expected CSR matrix AggOp')
if not isspmatrix_bsr(T):
raise TypeError('Expected BSR matrix T')
if T.blocksize[0] != T.blocksize[1]:
raise TypeError('Expected square blocksize for BSR matrix T')
if A.shape[0] != A.shape[1]:
raise TypeError('Expected square matrix A')
if T.shape[0] != A.shape[0]:
raise TypeError('Expected compatible dimensions for T and A,\
T.shape[0] = A.shape[0]')
if Cnodes.shape[0] != AggOp.shape[1]:
if AggOp.shape[1] > 1:
raise TypeError('Number of columns in AggOp must equal number\
of Cnodes')
if isspmatrix_bsr(A) and A.blocksize[0] > 1:
# Expand the list of Cpt nodes to a list of Cpt dofs
blocksize = A.blocksize[0]
Cpts = np.repeat(blocksize*Cnodes, blocksize)
for k in range(1, blocksize):
Cpts[list(range(k, Cpts.shape[0], blocksize))] += k
else:
blocksize = 1
Cpts = Cnodes
Cpts = np.array(Cpts, dtype=int)
# More input checking
if Cpts.shape[0] != T.shape[1]:
if T.shape[1] > blocksize:
raise ValueError('Expected number of Cpts to match T.shape[1]')
if blocksize != T.blocksize[0]:
raise ValueError('Expected identical blocksize in A and T')
if AggOp.shape[0] != int(T.shape[0]/blocksize):
raise ValueError('Number of rows in AggOp must equal number of\
fine-grid nodes')
# Create two maps, one for F points and one for C points
ncoarse = T.shape[1]
I_C = eye(A.shape[0], A.shape[1], format='csr')
I_F = I_C.copy()
I_F.data[Cpts] = 0.0
I_F.eliminate_zeros()
I_C = I_C - I_F
I_C.eliminate_zeros()
# Find Fpts, the complement of Cpts
Fpts = I_F.indices.copy()
# P_I only injects from Cpts on the coarse grid to the fine grid, but
# because of it's later uses, it must have the CSC indices ordered as
# in Cpts
if I_C.nnz > 0:
indices = Cpts.copy()
indptr = np.arange(indices.shape[0]+1)
else:
indices = np.zeros((0,), dtype=T.indices.dtype)
indptr = np.zeros((ncoarse+1,), dtype=T.indptr.dtype)
P_I = csc_matrix((I_C.data.copy(), indices, indptr),
shape=(I_C.shape[0], ncoarse))
P_I = P_I.tobsr(T.blocksize)
# Use same blocksize as A
if isspmatrix_bsr(A):
I_C = I_C.tobsr(A.blocksize)
I_F = I_F.tobsr(A.blocksize)
else:
I_C = I_C.tobsr(blocksize=(1, 1))
I_F = I_F.tobsr(blocksize=(1, 1))
return {'P_I': P_I, 'I_F': I_F, 'I_C': I_C, 'Cpts': Cpts, 'Fpts': Fpts} | def function[get_Cpt_params, parameter[A, Cnodes, AggOp, T]]:
constant[Return C and F pts.
Helper function that returns a dictionary of sparse matrices and arrays
which allow us to easily operate on Cpts and Fpts separately.
Parameters
----------
A : {csr_matrix, bsr_matrix}
Operator
Cnodes : {array}
Array of all root node indices. This is an array of nodal indices,
not degree-of-freedom indices. If the blocksize of T is 1, then
nodal indices and degree-of-freedom indices coincide.
AggOp : {csr_matrix}
Aggregation operator corresponding to A
T : {bsr_matrix}
Tentative prolongator based on AggOp
Returns
-------
Dictionary containing these parameters:
P_I : {bsr_matrix}
Interpolation operator that carries out only simple injection from the
coarse grid to fine grid Cpts nodes
I_F : {bsr_matrix}
Identity operator on Fpts, i.e., the action of this matrix zeros
out entries in a vector at all Cpts, leaving Fpts untouched
I_C : {bsr_matrix}
Identity operator on Cpts nodes, i.e., the action of this matrix zeros
out entries in a vector at all Fpts, leaving Cpts untouched
Cpts : {array}
An array of all root node dofs, corresponding to the F/C splitting
Fpts : {array}
An array of all non root node dofs, corresponding to the F/C splitting
Examples
--------
>>> from numpy import array
>>> from pyamg.util.utils import get_Cpt_params
>>> from pyamg.gallery import poisson
>>> from scipy.sparse import csr_matrix, bsr_matrix
>>> A = poisson((10,), format='csr')
>>> Cpts = array([3, 7])
>>> AggOp = ([[ 1., 0.], [ 1., 0.],
... [ 1., 0.], [ 1., 0.],
... [ 1., 0.], [ 0., 1.],
... [ 0., 1.], [ 0., 1.],
... [ 0., 1.], [ 0., 1.]])
>>> AggOp = csr_matrix(AggOp)
>>> T = AggOp.copy().tobsr()
>>> params = get_Cpt_params(A, Cpts, AggOp, T)
>>> params['P_I'].todense()
matrix([[ 0., 0.],
[ 0., 0.],
[ 0., 0.],
[ 1., 0.],
[ 0., 0.],
[ 0., 0.],
[ 0., 0.],
[ 0., 1.],
[ 0., 0.],
[ 0., 0.]])
Notes
-----
The principal calling routine is
aggregation.smooth.energy_prolongation_smoother,
which uses the Cpt_param dictionary for root-node style
prolongation smoothing
]
if <ast.BoolOp object at 0x7da18dc9bbe0> begin[:]
<ast.Raise object at 0x7da18dc99f60>
if <ast.UnaryOp object at 0x7da18dc98e80> begin[:]
<ast.Raise object at 0x7da18dc98940>
if <ast.UnaryOp object at 0x7da18dc9bb80> begin[:]
<ast.Raise object at 0x7da18dc98130>
if compare[call[name[T].blocksize][constant[0]] not_equal[!=] call[name[T].blocksize][constant[1]]] begin[:]
<ast.Raise object at 0x7da18dc99ae0>
if compare[call[name[A].shape][constant[0]] not_equal[!=] call[name[A].shape][constant[1]]] begin[:]
<ast.Raise object at 0x7da18dc9bf10>
if compare[call[name[T].shape][constant[0]] not_equal[!=] call[name[A].shape][constant[0]]] begin[:]
<ast.Raise object at 0x7da18dc983d0>
if compare[call[name[Cnodes].shape][constant[0]] not_equal[!=] call[name[AggOp].shape][constant[1]]] begin[:]
if compare[call[name[AggOp].shape][constant[1]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da18dc9b8e0>
if <ast.BoolOp object at 0x7da18dc984f0> begin[:]
variable[blocksize] assign[=] call[name[A].blocksize][constant[0]]
variable[Cpts] assign[=] call[name[np].repeat, parameter[binary_operation[name[blocksize] * name[Cnodes]], name[blocksize]]]
for taget[name[k]] in starred[call[name[range], parameter[constant[1], name[blocksize]]]] begin[:]
<ast.AugAssign object at 0x7da18fe93520>
variable[Cpts] assign[=] call[name[np].array, parameter[name[Cpts]]]
if compare[call[name[Cpts].shape][constant[0]] not_equal[!=] call[name[T].shape][constant[1]]] begin[:]
if compare[call[name[T].shape][constant[1]] greater[>] name[blocksize]] begin[:]
<ast.Raise object at 0x7da18fe91c90>
if compare[name[blocksize] not_equal[!=] call[name[T].blocksize][constant[0]]] begin[:]
<ast.Raise object at 0x7da18fe92440>
if compare[call[name[AggOp].shape][constant[0]] not_equal[!=] call[name[int], parameter[binary_operation[call[name[T].shape][constant[0]] / name[blocksize]]]]] begin[:]
<ast.Raise object at 0x7da18fe92f50>
variable[ncoarse] assign[=] call[name[T].shape][constant[1]]
variable[I_C] assign[=] call[name[eye], parameter[call[name[A].shape][constant[0]], call[name[A].shape][constant[1]]]]
variable[I_F] assign[=] call[name[I_C].copy, parameter[]]
call[name[I_F].data][name[Cpts]] assign[=] constant[0.0]
call[name[I_F].eliminate_zeros, parameter[]]
variable[I_C] assign[=] binary_operation[name[I_C] - name[I_F]]
call[name[I_C].eliminate_zeros, parameter[]]
variable[Fpts] assign[=] call[name[I_F].indices.copy, parameter[]]
if compare[name[I_C].nnz greater[>] constant[0]] begin[:]
variable[indices] assign[=] call[name[Cpts].copy, parameter[]]
variable[indptr] assign[=] call[name[np].arange, parameter[binary_operation[call[name[indices].shape][constant[0]] + constant[1]]]]
variable[P_I] assign[=] call[name[csc_matrix], parameter[tuple[[<ast.Call object at 0x7da18fe90970>, <ast.Name object at 0x7da18fe935e0>, <ast.Name object at 0x7da18fe90550>]]]]
variable[P_I] assign[=] call[name[P_I].tobsr, parameter[name[T].blocksize]]
if call[name[isspmatrix_bsr], parameter[name[A]]] begin[:]
variable[I_C] assign[=] call[name[I_C].tobsr, parameter[name[A].blocksize]]
variable[I_F] assign[=] call[name[I_F].tobsr, parameter[name[A].blocksize]]
return[dictionary[[<ast.Constant object at 0x7da18fe91510>, <ast.Constant object at 0x7da18fe92710>, <ast.Constant object at 0x7da18fe93550>, <ast.Constant object at 0x7da18fe90fd0>, <ast.Constant object at 0x7da18fe90b20>], [<ast.Name object at 0x7da18fe91a80>, <ast.Name object at 0x7da18fe90eb0>, <ast.Name object at 0x7da18fe93400>, <ast.Name object at 0x7da18fe92cb0>, <ast.Name object at 0x7da18fe933d0>]]] | keyword[def] identifier[get_Cpt_params] ( identifier[A] , identifier[Cnodes] , identifier[AggOp] , identifier[T] ):
literal[string]
keyword[if] keyword[not] identifier[isspmatrix_bsr] ( identifier[A] ) keyword[and] keyword[not] identifier[isspmatrix_csr] ( identifier[A] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isspmatrix_csr] ( identifier[AggOp] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isspmatrix_bsr] ( identifier[T] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[T] . identifier[blocksize] [ literal[int] ]!= identifier[T] . identifier[blocksize] [ literal[int] ]:
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[A] . identifier[shape] [ literal[int] ]!= identifier[A] . identifier[shape] [ literal[int] ]:
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[T] . identifier[shape] [ literal[int] ]!= identifier[A] . identifier[shape] [ literal[int] ]:
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[Cnodes] . identifier[shape] [ literal[int] ]!= identifier[AggOp] . identifier[shape] [ literal[int] ]:
keyword[if] identifier[AggOp] . identifier[shape] [ literal[int] ]> literal[int] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[isspmatrix_bsr] ( identifier[A] ) keyword[and] identifier[A] . identifier[blocksize] [ literal[int] ]> literal[int] :
identifier[blocksize] = identifier[A] . identifier[blocksize] [ literal[int] ]
identifier[Cpts] = identifier[np] . identifier[repeat] ( identifier[blocksize] * identifier[Cnodes] , identifier[blocksize] )
keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] , identifier[blocksize] ):
identifier[Cpts] [ identifier[list] ( identifier[range] ( identifier[k] , identifier[Cpts] . identifier[shape] [ literal[int] ], identifier[blocksize] ))]+= identifier[k]
keyword[else] :
identifier[blocksize] = literal[int]
identifier[Cpts] = identifier[Cnodes]
identifier[Cpts] = identifier[np] . identifier[array] ( identifier[Cpts] , identifier[dtype] = identifier[int] )
keyword[if] identifier[Cpts] . identifier[shape] [ literal[int] ]!= identifier[T] . identifier[shape] [ literal[int] ]:
keyword[if] identifier[T] . identifier[shape] [ literal[int] ]> identifier[blocksize] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[blocksize] != identifier[T] . identifier[blocksize] [ literal[int] ]:
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[AggOp] . identifier[shape] [ literal[int] ]!= identifier[int] ( identifier[T] . identifier[shape] [ literal[int] ]/ identifier[blocksize] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[ncoarse] = identifier[T] . identifier[shape] [ literal[int] ]
identifier[I_C] = identifier[eye] ( identifier[A] . identifier[shape] [ literal[int] ], identifier[A] . identifier[shape] [ literal[int] ], identifier[format] = literal[string] )
identifier[I_F] = identifier[I_C] . identifier[copy] ()
identifier[I_F] . identifier[data] [ identifier[Cpts] ]= literal[int]
identifier[I_F] . identifier[eliminate_zeros] ()
identifier[I_C] = identifier[I_C] - identifier[I_F]
identifier[I_C] . identifier[eliminate_zeros] ()
identifier[Fpts] = identifier[I_F] . identifier[indices] . identifier[copy] ()
keyword[if] identifier[I_C] . identifier[nnz] > literal[int] :
identifier[indices] = identifier[Cpts] . identifier[copy] ()
identifier[indptr] = identifier[np] . identifier[arange] ( identifier[indices] . identifier[shape] [ literal[int] ]+ literal[int] )
keyword[else] :
identifier[indices] = identifier[np] . identifier[zeros] (( literal[int] ,), identifier[dtype] = identifier[T] . identifier[indices] . identifier[dtype] )
identifier[indptr] = identifier[np] . identifier[zeros] (( identifier[ncoarse] + literal[int] ,), identifier[dtype] = identifier[T] . identifier[indptr] . identifier[dtype] )
identifier[P_I] = identifier[csc_matrix] (( identifier[I_C] . identifier[data] . identifier[copy] (), identifier[indices] , identifier[indptr] ),
identifier[shape] =( identifier[I_C] . identifier[shape] [ literal[int] ], identifier[ncoarse] ))
identifier[P_I] = identifier[P_I] . identifier[tobsr] ( identifier[T] . identifier[blocksize] )
keyword[if] identifier[isspmatrix_bsr] ( identifier[A] ):
identifier[I_C] = identifier[I_C] . identifier[tobsr] ( identifier[A] . identifier[blocksize] )
identifier[I_F] = identifier[I_F] . identifier[tobsr] ( identifier[A] . identifier[blocksize] )
keyword[else] :
identifier[I_C] = identifier[I_C] . identifier[tobsr] ( identifier[blocksize] =( literal[int] , literal[int] ))
identifier[I_F] = identifier[I_F] . identifier[tobsr] ( identifier[blocksize] =( literal[int] , literal[int] ))
keyword[return] { literal[string] : identifier[P_I] , literal[string] : identifier[I_F] , literal[string] : identifier[I_C] , literal[string] : identifier[Cpts] , literal[string] : identifier[Fpts] } | def get_Cpt_params(A, Cnodes, AggOp, T):
"""Return C and F pts.
Helper function that returns a dictionary of sparse matrices and arrays
which allow us to easily operate on Cpts and Fpts separately.
Parameters
----------
A : {csr_matrix, bsr_matrix}
Operator
Cnodes : {array}
Array of all root node indices. This is an array of nodal indices,
not degree-of-freedom indices. If the blocksize of T is 1, then
nodal indices and degree-of-freedom indices coincide.
AggOp : {csr_matrix}
Aggregation operator corresponding to A
T : {bsr_matrix}
Tentative prolongator based on AggOp
Returns
-------
Dictionary containing these parameters:
P_I : {bsr_matrix}
Interpolation operator that carries out only simple injection from the
coarse grid to fine grid Cpts nodes
I_F : {bsr_matrix}
Identity operator on Fpts, i.e., the action of this matrix zeros
out entries in a vector at all Cpts, leaving Fpts untouched
I_C : {bsr_matrix}
Identity operator on Cpts nodes, i.e., the action of this matrix zeros
out entries in a vector at all Fpts, leaving Cpts untouched
Cpts : {array}
An array of all root node dofs, corresponding to the F/C splitting
Fpts : {array}
An array of all non root node dofs, corresponding to the F/C splitting
Examples
--------
>>> from numpy import array
>>> from pyamg.util.utils import get_Cpt_params
>>> from pyamg.gallery import poisson
>>> from scipy.sparse import csr_matrix, bsr_matrix
>>> A = poisson((10,), format='csr')
>>> Cpts = array([3, 7])
>>> AggOp = ([[ 1., 0.], [ 1., 0.],
... [ 1., 0.], [ 1., 0.],
... [ 1., 0.], [ 0., 1.],
... [ 0., 1.], [ 0., 1.],
... [ 0., 1.], [ 0., 1.]])
>>> AggOp = csr_matrix(AggOp)
>>> T = AggOp.copy().tobsr()
>>> params = get_Cpt_params(A, Cpts, AggOp, T)
>>> params['P_I'].todense()
matrix([[ 0., 0.],
[ 0., 0.],
[ 0., 0.],
[ 1., 0.],
[ 0., 0.],
[ 0., 0.],
[ 0., 0.],
[ 0., 1.],
[ 0., 0.],
[ 0., 0.]])
Notes
-----
The principal calling routine is
aggregation.smooth.energy_prolongation_smoother,
which uses the Cpt_param dictionary for root-node style
prolongation smoothing
"""
if not isspmatrix_bsr(A) and (not isspmatrix_csr(A)):
raise TypeError('Expected BSR or CSR matrix A') # depends on [control=['if'], data=[]]
if not isspmatrix_csr(AggOp):
raise TypeError('Expected CSR matrix AggOp') # depends on [control=['if'], data=[]]
if not isspmatrix_bsr(T):
raise TypeError('Expected BSR matrix T') # depends on [control=['if'], data=[]]
if T.blocksize[0] != T.blocksize[1]:
raise TypeError('Expected square blocksize for BSR matrix T') # depends on [control=['if'], data=[]]
if A.shape[0] != A.shape[1]:
raise TypeError('Expected square matrix A') # depends on [control=['if'], data=[]]
if T.shape[0] != A.shape[0]:
raise TypeError('Expected compatible dimensions for T and A, T.shape[0] = A.shape[0]') # depends on [control=['if'], data=[]]
if Cnodes.shape[0] != AggOp.shape[1]:
if AggOp.shape[1] > 1:
raise TypeError('Number of columns in AggOp must equal number of Cnodes') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if isspmatrix_bsr(A) and A.blocksize[0] > 1:
# Expand the list of Cpt nodes to a list of Cpt dofs
blocksize = A.blocksize[0]
Cpts = np.repeat(blocksize * Cnodes, blocksize)
for k in range(1, blocksize):
Cpts[list(range(k, Cpts.shape[0], blocksize))] += k # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]]
else:
blocksize = 1
Cpts = Cnodes
Cpts = np.array(Cpts, dtype=int)
# More input checking
if Cpts.shape[0] != T.shape[1]:
if T.shape[1] > blocksize:
raise ValueError('Expected number of Cpts to match T.shape[1]') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if blocksize != T.blocksize[0]:
raise ValueError('Expected identical blocksize in A and T') # depends on [control=['if'], data=[]]
if AggOp.shape[0] != int(T.shape[0] / blocksize):
raise ValueError('Number of rows in AggOp must equal number of fine-grid nodes') # depends on [control=['if'], data=[]]
# Create two maps, one for F points and one for C points
ncoarse = T.shape[1]
I_C = eye(A.shape[0], A.shape[1], format='csr')
I_F = I_C.copy()
I_F.data[Cpts] = 0.0
I_F.eliminate_zeros()
I_C = I_C - I_F
I_C.eliminate_zeros()
# Find Fpts, the complement of Cpts
Fpts = I_F.indices.copy()
# P_I only injects from Cpts on the coarse grid to the fine grid, but
# because of it's later uses, it must have the CSC indices ordered as
# in Cpts
if I_C.nnz > 0:
indices = Cpts.copy()
indptr = np.arange(indices.shape[0] + 1) # depends on [control=['if'], data=[]]
else:
indices = np.zeros((0,), dtype=T.indices.dtype)
indptr = np.zeros((ncoarse + 1,), dtype=T.indptr.dtype)
P_I = csc_matrix((I_C.data.copy(), indices, indptr), shape=(I_C.shape[0], ncoarse))
P_I = P_I.tobsr(T.blocksize)
# Use same blocksize as A
if isspmatrix_bsr(A):
I_C = I_C.tobsr(A.blocksize)
I_F = I_F.tobsr(A.blocksize) # depends on [control=['if'], data=[]]
else:
I_C = I_C.tobsr(blocksize=(1, 1))
I_F = I_F.tobsr(blocksize=(1, 1))
return {'P_I': P_I, 'I_F': I_F, 'I_C': I_C, 'Cpts': Cpts, 'Fpts': Fpts} |
def gfortran_search_path(library_dirs):
"""Get the library directory paths for ``gfortran``.
Looks for ``libraries: =`` in the output of ``gfortran -print-search-dirs``
and then parses the paths. If this fails for any reason, this method will
print an error and return ``library_dirs``.
Args:
library_dirs (List[str]): Existing library directories.
Returns:
List[str]: The library directories for ``gfortran``.
"""
cmd = ("gfortran", "-print-search-dirs")
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
return_code = process.wait()
# Bail out if the command failed.
if return_code != 0:
return library_dirs
cmd_output = process.stdout.read().decode("utf-8")
# Find single line starting with ``libraries: ``.
search_lines = cmd_output.strip().split("\n")
library_lines = [
line[len(FORTRAN_LIBRARY_PREFIX) :]
for line in search_lines
if line.startswith(FORTRAN_LIBRARY_PREFIX)
]
if len(library_lines) != 1:
msg = GFORTRAN_MISSING_LIBS.format(cmd_output)
print(msg, file=sys.stderr)
return library_dirs
# Go through each library in the ``libraries: = ...`` line.
library_line = library_lines[0]
accepted = set(library_dirs)
for part in library_line.split(os.pathsep):
full_path = os.path.abspath(part.strip())
if os.path.isdir(full_path):
accepted.add(full_path)
else:
# Ignore anything that isn't a directory.
msg = GFORTRAN_BAD_PATH.format(full_path)
print(msg, file=sys.stderr)
return sorted(accepted) | def function[gfortran_search_path, parameter[library_dirs]]:
constant[Get the library directory paths for ``gfortran``.
Looks for ``libraries: =`` in the output of ``gfortran -print-search-dirs``
and then parses the paths. If this fails for any reason, this method will
print an error and return ``library_dirs``.
Args:
library_dirs (List[str]): Existing library directories.
Returns:
List[str]: The library directories for ``gfortran``.
]
variable[cmd] assign[=] tuple[[<ast.Constant object at 0x7da18bc71d50>, <ast.Constant object at 0x7da18bc70f10>]]
variable[process] assign[=] call[name[subprocess].Popen, parameter[name[cmd]]]
variable[return_code] assign[=] call[name[process].wait, parameter[]]
if compare[name[return_code] not_equal[!=] constant[0]] begin[:]
return[name[library_dirs]]
variable[cmd_output] assign[=] call[call[name[process].stdout.read, parameter[]].decode, parameter[constant[utf-8]]]
variable[search_lines] assign[=] call[call[name[cmd_output].strip, parameter[]].split, parameter[constant[
]]]
variable[library_lines] assign[=] <ast.ListComp object at 0x7da18bc716c0>
if compare[call[name[len], parameter[name[library_lines]]] not_equal[!=] constant[1]] begin[:]
variable[msg] assign[=] call[name[GFORTRAN_MISSING_LIBS].format, parameter[name[cmd_output]]]
call[name[print], parameter[name[msg]]]
return[name[library_dirs]]
variable[library_line] assign[=] call[name[library_lines]][constant[0]]
variable[accepted] assign[=] call[name[set], parameter[name[library_dirs]]]
for taget[name[part]] in starred[call[name[library_line].split, parameter[name[os].pathsep]]] begin[:]
variable[full_path] assign[=] call[name[os].path.abspath, parameter[call[name[part].strip, parameter[]]]]
if call[name[os].path.isdir, parameter[name[full_path]]] begin[:]
call[name[accepted].add, parameter[name[full_path]]]
return[call[name[sorted], parameter[name[accepted]]]] | keyword[def] identifier[gfortran_search_path] ( identifier[library_dirs] ):
literal[string]
identifier[cmd] =( literal[string] , literal[string] )
identifier[process] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] )
identifier[return_code] = identifier[process] . identifier[wait] ()
keyword[if] identifier[return_code] != literal[int] :
keyword[return] identifier[library_dirs]
identifier[cmd_output] = identifier[process] . identifier[stdout] . identifier[read] (). identifier[decode] ( literal[string] )
identifier[search_lines] = identifier[cmd_output] . identifier[strip] (). identifier[split] ( literal[string] )
identifier[library_lines] =[
identifier[line] [ identifier[len] ( identifier[FORTRAN_LIBRARY_PREFIX] ):]
keyword[for] identifier[line] keyword[in] identifier[search_lines]
keyword[if] identifier[line] . identifier[startswith] ( identifier[FORTRAN_LIBRARY_PREFIX] )
]
keyword[if] identifier[len] ( identifier[library_lines] )!= literal[int] :
identifier[msg] = identifier[GFORTRAN_MISSING_LIBS] . identifier[format] ( identifier[cmd_output] )
identifier[print] ( identifier[msg] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[return] identifier[library_dirs]
identifier[library_line] = identifier[library_lines] [ literal[int] ]
identifier[accepted] = identifier[set] ( identifier[library_dirs] )
keyword[for] identifier[part] keyword[in] identifier[library_line] . identifier[split] ( identifier[os] . identifier[pathsep] ):
identifier[full_path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[part] . identifier[strip] ())
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[full_path] ):
identifier[accepted] . identifier[add] ( identifier[full_path] )
keyword[else] :
identifier[msg] = identifier[GFORTRAN_BAD_PATH] . identifier[format] ( identifier[full_path] )
identifier[print] ( identifier[msg] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[return] identifier[sorted] ( identifier[accepted] ) | def gfortran_search_path(library_dirs):
"""Get the library directory paths for ``gfortran``.
Looks for ``libraries: =`` in the output of ``gfortran -print-search-dirs``
and then parses the paths. If this fails for any reason, this method will
print an error and return ``library_dirs``.
Args:
library_dirs (List[str]): Existing library directories.
Returns:
List[str]: The library directories for ``gfortran``.
"""
cmd = ('gfortran', '-print-search-dirs')
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
return_code = process.wait()
# Bail out if the command failed.
if return_code != 0:
return library_dirs # depends on [control=['if'], data=[]]
cmd_output = process.stdout.read().decode('utf-8')
# Find single line starting with ``libraries: ``.
search_lines = cmd_output.strip().split('\n')
library_lines = [line[len(FORTRAN_LIBRARY_PREFIX):] for line in search_lines if line.startswith(FORTRAN_LIBRARY_PREFIX)]
if len(library_lines) != 1:
msg = GFORTRAN_MISSING_LIBS.format(cmd_output)
print(msg, file=sys.stderr)
return library_dirs # depends on [control=['if'], data=[]]
# Go through each library in the ``libraries: = ...`` line.
library_line = library_lines[0]
accepted = set(library_dirs)
for part in library_line.split(os.pathsep):
full_path = os.path.abspath(part.strip())
if os.path.isdir(full_path):
accepted.add(full_path) # depends on [control=['if'], data=[]]
else:
# Ignore anything that isn't a directory.
msg = GFORTRAN_BAD_PATH.format(full_path)
print(msg, file=sys.stderr) # depends on [control=['for'], data=['part']]
return sorted(accepted) |
def cli(obj, origin, tags, timeout, customer, delete):
"""Send or delete a heartbeat."""
client = obj['client']
if delete:
client.delete_heartbeat(delete)
else:
try:
heartbeat = client.heartbeat(origin=origin, tags=tags, timeout=timeout, customer=customer)
except Exception as e:
click.echo('ERROR: {}'.format(e))
sys.exit(1)
click.echo(heartbeat.id) | def function[cli, parameter[obj, origin, tags, timeout, customer, delete]]:
constant[Send or delete a heartbeat.]
variable[client] assign[=] call[name[obj]][constant[client]]
if name[delete] begin[:]
call[name[client].delete_heartbeat, parameter[name[delete]]] | keyword[def] identifier[cli] ( identifier[obj] , identifier[origin] , identifier[tags] , identifier[timeout] , identifier[customer] , identifier[delete] ):
literal[string]
identifier[client] = identifier[obj] [ literal[string] ]
keyword[if] identifier[delete] :
identifier[client] . identifier[delete_heartbeat] ( identifier[delete] )
keyword[else] :
keyword[try] :
identifier[heartbeat] = identifier[client] . identifier[heartbeat] ( identifier[origin] = identifier[origin] , identifier[tags] = identifier[tags] , identifier[timeout] = identifier[timeout] , identifier[customer] = identifier[customer] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[e] ))
identifier[sys] . identifier[exit] ( literal[int] )
identifier[click] . identifier[echo] ( identifier[heartbeat] . identifier[id] ) | def cli(obj, origin, tags, timeout, customer, delete):
"""Send or delete a heartbeat."""
client = obj['client']
if delete:
client.delete_heartbeat(delete) # depends on [control=['if'], data=[]]
else:
try:
heartbeat = client.heartbeat(origin=origin, tags=tags, timeout=timeout, customer=customer) # depends on [control=['try'], data=[]]
except Exception as e:
click.echo('ERROR: {}'.format(e))
sys.exit(1) # depends on [control=['except'], data=['e']]
click.echo(heartbeat.id) |
def withdraw(self, currency, quantity, address, paymentid=None):
"""
Used to withdraw funds from your account
Endpoint:
1.1 /account/withdraw
2.0 /key/balance/withdrawcurrency
:param currency: String literal for the currency (ie. BTC)
:type currency: str
:param quantity: The quantity of coins to withdraw
:type quantity: float
:param address: The address where to send the funds.
:type address: str
:param paymentid: Optional argument for memos, tags, or other supplemental information for cryptos such as XRP.
:type paymentid: str
:return:
:rtype : dict
"""
options = {
'currency': currency,
'quantity': quantity,
'address': address
}
if paymentid:
options['paymentid'] = paymentid
return self._api_query(path_dict={
API_V1_1: '/account/withdraw',
API_V2_0: '/key/balance/withdrawcurrency'
}, options=options, protection=PROTECTION_PRV) | def function[withdraw, parameter[self, currency, quantity, address, paymentid]]:
constant[
Used to withdraw funds from your account
Endpoint:
1.1 /account/withdraw
2.0 /key/balance/withdrawcurrency
:param currency: String literal for the currency (ie. BTC)
:type currency: str
:param quantity: The quantity of coins to withdraw
:type quantity: float
:param address: The address where to send the funds.
:type address: str
:param paymentid: Optional argument for memos, tags, or other supplemental information for cryptos such as XRP.
:type paymentid: str
:return:
:rtype : dict
]
variable[options] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b2890>, <ast.Constant object at 0x7da20e9b0d30>, <ast.Constant object at 0x7da20e9b0040>], [<ast.Name object at 0x7da20e9b0790>, <ast.Name object at 0x7da20e9b14e0>, <ast.Name object at 0x7da20e9b0970>]]
if name[paymentid] begin[:]
call[name[options]][constant[paymentid]] assign[=] name[paymentid]
return[call[name[self]._api_query, parameter[]]] | keyword[def] identifier[withdraw] ( identifier[self] , identifier[currency] , identifier[quantity] , identifier[address] , identifier[paymentid] = keyword[None] ):
literal[string]
identifier[options] ={
literal[string] : identifier[currency] ,
literal[string] : identifier[quantity] ,
literal[string] : identifier[address]
}
keyword[if] identifier[paymentid] :
identifier[options] [ literal[string] ]= identifier[paymentid]
keyword[return] identifier[self] . identifier[_api_query] ( identifier[path_dict] ={
identifier[API_V1_1] : literal[string] ,
identifier[API_V2_0] : literal[string]
}, identifier[options] = identifier[options] , identifier[protection] = identifier[PROTECTION_PRV] ) | def withdraw(self, currency, quantity, address, paymentid=None):
"""
Used to withdraw funds from your account
Endpoint:
1.1 /account/withdraw
2.0 /key/balance/withdrawcurrency
:param currency: String literal for the currency (ie. BTC)
:type currency: str
:param quantity: The quantity of coins to withdraw
:type quantity: float
:param address: The address where to send the funds.
:type address: str
:param paymentid: Optional argument for memos, tags, or other supplemental information for cryptos such as XRP.
:type paymentid: str
:return:
:rtype : dict
"""
options = {'currency': currency, 'quantity': quantity, 'address': address}
if paymentid:
options['paymentid'] = paymentid # depends on [control=['if'], data=[]]
return self._api_query(path_dict={API_V1_1: '/account/withdraw', API_V2_0: '/key/balance/withdrawcurrency'}, options=options, protection=PROTECTION_PRV) |
def restrict_chat_member(self, *args, **kwargs):
"""See :func:`restrict_chat_member`"""
return restrict_chat_member(*args, **self._merge_overrides(**kwargs)).run() | def function[restrict_chat_member, parameter[self]]:
constant[See :func:`restrict_chat_member`]
return[call[call[name[restrict_chat_member], parameter[<ast.Starred object at 0x7da1b0e9c040>]].run, parameter[]]] | keyword[def] identifier[restrict_chat_member] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[restrict_chat_member] (* identifier[args] ,** identifier[self] . identifier[_merge_overrides] (** identifier[kwargs] )). identifier[run] () | def restrict_chat_member(self, *args, **kwargs):
"""See :func:`restrict_chat_member`"""
return restrict_chat_member(*args, **self._merge_overrides(**kwargs)).run() |
def matchingFilePaths(targetfilename, directory, targetFileExtension=None,
selector=None):
"""Search for files in all subfolders of specified directory, return
filepaths of all matching instances.
:param targetfilename: filename to search for, only the string before the
last "." is used for filename matching. Ignored if a selector function
is specified.
:param directory: search directory, including all subdirectories
:param targetFileExtension: string after the last "." in the filename, has
to be identical if specified. "." in targetFileExtension are ignored,
thus ".txt" is treated equal to "txt".
:param selector: a function which is called with the value of targetfilename
and has to return True (include value) or False (discard value). If no
selector is specified, equality to targetfilename is used.
:returns: list of matching file paths (str)
"""
targetFilePaths = list()
targetfilename = os.path.splitext(targetfilename)[0]
targetFileExtension = targetFileExtension.replace('.', '')
matchExtensions = False if targetFileExtension is None else True
if selector is None:
selector = functools.partial(operator.eq, targetfilename)
for dirpath, dirnames, filenames in os.walk(directory):
for filename in filenames:
filenameNoextension = os.path.splitext(filename)[0]
if selector(filenameNoextension):
if matchExtensions:
if not filename.endswith('.' + targetFileExtension):
continue
targetFilePaths.append(joinpath(dirpath, filename))
return targetFilePaths | def function[matchingFilePaths, parameter[targetfilename, directory, targetFileExtension, selector]]:
constant[Search for files in all subfolders of specified directory, return
filepaths of all matching instances.
:param targetfilename: filename to search for, only the string before the
last "." is used for filename matching. Ignored if a selector function
is specified.
:param directory: search directory, including all subdirectories
:param targetFileExtension: string after the last "." in the filename, has
to be identical if specified. "." in targetFileExtension are ignored,
thus ".txt" is treated equal to "txt".
:param selector: a function which is called with the value of targetfilename
and has to return True (include value) or False (discard value). If no
selector is specified, equality to targetfilename is used.
:returns: list of matching file paths (str)
]
variable[targetFilePaths] assign[=] call[name[list], parameter[]]
variable[targetfilename] assign[=] call[call[name[os].path.splitext, parameter[name[targetfilename]]]][constant[0]]
variable[targetFileExtension] assign[=] call[name[targetFileExtension].replace, parameter[constant[.], constant[]]]
variable[matchExtensions] assign[=] <ast.IfExp object at 0x7da1b28adf00>
if compare[name[selector] is constant[None]] begin[:]
variable[selector] assign[=] call[name[functools].partial, parameter[name[operator].eq, name[targetfilename]]]
for taget[tuple[[<ast.Name object at 0x7da1b28ae650>, <ast.Name object at 0x7da1b28aeb00>, <ast.Name object at 0x7da1b28ae4a0>]]] in starred[call[name[os].walk, parameter[name[directory]]]] begin[:]
for taget[name[filename]] in starred[name[filenames]] begin[:]
variable[filenameNoextension] assign[=] call[call[name[os].path.splitext, parameter[name[filename]]]][constant[0]]
if call[name[selector], parameter[name[filenameNoextension]]] begin[:]
if name[matchExtensions] begin[:]
if <ast.UnaryOp object at 0x7da1b28ae380> begin[:]
continue
call[name[targetFilePaths].append, parameter[call[name[joinpath], parameter[name[dirpath], name[filename]]]]]
return[name[targetFilePaths]] | keyword[def] identifier[matchingFilePaths] ( identifier[targetfilename] , identifier[directory] , identifier[targetFileExtension] = keyword[None] ,
identifier[selector] = keyword[None] ):
literal[string]
identifier[targetFilePaths] = identifier[list] ()
identifier[targetfilename] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[targetfilename] )[ literal[int] ]
identifier[targetFileExtension] = identifier[targetFileExtension] . identifier[replace] ( literal[string] , literal[string] )
identifier[matchExtensions] = keyword[False] keyword[if] identifier[targetFileExtension] keyword[is] keyword[None] keyword[else] keyword[True]
keyword[if] identifier[selector] keyword[is] keyword[None] :
identifier[selector] = identifier[functools] . identifier[partial] ( identifier[operator] . identifier[eq] , identifier[targetfilename] )
keyword[for] identifier[dirpath] , identifier[dirnames] , identifier[filenames] keyword[in] identifier[os] . identifier[walk] ( identifier[directory] ):
keyword[for] identifier[filename] keyword[in] identifier[filenames] :
identifier[filenameNoextension] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[filename] )[ literal[int] ]
keyword[if] identifier[selector] ( identifier[filenameNoextension] ):
keyword[if] identifier[matchExtensions] :
keyword[if] keyword[not] identifier[filename] . identifier[endswith] ( literal[string] + identifier[targetFileExtension] ):
keyword[continue]
identifier[targetFilePaths] . identifier[append] ( identifier[joinpath] ( identifier[dirpath] , identifier[filename] ))
keyword[return] identifier[targetFilePaths] | def matchingFilePaths(targetfilename, directory, targetFileExtension=None, selector=None):
"""Search for files in all subfolders of specified directory, return
filepaths of all matching instances.
:param targetfilename: filename to search for, only the string before the
last "." is used for filename matching. Ignored if a selector function
is specified.
:param directory: search directory, including all subdirectories
:param targetFileExtension: string after the last "." in the filename, has
to be identical if specified. "." in targetFileExtension are ignored,
thus ".txt" is treated equal to "txt".
:param selector: a function which is called with the value of targetfilename
and has to return True (include value) or False (discard value). If no
selector is specified, equality to targetfilename is used.
:returns: list of matching file paths (str)
"""
targetFilePaths = list()
targetfilename = os.path.splitext(targetfilename)[0]
targetFileExtension = targetFileExtension.replace('.', '')
matchExtensions = False if targetFileExtension is None else True
if selector is None:
selector = functools.partial(operator.eq, targetfilename) # depends on [control=['if'], data=['selector']]
for (dirpath, dirnames, filenames) in os.walk(directory):
for filename in filenames:
filenameNoextension = os.path.splitext(filename)[0]
if selector(filenameNoextension):
if matchExtensions:
if not filename.endswith('.' + targetFileExtension):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
targetFilePaths.append(joinpath(dirpath, filename)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=[]]
return targetFilePaths |
def parse_data_line(self, sline):
"""
Parse the data line. If an AS was selected it can distinguish between data rows and information rows.
:param sline: a split data line to parse
:returns: the number of rows to jump and parse the next data line or return the code error -1
"""
# if there are less values founded than headers, it's an error
if len(sline) != len(self._columns):
self.err("One data line has the wrong number of items")
return 0
if self.analysiskey:
# If an AS is selected it should saves all the columns from the same row under the selected AS.
rawdict = {}
for idx, result in enumerate(sline):
# All data is interpreted as different fields from the same AS
rawdict[self._columns[idx]] = result
rid = rawdict.get('Sample ID No')
if not rid:
self.err("No Sample ID defined",
numline=self._numline)
return -1
rawdict['DefaultResult'] = self.defaultresult \
if self.defaultresult in self._columns \
else self.err("Default Result Key " + self.defaultresult + " not found")
rawdict['DateTime'] = csvDate2BikaDate(rawdict['Analysis Date'], rawdict['Analysis Time'])
self._addRawResult(rid, {self.analysiskey: rawdict}, False)
else:
# If non AS is selected it should saves all data under the same analysed sample (Sample ID No), and ignore
# the less important rows from the line.
headerdict = {}
datadict = {}
for idx, result in enumerate(sline):
if self._columns[idx] not in self._excludedcolumns:
datadict[self._columns[idx]] = {'Result': result, 'DefaultData': 'Result'}
else:
headerdict[self._columns[idx]] = result
rid = headerdict['Sample ID No']
datadict['DateTime'] = csvDate2BikaDate(headerdict['Analysis Date'], headerdict['Analysis Time'])
self._addRawResult(rid, datadict, False)
self._header = headerdict
return 0 | def function[parse_data_line, parameter[self, sline]]:
constant[
Parse the data line. If an AS was selected it can distinguish between data rows and information rows.
:param sline: a split data line to parse
:returns: the number of rows to jump and parse the next data line or return the code error -1
]
if compare[call[name[len], parameter[name[sline]]] not_equal[!=] call[name[len], parameter[name[self]._columns]]] begin[:]
call[name[self].err, parameter[constant[One data line has the wrong number of items]]]
return[constant[0]]
if name[self].analysiskey begin[:]
variable[rawdict] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1d3aa10>, <ast.Name object at 0x7da1b1d39750>]]] in starred[call[name[enumerate], parameter[name[sline]]]] begin[:]
call[name[rawdict]][call[name[self]._columns][name[idx]]] assign[=] name[result]
variable[rid] assign[=] call[name[rawdict].get, parameter[constant[Sample ID No]]]
if <ast.UnaryOp object at 0x7da1b1d38490> begin[:]
call[name[self].err, parameter[constant[No Sample ID defined]]]
return[<ast.UnaryOp object at 0x7da1b231d8d0>]
call[name[rawdict]][constant[DefaultResult]] assign[=] <ast.IfExp object at 0x7da1b231f280>
call[name[rawdict]][constant[DateTime]] assign[=] call[name[csvDate2BikaDate], parameter[call[name[rawdict]][constant[Analysis Date]], call[name[rawdict]][constant[Analysis Time]]]]
call[name[self]._addRawResult, parameter[name[rid], dictionary[[<ast.Attribute object at 0x7da1b231d060>], [<ast.Name object at 0x7da1b231d810>]], constant[False]]] | keyword[def] identifier[parse_data_line] ( identifier[self] , identifier[sline] ):
literal[string]
keyword[if] identifier[len] ( identifier[sline] )!= identifier[len] ( identifier[self] . identifier[_columns] ):
identifier[self] . identifier[err] ( literal[string] )
keyword[return] literal[int]
keyword[if] identifier[self] . identifier[analysiskey] :
identifier[rawdict] ={}
keyword[for] identifier[idx] , identifier[result] keyword[in] identifier[enumerate] ( identifier[sline] ):
identifier[rawdict] [ identifier[self] . identifier[_columns] [ identifier[idx] ]]= identifier[result]
identifier[rid] = identifier[rawdict] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[rid] :
identifier[self] . identifier[err] ( literal[string] ,
identifier[numline] = identifier[self] . identifier[_numline] )
keyword[return] - literal[int]
identifier[rawdict] [ literal[string] ]= identifier[self] . identifier[defaultresult] keyword[if] identifier[self] . identifier[defaultresult] keyword[in] identifier[self] . identifier[_columns] keyword[else] identifier[self] . identifier[err] ( literal[string] + identifier[self] . identifier[defaultresult] + literal[string] )
identifier[rawdict] [ literal[string] ]= identifier[csvDate2BikaDate] ( identifier[rawdict] [ literal[string] ], identifier[rawdict] [ literal[string] ])
identifier[self] . identifier[_addRawResult] ( identifier[rid] ,{ identifier[self] . identifier[analysiskey] : identifier[rawdict] }, keyword[False] )
keyword[else] :
identifier[headerdict] ={}
identifier[datadict] ={}
keyword[for] identifier[idx] , identifier[result] keyword[in] identifier[enumerate] ( identifier[sline] ):
keyword[if] identifier[self] . identifier[_columns] [ identifier[idx] ] keyword[not] keyword[in] identifier[self] . identifier[_excludedcolumns] :
identifier[datadict] [ identifier[self] . identifier[_columns] [ identifier[idx] ]]={ literal[string] : identifier[result] , literal[string] : literal[string] }
keyword[else] :
identifier[headerdict] [ identifier[self] . identifier[_columns] [ identifier[idx] ]]= identifier[result]
identifier[rid] = identifier[headerdict] [ literal[string] ]
identifier[datadict] [ literal[string] ]= identifier[csvDate2BikaDate] ( identifier[headerdict] [ literal[string] ], identifier[headerdict] [ literal[string] ])
identifier[self] . identifier[_addRawResult] ( identifier[rid] , identifier[datadict] , keyword[False] )
identifier[self] . identifier[_header] = identifier[headerdict]
keyword[return] literal[int] | def parse_data_line(self, sline):
"""
Parse the data line. If an AS was selected it can distinguish between data rows and information rows.
:param sline: a split data line to parse
:returns: the number of rows to jump and parse the next data line or return the code error -1
"""
# if there are less values founded than headers, it's an error
if len(sline) != len(self._columns):
self.err('One data line has the wrong number of items')
return 0 # depends on [control=['if'], data=[]]
if self.analysiskey:
# If an AS is selected it should saves all the columns from the same row under the selected AS.
rawdict = {}
for (idx, result) in enumerate(sline):
# All data is interpreted as different fields from the same AS
rawdict[self._columns[idx]] = result # depends on [control=['for'], data=[]]
rid = rawdict.get('Sample ID No')
if not rid:
self.err('No Sample ID defined', numline=self._numline)
return -1 # depends on [control=['if'], data=[]]
rawdict['DefaultResult'] = self.defaultresult if self.defaultresult in self._columns else self.err('Default Result Key ' + self.defaultresult + ' not found')
rawdict['DateTime'] = csvDate2BikaDate(rawdict['Analysis Date'], rawdict['Analysis Time'])
self._addRawResult(rid, {self.analysiskey: rawdict}, False) # depends on [control=['if'], data=[]]
else:
# If non AS is selected it should saves all data under the same analysed sample (Sample ID No), and ignore
# the less important rows from the line.
headerdict = {}
datadict = {}
for (idx, result) in enumerate(sline):
if self._columns[idx] not in self._excludedcolumns:
datadict[self._columns[idx]] = {'Result': result, 'DefaultData': 'Result'} # depends on [control=['if'], data=[]]
else:
headerdict[self._columns[idx]] = result # depends on [control=['for'], data=[]]
rid = headerdict['Sample ID No']
datadict['DateTime'] = csvDate2BikaDate(headerdict['Analysis Date'], headerdict['Analysis Time'])
self._addRawResult(rid, datadict, False)
self._header = headerdict
return 0 |
def sar(self, count):
"""Performs an arithmetic right-shift of a BinWord by the given number
of bits. Bits shifted out of the word are lost. The word is
filled on the left with copies of the top bit.
The shift count can be an arbitrary non-negative number, including
counts larger than the word (a word filled with copies of the sign bit
is returned in this case).
"""
count = operator.index(count)
if count < 0:
raise ValueError('negative shift')
if count > self._width:
count = self._width
return BinWord(self._width, self.to_sint() >> count, trunc=True) | def function[sar, parameter[self, count]]:
constant[Performs an arithmetic right-shift of a BinWord by the given number
of bits. Bits shifted out of the word are lost. The word is
filled on the left with copies of the top bit.
The shift count can be an arbitrary non-negative number, including
counts larger than the word (a word filled with copies of the sign bit
is returned in this case).
]
variable[count] assign[=] call[name[operator].index, parameter[name[count]]]
if compare[name[count] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da18fe90670>
if compare[name[count] greater[>] name[self]._width] begin[:]
variable[count] assign[=] name[self]._width
return[call[name[BinWord], parameter[name[self]._width, binary_operation[call[name[self].to_sint, parameter[]] <ast.RShift object at 0x7da2590d6a40> name[count]]]]] | keyword[def] identifier[sar] ( identifier[self] , identifier[count] ):
literal[string]
identifier[count] = identifier[operator] . identifier[index] ( identifier[count] )
keyword[if] identifier[count] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[count] > identifier[self] . identifier[_width] :
identifier[count] = identifier[self] . identifier[_width]
keyword[return] identifier[BinWord] ( identifier[self] . identifier[_width] , identifier[self] . identifier[to_sint] ()>> identifier[count] , identifier[trunc] = keyword[True] ) | def sar(self, count):
"""Performs an arithmetic right-shift of a BinWord by the given number
of bits. Bits shifted out of the word are lost. The word is
filled on the left with copies of the top bit.
The shift count can be an arbitrary non-negative number, including
counts larger than the word (a word filled with copies of the sign bit
is returned in this case).
"""
count = operator.index(count)
if count < 0:
raise ValueError('negative shift') # depends on [control=['if'], data=[]]
if count > self._width:
count = self._width # depends on [control=['if'], data=['count']]
return BinWord(self._width, self.to_sint() >> count, trunc=True) |
def error_response(self, e):
"""Make response for an IIIFError e.
Also add compliance header.
"""
self.add_compliance_header()
return self.make_response(*e.image_server_response(self.api_version)) | def function[error_response, parameter[self, e]]:
constant[Make response for an IIIFError e.
Also add compliance header.
]
call[name[self].add_compliance_header, parameter[]]
return[call[name[self].make_response, parameter[<ast.Starred object at 0x7da1b04752a0>]]] | keyword[def] identifier[error_response] ( identifier[self] , identifier[e] ):
literal[string]
identifier[self] . identifier[add_compliance_header] ()
keyword[return] identifier[self] . identifier[make_response] (* identifier[e] . identifier[image_server_response] ( identifier[self] . identifier[api_version] )) | def error_response(self, e):
"""Make response for an IIIFError e.
Also add compliance header.
"""
self.add_compliance_header()
return self.make_response(*e.image_server_response(self.api_version)) |
def update(self, other):
"""
Update the collection with items from *other*. Accepts other
:class:`GeoDB` instances, dictionaries mapping places to
``{'latitude': latitude, 'longitude': longitude}`` dicts,
or sequences of ``(place, latitude, longitude)`` tuples.
"""
# other is another Sorted Set
def update_sortedset_trans(pipe):
items = other._data(pipe=pipe) if use_redis else other._data()
pipe.multi()
for member, score in items:
pipe.zadd(self.key, {self._pickle(member): float(score)})
# other is dict-like
def update_mapping_trans(pipe):
items = other.items(pipe=pipe) if use_redis else other.items()
pipe.multi()
for place, value in items:
self.set_location(
place, value['latitude'], value['longitude'], pipe=pipe
)
# other is a list of tuples
def update_tuples_trans(pipe):
items = (
other.__iter__(pipe=pipe) if use_redis else other.__iter__()
)
pipe.multi()
for place, latitude, longitude in items:
self.set_location(place, latitude, longitude, pipe=pipe)
watches = []
if self._same_redis(other, RedisCollection):
use_redis = True
watches.append(other.key)
else:
use_redis = False
if isinstance(other, SortedSetBase):
func = update_sortedset_trans
elif hasattr(other, 'items'):
func = update_mapping_trans
elif hasattr(other, '__iter__'):
func = update_tuples_trans
self._transaction(func, *watches) | def function[update, parameter[self, other]]:
constant[
Update the collection with items from *other*. Accepts other
:class:`GeoDB` instances, dictionaries mapping places to
``{'latitude': latitude, 'longitude': longitude}`` dicts,
or sequences of ``(place, latitude, longitude)`` tuples.
]
def function[update_sortedset_trans, parameter[pipe]]:
variable[items] assign[=] <ast.IfExp object at 0x7da20c6c7190>
call[name[pipe].multi, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c6c64d0>, <ast.Name object at 0x7da20c6c7b50>]]] in starred[name[items]] begin[:]
call[name[pipe].zadd, parameter[name[self].key, dictionary[[<ast.Call object at 0x7da20c6c47c0>], [<ast.Call object at 0x7da20c6c4b20>]]]]
def function[update_mapping_trans, parameter[pipe]]:
variable[items] assign[=] <ast.IfExp object at 0x7da18f811120>
call[name[pipe].multi, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18f812e00>, <ast.Name object at 0x7da18f810c70>]]] in starred[name[items]] begin[:]
call[name[self].set_location, parameter[name[place], call[name[value]][constant[latitude]], call[name[value]][constant[longitude]]]]
def function[update_tuples_trans, parameter[pipe]]:
variable[items] assign[=] <ast.IfExp object at 0x7da20c6c44f0>
call[name[pipe].multi, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c6c79d0>, <ast.Name object at 0x7da20c6c5d80>, <ast.Name object at 0x7da20c6c5060>]]] in starred[name[items]] begin[:]
call[name[self].set_location, parameter[name[place], name[latitude], name[longitude]]]
variable[watches] assign[=] list[[]]
if call[name[self]._same_redis, parameter[name[other], name[RedisCollection]]] begin[:]
variable[use_redis] assign[=] constant[True]
call[name[watches].append, parameter[name[other].key]]
if call[name[isinstance], parameter[name[other], name[SortedSetBase]]] begin[:]
variable[func] assign[=] name[update_sortedset_trans]
call[name[self]._transaction, parameter[name[func], <ast.Starred object at 0x7da20c6c5780>]] | keyword[def] identifier[update] ( identifier[self] , identifier[other] ):
literal[string]
keyword[def] identifier[update_sortedset_trans] ( identifier[pipe] ):
identifier[items] = identifier[other] . identifier[_data] ( identifier[pipe] = identifier[pipe] ) keyword[if] identifier[use_redis] keyword[else] identifier[other] . identifier[_data] ()
identifier[pipe] . identifier[multi] ()
keyword[for] identifier[member] , identifier[score] keyword[in] identifier[items] :
identifier[pipe] . identifier[zadd] ( identifier[self] . identifier[key] ,{ identifier[self] . identifier[_pickle] ( identifier[member] ): identifier[float] ( identifier[score] )})
keyword[def] identifier[update_mapping_trans] ( identifier[pipe] ):
identifier[items] = identifier[other] . identifier[items] ( identifier[pipe] = identifier[pipe] ) keyword[if] identifier[use_redis] keyword[else] identifier[other] . identifier[items] ()
identifier[pipe] . identifier[multi] ()
keyword[for] identifier[place] , identifier[value] keyword[in] identifier[items] :
identifier[self] . identifier[set_location] (
identifier[place] , identifier[value] [ literal[string] ], identifier[value] [ literal[string] ], identifier[pipe] = identifier[pipe]
)
keyword[def] identifier[update_tuples_trans] ( identifier[pipe] ):
identifier[items] =(
identifier[other] . identifier[__iter__] ( identifier[pipe] = identifier[pipe] ) keyword[if] identifier[use_redis] keyword[else] identifier[other] . identifier[__iter__] ()
)
identifier[pipe] . identifier[multi] ()
keyword[for] identifier[place] , identifier[latitude] , identifier[longitude] keyword[in] identifier[items] :
identifier[self] . identifier[set_location] ( identifier[place] , identifier[latitude] , identifier[longitude] , identifier[pipe] = identifier[pipe] )
identifier[watches] =[]
keyword[if] identifier[self] . identifier[_same_redis] ( identifier[other] , identifier[RedisCollection] ):
identifier[use_redis] = keyword[True]
identifier[watches] . identifier[append] ( identifier[other] . identifier[key] )
keyword[else] :
identifier[use_redis] = keyword[False]
keyword[if] identifier[isinstance] ( identifier[other] , identifier[SortedSetBase] ):
identifier[func] = identifier[update_sortedset_trans]
keyword[elif] identifier[hasattr] ( identifier[other] , literal[string] ):
identifier[func] = identifier[update_mapping_trans]
keyword[elif] identifier[hasattr] ( identifier[other] , literal[string] ):
identifier[func] = identifier[update_tuples_trans]
identifier[self] . identifier[_transaction] ( identifier[func] ,* identifier[watches] ) | def update(self, other):
"""
Update the collection with items from *other*. Accepts other
:class:`GeoDB` instances, dictionaries mapping places to
``{'latitude': latitude, 'longitude': longitude}`` dicts,
or sequences of ``(place, latitude, longitude)`` tuples.
"""
# other is another Sorted Set
def update_sortedset_trans(pipe):
items = other._data(pipe=pipe) if use_redis else other._data()
pipe.multi()
for (member, score) in items:
pipe.zadd(self.key, {self._pickle(member): float(score)}) # depends on [control=['for'], data=[]]
# other is dict-like
def update_mapping_trans(pipe):
items = other.items(pipe=pipe) if use_redis else other.items()
pipe.multi()
for (place, value) in items:
self.set_location(place, value['latitude'], value['longitude'], pipe=pipe) # depends on [control=['for'], data=[]]
# other is a list of tuples
def update_tuples_trans(pipe):
items = other.__iter__(pipe=pipe) if use_redis else other.__iter__()
pipe.multi()
for (place, latitude, longitude) in items:
self.set_location(place, latitude, longitude, pipe=pipe) # depends on [control=['for'], data=[]]
watches = []
if self._same_redis(other, RedisCollection):
use_redis = True
watches.append(other.key) # depends on [control=['if'], data=[]]
else:
use_redis = False
if isinstance(other, SortedSetBase):
func = update_sortedset_trans # depends on [control=['if'], data=[]]
elif hasattr(other, 'items'):
func = update_mapping_trans # depends on [control=['if'], data=[]]
elif hasattr(other, '__iter__'):
func = update_tuples_trans # depends on [control=['if'], data=[]]
self._transaction(func, *watches) |
def _int_to_key(keys, index):
'Convert int ``index`` to the corresponding key in ``keys``'
if isinstance(index, int):
try:
return keys[index]
except IndexError:
# use KeyError rather than IndexError for compatibility
raise KeyError('Index out of range of keys: %s' % (index,))
return index | def function[_int_to_key, parameter[keys, index]]:
constant[Convert int ``index`` to the corresponding key in ``keys``]
if call[name[isinstance], parameter[name[index], name[int]]] begin[:]
<ast.Try object at 0x7da18fe91840>
return[name[index]] | keyword[def] identifier[_int_to_key] ( identifier[keys] , identifier[index] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[index] , identifier[int] ):
keyword[try] :
keyword[return] identifier[keys] [ identifier[index] ]
keyword[except] identifier[IndexError] :
keyword[raise] identifier[KeyError] ( literal[string] %( identifier[index] ,))
keyword[return] identifier[index] | def _int_to_key(keys, index):
"""Convert int ``index`` to the corresponding key in ``keys``"""
if isinstance(index, int):
try:
return keys[index] # depends on [control=['try'], data=[]]
except IndexError:
# use KeyError rather than IndexError for compatibility
raise KeyError('Index out of range of keys: %s' % (index,)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return index |
async def _process_message(self, message):
"""
Adds the given message to the list of messages that must be
acknowledged and dispatches control to different ``_handle_*``
method based on its type.
"""
self._pending_ack.add(message.msg_id)
handler = self._handlers.get(message.obj.CONSTRUCTOR_ID,
self._handle_update)
await handler(message) | <ast.AsyncFunctionDef object at 0x7da1b21dbac0> | keyword[async] keyword[def] identifier[_process_message] ( identifier[self] , identifier[message] ):
literal[string]
identifier[self] . identifier[_pending_ack] . identifier[add] ( identifier[message] . identifier[msg_id] )
identifier[handler] = identifier[self] . identifier[_handlers] . identifier[get] ( identifier[message] . identifier[obj] . identifier[CONSTRUCTOR_ID] ,
identifier[self] . identifier[_handle_update] )
keyword[await] identifier[handler] ( identifier[message] ) | async def _process_message(self, message):
"""
Adds the given message to the list of messages that must be
acknowledged and dispatches control to different ``_handle_*``
method based on its type.
"""
self._pending_ack.add(message.msg_id)
handler = self._handlers.get(message.obj.CONSTRUCTOR_ID, self._handle_update)
await handler(message) |
def update(self, request, *args, **kwargs):
"""Update an entity.
Original queryset produces a temporary database table whose rows
cannot be selected for an update. As a workaround, we patch
get_queryset function to return only Entity objects without
additional data that is not needed for the update.
"""
orig_get_queryset = self.get_queryset
def patched_get_queryset():
"""Patched get_queryset method."""
entity_ids = orig_get_queryset().values_list('id', flat=True)
return Entity.objects.filter(id__in=entity_ids)
self.get_queryset = patched_get_queryset
resp = super().update(request, *args, **kwargs)
self.get_queryset = orig_get_queryset
return resp | def function[update, parameter[self, request]]:
constant[Update an entity.
Original queryset produces a temporary database table whose rows
cannot be selected for an update. As a workaround, we patch
get_queryset function to return only Entity objects without
additional data that is not needed for the update.
]
variable[orig_get_queryset] assign[=] name[self].get_queryset
def function[patched_get_queryset, parameter[]]:
constant[Patched get_queryset method.]
variable[entity_ids] assign[=] call[call[name[orig_get_queryset], parameter[]].values_list, parameter[constant[id]]]
return[call[name[Entity].objects.filter, parameter[]]]
name[self].get_queryset assign[=] name[patched_get_queryset]
variable[resp] assign[=] call[call[name[super], parameter[]].update, parameter[name[request], <ast.Starred object at 0x7da1b1a9ce50>]]
name[self].get_queryset assign[=] name[orig_get_queryset]
return[name[resp]] | keyword[def] identifier[update] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[orig_get_queryset] = identifier[self] . identifier[get_queryset]
keyword[def] identifier[patched_get_queryset] ():
literal[string]
identifier[entity_ids] = identifier[orig_get_queryset] (). identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] )
keyword[return] identifier[Entity] . identifier[objects] . identifier[filter] ( identifier[id__in] = identifier[entity_ids] )
identifier[self] . identifier[get_queryset] = identifier[patched_get_queryset]
identifier[resp] = identifier[super] (). identifier[update] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[get_queryset] = identifier[orig_get_queryset]
keyword[return] identifier[resp] | def update(self, request, *args, **kwargs):
"""Update an entity.
Original queryset produces a temporary database table whose rows
cannot be selected for an update. As a workaround, we patch
get_queryset function to return only Entity objects without
additional data that is not needed for the update.
"""
orig_get_queryset = self.get_queryset
def patched_get_queryset():
"""Patched get_queryset method."""
entity_ids = orig_get_queryset().values_list('id', flat=True)
return Entity.objects.filter(id__in=entity_ids)
self.get_queryset = patched_get_queryset
resp = super().update(request, *args, **kwargs)
self.get_queryset = orig_get_queryset
return resp |
def makefile(self, mode="r", buffering=None, **_3to2kwargs):
"""makefile(...) -> an I/O stream connected to the socket
The arguments are as for io.open() after the filename,
except the only mode characters supported are 'r', 'w' and 'b'.
The semantics are similar too. (XXX refactor to share code?)
"""
if 'newline' in _3to2kwargs: newline = _3to2kwargs['newline']; del _3to2kwargs['newline']
else: newline = None
if 'errors' in _3to2kwargs: errors = _3to2kwargs['errors']; del _3to2kwargs['errors']
else: errors = None
if 'encoding' in _3to2kwargs: encoding = _3to2kwargs['encoding']; del _3to2kwargs['encoding']
else: encoding = None
for c in mode:
if c not in ("r", "w", "b"):
raise ValueError("invalid mode %r (only r, w, b allowed)")
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._io_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text | def function[makefile, parameter[self, mode, buffering]]:
constant[makefile(...) -> an I/O stream connected to the socket
The arguments are as for io.open() after the filename,
except the only mode characters supported are 'r', 'w' and 'b'.
The semantics are similar too. (XXX refactor to share code?)
]
if compare[constant[newline] in name[_3to2kwargs]] begin[:]
variable[newline] assign[=] call[name[_3to2kwargs]][constant[newline]]
<ast.Delete object at 0x7da18f58e020>
if compare[constant[errors] in name[_3to2kwargs]] begin[:]
variable[errors] assign[=] call[name[_3to2kwargs]][constant[errors]]
<ast.Delete object at 0x7da18f58f070>
if compare[constant[encoding] in name[_3to2kwargs]] begin[:]
variable[encoding] assign[=] call[name[_3to2kwargs]][constant[encoding]]
<ast.Delete object at 0x7da18f58d870>
for taget[name[c]] in starred[name[mode]] begin[:]
if compare[name[c] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da2044c07c0>, <ast.Constant object at 0x7da2044c3dc0>, <ast.Constant object at 0x7da2044c0070>]]] begin[:]
<ast.Raise object at 0x7da2044c0eb0>
variable[writing] assign[=] compare[constant[w] in name[mode]]
variable[reading] assign[=] <ast.BoolOp object at 0x7da2044c27d0>
assert[<ast.BoolOp object at 0x7da2044c1c00>]
variable[binary] assign[=] compare[constant[b] in name[mode]]
variable[rawmode] assign[=] constant[]
if name[reading] begin[:]
<ast.AugAssign object at 0x7da2044c1090>
if name[writing] begin[:]
<ast.AugAssign object at 0x7da2044c0a00>
variable[raw] assign[=] call[name[SocketIO], parameter[name[self], name[rawmode]]]
<ast.AugAssign object at 0x7da2044c2950>
if compare[name[buffering] is constant[None]] begin[:]
variable[buffering] assign[=] <ast.UnaryOp object at 0x7da2044c1a20>
if compare[name[buffering] less[<] constant[0]] begin[:]
variable[buffering] assign[=] name[io].DEFAULT_BUFFER_SIZE
if compare[name[buffering] equal[==] constant[0]] begin[:]
if <ast.UnaryOp object at 0x7da2044c13c0> begin[:]
<ast.Raise object at 0x7da2044c2e30>
return[name[raw]]
if <ast.BoolOp object at 0x7da2044c3370> begin[:]
variable[buffer] assign[=] call[name[io].BufferedRWPair, parameter[name[raw], name[raw], name[buffering]]]
if name[binary] begin[:]
return[name[buffer]]
variable[text] assign[=] call[name[io].TextIOWrapper, parameter[name[buffer], name[encoding], name[errors], name[newline]]]
name[text].mode assign[=] name[mode]
return[name[text]] | keyword[def] identifier[makefile] ( identifier[self] , identifier[mode] = literal[string] , identifier[buffering] = keyword[None] ,** identifier[_3to2kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[_3to2kwargs] : identifier[newline] = identifier[_3to2kwargs] [ literal[string] ]; keyword[del] identifier[_3to2kwargs] [ literal[string] ]
keyword[else] : identifier[newline] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[_3to2kwargs] : identifier[errors] = identifier[_3to2kwargs] [ literal[string] ]; keyword[del] identifier[_3to2kwargs] [ literal[string] ]
keyword[else] : identifier[errors] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[_3to2kwargs] : identifier[encoding] = identifier[_3to2kwargs] [ literal[string] ]; keyword[del] identifier[_3to2kwargs] [ literal[string] ]
keyword[else] : identifier[encoding] = keyword[None]
keyword[for] identifier[c] keyword[in] identifier[mode] :
keyword[if] identifier[c] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[writing] = literal[string] keyword[in] identifier[mode]
identifier[reading] = literal[string] keyword[in] identifier[mode] keyword[or] keyword[not] identifier[writing]
keyword[assert] identifier[reading] keyword[or] identifier[writing]
identifier[binary] = literal[string] keyword[in] identifier[mode]
identifier[rawmode] = literal[string]
keyword[if] identifier[reading] :
identifier[rawmode] += literal[string]
keyword[if] identifier[writing] :
identifier[rawmode] += literal[string]
identifier[raw] = identifier[SocketIO] ( identifier[self] , identifier[rawmode] )
identifier[self] . identifier[_io_refs] += literal[int]
keyword[if] identifier[buffering] keyword[is] keyword[None] :
identifier[buffering] =- literal[int]
keyword[if] identifier[buffering] < literal[int] :
identifier[buffering] = identifier[io] . identifier[DEFAULT_BUFFER_SIZE]
keyword[if] identifier[buffering] == literal[int] :
keyword[if] keyword[not] identifier[binary] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[raw]
keyword[if] identifier[reading] keyword[and] identifier[writing] :
identifier[buffer] = identifier[io] . identifier[BufferedRWPair] ( identifier[raw] , identifier[raw] , identifier[buffering] )
keyword[elif] identifier[reading] :
identifier[buffer] = identifier[io] . identifier[BufferedReader] ( identifier[raw] , identifier[buffering] )
keyword[else] :
keyword[assert] identifier[writing]
identifier[buffer] = identifier[io] . identifier[BufferedWriter] ( identifier[raw] , identifier[buffering] )
keyword[if] identifier[binary] :
keyword[return] identifier[buffer]
identifier[text] = identifier[io] . identifier[TextIOWrapper] ( identifier[buffer] , identifier[encoding] , identifier[errors] , identifier[newline] )
identifier[text] . identifier[mode] = identifier[mode]
keyword[return] identifier[text] | def makefile(self, mode='r', buffering=None, **_3to2kwargs):
"""makefile(...) -> an I/O stream connected to the socket
The arguments are as for io.open() after the filename,
except the only mode characters supported are 'r', 'w' and 'b'.
The semantics are similar too. (XXX refactor to share code?)
"""
if 'newline' in _3to2kwargs:
newline = _3to2kwargs['newline']
del _3to2kwargs['newline'] # depends on [control=['if'], data=['_3to2kwargs']]
else:
newline = None
if 'errors' in _3to2kwargs:
errors = _3to2kwargs['errors']
del _3to2kwargs['errors'] # depends on [control=['if'], data=['_3to2kwargs']]
else:
errors = None
if 'encoding' in _3to2kwargs:
encoding = _3to2kwargs['encoding']
del _3to2kwargs['encoding'] # depends on [control=['if'], data=['_3to2kwargs']]
else:
encoding = None
for c in mode:
if c not in ('r', 'w', 'b'):
raise ValueError('invalid mode %r (only r, w, b allowed)') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']]
writing = 'w' in mode
reading = 'r' in mode or not writing
assert reading or writing
binary = 'b' in mode
rawmode = ''
if reading:
rawmode += 'r' # depends on [control=['if'], data=[]]
if writing:
rawmode += 'w' # depends on [control=['if'], data=[]]
raw = SocketIO(self, rawmode)
self._io_refs += 1
if buffering is None:
buffering = -1 # depends on [control=['if'], data=['buffering']]
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE # depends on [control=['if'], data=['buffering']]
if buffering == 0:
if not binary:
raise ValueError('unbuffered streams must be binary') # depends on [control=['if'], data=[]]
return raw # depends on [control=['if'], data=[]]
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering) # depends on [control=['if'], data=[]]
elif reading:
buffer = io.BufferedReader(raw, buffering) # depends on [control=['if'], data=[]]
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer # depends on [control=['if'], data=[]]
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text |
def text_alignment(x, y):
"""
Align text labels based on the x- and y-axis coordinate values.
This function is used for computing the appropriate alignment of the text
label.
For example, if the text is on the "right" side of the plot, we want it to
be left-aligned. If the text is on the "top" side of the plot, we want it
to be bottom-aligned.
:param x, y: (`int` or `float`) x- and y-axis coordinate respectively.
:returns: A 2-tuple of strings, the horizontal and vertical alignments
respectively.
"""
if x == 0:
ha = "center"
elif x > 0:
ha = "left"
else:
ha = "right"
if y == 0:
va = "center"
elif y > 0:
va = "bottom"
else:
va = "top"
return ha, va | def function[text_alignment, parameter[x, y]]:
constant[
Align text labels based on the x- and y-axis coordinate values.
This function is used for computing the appropriate alignment of the text
label.
For example, if the text is on the "right" side of the plot, we want it to
be left-aligned. If the text is on the "top" side of the plot, we want it
to be bottom-aligned.
:param x, y: (`int` or `float`) x- and y-axis coordinate respectively.
:returns: A 2-tuple of strings, the horizontal and vertical alignments
respectively.
]
if compare[name[x] equal[==] constant[0]] begin[:]
variable[ha] assign[=] constant[center]
if compare[name[y] equal[==] constant[0]] begin[:]
variable[va] assign[=] constant[center]
return[tuple[[<ast.Name object at 0x7da1b1ed5390>, <ast.Name object at 0x7da1b1ed6ec0>]]] | keyword[def] identifier[text_alignment] ( identifier[x] , identifier[y] ):
literal[string]
keyword[if] identifier[x] == literal[int] :
identifier[ha] = literal[string]
keyword[elif] identifier[x] > literal[int] :
identifier[ha] = literal[string]
keyword[else] :
identifier[ha] = literal[string]
keyword[if] identifier[y] == literal[int] :
identifier[va] = literal[string]
keyword[elif] identifier[y] > literal[int] :
identifier[va] = literal[string]
keyword[else] :
identifier[va] = literal[string]
keyword[return] identifier[ha] , identifier[va] | def text_alignment(x, y):
"""
Align text labels based on the x- and y-axis coordinate values.
This function is used for computing the appropriate alignment of the text
label.
For example, if the text is on the "right" side of the plot, we want it to
be left-aligned. If the text is on the "top" side of the plot, we want it
to be bottom-aligned.
:param x, y: (`int` or `float`) x- and y-axis coordinate respectively.
:returns: A 2-tuple of strings, the horizontal and vertical alignments
respectively.
"""
if x == 0:
ha = 'center' # depends on [control=['if'], data=[]]
elif x > 0:
ha = 'left' # depends on [control=['if'], data=[]]
else:
ha = 'right'
if y == 0:
va = 'center' # depends on [control=['if'], data=[]]
elif y > 0:
va = 'bottom' # depends on [control=['if'], data=[]]
else:
va = 'top'
return (ha, va) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.