code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def byte_href_anchors_state_machine(self):
'''
byte-based state machine extractor of anchor tags, so we can
compute byte offsets for anchor texts and associate them with
their href.
Generates tuple(href_string, first_byte, byte_length, anchor_text)
'''
tag_depth = 0
a_tag_depth = 0
vals = []
href = None
idx_bytes = enumerate( self.clean_html )
while 1:
end_idx, val, next_b = read_to( idx_bytes, '<' )
tag_depth += 1
if href:
## must be inside an anchor tag, so accumulate the
## whole anchor
assert a_tag_depth > 0, (href, self.clean_html)
vals.append(val)
## figure out if start of an "A" anchor tag or close
## of a previous tag
idx, next_b1 = idx_bytes.next()
if next_b1.lower() == 'a':
## could be start of "A" tag
idx, next_b2 = idx_bytes.next()
if next_b2 == ' ':
a_tag_depth += 1
href = None
for idx, attr_name, attr_val in iter_attrs( idx_bytes ):
if attr_name.lower() == 'href':
href = attr_val
if idx is None:
## doc ended mid tag, so invalid HTML--> just bail
return
first = idx + 1
## if we got an href, then we want to keep the
## first byte idx of the anchor:
if href:
## Someone could nest an A tag inside another
## A tag, which is invalid (even in HTML5), so
## vals could be nonempty. We only generate
## the leaf-level A tags in these rare cases
## of nested A tags, so reset it:
vals = []
elif next_b1 == '/':
idx, next_b1 = idx_bytes.next()
if next_b1 == 'a':
## could be end of "A" tag
idx, next_b2 = idx_bytes.next()
if next_b2 == '>':
a_tag_depth -= 1
if href:
## join is much faster than using += above
anchor = b''.join(vals)
length = len(anchor)
## yield the data
yield href, first, len(anchor), anchor
## reset, no yield again in a nested A tag
href = None
else:
## the next_b was not part of </a> or a nested <a tag,
## so keep it in the output
vals.append(next_b) | def function[byte_href_anchors_state_machine, parameter[self]]:
constant[
byte-based state machine extractor of anchor tags, so we can
compute byte offsets for anchor texts and associate them with
their href.
Generates tuple(href_string, first_byte, byte_length, anchor_text)
]
variable[tag_depth] assign[=] constant[0]
variable[a_tag_depth] assign[=] constant[0]
variable[vals] assign[=] list[[]]
variable[href] assign[=] constant[None]
variable[idx_bytes] assign[=] call[name[enumerate], parameter[name[self].clean_html]]
while constant[1] begin[:]
<ast.Tuple object at 0x7da20cabd120> assign[=] call[name[read_to], parameter[name[idx_bytes], constant[<]]]
<ast.AugAssign object at 0x7da20cabebf0>
if name[href] begin[:]
assert[compare[name[a_tag_depth] greater[>] constant[0]]]
call[name[vals].append, parameter[name[val]]]
<ast.Tuple object at 0x7da20cabf580> assign[=] call[name[idx_bytes].next, parameter[]]
if compare[call[name[next_b1].lower, parameter[]] equal[==] constant[a]] begin[:]
<ast.Tuple object at 0x7da20cabc700> assign[=] call[name[idx_bytes].next, parameter[]]
if compare[name[next_b2] equal[==] constant[ ]] begin[:]
<ast.AugAssign object at 0x7da20cabf910>
variable[href] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da20cabc460>, <ast.Name object at 0x7da20cabf2b0>, <ast.Name object at 0x7da20cabee00>]]] in starred[call[name[iter_attrs], parameter[name[idx_bytes]]]] begin[:]
if compare[call[name[attr_name].lower, parameter[]] equal[==] constant[href]] begin[:]
variable[href] assign[=] name[attr_val]
if compare[name[idx] is constant[None]] begin[:]
return[None]
variable[first] assign[=] binary_operation[name[idx] + constant[1]]
if name[href] begin[:]
variable[vals] assign[=] list[[]] | keyword[def] identifier[byte_href_anchors_state_machine] ( identifier[self] ):
literal[string]
identifier[tag_depth] = literal[int]
identifier[a_tag_depth] = literal[int]
identifier[vals] =[]
identifier[href] = keyword[None]
identifier[idx_bytes] = identifier[enumerate] ( identifier[self] . identifier[clean_html] )
keyword[while] literal[int] :
identifier[end_idx] , identifier[val] , identifier[next_b] = identifier[read_to] ( identifier[idx_bytes] , literal[string] )
identifier[tag_depth] += literal[int]
keyword[if] identifier[href] :
keyword[assert] identifier[a_tag_depth] > literal[int] ,( identifier[href] , identifier[self] . identifier[clean_html] )
identifier[vals] . identifier[append] ( identifier[val] )
identifier[idx] , identifier[next_b1] = identifier[idx_bytes] . identifier[next] ()
keyword[if] identifier[next_b1] . identifier[lower] ()== literal[string] :
identifier[idx] , identifier[next_b2] = identifier[idx_bytes] . identifier[next] ()
keyword[if] identifier[next_b2] == literal[string] :
identifier[a_tag_depth] += literal[int]
identifier[href] = keyword[None]
keyword[for] identifier[idx] , identifier[attr_name] , identifier[attr_val] keyword[in] identifier[iter_attrs] ( identifier[idx_bytes] ):
keyword[if] identifier[attr_name] . identifier[lower] ()== literal[string] :
identifier[href] = identifier[attr_val]
keyword[if] identifier[idx] keyword[is] keyword[None] :
keyword[return]
identifier[first] = identifier[idx] + literal[int]
keyword[if] identifier[href] :
identifier[vals] =[]
keyword[elif] identifier[next_b1] == literal[string] :
identifier[idx] , identifier[next_b1] = identifier[idx_bytes] . identifier[next] ()
keyword[if] identifier[next_b1] == literal[string] :
identifier[idx] , identifier[next_b2] = identifier[idx_bytes] . identifier[next] ()
keyword[if] identifier[next_b2] == literal[string] :
identifier[a_tag_depth] -= literal[int]
keyword[if] identifier[href] :
identifier[anchor] = literal[string] . identifier[join] ( identifier[vals] )
identifier[length] = identifier[len] ( identifier[anchor] )
keyword[yield] identifier[href] , identifier[first] , identifier[len] ( identifier[anchor] ), identifier[anchor]
identifier[href] = keyword[None]
keyword[else] :
identifier[vals] . identifier[append] ( identifier[next_b] ) | def byte_href_anchors_state_machine(self):
"""
byte-based state machine extractor of anchor tags, so we can
compute byte offsets for anchor texts and associate them with
their href.
Generates tuple(href_string, first_byte, byte_length, anchor_text)
"""
tag_depth = 0
a_tag_depth = 0
vals = []
href = None
idx_bytes = enumerate(self.clean_html)
while 1:
(end_idx, val, next_b) = read_to(idx_bytes, '<')
tag_depth += 1
if href:
## must be inside an anchor tag, so accumulate the
## whole anchor
assert a_tag_depth > 0, (href, self.clean_html)
vals.append(val) # depends on [control=['if'], data=[]]
## figure out if start of an "A" anchor tag or close
## of a previous tag
(idx, next_b1) = idx_bytes.next()
if next_b1.lower() == 'a':
## could be start of "A" tag
(idx, next_b2) = idx_bytes.next()
if next_b2 == ' ':
a_tag_depth += 1
href = None
for (idx, attr_name, attr_val) in iter_attrs(idx_bytes):
if attr_name.lower() == 'href':
href = attr_val # depends on [control=['if'], data=[]]
if idx is None:
## doc ended mid tag, so invalid HTML--> just bail
return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
first = idx + 1
## if we got an href, then we want to keep the
## first byte idx of the anchor:
if href:
## Someone could nest an A tag inside another
## A tag, which is invalid (even in HTML5), so
## vals could be nonempty. We only generate
## the leaf-level A tags in these rare cases
## of nested A tags, so reset it:
vals = [] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif next_b1 == '/':
(idx, next_b1) = idx_bytes.next()
if next_b1 == 'a':
## could be end of "A" tag
(idx, next_b2) = idx_bytes.next()
if next_b2 == '>':
a_tag_depth -= 1
if href:
## join is much faster than using += above
anchor = b''.join(vals)
length = len(anchor)
## yield the data
yield (href, first, len(anchor), anchor)
## reset, no yield again in a nested A tag
href = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['next_b1']]
else:
## the next_b was not part of </a> or a nested <a tag,
## so keep it in the output
vals.append(next_b) # depends on [control=['while'], data=[]] |
def checkPortAvailable(ha):
"""Checks whether the given port is available"""
# Not sure why OS would allow binding to one type and not other.
# Checking for port available for TCP and UDP.
sockTypes = (socket.SOCK_DGRAM, socket.SOCK_STREAM)
for typ in sockTypes:
sock = socket.socket(socket.AF_INET, typ)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(ha)
if typ == socket.SOCK_STREAM:
l_onoff = 1
l_linger = 0
sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER,
struct.pack('ii', l_onoff, l_linger))
except OSError as exc:
if exc.errno in [
errno.EADDRINUSE, errno.EADDRNOTAVAIL,
WS_SOCKET_BIND_ERROR_ALREADY_IN_USE,
WS_SOCKET_BIND_ERROR_NOT_AVAILABLE
]:
raise PortNotAvailable(ha)
else:
raise exc
finally:
sock.close() | def function[checkPortAvailable, parameter[ha]]:
constant[Checks whether the given port is available]
variable[sockTypes] assign[=] tuple[[<ast.Attribute object at 0x7da1b16c3310>, <ast.Attribute object at 0x7da1b16c08e0>]]
for taget[name[typ]] in starred[name[sockTypes]] begin[:]
variable[sock] assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[typ]]]
<ast.Try object at 0x7da1b1737f40> | keyword[def] identifier[checkPortAvailable] ( identifier[ha] ):
literal[string]
identifier[sockTypes] =( identifier[socket] . identifier[SOCK_DGRAM] , identifier[socket] . identifier[SOCK_STREAM] )
keyword[for] identifier[typ] keyword[in] identifier[sockTypes] :
identifier[sock] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[typ] )
keyword[try] :
identifier[sock] . identifier[setsockopt] ( identifier[socket] . identifier[SOL_SOCKET] , identifier[socket] . identifier[SO_REUSEADDR] , literal[int] )
identifier[sock] . identifier[bind] ( identifier[ha] )
keyword[if] identifier[typ] == identifier[socket] . identifier[SOCK_STREAM] :
identifier[l_onoff] = literal[int]
identifier[l_linger] = literal[int]
identifier[sock] . identifier[setsockopt] ( identifier[socket] . identifier[SOL_SOCKET] , identifier[socket] . identifier[SO_LINGER] ,
identifier[struct] . identifier[pack] ( literal[string] , identifier[l_onoff] , identifier[l_linger] ))
keyword[except] identifier[OSError] keyword[as] identifier[exc] :
keyword[if] identifier[exc] . identifier[errno] keyword[in] [
identifier[errno] . identifier[EADDRINUSE] , identifier[errno] . identifier[EADDRNOTAVAIL] ,
identifier[WS_SOCKET_BIND_ERROR_ALREADY_IN_USE] ,
identifier[WS_SOCKET_BIND_ERROR_NOT_AVAILABLE]
]:
keyword[raise] identifier[PortNotAvailable] ( identifier[ha] )
keyword[else] :
keyword[raise] identifier[exc]
keyword[finally] :
identifier[sock] . identifier[close] () | def checkPortAvailable(ha):
"""Checks whether the given port is available"""
# Not sure why OS would allow binding to one type and not other.
# Checking for port available for TCP and UDP.
sockTypes = (socket.SOCK_DGRAM, socket.SOCK_STREAM)
for typ in sockTypes:
sock = socket.socket(socket.AF_INET, typ)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(ha)
if typ == socket.SOCK_STREAM:
l_onoff = 1
l_linger = 0
sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, struct.pack('ii', l_onoff, l_linger)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except OSError as exc:
if exc.errno in [errno.EADDRINUSE, errno.EADDRNOTAVAIL, WS_SOCKET_BIND_ERROR_ALREADY_IN_USE, WS_SOCKET_BIND_ERROR_NOT_AVAILABLE]:
raise PortNotAvailable(ha) # depends on [control=['if'], data=[]]
else:
raise exc # depends on [control=['except'], data=['exc']]
finally:
sock.close() # depends on [control=['for'], data=['typ']] |
def get_bytes(self, line_number):
"""Get the bytes representing needle positions or None.
:param int line_number: the line number to take the bytes from
:rtype: bytes
:return: the bytes that represent the message or :obj:`None` if no
data is there for the line.
Depending on the :attr:`machine`, the length and result may vary.
"""
if line_number not in self._needle_position_bytes_cache:
line = self._get(line_number)
if line is None:
line_bytes = None
else:
line_bytes = self._machine.needle_positions_to_bytes(line)
self._needle_position_bytes_cache[line_number] = line_bytes
return self._needle_position_bytes_cache[line_number] | def function[get_bytes, parameter[self, line_number]]:
constant[Get the bytes representing needle positions or None.
:param int line_number: the line number to take the bytes from
:rtype: bytes
:return: the bytes that represent the message or :obj:`None` if no
data is there for the line.
Depending on the :attr:`machine`, the length and result may vary.
]
if compare[name[line_number] <ast.NotIn object at 0x7da2590d7190> name[self]._needle_position_bytes_cache] begin[:]
variable[line] assign[=] call[name[self]._get, parameter[name[line_number]]]
if compare[name[line] is constant[None]] begin[:]
variable[line_bytes] assign[=] constant[None]
call[name[self]._needle_position_bytes_cache][name[line_number]] assign[=] name[line_bytes]
return[call[name[self]._needle_position_bytes_cache][name[line_number]]] | keyword[def] identifier[get_bytes] ( identifier[self] , identifier[line_number] ):
literal[string]
keyword[if] identifier[line_number] keyword[not] keyword[in] identifier[self] . identifier[_needle_position_bytes_cache] :
identifier[line] = identifier[self] . identifier[_get] ( identifier[line_number] )
keyword[if] identifier[line] keyword[is] keyword[None] :
identifier[line_bytes] = keyword[None]
keyword[else] :
identifier[line_bytes] = identifier[self] . identifier[_machine] . identifier[needle_positions_to_bytes] ( identifier[line] )
identifier[self] . identifier[_needle_position_bytes_cache] [ identifier[line_number] ]= identifier[line_bytes]
keyword[return] identifier[self] . identifier[_needle_position_bytes_cache] [ identifier[line_number] ] | def get_bytes(self, line_number):
"""Get the bytes representing needle positions or None.
:param int line_number: the line number to take the bytes from
:rtype: bytes
:return: the bytes that represent the message or :obj:`None` if no
data is there for the line.
Depending on the :attr:`machine`, the length and result may vary.
"""
if line_number not in self._needle_position_bytes_cache:
line = self._get(line_number)
if line is None:
line_bytes = None # depends on [control=['if'], data=[]]
else:
line_bytes = self._machine.needle_positions_to_bytes(line)
self._needle_position_bytes_cache[line_number] = line_bytes # depends on [control=['if'], data=['line_number']]
return self._needle_position_bytes_cache[line_number] |
def load_shapefile(self, feature_type, base_path):
"""Load downloaded shape file to QGIS Main Window.
TODO: This is cut & paste from OSM - refactor to have one method
:param feature_type: What kind of features should be downloaded.
Currently 'buildings', 'building-points' or 'roads' are supported.
:type feature_type: str
:param base_path: The base path of the shape file (without extension).
:type base_path: str
:raises: FileMissingError - when buildings.shp not exist
"""
path = '%s.shp' % base_path
if not os.path.exists(path):
message = self.tr(
'%s does not exist. The server does not have any data for '
'this extent.' % path)
raise FileMissingError(message)
self.iface.addVectorLayer(path, feature_type, 'ogr') | def function[load_shapefile, parameter[self, feature_type, base_path]]:
constant[Load downloaded shape file to QGIS Main Window.
TODO: This is cut & paste from OSM - refactor to have one method
:param feature_type: What kind of features should be downloaded.
Currently 'buildings', 'building-points' or 'roads' are supported.
:type feature_type: str
:param base_path: The base path of the shape file (without extension).
:type base_path: str
:raises: FileMissingError - when buildings.shp not exist
]
variable[path] assign[=] binary_operation[constant[%s.shp] <ast.Mod object at 0x7da2590d6920> name[base_path]]
if <ast.UnaryOp object at 0x7da1b0c50ac0> begin[:]
variable[message] assign[=] call[name[self].tr, parameter[binary_operation[constant[%s does not exist. The server does not have any data for this extent.] <ast.Mod object at 0x7da2590d6920> name[path]]]]
<ast.Raise object at 0x7da1b0c50550>
call[name[self].iface.addVectorLayer, parameter[name[path], name[feature_type], constant[ogr]]] | keyword[def] identifier[load_shapefile] ( identifier[self] , identifier[feature_type] , identifier[base_path] ):
literal[string]
identifier[path] = literal[string] % identifier[base_path]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
identifier[message] = identifier[self] . identifier[tr] (
literal[string]
literal[string] % identifier[path] )
keyword[raise] identifier[FileMissingError] ( identifier[message] )
identifier[self] . identifier[iface] . identifier[addVectorLayer] ( identifier[path] , identifier[feature_type] , literal[string] ) | def load_shapefile(self, feature_type, base_path):
"""Load downloaded shape file to QGIS Main Window.
TODO: This is cut & paste from OSM - refactor to have one method
:param feature_type: What kind of features should be downloaded.
Currently 'buildings', 'building-points' or 'roads' are supported.
:type feature_type: str
:param base_path: The base path of the shape file (without extension).
:type base_path: str
:raises: FileMissingError - when buildings.shp not exist
"""
path = '%s.shp' % base_path
if not os.path.exists(path):
message = self.tr('%s does not exist. The server does not have any data for this extent.' % path)
raise FileMissingError(message) # depends on [control=['if'], data=[]]
self.iface.addVectorLayer(path, feature_type, 'ogr') |
def any_commaseparatedinteger_field(field, **kwargs):
"""
Return random value for CharField
>>> result = any_field(models.CommaSeparatedIntegerField(max_length=10))
>>> type(result)
<type 'str'>
>>> [int(num) for num in result.split(',')] and 'OK'
'OK'
"""
nums_count = field.max_length/2
nums = [str(xunit.any_int(min_value=0, max_value=9)) for _ in xrange(0, nums_count)]
return ",".join(nums) | def function[any_commaseparatedinteger_field, parameter[field]]:
constant[
Return random value for CharField
>>> result = any_field(models.CommaSeparatedIntegerField(max_length=10))
>>> type(result)
<type 'str'>
>>> [int(num) for num in result.split(',')] and 'OK'
'OK'
]
variable[nums_count] assign[=] binary_operation[name[field].max_length / constant[2]]
variable[nums] assign[=] <ast.ListComp object at 0x7da1b02dd240>
return[call[constant[,].join, parameter[name[nums]]]] | keyword[def] identifier[any_commaseparatedinteger_field] ( identifier[field] ,** identifier[kwargs] ):
literal[string]
identifier[nums_count] = identifier[field] . identifier[max_length] / literal[int]
identifier[nums] =[ identifier[str] ( identifier[xunit] . identifier[any_int] ( identifier[min_value] = literal[int] , identifier[max_value] = literal[int] )) keyword[for] identifier[_] keyword[in] identifier[xrange] ( literal[int] , identifier[nums_count] )]
keyword[return] literal[string] . identifier[join] ( identifier[nums] ) | def any_commaseparatedinteger_field(field, **kwargs):
"""
Return random value for CharField
>>> result = any_field(models.CommaSeparatedIntegerField(max_length=10))
>>> type(result)
<type 'str'>
>>> [int(num) for num in result.split(',')] and 'OK'
'OK'
"""
nums_count = field.max_length / 2
nums = [str(xunit.any_int(min_value=0, max_value=9)) for _ in xrange(0, nums_count)]
return ','.join(nums) |
def instance(cls):
"""
Singleton to return only one instance of BaseManager.
:returns: instance of BaseManager
"""
if not hasattr(cls, "_instance") or cls._instance is None:
cls._instance = cls()
return cls._instance | def function[instance, parameter[cls]]:
constant[
Singleton to return only one instance of BaseManager.
:returns: instance of BaseManager
]
if <ast.BoolOp object at 0x7da2044c29e0> begin[:]
name[cls]._instance assign[=] call[name[cls], parameter[]]
return[name[cls]._instance] | keyword[def] identifier[instance] ( identifier[cls] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[cls] , literal[string] ) keyword[or] identifier[cls] . identifier[_instance] keyword[is] keyword[None] :
identifier[cls] . identifier[_instance] = identifier[cls] ()
keyword[return] identifier[cls] . identifier[_instance] | def instance(cls):
"""
Singleton to return only one instance of BaseManager.
:returns: instance of BaseManager
"""
if not hasattr(cls, '_instance') or cls._instance is None:
cls._instance = cls() # depends on [control=['if'], data=[]]
return cls._instance |
def portCnt(port):
"""
recursively count number of ports without children
"""
if port.children:
return sum(map(lambda p: portCnt(p), port.children))
else:
return 1 | def function[portCnt, parameter[port]]:
constant[
recursively count number of ports without children
]
if name[port].children begin[:]
return[call[name[sum], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b2240910>, name[port].children]]]]] | keyword[def] identifier[portCnt] ( identifier[port] ):
literal[string]
keyword[if] identifier[port] . identifier[children] :
keyword[return] identifier[sum] ( identifier[map] ( keyword[lambda] identifier[p] : identifier[portCnt] ( identifier[p] ), identifier[port] . identifier[children] ))
keyword[else] :
keyword[return] literal[int] | def portCnt(port):
"""
recursively count number of ports without children
"""
if port.children:
return sum(map(lambda p: portCnt(p), port.children)) # depends on [control=['if'], data=[]]
else:
return 1 |
def get_coords(x, y, params):
"""
Transforms the given coordinates from plane-space to Mandelbrot-space (real and imaginary).
:param x: X coordinate on the plane.
:param y: Y coordinate on the plane.
:param params: Current application parameters.
:type params: params.Params
:return: Tuple containing the re-mapped coordinates in Mandelbrot-space.
"""
n_x = x * 2.0 / params.plane_w * params.plane_ratio - 1.0
n_y = y * 2.0 / params.plane_h - 1.0
mb_x = params.zoom * n_x
mb_y = params.zoom * n_y
return mb_x, mb_y | def function[get_coords, parameter[x, y, params]]:
constant[
Transforms the given coordinates from plane-space to Mandelbrot-space (real and imaginary).
:param x: X coordinate on the plane.
:param y: Y coordinate on the plane.
:param params: Current application parameters.
:type params: params.Params
:return: Tuple containing the re-mapped coordinates in Mandelbrot-space.
]
variable[n_x] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[x] * constant[2.0]] / name[params].plane_w] * name[params].plane_ratio] - constant[1.0]]
variable[n_y] assign[=] binary_operation[binary_operation[binary_operation[name[y] * constant[2.0]] / name[params].plane_h] - constant[1.0]]
variable[mb_x] assign[=] binary_operation[name[params].zoom * name[n_x]]
variable[mb_y] assign[=] binary_operation[name[params].zoom * name[n_y]]
return[tuple[[<ast.Name object at 0x7da1b05bd360>, <ast.Name object at 0x7da1b05bd390>]]] | keyword[def] identifier[get_coords] ( identifier[x] , identifier[y] , identifier[params] ):
literal[string]
identifier[n_x] = identifier[x] * literal[int] / identifier[params] . identifier[plane_w] * identifier[params] . identifier[plane_ratio] - literal[int]
identifier[n_y] = identifier[y] * literal[int] / identifier[params] . identifier[plane_h] - literal[int]
identifier[mb_x] = identifier[params] . identifier[zoom] * identifier[n_x]
identifier[mb_y] = identifier[params] . identifier[zoom] * identifier[n_y]
keyword[return] identifier[mb_x] , identifier[mb_y] | def get_coords(x, y, params):
"""
Transforms the given coordinates from plane-space to Mandelbrot-space (real and imaginary).
:param x: X coordinate on the plane.
:param y: Y coordinate on the plane.
:param params: Current application parameters.
:type params: params.Params
:return: Tuple containing the re-mapped coordinates in Mandelbrot-space.
"""
n_x = x * 2.0 / params.plane_w * params.plane_ratio - 1.0
n_y = y * 2.0 / params.plane_h - 1.0
mb_x = params.zoom * n_x
mb_y = params.zoom * n_y
return (mb_x, mb_y) |
def resolve_links(self, link_resolver):
"""
Banana banana
"""
self.type_link = None
self.type_tokens = []
for child in self.get_children_symbols():
child.resolve_links(link_resolver)
for tok in self.input_tokens:
if isinstance(tok, Link):
self.type_link = link_resolver.upsert_link(tok)
self.type_tokens.append(self.type_link)
else:
self.type_tokens.append(tok) | def function[resolve_links, parameter[self, link_resolver]]:
constant[
Banana banana
]
name[self].type_link assign[=] constant[None]
name[self].type_tokens assign[=] list[[]]
for taget[name[child]] in starred[call[name[self].get_children_symbols, parameter[]]] begin[:]
call[name[child].resolve_links, parameter[name[link_resolver]]]
for taget[name[tok]] in starred[name[self].input_tokens] begin[:]
if call[name[isinstance], parameter[name[tok], name[Link]]] begin[:]
name[self].type_link assign[=] call[name[link_resolver].upsert_link, parameter[name[tok]]]
call[name[self].type_tokens.append, parameter[name[self].type_link]] | keyword[def] identifier[resolve_links] ( identifier[self] , identifier[link_resolver] ):
literal[string]
identifier[self] . identifier[type_link] = keyword[None]
identifier[self] . identifier[type_tokens] =[]
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[get_children_symbols] ():
identifier[child] . identifier[resolve_links] ( identifier[link_resolver] )
keyword[for] identifier[tok] keyword[in] identifier[self] . identifier[input_tokens] :
keyword[if] identifier[isinstance] ( identifier[tok] , identifier[Link] ):
identifier[self] . identifier[type_link] = identifier[link_resolver] . identifier[upsert_link] ( identifier[tok] )
identifier[self] . identifier[type_tokens] . identifier[append] ( identifier[self] . identifier[type_link] )
keyword[else] :
identifier[self] . identifier[type_tokens] . identifier[append] ( identifier[tok] ) | def resolve_links(self, link_resolver):
"""
Banana banana
"""
self.type_link = None
self.type_tokens = []
for child in self.get_children_symbols():
child.resolve_links(link_resolver) # depends on [control=['for'], data=['child']]
for tok in self.input_tokens:
if isinstance(tok, Link):
self.type_link = link_resolver.upsert_link(tok)
self.type_tokens.append(self.type_link) # depends on [control=['if'], data=[]]
else:
self.type_tokens.append(tok) # depends on [control=['for'], data=['tok']] |
def update_position(self):
'''update position text'''
state = self.state
pos = self.mouse_pos
newtext = ''
alt = 0
if pos is not None:
(lat,lon) = self.coordinates(pos.x, pos.y)
newtext += 'Cursor: %f %f (%s)' % (lat, lon, mp_util.latlon_to_grid((lat, lon)))
if state.elevation:
alt = self.ElevationMap.GetElevation(lat, lon)
if alt is not None:
newtext += ' %.1fm' % alt
state.mt.set_download(state.download)
pending = 0
if state.download:
pending = state.mt.tiles_pending()
if pending:
newtext += ' Map Downloading %u ' % pending
if alt == -1:
newtext += ' SRTM Downloading '
newtext += '\n'
if self.click_pos is not None:
newtext += 'Click: %f %f (%s %s) (%s)' % (self.click_pos[0], self.click_pos[1],
mp_util.degrees_to_dms(self.click_pos[0]),
mp_util.degrees_to_dms(self.click_pos[1]),
mp_util.latlon_to_grid(self.click_pos))
if self.last_click_pos is not None:
distance = mp_util.gps_distance(self.last_click_pos[0], self.last_click_pos[1],
self.click_pos[0], self.click_pos[1])
bearing = mp_util.gps_bearing(self.last_click_pos[0], self.last_click_pos[1],
self.click_pos[0], self.click_pos[1])
newtext += ' Distance: %.1fm Bearing %.1f' % (distance, bearing)
if newtext != state.oldtext:
self.position.Clear()
self.position.WriteText(newtext)
state.oldtext = newtext | def function[update_position, parameter[self]]:
constant[update position text]
variable[state] assign[=] name[self].state
variable[pos] assign[=] name[self].mouse_pos
variable[newtext] assign[=] constant[]
variable[alt] assign[=] constant[0]
if compare[name[pos] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da20e962680> assign[=] call[name[self].coordinates, parameter[name[pos].x, name[pos].y]]
<ast.AugAssign object at 0x7da20e961ff0>
if name[state].elevation begin[:]
variable[alt] assign[=] call[name[self].ElevationMap.GetElevation, parameter[name[lat], name[lon]]]
if compare[name[alt] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da20e9612a0>
call[name[state].mt.set_download, parameter[name[state].download]]
variable[pending] assign[=] constant[0]
if name[state].download begin[:]
variable[pending] assign[=] call[name[state].mt.tiles_pending, parameter[]]
if name[pending] begin[:]
<ast.AugAssign object at 0x7da20e961420>
if compare[name[alt] equal[==] <ast.UnaryOp object at 0x7da20e963a90>] begin[:]
<ast.AugAssign object at 0x7da20e9600a0>
<ast.AugAssign object at 0x7da20e963be0>
if compare[name[self].click_pos is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da20e963610>
if compare[name[self].last_click_pos is_not constant[None]] begin[:]
variable[distance] assign[=] call[name[mp_util].gps_distance, parameter[call[name[self].last_click_pos][constant[0]], call[name[self].last_click_pos][constant[1]], call[name[self].click_pos][constant[0]], call[name[self].click_pos][constant[1]]]]
variable[bearing] assign[=] call[name[mp_util].gps_bearing, parameter[call[name[self].last_click_pos][constant[0]], call[name[self].last_click_pos][constant[1]], call[name[self].click_pos][constant[0]], call[name[self].click_pos][constant[1]]]]
<ast.AugAssign object at 0x7da204344460>
if compare[name[newtext] not_equal[!=] name[state].oldtext] begin[:]
call[name[self].position.Clear, parameter[]]
call[name[self].position.WriteText, parameter[name[newtext]]]
name[state].oldtext assign[=] name[newtext] | keyword[def] identifier[update_position] ( identifier[self] ):
literal[string]
identifier[state] = identifier[self] . identifier[state]
identifier[pos] = identifier[self] . identifier[mouse_pos]
identifier[newtext] = literal[string]
identifier[alt] = literal[int]
keyword[if] identifier[pos] keyword[is] keyword[not] keyword[None] :
( identifier[lat] , identifier[lon] )= identifier[self] . identifier[coordinates] ( identifier[pos] . identifier[x] , identifier[pos] . identifier[y] )
identifier[newtext] += literal[string] %( identifier[lat] , identifier[lon] , identifier[mp_util] . identifier[latlon_to_grid] (( identifier[lat] , identifier[lon] )))
keyword[if] identifier[state] . identifier[elevation] :
identifier[alt] = identifier[self] . identifier[ElevationMap] . identifier[GetElevation] ( identifier[lat] , identifier[lon] )
keyword[if] identifier[alt] keyword[is] keyword[not] keyword[None] :
identifier[newtext] += literal[string] % identifier[alt]
identifier[state] . identifier[mt] . identifier[set_download] ( identifier[state] . identifier[download] )
identifier[pending] = literal[int]
keyword[if] identifier[state] . identifier[download] :
identifier[pending] = identifier[state] . identifier[mt] . identifier[tiles_pending] ()
keyword[if] identifier[pending] :
identifier[newtext] += literal[string] % identifier[pending]
keyword[if] identifier[alt] ==- literal[int] :
identifier[newtext] += literal[string]
identifier[newtext] += literal[string]
keyword[if] identifier[self] . identifier[click_pos] keyword[is] keyword[not] keyword[None] :
identifier[newtext] += literal[string] %( identifier[self] . identifier[click_pos] [ literal[int] ], identifier[self] . identifier[click_pos] [ literal[int] ],
identifier[mp_util] . identifier[degrees_to_dms] ( identifier[self] . identifier[click_pos] [ literal[int] ]),
identifier[mp_util] . identifier[degrees_to_dms] ( identifier[self] . identifier[click_pos] [ literal[int] ]),
identifier[mp_util] . identifier[latlon_to_grid] ( identifier[self] . identifier[click_pos] ))
keyword[if] identifier[self] . identifier[last_click_pos] keyword[is] keyword[not] keyword[None] :
identifier[distance] = identifier[mp_util] . identifier[gps_distance] ( identifier[self] . identifier[last_click_pos] [ literal[int] ], identifier[self] . identifier[last_click_pos] [ literal[int] ],
identifier[self] . identifier[click_pos] [ literal[int] ], identifier[self] . identifier[click_pos] [ literal[int] ])
identifier[bearing] = identifier[mp_util] . identifier[gps_bearing] ( identifier[self] . identifier[last_click_pos] [ literal[int] ], identifier[self] . identifier[last_click_pos] [ literal[int] ],
identifier[self] . identifier[click_pos] [ literal[int] ], identifier[self] . identifier[click_pos] [ literal[int] ])
identifier[newtext] += literal[string] %( identifier[distance] , identifier[bearing] )
keyword[if] identifier[newtext] != identifier[state] . identifier[oldtext] :
identifier[self] . identifier[position] . identifier[Clear] ()
identifier[self] . identifier[position] . identifier[WriteText] ( identifier[newtext] )
identifier[state] . identifier[oldtext] = identifier[newtext] | def update_position(self):
"""update position text"""
state = self.state
pos = self.mouse_pos
newtext = ''
alt = 0
if pos is not None:
(lat, lon) = self.coordinates(pos.x, pos.y)
newtext += 'Cursor: %f %f (%s)' % (lat, lon, mp_util.latlon_to_grid((lat, lon)))
if state.elevation:
alt = self.ElevationMap.GetElevation(lat, lon)
if alt is not None:
newtext += ' %.1fm' % alt # depends on [control=['if'], data=['alt']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['pos']]
state.mt.set_download(state.download)
pending = 0
if state.download:
pending = state.mt.tiles_pending() # depends on [control=['if'], data=[]]
if pending:
newtext += ' Map Downloading %u ' % pending # depends on [control=['if'], data=[]]
if alt == -1:
newtext += ' SRTM Downloading ' # depends on [control=['if'], data=[]]
newtext += '\n'
if self.click_pos is not None:
newtext += 'Click: %f %f (%s %s) (%s)' % (self.click_pos[0], self.click_pos[1], mp_util.degrees_to_dms(self.click_pos[0]), mp_util.degrees_to_dms(self.click_pos[1]), mp_util.latlon_to_grid(self.click_pos)) # depends on [control=['if'], data=[]]
if self.last_click_pos is not None:
distance = mp_util.gps_distance(self.last_click_pos[0], self.last_click_pos[1], self.click_pos[0], self.click_pos[1])
bearing = mp_util.gps_bearing(self.last_click_pos[0], self.last_click_pos[1], self.click_pos[0], self.click_pos[1])
newtext += ' Distance: %.1fm Bearing %.1f' % (distance, bearing) # depends on [control=['if'], data=[]]
if newtext != state.oldtext:
self.position.Clear()
self.position.WriteText(newtext)
state.oldtext = newtext # depends on [control=['if'], data=['newtext']] |
def draw_text(self, content):
"""Draws text cell content to context"""
wx2pango_alignment = {
"left": pango.ALIGN_LEFT,
"center": pango.ALIGN_CENTER,
"right": pango.ALIGN_RIGHT,
}
cell_attributes = self.code_array.cell_attributes[self.key]
angle = cell_attributes["angle"]
if angle in [-90, 90]:
rect = self.rect[1], self.rect[0], self.rect[3], self.rect[2]
else:
rect = self.rect
# Text color attributes
self.context.set_source_rgb(*self._get_text_color())
ptx = pangocairo.CairoContext(self.context)
pango_layout = ptx.create_layout()
self.set_font(pango_layout)
pango_layout.set_wrap(pango.WRAP_WORD_CHAR)
pango_layout.set_width(int(round((rect[2] - 4.0) * pango.SCALE)))
try:
markup = cell_attributes["markup"]
except KeyError:
# Old file
markup = False
if markup:
with warnings.catch_warnings(record=True) as warning_lines:
warnings.resetwarnings()
warnings.simplefilter("always")
pango_layout.set_markup(unicode(content))
if warning_lines:
w2unicode = lambda m: unicode(m.message)
msg = u"\n".join(map(w2unicode, warning_lines))
pango_layout.set_text(msg)
else:
pango_layout.set_text(unicode(content))
alignment = cell_attributes["justification"]
pango_layout.set_alignment(wx2pango_alignment[alignment])
# Shift text for vertical alignment
extents = pango_layout.get_pixel_extents()
downshift = 0
if cell_attributes["vertical_align"] == "bottom":
downshift = rect[3] - extents[1][3] - 4
elif cell_attributes["vertical_align"] == "middle":
downshift = int((rect[3] - extents[1][3]) / 2) - 2
self.context.save()
self._rotate_cell(angle, rect)
self.context.translate(0, downshift)
# Spell check underline drawing
if SpellChecker is not None and self.spell_check:
text = unicode(pango_layout.get_text())
lang = config["spell_lang"]
for start, stop in self._check_spelling(text, lang=lang):
self._draw_error_underline(ptx, pango_layout, start, stop-1)
ptx.update_layout(pango_layout)
ptx.show_layout(pango_layout)
self.context.restore() | def function[draw_text, parameter[self, content]]:
constant[Draws text cell content to context]
variable[wx2pango_alignment] assign[=] dictionary[[<ast.Constant object at 0x7da1b1543dc0>, <ast.Constant object at 0x7da1b1543d90>, <ast.Constant object at 0x7da1b1543d60>], [<ast.Attribute object at 0x7da1b1543d30>, <ast.Attribute object at 0x7da1b1543cd0>, <ast.Attribute object at 0x7da1b1543c70>]]
variable[cell_attributes] assign[=] call[name[self].code_array.cell_attributes][name[self].key]
variable[angle] assign[=] call[name[cell_attributes]][constant[angle]]
if compare[name[angle] in list[[<ast.UnaryOp object at 0x7da1b15438e0>, <ast.Constant object at 0x7da1b1543880>]]] begin[:]
variable[rect] assign[=] tuple[[<ast.Subscript object at 0x7da1b15437c0>, <ast.Subscript object at 0x7da1b1543700>, <ast.Subscript object at 0x7da1b1543640>, <ast.Subscript object at 0x7da1b1543580>]]
call[name[self].context.set_source_rgb, parameter[<ast.Starred object at 0x7da1b1543310>]]
variable[ptx] assign[=] call[name[pangocairo].CairoContext, parameter[name[self].context]]
variable[pango_layout] assign[=] call[name[ptx].create_layout, parameter[]]
call[name[self].set_font, parameter[name[pango_layout]]]
call[name[pango_layout].set_wrap, parameter[name[pango].WRAP_WORD_CHAR]]
call[name[pango_layout].set_width, parameter[call[name[int], parameter[call[name[round], parameter[binary_operation[binary_operation[call[name[rect]][constant[2]] - constant[4.0]] * name[pango].SCALE]]]]]]]
<ast.Try object at 0x7da1b1542a40>
if name[markup] begin[:]
with call[name[warnings].catch_warnings, parameter[]] begin[:]
call[name[warnings].resetwarnings, parameter[]]
call[name[warnings].simplefilter, parameter[constant[always]]]
call[name[pango_layout].set_markup, parameter[call[name[unicode], parameter[name[content]]]]]
if name[warning_lines] begin[:]
variable[w2unicode] assign[=] <ast.Lambda object at 0x7da1b15421d0>
variable[msg] assign[=] call[constant[
].join, parameter[call[name[map], parameter[name[w2unicode], name[warning_lines]]]]]
call[name[pango_layout].set_text, parameter[name[msg]]]
variable[alignment] assign[=] call[name[cell_attributes]][constant[justification]]
call[name[pango_layout].set_alignment, parameter[call[name[wx2pango_alignment]][name[alignment]]]]
variable[extents] assign[=] call[name[pango_layout].get_pixel_extents, parameter[]]
variable[downshift] assign[=] constant[0]
if compare[call[name[cell_attributes]][constant[vertical_align]] equal[==] constant[bottom]] begin[:]
variable[downshift] assign[=] binary_operation[binary_operation[call[name[rect]][constant[3]] - call[call[name[extents]][constant[1]]][constant[3]]] - constant[4]]
call[name[self].context.save, parameter[]]
call[name[self]._rotate_cell, parameter[name[angle], name[rect]]]
call[name[self].context.translate, parameter[constant[0], name[downshift]]]
if <ast.BoolOp object at 0x7da1b15402e0> begin[:]
variable[text] assign[=] call[name[unicode], parameter[call[name[pango_layout].get_text, parameter[]]]]
variable[lang] assign[=] call[name[config]][constant[spell_lang]]
for taget[tuple[[<ast.Name object at 0x7da1b1632d40>, <ast.Name object at 0x7da1b1632e00>]]] in starred[call[name[self]._check_spelling, parameter[name[text]]]] begin[:]
call[name[self]._draw_error_underline, parameter[name[ptx], name[pango_layout], name[start], binary_operation[name[stop] - constant[1]]]]
call[name[ptx].update_layout, parameter[name[pango_layout]]]
call[name[ptx].show_layout, parameter[name[pango_layout]]]
call[name[self].context.restore, parameter[]] | keyword[def] identifier[draw_text] ( identifier[self] , identifier[content] ):
literal[string]
identifier[wx2pango_alignment] ={
literal[string] : identifier[pango] . identifier[ALIGN_LEFT] ,
literal[string] : identifier[pango] . identifier[ALIGN_CENTER] ,
literal[string] : identifier[pango] . identifier[ALIGN_RIGHT] ,
}
identifier[cell_attributes] = identifier[self] . identifier[code_array] . identifier[cell_attributes] [ identifier[self] . identifier[key] ]
identifier[angle] = identifier[cell_attributes] [ literal[string] ]
keyword[if] identifier[angle] keyword[in] [- literal[int] , literal[int] ]:
identifier[rect] = identifier[self] . identifier[rect] [ literal[int] ], identifier[self] . identifier[rect] [ literal[int] ], identifier[self] . identifier[rect] [ literal[int] ], identifier[self] . identifier[rect] [ literal[int] ]
keyword[else] :
identifier[rect] = identifier[self] . identifier[rect]
identifier[self] . identifier[context] . identifier[set_source_rgb] (* identifier[self] . identifier[_get_text_color] ())
identifier[ptx] = identifier[pangocairo] . identifier[CairoContext] ( identifier[self] . identifier[context] )
identifier[pango_layout] = identifier[ptx] . identifier[create_layout] ()
identifier[self] . identifier[set_font] ( identifier[pango_layout] )
identifier[pango_layout] . identifier[set_wrap] ( identifier[pango] . identifier[WRAP_WORD_CHAR] )
identifier[pango_layout] . identifier[set_width] ( identifier[int] ( identifier[round] (( identifier[rect] [ literal[int] ]- literal[int] )* identifier[pango] . identifier[SCALE] )))
keyword[try] :
identifier[markup] = identifier[cell_attributes] [ literal[string] ]
keyword[except] identifier[KeyError] :
identifier[markup] = keyword[False]
keyword[if] identifier[markup] :
keyword[with] identifier[warnings] . identifier[catch_warnings] ( identifier[record] = keyword[True] ) keyword[as] identifier[warning_lines] :
identifier[warnings] . identifier[resetwarnings] ()
identifier[warnings] . identifier[simplefilter] ( literal[string] )
identifier[pango_layout] . identifier[set_markup] ( identifier[unicode] ( identifier[content] ))
keyword[if] identifier[warning_lines] :
identifier[w2unicode] = keyword[lambda] identifier[m] : identifier[unicode] ( identifier[m] . identifier[message] )
identifier[msg] = literal[string] . identifier[join] ( identifier[map] ( identifier[w2unicode] , identifier[warning_lines] ))
identifier[pango_layout] . identifier[set_text] ( identifier[msg] )
keyword[else] :
identifier[pango_layout] . identifier[set_text] ( identifier[unicode] ( identifier[content] ))
identifier[alignment] = identifier[cell_attributes] [ literal[string] ]
identifier[pango_layout] . identifier[set_alignment] ( identifier[wx2pango_alignment] [ identifier[alignment] ])
identifier[extents] = identifier[pango_layout] . identifier[get_pixel_extents] ()
identifier[downshift] = literal[int]
keyword[if] identifier[cell_attributes] [ literal[string] ]== literal[string] :
identifier[downshift] = identifier[rect] [ literal[int] ]- identifier[extents] [ literal[int] ][ literal[int] ]- literal[int]
keyword[elif] identifier[cell_attributes] [ literal[string] ]== literal[string] :
identifier[downshift] = identifier[int] (( identifier[rect] [ literal[int] ]- identifier[extents] [ literal[int] ][ literal[int] ])/ literal[int] )- literal[int]
identifier[self] . identifier[context] . identifier[save] ()
identifier[self] . identifier[_rotate_cell] ( identifier[angle] , identifier[rect] )
identifier[self] . identifier[context] . identifier[translate] ( literal[int] , identifier[downshift] )
keyword[if] identifier[SpellChecker] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[spell_check] :
identifier[text] = identifier[unicode] ( identifier[pango_layout] . identifier[get_text] ())
identifier[lang] = identifier[config] [ literal[string] ]
keyword[for] identifier[start] , identifier[stop] keyword[in] identifier[self] . identifier[_check_spelling] ( identifier[text] , identifier[lang] = identifier[lang] ):
identifier[self] . identifier[_draw_error_underline] ( identifier[ptx] , identifier[pango_layout] , identifier[start] , identifier[stop] - literal[int] )
identifier[ptx] . identifier[update_layout] ( identifier[pango_layout] )
identifier[ptx] . identifier[show_layout] ( identifier[pango_layout] )
identifier[self] . identifier[context] . identifier[restore] () | def draw_text(self, content):
"""Draws text cell content to context"""
wx2pango_alignment = {'left': pango.ALIGN_LEFT, 'center': pango.ALIGN_CENTER, 'right': pango.ALIGN_RIGHT}
cell_attributes = self.code_array.cell_attributes[self.key]
angle = cell_attributes['angle']
if angle in [-90, 90]:
rect = (self.rect[1], self.rect[0], self.rect[3], self.rect[2]) # depends on [control=['if'], data=[]]
else:
rect = self.rect
# Text color attributes
self.context.set_source_rgb(*self._get_text_color())
ptx = pangocairo.CairoContext(self.context)
pango_layout = ptx.create_layout()
self.set_font(pango_layout)
pango_layout.set_wrap(pango.WRAP_WORD_CHAR)
pango_layout.set_width(int(round((rect[2] - 4.0) * pango.SCALE)))
try:
markup = cell_attributes['markup'] # depends on [control=['try'], data=[]]
except KeyError:
# Old file
markup = False # depends on [control=['except'], data=[]]
if markup:
with warnings.catch_warnings(record=True) as warning_lines:
warnings.resetwarnings()
warnings.simplefilter('always')
pango_layout.set_markup(unicode(content))
if warning_lines:
w2unicode = lambda m: unicode(m.message)
msg = u'\n'.join(map(w2unicode, warning_lines))
pango_layout.set_text(msg) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['warning_lines']] # depends on [control=['if'], data=[]]
else:
pango_layout.set_text(unicode(content))
alignment = cell_attributes['justification']
pango_layout.set_alignment(wx2pango_alignment[alignment])
# Shift text for vertical alignment
extents = pango_layout.get_pixel_extents()
downshift = 0
if cell_attributes['vertical_align'] == 'bottom':
downshift = rect[3] - extents[1][3] - 4 # depends on [control=['if'], data=[]]
elif cell_attributes['vertical_align'] == 'middle':
downshift = int((rect[3] - extents[1][3]) / 2) - 2 # depends on [control=['if'], data=[]]
self.context.save()
self._rotate_cell(angle, rect)
self.context.translate(0, downshift)
# Spell check underline drawing
if SpellChecker is not None and self.spell_check:
text = unicode(pango_layout.get_text())
lang = config['spell_lang']
for (start, stop) in self._check_spelling(text, lang=lang):
self._draw_error_underline(ptx, pango_layout, start, stop - 1) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
ptx.update_layout(pango_layout)
ptx.show_layout(pango_layout)
self.context.restore() |
def get_axis_grid(self, ind):
"""
Returns the grid for a particular axis.
Args:
ind (int): Axis index.
"""
ng = self.dim
num_pts = ng[ind]
lengths = self.structure.lattice.abc
return [i / num_pts * lengths[ind] for i in range(num_pts)] | def function[get_axis_grid, parameter[self, ind]]:
constant[
Returns the grid for a particular axis.
Args:
ind (int): Axis index.
]
variable[ng] assign[=] name[self].dim
variable[num_pts] assign[=] call[name[ng]][name[ind]]
variable[lengths] assign[=] name[self].structure.lattice.abc
return[<ast.ListComp object at 0x7da1b26aca00>] | keyword[def] identifier[get_axis_grid] ( identifier[self] , identifier[ind] ):
literal[string]
identifier[ng] = identifier[self] . identifier[dim]
identifier[num_pts] = identifier[ng] [ identifier[ind] ]
identifier[lengths] = identifier[self] . identifier[structure] . identifier[lattice] . identifier[abc]
keyword[return] [ identifier[i] / identifier[num_pts] * identifier[lengths] [ identifier[ind] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_pts] )] | def get_axis_grid(self, ind):
"""
Returns the grid for a particular axis.
Args:
ind (int): Axis index.
"""
ng = self.dim
num_pts = ng[ind]
lengths = self.structure.lattice.abc
return [i / num_pts * lengths[ind] for i in range(num_pts)] |
def create_folder(dirpath, overwrite=False):
""" Will create dirpath folder. If dirpath already exists and overwrite is False,
will append a '+' suffix to dirpath until dirpath does not exist."""
if not overwrite:
while op.exists(dirpath):
dirpath += '+'
os.makedirs(dirpath, exist_ok=overwrite)
return dirpath | def function[create_folder, parameter[dirpath, overwrite]]:
constant[ Will create dirpath folder. If dirpath already exists and overwrite is False,
will append a '+' suffix to dirpath until dirpath does not exist.]
if <ast.UnaryOp object at 0x7da1afe39540> begin[:]
while call[name[op].exists, parameter[name[dirpath]]] begin[:]
<ast.AugAssign object at 0x7da1b004dd80>
call[name[os].makedirs, parameter[name[dirpath]]]
return[name[dirpath]] | keyword[def] identifier[create_folder] ( identifier[dirpath] , identifier[overwrite] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[overwrite] :
keyword[while] identifier[op] . identifier[exists] ( identifier[dirpath] ):
identifier[dirpath] += literal[string]
identifier[os] . identifier[makedirs] ( identifier[dirpath] , identifier[exist_ok] = identifier[overwrite] )
keyword[return] identifier[dirpath] | def create_folder(dirpath, overwrite=False):
""" Will create dirpath folder. If dirpath already exists and overwrite is False,
will append a '+' suffix to dirpath until dirpath does not exist."""
if not overwrite:
while op.exists(dirpath):
dirpath += '+' # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
os.makedirs(dirpath, exist_ok=overwrite)
return dirpath |
def scs2e(sc, sclkch):
"""
Convert a spacecraft clock string to ephemeris seconds past J2000 (ET).
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/scs2e_c.html
:param sc: NAIF integer code for a spacecraft.
:type sc: int
:param sclkch: An SCLK string.
:type sclkch: str
:return: Ephemeris time, seconds past J2000.
:rtype: float
"""
sc = ctypes.c_int(sc)
sclkch = stypes.stringToCharP(sclkch)
et = ctypes.c_double()
libspice.scs2e_c(sc, sclkch, ctypes.byref(et))
return et.value | def function[scs2e, parameter[sc, sclkch]]:
constant[
Convert a spacecraft clock string to ephemeris seconds past J2000 (ET).
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/scs2e_c.html
:param sc: NAIF integer code for a spacecraft.
:type sc: int
:param sclkch: An SCLK string.
:type sclkch: str
:return: Ephemeris time, seconds past J2000.
:rtype: float
]
variable[sc] assign[=] call[name[ctypes].c_int, parameter[name[sc]]]
variable[sclkch] assign[=] call[name[stypes].stringToCharP, parameter[name[sclkch]]]
variable[et] assign[=] call[name[ctypes].c_double, parameter[]]
call[name[libspice].scs2e_c, parameter[name[sc], name[sclkch], call[name[ctypes].byref, parameter[name[et]]]]]
return[name[et].value] | keyword[def] identifier[scs2e] ( identifier[sc] , identifier[sclkch] ):
literal[string]
identifier[sc] = identifier[ctypes] . identifier[c_int] ( identifier[sc] )
identifier[sclkch] = identifier[stypes] . identifier[stringToCharP] ( identifier[sclkch] )
identifier[et] = identifier[ctypes] . identifier[c_double] ()
identifier[libspice] . identifier[scs2e_c] ( identifier[sc] , identifier[sclkch] , identifier[ctypes] . identifier[byref] ( identifier[et] ))
keyword[return] identifier[et] . identifier[value] | def scs2e(sc, sclkch):
"""
Convert a spacecraft clock string to ephemeris seconds past J2000 (ET).
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/scs2e_c.html
:param sc: NAIF integer code for a spacecraft.
:type sc: int
:param sclkch: An SCLK string.
:type sclkch: str
:return: Ephemeris time, seconds past J2000.
:rtype: float
"""
sc = ctypes.c_int(sc)
sclkch = stypes.stringToCharP(sclkch)
et = ctypes.c_double()
libspice.scs2e_c(sc, sclkch, ctypes.byref(et))
return et.value |
def as_cache_key(self, ireq):
"""
Given a requirement, return its cache key. This behavior is a little weird in order to allow backwards
compatibility with cache files. For a requirement without extras, this will return, for example:
("ipython", "2.1.0")
For a requirement with extras, the extras will be comma-separated and appended to the version, inside brackets,
like so:
("ipython", "2.1.0[nbconvert,notebook]")
"""
name, version, extras = as_tuple(ireq)
if not extras:
extras_string = ""
else:
extras_string = "[{}]".format(",".join(extras))
return name, "{}{}".format(version, extras_string) | def function[as_cache_key, parameter[self, ireq]]:
constant[
Given a requirement, return its cache key. This behavior is a little weird in order to allow backwards
compatibility with cache files. For a requirement without extras, this will return, for example:
("ipython", "2.1.0")
For a requirement with extras, the extras will be comma-separated and appended to the version, inside brackets,
like so:
("ipython", "2.1.0[nbconvert,notebook]")
]
<ast.Tuple object at 0x7da18f00fd60> assign[=] call[name[as_tuple], parameter[name[ireq]]]
if <ast.UnaryOp object at 0x7da18f00ffd0> begin[:]
variable[extras_string] assign[=] constant[]
return[tuple[[<ast.Name object at 0x7da18f00cd90>, <ast.Call object at 0x7da18f00ece0>]]] | keyword[def] identifier[as_cache_key] ( identifier[self] , identifier[ireq] ):
literal[string]
identifier[name] , identifier[version] , identifier[extras] = identifier[as_tuple] ( identifier[ireq] )
keyword[if] keyword[not] identifier[extras] :
identifier[extras_string] = literal[string]
keyword[else] :
identifier[extras_string] = literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[extras] ))
keyword[return] identifier[name] , literal[string] . identifier[format] ( identifier[version] , identifier[extras_string] ) | def as_cache_key(self, ireq):
"""
Given a requirement, return its cache key. This behavior is a little weird in order to allow backwards
compatibility with cache files. For a requirement without extras, this will return, for example:
("ipython", "2.1.0")
For a requirement with extras, the extras will be comma-separated and appended to the version, inside brackets,
like so:
("ipython", "2.1.0[nbconvert,notebook]")
"""
(name, version, extras) = as_tuple(ireq)
if not extras:
extras_string = '' # depends on [control=['if'], data=[]]
else:
extras_string = '[{}]'.format(','.join(extras))
return (name, '{}{}'.format(version, extras_string)) |
def get_developer_package(path, format=None):
"""Create a developer package.
Args:
path (str): Path to dir containing package definition file.
format (str): Package definition file format, detected if None.
Returns:
`DeveloperPackage`.
"""
from rez.developer_package import DeveloperPackage
return DeveloperPackage.from_path(path, format=format) | def function[get_developer_package, parameter[path, format]]:
constant[Create a developer package.
Args:
path (str): Path to dir containing package definition file.
format (str): Package definition file format, detected if None.
Returns:
`DeveloperPackage`.
]
from relative_module[rez.developer_package] import module[DeveloperPackage]
return[call[name[DeveloperPackage].from_path, parameter[name[path]]]] | keyword[def] identifier[get_developer_package] ( identifier[path] , identifier[format] = keyword[None] ):
literal[string]
keyword[from] identifier[rez] . identifier[developer_package] keyword[import] identifier[DeveloperPackage]
keyword[return] identifier[DeveloperPackage] . identifier[from_path] ( identifier[path] , identifier[format] = identifier[format] ) | def get_developer_package(path, format=None):
"""Create a developer package.
Args:
path (str): Path to dir containing package definition file.
format (str): Package definition file format, detected if None.
Returns:
`DeveloperPackage`.
"""
from rez.developer_package import DeveloperPackage
return DeveloperPackage.from_path(path, format=format) |
def check_time(self):
""" Make sure our Honeypot time is consistent, and not too far off
from the actual time. """
poll = self.config['timecheck']['poll']
ntp_poll = self.config['timecheck']['ntp_pool']
while True:
clnt = ntplib.NTPClient()
try:
response = clnt.request(ntp_poll, version=3)
diff = response.offset
if abs(diff) >= 15:
logger.error('Timings found to be far off, shutting down drone ({0})'.format(diff))
sys.exit(1)
else:
logger.debug('Polled ntp server and found that drone has {0} seconds offset.'.format(diff))
except (ntplib.NTPException, _socket.error) as ex:
logger.warning('Error while polling ntp server: {0}'.format(ex))
gevent.sleep(poll * 60 * 60) | def function[check_time, parameter[self]]:
constant[ Make sure our Honeypot time is consistent, and not too far off
from the actual time. ]
variable[poll] assign[=] call[call[name[self].config][constant[timecheck]]][constant[poll]]
variable[ntp_poll] assign[=] call[call[name[self].config][constant[timecheck]]][constant[ntp_pool]]
while constant[True] begin[:]
variable[clnt] assign[=] call[name[ntplib].NTPClient, parameter[]]
<ast.Try object at 0x7da1b10c1d20>
call[name[gevent].sleep, parameter[binary_operation[binary_operation[name[poll] * constant[60]] * constant[60]]]] | keyword[def] identifier[check_time] ( identifier[self] ):
literal[string]
identifier[poll] = identifier[self] . identifier[config] [ literal[string] ][ literal[string] ]
identifier[ntp_poll] = identifier[self] . identifier[config] [ literal[string] ][ literal[string] ]
keyword[while] keyword[True] :
identifier[clnt] = identifier[ntplib] . identifier[NTPClient] ()
keyword[try] :
identifier[response] = identifier[clnt] . identifier[request] ( identifier[ntp_poll] , identifier[version] = literal[int] )
identifier[diff] = identifier[response] . identifier[offset]
keyword[if] identifier[abs] ( identifier[diff] )>= literal[int] :
identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[diff] ))
identifier[sys] . identifier[exit] ( literal[int] )
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[diff] ))
keyword[except] ( identifier[ntplib] . identifier[NTPException] , identifier[_socket] . identifier[error] ) keyword[as] identifier[ex] :
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[ex] ))
identifier[gevent] . identifier[sleep] ( identifier[poll] * literal[int] * literal[int] ) | def check_time(self):
""" Make sure our Honeypot time is consistent, and not too far off
from the actual time. """
poll = self.config['timecheck']['poll']
ntp_poll = self.config['timecheck']['ntp_pool']
while True:
clnt = ntplib.NTPClient()
try:
response = clnt.request(ntp_poll, version=3)
diff = response.offset
if abs(diff) >= 15:
logger.error('Timings found to be far off, shutting down drone ({0})'.format(diff))
sys.exit(1) # depends on [control=['if'], data=[]]
else:
logger.debug('Polled ntp server and found that drone has {0} seconds offset.'.format(diff)) # depends on [control=['try'], data=[]]
except (ntplib.NTPException, _socket.error) as ex:
logger.warning('Error while polling ntp server: {0}'.format(ex)) # depends on [control=['except'], data=['ex']]
gevent.sleep(poll * 60 * 60) # depends on [control=['while'], data=[]] |
def get_or_create(
cls, name: sym.Symbol, module: types.ModuleType = None
) -> "Namespace":
"""Get the namespace bound to the symbol `name` in the global namespace
cache, creating it if it does not exist.
Return the namespace."""
return cls._NAMESPACES.swap(Namespace.__get_or_create, name, module=module)[
name
] | def function[get_or_create, parameter[cls, name, module]]:
constant[Get the namespace bound to the symbol `name` in the global namespace
cache, creating it if it does not exist.
Return the namespace.]
return[call[call[name[cls]._NAMESPACES.swap, parameter[name[Namespace].__get_or_create, name[name]]]][name[name]]] | keyword[def] identifier[get_or_create] (
identifier[cls] , identifier[name] : identifier[sym] . identifier[Symbol] , identifier[module] : identifier[types] . identifier[ModuleType] = keyword[None]
)-> literal[string] :
literal[string]
keyword[return] identifier[cls] . identifier[_NAMESPACES] . identifier[swap] ( identifier[Namespace] . identifier[__get_or_create] , identifier[name] , identifier[module] = identifier[module] )[
identifier[name]
] | def get_or_create(cls, name: sym.Symbol, module: types.ModuleType=None) -> 'Namespace':
"""Get the namespace bound to the symbol `name` in the global namespace
cache, creating it if it does not exist.
Return the namespace."""
return cls._NAMESPACES.swap(Namespace.__get_or_create, name, module=module)[name] |
def md5sum(self, f):
''' md5sums a file, returning the hex digest
Parameters:
- f filename string
'''
m = hashlib.md5()
fh = open(f, 'r')
while 1:
chunk = fh.read(BUF_SIZE)
if not chunk: break
m.update(chunk)
fh.close()
return m.hexdigest() | def function[md5sum, parameter[self, f]]:
constant[ md5sums a file, returning the hex digest
Parameters:
- f filename string
]
variable[m] assign[=] call[name[hashlib].md5, parameter[]]
variable[fh] assign[=] call[name[open], parameter[name[f], constant[r]]]
while constant[1] begin[:]
variable[chunk] assign[=] call[name[fh].read, parameter[name[BUF_SIZE]]]
if <ast.UnaryOp object at 0x7da18f00da80> begin[:]
break
call[name[m].update, parameter[name[chunk]]]
call[name[fh].close, parameter[]]
return[call[name[m].hexdigest, parameter[]]] | keyword[def] identifier[md5sum] ( identifier[self] , identifier[f] ):
literal[string]
identifier[m] = identifier[hashlib] . identifier[md5] ()
identifier[fh] = identifier[open] ( identifier[f] , literal[string] )
keyword[while] literal[int] :
identifier[chunk] = identifier[fh] . identifier[read] ( identifier[BUF_SIZE] )
keyword[if] keyword[not] identifier[chunk] : keyword[break]
identifier[m] . identifier[update] ( identifier[chunk] )
identifier[fh] . identifier[close] ()
keyword[return] identifier[m] . identifier[hexdigest] () | def md5sum(self, f):
""" md5sums a file, returning the hex digest
Parameters:
- f filename string
"""
m = hashlib.md5()
fh = open(f, 'r')
while 1:
chunk = fh.read(BUF_SIZE)
if not chunk:
break # depends on [control=['if'], data=[]]
m.update(chunk) # depends on [control=['while'], data=[]]
fh.close()
return m.hexdigest() |
def iscm_md_append_array(self, arraypath, member):
"""
Append a member to a metadata array entry
"""
array_path = string.split(arraypath, ".")
array_key = array_path.pop()
current = self.metadata
for k in array_path:
if not current.has_key(k):
current[k] = {}
current = current[k]
if not current.has_key(array_key):
current[array_key] = []
if not type(current[array_key]) == list:
raise KeyError("%s doesn't point to an array" % arraypath)
current[array_key].append(member) | def function[iscm_md_append_array, parameter[self, arraypath, member]]:
constant[
Append a member to a metadata array entry
]
variable[array_path] assign[=] call[name[string].split, parameter[name[arraypath], constant[.]]]
variable[array_key] assign[=] call[name[array_path].pop, parameter[]]
variable[current] assign[=] name[self].metadata
for taget[name[k]] in starred[name[array_path]] begin[:]
if <ast.UnaryOp object at 0x7da18bcc9660> begin[:]
call[name[current]][name[k]] assign[=] dictionary[[], []]
variable[current] assign[=] call[name[current]][name[k]]
if <ast.UnaryOp object at 0x7da2041d86d0> begin[:]
call[name[current]][name[array_key]] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da2041d95d0> begin[:]
<ast.Raise object at 0x7da2041dbd30>
call[call[name[current]][name[array_key]].append, parameter[name[member]]] | keyword[def] identifier[iscm_md_append_array] ( identifier[self] , identifier[arraypath] , identifier[member] ):
literal[string]
identifier[array_path] = identifier[string] . identifier[split] ( identifier[arraypath] , literal[string] )
identifier[array_key] = identifier[array_path] . identifier[pop] ()
identifier[current] = identifier[self] . identifier[metadata]
keyword[for] identifier[k] keyword[in] identifier[array_path] :
keyword[if] keyword[not] identifier[current] . identifier[has_key] ( identifier[k] ):
identifier[current] [ identifier[k] ]={}
identifier[current] = identifier[current] [ identifier[k] ]
keyword[if] keyword[not] identifier[current] . identifier[has_key] ( identifier[array_key] ):
identifier[current] [ identifier[array_key] ]=[]
keyword[if] keyword[not] identifier[type] ( identifier[current] [ identifier[array_key] ])== identifier[list] :
keyword[raise] identifier[KeyError] ( literal[string] % identifier[arraypath] )
identifier[current] [ identifier[array_key] ]. identifier[append] ( identifier[member] ) | def iscm_md_append_array(self, arraypath, member):
"""
Append a member to a metadata array entry
"""
array_path = string.split(arraypath, '.')
array_key = array_path.pop()
current = self.metadata
for k in array_path:
if not current.has_key(k):
current[k] = {} # depends on [control=['if'], data=[]]
current = current[k] # depends on [control=['for'], data=['k']]
if not current.has_key(array_key):
current[array_key] = [] # depends on [control=['if'], data=[]]
if not type(current[array_key]) == list:
raise KeyError("%s doesn't point to an array" % arraypath) # depends on [control=['if'], data=[]]
current[array_key].append(member) |
def uand(self):
"""Unary AND reduction operator"""
return reduce(operator.and_, self._items, self.ftype.box(1)) | def function[uand, parameter[self]]:
constant[Unary AND reduction operator]
return[call[name[reduce], parameter[name[operator].and_, name[self]._items, call[name[self].ftype.box, parameter[constant[1]]]]]] | keyword[def] identifier[uand] ( identifier[self] ):
literal[string]
keyword[return] identifier[reduce] ( identifier[operator] . identifier[and_] , identifier[self] . identifier[_items] , identifier[self] . identifier[ftype] . identifier[box] ( literal[int] )) | def uand(self):
"""Unary AND reduction operator"""
return reduce(operator.and_, self._items, self.ftype.box(1)) |
def keys(self, start=None, stop=None):
"""Like :meth:`items` but returns only the keys."""
return (item[0] for item in self.items(start, stop)) | def function[keys, parameter[self, start, stop]]:
constant[Like :meth:`items` but returns only the keys.]
return[<ast.GeneratorExp object at 0x7da1b0a1d660>] | keyword[def] identifier[keys] ( identifier[self] , identifier[start] = keyword[None] , identifier[stop] = keyword[None] ):
literal[string]
keyword[return] ( identifier[item] [ literal[int] ] keyword[for] identifier[item] keyword[in] identifier[self] . identifier[items] ( identifier[start] , identifier[stop] )) | def keys(self, start=None, stop=None):
"""Like :meth:`items` but returns only the keys."""
return (item[0] for item in self.items(start, stop)) |
def _prm_read_dictionary(self, leaf, full_name):
"""Loads data that was originally a dictionary when stored
:param leaf:
PyTables table containing the dictionary data
:param full_name:
Full name of the parameter or result whose data is to be loaded
:return:
Data to be loaded
"""
try:
# Load as Pbject Table
temp_table = self._prm_read_table(leaf, full_name)
# Turn the ObjectTable into a dictionary of lists (with length 1).
temp_dict = temp_table.to_dict('list')
innder_dict = {}
# Turn the dictionary of lists into a normal dictionary
for innerkey, vallist in temp_dict.items():
innder_dict[innerkey] = vallist[0]
return innder_dict
except:
self._logger.error('Failed loading `%s` of `%s`.' % (leaf._v_name, full_name))
raise | def function[_prm_read_dictionary, parameter[self, leaf, full_name]]:
constant[Loads data that was originally a dictionary when stored
:param leaf:
PyTables table containing the dictionary data
:param full_name:
Full name of the parameter or result whose data is to be loaded
:return:
Data to be loaded
]
<ast.Try object at 0x7da1b0388d00> | keyword[def] identifier[_prm_read_dictionary] ( identifier[self] , identifier[leaf] , identifier[full_name] ):
literal[string]
keyword[try] :
identifier[temp_table] = identifier[self] . identifier[_prm_read_table] ( identifier[leaf] , identifier[full_name] )
identifier[temp_dict] = identifier[temp_table] . identifier[to_dict] ( literal[string] )
identifier[innder_dict] ={}
keyword[for] identifier[innerkey] , identifier[vallist] keyword[in] identifier[temp_dict] . identifier[items] ():
identifier[innder_dict] [ identifier[innerkey] ]= identifier[vallist] [ literal[int] ]
keyword[return] identifier[innder_dict]
keyword[except] :
identifier[self] . identifier[_logger] . identifier[error] ( literal[string] %( identifier[leaf] . identifier[_v_name] , identifier[full_name] ))
keyword[raise] | def _prm_read_dictionary(self, leaf, full_name):
"""Loads data that was originally a dictionary when stored
:param leaf:
PyTables table containing the dictionary data
:param full_name:
Full name of the parameter or result whose data is to be loaded
:return:
Data to be loaded
"""
try:
# Load as Pbject Table
temp_table = self._prm_read_table(leaf, full_name)
# Turn the ObjectTable into a dictionary of lists (with length 1).
temp_dict = temp_table.to_dict('list')
innder_dict = {}
# Turn the dictionary of lists into a normal dictionary
for (innerkey, vallist) in temp_dict.items():
innder_dict[innerkey] = vallist[0] # depends on [control=['for'], data=[]]
return innder_dict # depends on [control=['try'], data=[]]
except:
self._logger.error('Failed loading `%s` of `%s`.' % (leaf._v_name, full_name))
raise # depends on [control=['except'], data=[]] |
def unquote_header_value(value):
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1].replace('\\\\', '\\').replace('\\"', '"')
return value | def function[unquote_header_value, parameter[value]]:
constant[Unquotes a header value. (Reversal of :func:`quote_header_value`).
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
]
if <ast.BoolOp object at 0x7da20e9552a0> begin[:]
variable[value] assign[=] call[call[call[name[value]][<ast.Slice object at 0x7da20e9543d0>].replace, parameter[constant[\\], constant[\]]].replace, parameter[constant[\"], constant["]]]
return[name[value]] | keyword[def] identifier[unquote_header_value] ( identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[and] identifier[value] [ literal[int] ]== identifier[value] [- literal[int] ]== literal[string] :
identifier[value] = identifier[value] [ literal[int] :- literal[int] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[value] | def unquote_header_value(value):
"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1].replace('\\\\', '\\').replace('\\"', '"') # depends on [control=['if'], data=[]]
return value |
def subnet_get(auth=None, **kwargs):
'''
Get a single subnet
filters
A Python dictionary of filter conditions to push down
CLI Example:
.. code-block:: bash
salt '*' neutronng.subnet_get name=subnet1
'''
cloud = get_operator_cloud(auth)
kwargs = _clean_kwargs(**kwargs)
return cloud.get_subnet(**kwargs) | def function[subnet_get, parameter[auth]]:
constant[
Get a single subnet
filters
A Python dictionary of filter conditions to push down
CLI Example:
.. code-block:: bash
salt '*' neutronng.subnet_get name=subnet1
]
variable[cloud] assign[=] call[name[get_operator_cloud], parameter[name[auth]]]
variable[kwargs] assign[=] call[name[_clean_kwargs], parameter[]]
return[call[name[cloud].get_subnet, parameter[]]] | keyword[def] identifier[subnet_get] ( identifier[auth] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[cloud] = identifier[get_operator_cloud] ( identifier[auth] )
identifier[kwargs] = identifier[_clean_kwargs] (** identifier[kwargs] )
keyword[return] identifier[cloud] . identifier[get_subnet] (** identifier[kwargs] ) | def subnet_get(auth=None, **kwargs):
"""
Get a single subnet
filters
A Python dictionary of filter conditions to push down
CLI Example:
.. code-block:: bash
salt '*' neutronng.subnet_get name=subnet1
"""
cloud = get_operator_cloud(auth)
kwargs = _clean_kwargs(**kwargs)
return cloud.get_subnet(**kwargs) |
def get_functions_and_classes(package):
"""Retun lists of functions and classes from a package.
Parameters
----------
package : python package object
Returns
--------
list, list : list of classes and functions
Each sublist consists of [name, member] sublists.
"""
classes, functions = [], []
for name, member in inspect.getmembers(package):
if not name.startswith('_'):
if inspect.isclass(member):
classes.append([name, member])
elif inspect.isfunction(member):
functions.append([name, member])
return classes, functions | def function[get_functions_and_classes, parameter[package]]:
constant[Retun lists of functions and classes from a package.
Parameters
----------
package : python package object
Returns
--------
list, list : list of classes and functions
Each sublist consists of [name, member] sublists.
]
<ast.Tuple object at 0x7da1b0c426b0> assign[=] tuple[[<ast.List object at 0x7da1b0c41db0>, <ast.List object at 0x7da1b0c40fd0>]]
for taget[tuple[[<ast.Name object at 0x7da1b0c42980>, <ast.Name object at 0x7da1b0c43700>]]] in starred[call[name[inspect].getmembers, parameter[name[package]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b0c402b0> begin[:]
if call[name[inspect].isclass, parameter[name[member]]] begin[:]
call[name[classes].append, parameter[list[[<ast.Name object at 0x7da207f9a5c0>, <ast.Name object at 0x7da207f9af20>]]]]
return[tuple[[<ast.Name object at 0x7da1b0e2c280>, <ast.Name object at 0x7da1b0e2c460>]]] | keyword[def] identifier[get_functions_and_classes] ( identifier[package] ):
literal[string]
identifier[classes] , identifier[functions] =[],[]
keyword[for] identifier[name] , identifier[member] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[package] ):
keyword[if] keyword[not] identifier[name] . identifier[startswith] ( literal[string] ):
keyword[if] identifier[inspect] . identifier[isclass] ( identifier[member] ):
identifier[classes] . identifier[append] ([ identifier[name] , identifier[member] ])
keyword[elif] identifier[inspect] . identifier[isfunction] ( identifier[member] ):
identifier[functions] . identifier[append] ([ identifier[name] , identifier[member] ])
keyword[return] identifier[classes] , identifier[functions] | def get_functions_and_classes(package):
"""Retun lists of functions and classes from a package.
Parameters
----------
package : python package object
Returns
--------
list, list : list of classes and functions
Each sublist consists of [name, member] sublists.
"""
(classes, functions) = ([], [])
for (name, member) in inspect.getmembers(package):
if not name.startswith('_'):
if inspect.isclass(member):
classes.append([name, member]) # depends on [control=['if'], data=[]]
elif inspect.isfunction(member):
functions.append([name, member]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return (classes, functions) |
def findSubCommand(args):
"""
Given a list ['foo','bar', 'baz'], attempts to create a command name in the
format 'foo-bar-baz'. If that command exists, we run it. If it doesn't, we
check to see if foo-bar exists, in which case we run `foo-bar baz`. We keep
taking chunks off the end of the command name and adding them to the argument
list until we find a valid command name we can run.
This allows us to easily make git-style command drivers where for example we
have a driver script, foo, and subcommand scripts foo-bar and foo-baz, and when
the user types `foo bar foobar` we find the foo-bar script and run it as
`foo-bar foobar`
:param list|tuple args: list to try and convert to a command args pair
:returns: command and arguments list
:rtype: tuple
:raises StandardError: if the args can't be matched to an executable subcommand
"""
# If the only command we find is the first element of args, we've found the
# driver script itself and re-executing it will cause an infinite loop, so
# don't even look at the first element on its own.
for n in range(len(args) - 1):
command = '-'.join(args[:(len(args) - n)])
commandArgs = args[len(args) - n:]
if isProgram(command):
return (command, commandArgs)
raise StandardError("Could not find a %s subcommand executable" % command) | def function[findSubCommand, parameter[args]]:
constant[
Given a list ['foo','bar', 'baz'], attempts to create a command name in the
format 'foo-bar-baz'. If that command exists, we run it. If it doesn't, we
check to see if foo-bar exists, in which case we run `foo-bar baz`. We keep
taking chunks off the end of the command name and adding them to the argument
list until we find a valid command name we can run.
This allows us to easily make git-style command drivers where for example we
have a driver script, foo, and subcommand scripts foo-bar and foo-baz, and when
the user types `foo bar foobar` we find the foo-bar script and run it as
`foo-bar foobar`
:param list|tuple args: list to try and convert to a command args pair
:returns: command and arguments list
:rtype: tuple
:raises StandardError: if the args can't be matched to an executable subcommand
]
for taget[name[n]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[args]]] - constant[1]]]]] begin[:]
variable[command] assign[=] call[constant[-].join, parameter[call[name[args]][<ast.Slice object at 0x7da207f02890>]]]
variable[commandArgs] assign[=] call[name[args]][<ast.Slice object at 0x7da18f722200>]
if call[name[isProgram], parameter[name[command]]] begin[:]
return[tuple[[<ast.Name object at 0x7da18f7239a0>, <ast.Name object at 0x7da18f720640>]]]
<ast.Raise object at 0x7da18f721690> | keyword[def] identifier[findSubCommand] ( identifier[args] ):
literal[string]
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[args] )- literal[int] ):
identifier[command] = literal[string] . identifier[join] ( identifier[args] [:( identifier[len] ( identifier[args] )- identifier[n] )])
identifier[commandArgs] = identifier[args] [ identifier[len] ( identifier[args] )- identifier[n] :]
keyword[if] identifier[isProgram] ( identifier[command] ):
keyword[return] ( identifier[command] , identifier[commandArgs] )
keyword[raise] identifier[StandardError] ( literal[string] % identifier[command] ) | def findSubCommand(args):
"""
Given a list ['foo','bar', 'baz'], attempts to create a command name in the
format 'foo-bar-baz'. If that command exists, we run it. If it doesn't, we
check to see if foo-bar exists, in which case we run `foo-bar baz`. We keep
taking chunks off the end of the command name and adding them to the argument
list until we find a valid command name we can run.
This allows us to easily make git-style command drivers where for example we
have a driver script, foo, and subcommand scripts foo-bar and foo-baz, and when
the user types `foo bar foobar` we find the foo-bar script and run it as
`foo-bar foobar`
:param list|tuple args: list to try and convert to a command args pair
:returns: command and arguments list
:rtype: tuple
:raises StandardError: if the args can't be matched to an executable subcommand
"""
# If the only command we find is the first element of args, we've found the
# driver script itself and re-executing it will cause an infinite loop, so
# don't even look at the first element on its own.
for n in range(len(args) - 1):
command = '-'.join(args[:len(args) - n])
commandArgs = args[len(args) - n:]
if isProgram(command):
return (command, commandArgs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']]
raise StandardError('Could not find a %s subcommand executable' % command) |
def forum_topic_list(self, title_matches=None, title=None,
category_id=None):
"""Function to get forum topics.
Parameters:
title_matches (str): Search body for the given terms.
title (str): Exact title match.
category_id (int): Can be: 0, 1, 2 (General, Tags, Bugs & Features
respectively).
"""
params = {
'search[title_matches]': title_matches,
'search[title]': title,
'search[category_id]': category_id
}
return self._get('forum_topics.json', params) | def function[forum_topic_list, parameter[self, title_matches, title, category_id]]:
constant[Function to get forum topics.
Parameters:
title_matches (str): Search body for the given terms.
title (str): Exact title match.
category_id (int): Can be: 0, 1, 2 (General, Tags, Bugs & Features
respectively).
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da2041dad70>, <ast.Constant object at 0x7da2041da260>, <ast.Constant object at 0x7da2041d8670>], [<ast.Name object at 0x7da2041d9b10>, <ast.Name object at 0x7da2041dbd60>, <ast.Name object at 0x7da2041d8c40>]]
return[call[name[self]._get, parameter[constant[forum_topics.json], name[params]]]] | keyword[def] identifier[forum_topic_list] ( identifier[self] , identifier[title_matches] = keyword[None] , identifier[title] = keyword[None] ,
identifier[category_id] = keyword[None] ):
literal[string]
identifier[params] ={
literal[string] : identifier[title_matches] ,
literal[string] : identifier[title] ,
literal[string] : identifier[category_id]
}
keyword[return] identifier[self] . identifier[_get] ( literal[string] , identifier[params] ) | def forum_topic_list(self, title_matches=None, title=None, category_id=None):
"""Function to get forum topics.
Parameters:
title_matches (str): Search body for the given terms.
title (str): Exact title match.
category_id (int): Can be: 0, 1, 2 (General, Tags, Bugs & Features
respectively).
"""
params = {'search[title_matches]': title_matches, 'search[title]': title, 'search[category_id]': category_id}
return self._get('forum_topics.json', params) |
def bytes2human(n, fmt='%(value).1f %(symbol)s', symbols='customary'):
"""
Convert n bytes into a human readable string based on format.
symbols can be either "customary", "customary_ext", "iec" or "iec_ext",
see: http://goo.gl/kTQMs
"""
n = int(n)
if n < 0:
raise ValueError("n < 0")
symbols = SYMBOLS[symbols]
prefix = {}
for i, s in enumerate(symbols[1:]):
prefix[s] = 1 << (i+1)*10
for symbol in reversed(symbols[1:]):
if n >= prefix[symbol]:
value = old_div(float(n), prefix[symbol])
return fmt % locals()
return fmt % dict(symbol=symbols[0], value=n) | def function[bytes2human, parameter[n, fmt, symbols]]:
constant[
Convert n bytes into a human readable string based on format.
symbols can be either "customary", "customary_ext", "iec" or "iec_ext",
see: http://goo.gl/kTQMs
]
variable[n] assign[=] call[name[int], parameter[name[n]]]
if compare[name[n] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da1b1eed840>
variable[symbols] assign[=] call[name[SYMBOLS]][name[symbols]]
variable[prefix] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1eef220>, <ast.Name object at 0x7da1b1eee4a0>]]] in starred[call[name[enumerate], parameter[call[name[symbols]][<ast.Slice object at 0x7da1b1eed9f0>]]]] begin[:]
call[name[prefix]][name[s]] assign[=] binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> binary_operation[binary_operation[name[i] + constant[1]] * constant[10]]]
for taget[name[symbol]] in starred[call[name[reversed], parameter[call[name[symbols]][<ast.Slice object at 0x7da1b1eede70>]]]] begin[:]
if compare[name[n] greater_or_equal[>=] call[name[prefix]][name[symbol]]] begin[:]
variable[value] assign[=] call[name[old_div], parameter[call[name[float], parameter[name[n]]], call[name[prefix]][name[symbol]]]]
return[binary_operation[name[fmt] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]]]
return[binary_operation[name[fmt] <ast.Mod object at 0x7da2590d6920> call[name[dict], parameter[]]]] | keyword[def] identifier[bytes2human] ( identifier[n] , identifier[fmt] = literal[string] , identifier[symbols] = literal[string] ):
literal[string]
identifier[n] = identifier[int] ( identifier[n] )
keyword[if] identifier[n] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[symbols] = identifier[SYMBOLS] [ identifier[symbols] ]
identifier[prefix] ={}
keyword[for] identifier[i] , identifier[s] keyword[in] identifier[enumerate] ( identifier[symbols] [ literal[int] :]):
identifier[prefix] [ identifier[s] ]= literal[int] <<( identifier[i] + literal[int] )* literal[int]
keyword[for] identifier[symbol] keyword[in] identifier[reversed] ( identifier[symbols] [ literal[int] :]):
keyword[if] identifier[n] >= identifier[prefix] [ identifier[symbol] ]:
identifier[value] = identifier[old_div] ( identifier[float] ( identifier[n] ), identifier[prefix] [ identifier[symbol] ])
keyword[return] identifier[fmt] % identifier[locals] ()
keyword[return] identifier[fmt] % identifier[dict] ( identifier[symbol] = identifier[symbols] [ literal[int] ], identifier[value] = identifier[n] ) | def bytes2human(n, fmt='%(value).1f %(symbol)s', symbols='customary'):
"""
Convert n bytes into a human readable string based on format.
symbols can be either "customary", "customary_ext", "iec" or "iec_ext",
see: http://goo.gl/kTQMs
"""
n = int(n)
if n < 0:
raise ValueError('n < 0') # depends on [control=['if'], data=[]]
symbols = SYMBOLS[symbols]
prefix = {}
for (i, s) in enumerate(symbols[1:]):
prefix[s] = 1 << (i + 1) * 10 # depends on [control=['for'], data=[]]
for symbol in reversed(symbols[1:]):
if n >= prefix[symbol]:
value = old_div(float(n), prefix[symbol])
return fmt % locals() # depends on [control=['if'], data=['n']] # depends on [control=['for'], data=['symbol']]
return fmt % dict(symbol=symbols[0], value=n) |
def _build_connection_pool(cls, session: AppSession):
'''Create connection pool.'''
args = session.args
connect_timeout = args.connect_timeout
read_timeout = args.read_timeout
if args.timeout:
connect_timeout = read_timeout = args.timeout
if args.limit_rate:
bandwidth_limiter = session.factory.new('BandwidthLimiter',
args.limit_rate)
else:
bandwidth_limiter = None
connection_factory = functools.partial(
Connection,
timeout=read_timeout,
connect_timeout=connect_timeout,
bind_host=session.args.bind_address,
bandwidth_limiter=bandwidth_limiter,
)
ssl_connection_factory = functools.partial(
SSLConnection,
timeout=read_timeout,
connect_timeout=connect_timeout,
bind_host=session.args.bind_address,
ssl_context=session.ssl_context,
)
if not session.args.no_proxy:
if session.args.https_proxy:
http_proxy = session.args.http_proxy.split(':', 1)
proxy_ssl = True
elif session.args.http_proxy:
http_proxy = session.args.http_proxy.split(':', 1)
proxy_ssl = False
else:
http_proxy = None
proxy_ssl = None
if http_proxy:
http_proxy[1] = int(http_proxy[1])
if session.args.proxy_user:
authentication = (session.args.proxy_user,
session.args.proxy_password)
else:
authentication = None
session.factory.class_map['ConnectionPool'] = \
HTTPProxyConnectionPool
host_filter = session.factory.new(
'ProxyHostFilter',
accept_domains=session.args.proxy_domains,
reject_domains=session.args.proxy_exclude_domains,
accept_hostnames=session.args.proxy_hostnames,
reject_hostnames=session.args.proxy_exclude_hostnames
)
return session.factory.new(
'ConnectionPool',
http_proxy,
proxy_ssl=proxy_ssl,
authentication=authentication,
resolver=session.factory['Resolver'],
connection_factory=connection_factory,
ssl_connection_factory=ssl_connection_factory,
host_filter=host_filter,
)
return session.factory.new(
'ConnectionPool',
resolver=session.factory['Resolver'],
connection_factory=connection_factory,
ssl_connection_factory=ssl_connection_factory
) | def function[_build_connection_pool, parameter[cls, session]]:
constant[Create connection pool.]
variable[args] assign[=] name[session].args
variable[connect_timeout] assign[=] name[args].connect_timeout
variable[read_timeout] assign[=] name[args].read_timeout
if name[args].timeout begin[:]
variable[connect_timeout] assign[=] name[args].timeout
if name[args].limit_rate begin[:]
variable[bandwidth_limiter] assign[=] call[name[session].factory.new, parameter[constant[BandwidthLimiter], name[args].limit_rate]]
variable[connection_factory] assign[=] call[name[functools].partial, parameter[name[Connection]]]
variable[ssl_connection_factory] assign[=] call[name[functools].partial, parameter[name[SSLConnection]]]
if <ast.UnaryOp object at 0x7da204346b30> begin[:]
if name[session].args.https_proxy begin[:]
variable[http_proxy] assign[=] call[name[session].args.http_proxy.split, parameter[constant[:], constant[1]]]
variable[proxy_ssl] assign[=] constant[True]
if name[http_proxy] begin[:]
call[name[http_proxy]][constant[1]] assign[=] call[name[int], parameter[call[name[http_proxy]][constant[1]]]]
if name[session].args.proxy_user begin[:]
variable[authentication] assign[=] tuple[[<ast.Attribute object at 0x7da204347880>, <ast.Attribute object at 0x7da204344b20>]]
call[name[session].factory.class_map][constant[ConnectionPool]] assign[=] name[HTTPProxyConnectionPool]
variable[host_filter] assign[=] call[name[session].factory.new, parameter[constant[ProxyHostFilter]]]
return[call[name[session].factory.new, parameter[constant[ConnectionPool], name[http_proxy]]]]
return[call[name[session].factory.new, parameter[constant[ConnectionPool]]]] | keyword[def] identifier[_build_connection_pool] ( identifier[cls] , identifier[session] : identifier[AppSession] ):
literal[string]
identifier[args] = identifier[session] . identifier[args]
identifier[connect_timeout] = identifier[args] . identifier[connect_timeout]
identifier[read_timeout] = identifier[args] . identifier[read_timeout]
keyword[if] identifier[args] . identifier[timeout] :
identifier[connect_timeout] = identifier[read_timeout] = identifier[args] . identifier[timeout]
keyword[if] identifier[args] . identifier[limit_rate] :
identifier[bandwidth_limiter] = identifier[session] . identifier[factory] . identifier[new] ( literal[string] ,
identifier[args] . identifier[limit_rate] )
keyword[else] :
identifier[bandwidth_limiter] = keyword[None]
identifier[connection_factory] = identifier[functools] . identifier[partial] (
identifier[Connection] ,
identifier[timeout] = identifier[read_timeout] ,
identifier[connect_timeout] = identifier[connect_timeout] ,
identifier[bind_host] = identifier[session] . identifier[args] . identifier[bind_address] ,
identifier[bandwidth_limiter] = identifier[bandwidth_limiter] ,
)
identifier[ssl_connection_factory] = identifier[functools] . identifier[partial] (
identifier[SSLConnection] ,
identifier[timeout] = identifier[read_timeout] ,
identifier[connect_timeout] = identifier[connect_timeout] ,
identifier[bind_host] = identifier[session] . identifier[args] . identifier[bind_address] ,
identifier[ssl_context] = identifier[session] . identifier[ssl_context] ,
)
keyword[if] keyword[not] identifier[session] . identifier[args] . identifier[no_proxy] :
keyword[if] identifier[session] . identifier[args] . identifier[https_proxy] :
identifier[http_proxy] = identifier[session] . identifier[args] . identifier[http_proxy] . identifier[split] ( literal[string] , literal[int] )
identifier[proxy_ssl] = keyword[True]
keyword[elif] identifier[session] . identifier[args] . identifier[http_proxy] :
identifier[http_proxy] = identifier[session] . identifier[args] . identifier[http_proxy] . identifier[split] ( literal[string] , literal[int] )
identifier[proxy_ssl] = keyword[False]
keyword[else] :
identifier[http_proxy] = keyword[None]
identifier[proxy_ssl] = keyword[None]
keyword[if] identifier[http_proxy] :
identifier[http_proxy] [ literal[int] ]= identifier[int] ( identifier[http_proxy] [ literal[int] ])
keyword[if] identifier[session] . identifier[args] . identifier[proxy_user] :
identifier[authentication] =( identifier[session] . identifier[args] . identifier[proxy_user] ,
identifier[session] . identifier[args] . identifier[proxy_password] )
keyword[else] :
identifier[authentication] = keyword[None]
identifier[session] . identifier[factory] . identifier[class_map] [ literal[string] ]= identifier[HTTPProxyConnectionPool]
identifier[host_filter] = identifier[session] . identifier[factory] . identifier[new] (
literal[string] ,
identifier[accept_domains] = identifier[session] . identifier[args] . identifier[proxy_domains] ,
identifier[reject_domains] = identifier[session] . identifier[args] . identifier[proxy_exclude_domains] ,
identifier[accept_hostnames] = identifier[session] . identifier[args] . identifier[proxy_hostnames] ,
identifier[reject_hostnames] = identifier[session] . identifier[args] . identifier[proxy_exclude_hostnames]
)
keyword[return] identifier[session] . identifier[factory] . identifier[new] (
literal[string] ,
identifier[http_proxy] ,
identifier[proxy_ssl] = identifier[proxy_ssl] ,
identifier[authentication] = identifier[authentication] ,
identifier[resolver] = identifier[session] . identifier[factory] [ literal[string] ],
identifier[connection_factory] = identifier[connection_factory] ,
identifier[ssl_connection_factory] = identifier[ssl_connection_factory] ,
identifier[host_filter] = identifier[host_filter] ,
)
keyword[return] identifier[session] . identifier[factory] . identifier[new] (
literal[string] ,
identifier[resolver] = identifier[session] . identifier[factory] [ literal[string] ],
identifier[connection_factory] = identifier[connection_factory] ,
identifier[ssl_connection_factory] = identifier[ssl_connection_factory]
) | def _build_connection_pool(cls, session: AppSession):
"""Create connection pool."""
args = session.args
connect_timeout = args.connect_timeout
read_timeout = args.read_timeout
if args.timeout:
connect_timeout = read_timeout = args.timeout # depends on [control=['if'], data=[]]
if args.limit_rate:
bandwidth_limiter = session.factory.new('BandwidthLimiter', args.limit_rate) # depends on [control=['if'], data=[]]
else:
bandwidth_limiter = None
connection_factory = functools.partial(Connection, timeout=read_timeout, connect_timeout=connect_timeout, bind_host=session.args.bind_address, bandwidth_limiter=bandwidth_limiter)
ssl_connection_factory = functools.partial(SSLConnection, timeout=read_timeout, connect_timeout=connect_timeout, bind_host=session.args.bind_address, ssl_context=session.ssl_context)
if not session.args.no_proxy:
if session.args.https_proxy:
http_proxy = session.args.http_proxy.split(':', 1)
proxy_ssl = True # depends on [control=['if'], data=[]]
elif session.args.http_proxy:
http_proxy = session.args.http_proxy.split(':', 1)
proxy_ssl = False # depends on [control=['if'], data=[]]
else:
http_proxy = None
proxy_ssl = None
if http_proxy:
http_proxy[1] = int(http_proxy[1])
if session.args.proxy_user:
authentication = (session.args.proxy_user, session.args.proxy_password) # depends on [control=['if'], data=[]]
else:
authentication = None
session.factory.class_map['ConnectionPool'] = HTTPProxyConnectionPool
host_filter = session.factory.new('ProxyHostFilter', accept_domains=session.args.proxy_domains, reject_domains=session.args.proxy_exclude_domains, accept_hostnames=session.args.proxy_hostnames, reject_hostnames=session.args.proxy_exclude_hostnames)
return session.factory.new('ConnectionPool', http_proxy, proxy_ssl=proxy_ssl, authentication=authentication, resolver=session.factory['Resolver'], connection_factory=connection_factory, ssl_connection_factory=ssl_connection_factory, host_filter=host_filter) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return session.factory.new('ConnectionPool', resolver=session.factory['Resolver'], connection_factory=connection_factory, ssl_connection_factory=ssl_connection_factory) |
def reports(self):
"""
Create reports from the abundance estimation
"""
logging.info('Creating CLARK report for {} files'.format(self.extension))
# Create a workbook to store the report. Using xlsxwriter rather than a simple csv format, as I want to be
# able to have appropriately sized, multi-line cells
workbook = xlsxwriter.Workbook(self.report)
make_path(self.reportpath)
# New worksheet to store the data
worksheet = workbook.add_worksheet()
# Add a bold format for header cells. Using a monotype font size 8
bold = workbook.add_format({'bold': True, 'font_name': 'Courier New', 'font_size': 8})
bold.set_align('center')
# Format for data cells. Monotype, size 8, top vertically justified
courier = workbook.add_format({'font_name': 'Courier New', 'font_size': 8})
courier.set_align('top')
# Set the custom width for 5 and 6 to be 15
worksheet.set_column(5, 5, 15)
worksheet.set_column(6, 6, 20)
# Initialise the position within the worksheet to be (0,0)
row = 0
col = 0
# List of the headers to use
headers = ['Strain', 'Name', 'TaxID', 'Lineage', 'Count', 'Proportion_All(%)', 'Proportion_Classified(%)']
# Add an additional header for .fasta analyses
if self.extension == 'fasta':
headers.insert(4, 'TotalBP')
# Populate the headers
for category in headers:
# Write the data in the specified cell (row, col) using the bold format
worksheet.write(row, col, category, bold)
# Move to the next column to write the next category
col += 1
# Data starts in row 1
row = 1
# Initialise variables to hold the longest names; used in setting the column width
longeststrain = 0
longestname = 0
longestlineage = 0
# Extract all the taxonomic groups that pass the cutoff from the abundance file
for sample in self.runmetadata.samples:
# Every record starts at column 0
col = 0
# Write the strain name
worksheet.write(row, col, sample.name, courier)
col += 1
# Initialise a dictionary to store the species above the cutoff in the sample
sample.general.passfilter = list()
try:
# Abundance file as a dictionary
abundancedict = DictReader(open(sample.general.abundance))
# Filter abundance to taxIDs with at least self.cutoff% of the total proportion
for result in abundancedict:
# The UNKNOWN category doesn't contain a 'Lineage' column, and therefore, subsequent columns are
# shifted out of proper alignment, and do not contain the appropriate data
try:
if float(result['Proportion_All(%)']) > self.cutoff:
sample.general.passfilter.append(result)
except ValueError:
pass
# Determine the longest name of all the strains, and use it to set the width of column 0
if len(sample.name) > longeststrain:
longeststrain = len(sample.name)
worksheet.set_column(0, 0, longeststrain)
# Sort the abundance results based on the highest count
sortedabundance = sorted(sample.general.passfilter, key=lambda x: int(x['Count']), reverse=True)
# Set of contigs from the classification file. For some reason, certain contigs are represented multiple
# times in the classification file. As far as I can tell, these multiple representations are always
# classified the same, and, therefore, should be treated as duplicates, and ignored
contigset = set()
for result in sortedabundance:
# Add the total number of base pairs classified for each TaxID. As only the total number of contigs
# classified as a particular TaxID are in the report, it can be misleading if a large number
# of small contigs are classified to a particular TaxID e.g. 56 contigs map to TaxID 28901, and 50
# contigs map to TaxID 630, however, added together, those 56 contigs are 4705838 bp, while the 50
# contigs added together are only 69602 bp. While this is unlikely a pure culture, only
# 69602 / (4705838 + 69602) = 1.5% of the total bp map to TaxID 630 compared to 45% of the contigs
if self.extension == 'fasta':
# Initialise a variable to store the total bp mapped to the TaxID
result['TotalBP'] = int()
# Read the classification file into a dictionary
classificationdict = DictReader(open(sample.general.classification))
# Read through each contig classification in the dictionary
for contig in classificationdict:
# Pull out each contig with a TaxID that matches the TaxID of the result of interest, and
# is not present in a set of contigs that have already been added to the dictionary
if result['TaxID'] == contig[' Assignment'] and contig['Object_ID'] not in contigset:
# Increment the total bp mapping to the TaxID by the integer of each contig
result['TotalBP'] += int(contig[' Length'])
# Avoid duplicates by adding the contig name to the set of contigs
contigset.add(contig['Object_ID'])
# Print the results to file
# Ignore the first header, as it is the strain name, which has already been added to the report
dictionaryheaders = headers[1:]
for header in dictionaryheaders:
data = result[header]
worksheet.write(row, col, data, courier)
col += 1
# Determine the longest name of all the matches, and use it to set the width of column 0
if len(result['Name']) > longestname:
longestname = len(result['Name'])
worksheet.set_column(1, 1, longestname)
# Do the same for the lineages
if len(result['Lineage']) > longestlineage:
longestlineage = len(result['Lineage'])
worksheet.set_column(3, 3, longestlineage)
# Increase the row
row += 1
# Set the column to 1
col = 1
except KeyError:
# Increase the row
row += 1
# Close the workbook
workbook.close() | def function[reports, parameter[self]]:
constant[
Create reports from the abundance estimation
]
call[name[logging].info, parameter[call[constant[Creating CLARK report for {} files].format, parameter[name[self].extension]]]]
variable[workbook] assign[=] call[name[xlsxwriter].Workbook, parameter[name[self].report]]
call[name[make_path], parameter[name[self].reportpath]]
variable[worksheet] assign[=] call[name[workbook].add_worksheet, parameter[]]
variable[bold] assign[=] call[name[workbook].add_format, parameter[dictionary[[<ast.Constant object at 0x7da1b1e0a230>, <ast.Constant object at 0x7da1b1e0b0d0>, <ast.Constant object at 0x7da1b1e091b0>], [<ast.Constant object at 0x7da1b1e0b820>, <ast.Constant object at 0x7da1b1e08550>, <ast.Constant object at 0x7da1b1e0bb50>]]]]
call[name[bold].set_align, parameter[constant[center]]]
variable[courier] assign[=] call[name[workbook].add_format, parameter[dictionary[[<ast.Constant object at 0x7da1b1e0b580>, <ast.Constant object at 0x7da1b1e085b0>], [<ast.Constant object at 0x7da1b1e08730>, <ast.Constant object at 0x7da1b1e0a440>]]]]
call[name[courier].set_align, parameter[constant[top]]]
call[name[worksheet].set_column, parameter[constant[5], constant[5], constant[15]]]
call[name[worksheet].set_column, parameter[constant[6], constant[6], constant[20]]]
variable[row] assign[=] constant[0]
variable[col] assign[=] constant[0]
variable[headers] assign[=] list[[<ast.Constant object at 0x7da1b1e0b010>, <ast.Constant object at 0x7da1b1e08c70>, <ast.Constant object at 0x7da1b1e0a950>, <ast.Constant object at 0x7da1b1e09c00>, <ast.Constant object at 0x7da1b1e08ee0>, <ast.Constant object at 0x7da1b1e084c0>, <ast.Constant object at 0x7da1b1e088e0>]]
if compare[name[self].extension equal[==] constant[fasta]] begin[:]
call[name[headers].insert, parameter[constant[4], constant[TotalBP]]]
for taget[name[category]] in starred[name[headers]] begin[:]
call[name[worksheet].write, parameter[name[row], name[col], name[category], name[bold]]]
<ast.AugAssign object at 0x7da1b1e09930>
variable[row] assign[=] constant[1]
variable[longeststrain] assign[=] constant[0]
variable[longestname] assign[=] constant[0]
variable[longestlineage] assign[=] constant[0]
for taget[name[sample]] in starred[name[self].runmetadata.samples] begin[:]
variable[col] assign[=] constant[0]
call[name[worksheet].write, parameter[name[row], name[col], name[sample].name, name[courier]]]
<ast.AugAssign object at 0x7da1b1e0a170>
name[sample].general.passfilter assign[=] call[name[list], parameter[]]
<ast.Try object at 0x7da1b1ec3bb0>
call[name[workbook].close, parameter[]] | keyword[def] identifier[reports] ( identifier[self] ):
literal[string]
identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[extension] ))
identifier[workbook] = identifier[xlsxwriter] . identifier[Workbook] ( identifier[self] . identifier[report] )
identifier[make_path] ( identifier[self] . identifier[reportpath] )
identifier[worksheet] = identifier[workbook] . identifier[add_worksheet] ()
identifier[bold] = identifier[workbook] . identifier[add_format] ({ literal[string] : keyword[True] , literal[string] : literal[string] , literal[string] : literal[int] })
identifier[bold] . identifier[set_align] ( literal[string] )
identifier[courier] = identifier[workbook] . identifier[add_format] ({ literal[string] : literal[string] , literal[string] : literal[int] })
identifier[courier] . identifier[set_align] ( literal[string] )
identifier[worksheet] . identifier[set_column] ( literal[int] , literal[int] , literal[int] )
identifier[worksheet] . identifier[set_column] ( literal[int] , literal[int] , literal[int] )
identifier[row] = literal[int]
identifier[col] = literal[int]
identifier[headers] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[self] . identifier[extension] == literal[string] :
identifier[headers] . identifier[insert] ( literal[int] , literal[string] )
keyword[for] identifier[category] keyword[in] identifier[headers] :
identifier[worksheet] . identifier[write] ( identifier[row] , identifier[col] , identifier[category] , identifier[bold] )
identifier[col] += literal[int]
identifier[row] = literal[int]
identifier[longeststrain] = literal[int]
identifier[longestname] = literal[int]
identifier[longestlineage] = literal[int]
keyword[for] identifier[sample] keyword[in] identifier[self] . identifier[runmetadata] . identifier[samples] :
identifier[col] = literal[int]
identifier[worksheet] . identifier[write] ( identifier[row] , identifier[col] , identifier[sample] . identifier[name] , identifier[courier] )
identifier[col] += literal[int]
identifier[sample] . identifier[general] . identifier[passfilter] = identifier[list] ()
keyword[try] :
identifier[abundancedict] = identifier[DictReader] ( identifier[open] ( identifier[sample] . identifier[general] . identifier[abundance] ))
keyword[for] identifier[result] keyword[in] identifier[abundancedict] :
keyword[try] :
keyword[if] identifier[float] ( identifier[result] [ literal[string] ])> identifier[self] . identifier[cutoff] :
identifier[sample] . identifier[general] . identifier[passfilter] . identifier[append] ( identifier[result] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[if] identifier[len] ( identifier[sample] . identifier[name] )> identifier[longeststrain] :
identifier[longeststrain] = identifier[len] ( identifier[sample] . identifier[name] )
identifier[worksheet] . identifier[set_column] ( literal[int] , literal[int] , identifier[longeststrain] )
identifier[sortedabundance] = identifier[sorted] ( identifier[sample] . identifier[general] . identifier[passfilter] , identifier[key] = keyword[lambda] identifier[x] : identifier[int] ( identifier[x] [ literal[string] ]), identifier[reverse] = keyword[True] )
identifier[contigset] = identifier[set] ()
keyword[for] identifier[result] keyword[in] identifier[sortedabundance] :
keyword[if] identifier[self] . identifier[extension] == literal[string] :
identifier[result] [ literal[string] ]= identifier[int] ()
identifier[classificationdict] = identifier[DictReader] ( identifier[open] ( identifier[sample] . identifier[general] . identifier[classification] ))
keyword[for] identifier[contig] keyword[in] identifier[classificationdict] :
keyword[if] identifier[result] [ literal[string] ]== identifier[contig] [ literal[string] ] keyword[and] identifier[contig] [ literal[string] ] keyword[not] keyword[in] identifier[contigset] :
identifier[result] [ literal[string] ]+= identifier[int] ( identifier[contig] [ literal[string] ])
identifier[contigset] . identifier[add] ( identifier[contig] [ literal[string] ])
identifier[dictionaryheaders] = identifier[headers] [ literal[int] :]
keyword[for] identifier[header] keyword[in] identifier[dictionaryheaders] :
identifier[data] = identifier[result] [ identifier[header] ]
identifier[worksheet] . identifier[write] ( identifier[row] , identifier[col] , identifier[data] , identifier[courier] )
identifier[col] += literal[int]
keyword[if] identifier[len] ( identifier[result] [ literal[string] ])> identifier[longestname] :
identifier[longestname] = identifier[len] ( identifier[result] [ literal[string] ])
identifier[worksheet] . identifier[set_column] ( literal[int] , literal[int] , identifier[longestname] )
keyword[if] identifier[len] ( identifier[result] [ literal[string] ])> identifier[longestlineage] :
identifier[longestlineage] = identifier[len] ( identifier[result] [ literal[string] ])
identifier[worksheet] . identifier[set_column] ( literal[int] , literal[int] , identifier[longestlineage] )
identifier[row] += literal[int]
identifier[col] = literal[int]
keyword[except] identifier[KeyError] :
identifier[row] += literal[int]
identifier[workbook] . identifier[close] () | def reports(self):
"""
Create reports from the abundance estimation
"""
logging.info('Creating CLARK report for {} files'.format(self.extension))
# Create a workbook to store the report. Using xlsxwriter rather than a simple csv format, as I want to be
# able to have appropriately sized, multi-line cells
workbook = xlsxwriter.Workbook(self.report)
make_path(self.reportpath)
# New worksheet to store the data
worksheet = workbook.add_worksheet()
# Add a bold format for header cells. Using a monotype font size 8
bold = workbook.add_format({'bold': True, 'font_name': 'Courier New', 'font_size': 8})
bold.set_align('center')
# Format for data cells. Monotype, size 8, top vertically justified
courier = workbook.add_format({'font_name': 'Courier New', 'font_size': 8})
courier.set_align('top')
# Set the custom width for 5 and 6 to be 15
worksheet.set_column(5, 5, 15)
worksheet.set_column(6, 6, 20)
# Initialise the position within the worksheet to be (0,0)
row = 0
col = 0
# List of the headers to use
headers = ['Strain', 'Name', 'TaxID', 'Lineage', 'Count', 'Proportion_All(%)', 'Proportion_Classified(%)']
# Add an additional header for .fasta analyses
if self.extension == 'fasta':
headers.insert(4, 'TotalBP') # depends on [control=['if'], data=[]]
# Populate the headers
for category in headers:
# Write the data in the specified cell (row, col) using the bold format
worksheet.write(row, col, category, bold)
# Move to the next column to write the next category
col += 1 # depends on [control=['for'], data=['category']]
# Data starts in row 1
row = 1
# Initialise variables to hold the longest names; used in setting the column width
longeststrain = 0
longestname = 0
longestlineage = 0
# Extract all the taxonomic groups that pass the cutoff from the abundance file
for sample in self.runmetadata.samples:
# Every record starts at column 0
col = 0
# Write the strain name
worksheet.write(row, col, sample.name, courier)
col += 1
# Initialise a dictionary to store the species above the cutoff in the sample
sample.general.passfilter = list()
try:
# Abundance file as a dictionary
abundancedict = DictReader(open(sample.general.abundance))
# Filter abundance to taxIDs with at least self.cutoff% of the total proportion
for result in abundancedict:
# The UNKNOWN category doesn't contain a 'Lineage' column, and therefore, subsequent columns are
# shifted out of proper alignment, and do not contain the appropriate data
try:
if float(result['Proportion_All(%)']) > self.cutoff:
sample.general.passfilter.append(result) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['result']]
# Determine the longest name of all the strains, and use it to set the width of column 0
if len(sample.name) > longeststrain:
longeststrain = len(sample.name)
worksheet.set_column(0, 0, longeststrain) # depends on [control=['if'], data=['longeststrain']]
# Sort the abundance results based on the highest count
sortedabundance = sorted(sample.general.passfilter, key=lambda x: int(x['Count']), reverse=True)
# Set of contigs from the classification file. For some reason, certain contigs are represented multiple
# times in the classification file. As far as I can tell, these multiple representations are always
# classified the same, and, therefore, should be treated as duplicates, and ignored
contigset = set()
for result in sortedabundance:
# Add the total number of base pairs classified for each TaxID. As only the total number of contigs
# classified as a particular TaxID are in the report, it can be misleading if a large number
# of small contigs are classified to a particular TaxID e.g. 56 contigs map to TaxID 28901, and 50
# contigs map to TaxID 630, however, added together, those 56 contigs are 4705838 bp, while the 50
# contigs added together are only 69602 bp. While this is unlikely a pure culture, only
# 69602 / (4705838 + 69602) = 1.5% of the total bp map to TaxID 630 compared to 45% of the contigs
if self.extension == 'fasta':
# Initialise a variable to store the total bp mapped to the TaxID
result['TotalBP'] = int()
# Read the classification file into a dictionary
classificationdict = DictReader(open(sample.general.classification))
# Read through each contig classification in the dictionary
for contig in classificationdict:
# Pull out each contig with a TaxID that matches the TaxID of the result of interest, and
# is not present in a set of contigs that have already been added to the dictionary
if result['TaxID'] == contig[' Assignment'] and contig['Object_ID'] not in contigset:
# Increment the total bp mapping to the TaxID by the integer of each contig
result['TotalBP'] += int(contig[' Length'])
# Avoid duplicates by adding the contig name to the set of contigs
contigset.add(contig['Object_ID']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['contig']] # depends on [control=['if'], data=[]]
# Print the results to file
# Ignore the first header, as it is the strain name, which has already been added to the report
dictionaryheaders = headers[1:]
for header in dictionaryheaders:
data = result[header]
worksheet.write(row, col, data, courier)
col += 1
# Determine the longest name of all the matches, and use it to set the width of column 0
if len(result['Name']) > longestname:
longestname = len(result['Name'])
worksheet.set_column(1, 1, longestname) # depends on [control=['if'], data=['longestname']]
# Do the same for the lineages
if len(result['Lineage']) > longestlineage:
longestlineage = len(result['Lineage'])
worksheet.set_column(3, 3, longestlineage) # depends on [control=['if'], data=['longestlineage']] # depends on [control=['for'], data=['header']]
# Increase the row
row += 1
# Set the column to 1
col = 1 # depends on [control=['for'], data=['result']] # depends on [control=['try'], data=[]]
except KeyError:
# Increase the row
row += 1 # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['sample']]
# Close the workbook
workbook.close() |
def is_agari(self, tiles_34, open_sets_34=None):
"""
Determine was it win or not
:param tiles_34: 34 tiles format array
:param open_sets_34: array of array of 34 tiles format
:return: boolean
"""
# we will modify them later, so we need to use a copy
tiles = copy.deepcopy(tiles_34)
# With open hand we need to remove open sets from hand and replace them with isolated pon sets
# it will allow to determine agari state correctly
if open_sets_34:
isolated_tiles = find_isolated_tile_indices(tiles)
for meld in open_sets_34:
if not isolated_tiles:
break
isolated_tile = isolated_tiles.pop()
tiles[meld[0]] -= 1
tiles[meld[1]] -= 1
tiles[meld[2]] -= 1
tiles[isolated_tile] = 3
j = (1 << tiles[27]) | (1 << tiles[28]) | (1 << tiles[29]) | (1 << tiles[30]) | \
(1 << tiles[31]) | (1 << tiles[32]) | (1 << tiles[33])
if j >= 0x10:
return False
# 13 orphans
if ((j & 3) == 2) and (tiles[0] * tiles[8] * tiles[9] * tiles[17] * tiles[18] *
tiles[26] * tiles[27] * tiles[28] * tiles[29] * tiles[30] *
tiles[31] * tiles[32] * tiles[33] == 2):
return True
# seven pairs
if not (j & 10) and sum([tiles[i] == 2 for i in range(0, 34)]) == 7:
return True
if j & 2:
return False
n00 = tiles[0] + tiles[3] + tiles[6]
n01 = tiles[1] + tiles[4] + tiles[7]
n02 = tiles[2] + tiles[5] + tiles[8]
n10 = tiles[9] + tiles[12] + tiles[15]
n11 = tiles[10] + tiles[13] + tiles[16]
n12 = tiles[11] + tiles[14] + tiles[17]
n20 = tiles[18] + tiles[21] + tiles[24]
n21 = tiles[19] + tiles[22] + tiles[25]
n22 = tiles[20] + tiles[23] + tiles[26]
n0 = (n00 + n01 + n02) % 3
if n0 == 1:
return False
n1 = (n10 + n11 + n12) % 3
if n1 == 1:
return False
n2 = (n20 + n21 + n22) % 3
if n2 == 1:
return False
if ((n0 == 2) + (n1 == 2) + (n2 == 2) + (tiles[27] == 2) + (tiles[28] == 2) +
(tiles[29] == 2) + (tiles[30] == 2) + (tiles[31] == 2) + (tiles[32] == 2) +
(tiles[33] == 2) != 1):
return False
nn0 = (n00 * 1 + n01 * 2) % 3
m0 = self._to_meld(tiles, 0)
nn1 = (n10 * 1 + n11 * 2) % 3
m1 = self._to_meld(tiles, 9)
nn2 = (n20 * 1 + n21 * 2) % 3
m2 = self._to_meld(tiles, 18)
if j & 4:
return not (n0 | nn0 | n1 | nn1 | n2 | nn2) and self._is_mentsu(m0) \
and self._is_mentsu(m1) and self._is_mentsu(m2)
if n0 == 2:
return not (n1 | nn1 | n2 | nn2) and self._is_mentsu(m1) and self._is_mentsu(m2) \
and self._is_atama_mentsu(nn0, m0)
if n1 == 2:
return not (n2 | nn2 | n0 | nn0) and self._is_mentsu(m2) and self._is_mentsu(m0) \
and self._is_atama_mentsu(nn1, m1)
if n2 == 2:
return not (n0 | nn0 | n1 | nn1) and self._is_mentsu(m0) and self._is_mentsu(m1) \
and self._is_atama_mentsu(nn2, m2)
return False | def function[is_agari, parameter[self, tiles_34, open_sets_34]]:
constant[
Determine was it win or not
:param tiles_34: 34 tiles format array
:param open_sets_34: array of array of 34 tiles format
:return: boolean
]
variable[tiles] assign[=] call[name[copy].deepcopy, parameter[name[tiles_34]]]
if name[open_sets_34] begin[:]
variable[isolated_tiles] assign[=] call[name[find_isolated_tile_indices], parameter[name[tiles]]]
for taget[name[meld]] in starred[name[open_sets_34]] begin[:]
if <ast.UnaryOp object at 0x7da1b07e0c10> begin[:]
break
variable[isolated_tile] assign[=] call[name[isolated_tiles].pop, parameter[]]
<ast.AugAssign object at 0x7da1b07e3100>
<ast.AugAssign object at 0x7da1b07e3fd0>
<ast.AugAssign object at 0x7da1b07e3130>
call[name[tiles]][name[isolated_tile]] assign[=] constant[3]
variable[j] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> call[name[tiles]][constant[27]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> call[name[tiles]][constant[28]]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> call[name[tiles]][constant[29]]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> call[name[tiles]][constant[30]]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> call[name[tiles]][constant[31]]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> call[name[tiles]][constant[32]]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> call[name[tiles]][constant[33]]]]
if compare[name[j] greater_or_equal[>=] constant[16]] begin[:]
return[constant[False]]
if <ast.BoolOp object at 0x7da18bccae00> begin[:]
return[constant[True]]
if <ast.BoolOp object at 0x7da18bccb130> begin[:]
return[constant[True]]
if binary_operation[name[j] <ast.BitAnd object at 0x7da2590d6b60> constant[2]] begin[:]
return[constant[False]]
variable[n00] assign[=] binary_operation[binary_operation[call[name[tiles]][constant[0]] + call[name[tiles]][constant[3]]] + call[name[tiles]][constant[6]]]
variable[n01] assign[=] binary_operation[binary_operation[call[name[tiles]][constant[1]] + call[name[tiles]][constant[4]]] + call[name[tiles]][constant[7]]]
variable[n02] assign[=] binary_operation[binary_operation[call[name[tiles]][constant[2]] + call[name[tiles]][constant[5]]] + call[name[tiles]][constant[8]]]
variable[n10] assign[=] binary_operation[binary_operation[call[name[tiles]][constant[9]] + call[name[tiles]][constant[12]]] + call[name[tiles]][constant[15]]]
variable[n11] assign[=] binary_operation[binary_operation[call[name[tiles]][constant[10]] + call[name[tiles]][constant[13]]] + call[name[tiles]][constant[16]]]
variable[n12] assign[=] binary_operation[binary_operation[call[name[tiles]][constant[11]] + call[name[tiles]][constant[14]]] + call[name[tiles]][constant[17]]]
variable[n20] assign[=] binary_operation[binary_operation[call[name[tiles]][constant[18]] + call[name[tiles]][constant[21]]] + call[name[tiles]][constant[24]]]
variable[n21] assign[=] binary_operation[binary_operation[call[name[tiles]][constant[19]] + call[name[tiles]][constant[22]]] + call[name[tiles]][constant[25]]]
variable[n22] assign[=] binary_operation[binary_operation[call[name[tiles]][constant[20]] + call[name[tiles]][constant[23]]] + call[name[tiles]][constant[26]]]
variable[n0] assign[=] binary_operation[binary_operation[binary_operation[name[n00] + name[n01]] + name[n02]] <ast.Mod object at 0x7da2590d6920> constant[3]]
if compare[name[n0] equal[==] constant[1]] begin[:]
return[constant[False]]
variable[n1] assign[=] binary_operation[binary_operation[binary_operation[name[n10] + name[n11]] + name[n12]] <ast.Mod object at 0x7da2590d6920> constant[3]]
if compare[name[n1] equal[==] constant[1]] begin[:]
return[constant[False]]
variable[n2] assign[=] binary_operation[binary_operation[binary_operation[name[n20] + name[n21]] + name[n22]] <ast.Mod object at 0x7da2590d6920> constant[3]]
if compare[name[n2] equal[==] constant[1]] begin[:]
return[constant[False]]
if compare[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[compare[name[n0] equal[==] constant[2]] + compare[name[n1] equal[==] constant[2]]] + compare[name[n2] equal[==] constant[2]]] + compare[call[name[tiles]][constant[27]] equal[==] constant[2]]] + compare[call[name[tiles]][constant[28]] equal[==] constant[2]]] + compare[call[name[tiles]][constant[29]] equal[==] constant[2]]] + compare[call[name[tiles]][constant[30]] equal[==] constant[2]]] + compare[call[name[tiles]][constant[31]] equal[==] constant[2]]] + compare[call[name[tiles]][constant[32]] equal[==] constant[2]]] + compare[call[name[tiles]][constant[33]] equal[==] constant[2]]] not_equal[!=] constant[1]] begin[:]
return[constant[False]]
variable[nn0] assign[=] binary_operation[binary_operation[binary_operation[name[n00] * constant[1]] + binary_operation[name[n01] * constant[2]]] <ast.Mod object at 0x7da2590d6920> constant[3]]
variable[m0] assign[=] call[name[self]._to_meld, parameter[name[tiles], constant[0]]]
variable[nn1] assign[=] binary_operation[binary_operation[binary_operation[name[n10] * constant[1]] + binary_operation[name[n11] * constant[2]]] <ast.Mod object at 0x7da2590d6920> constant[3]]
variable[m1] assign[=] call[name[self]._to_meld, parameter[name[tiles], constant[9]]]
variable[nn2] assign[=] binary_operation[binary_operation[binary_operation[name[n20] * constant[1]] + binary_operation[name[n21] * constant[2]]] <ast.Mod object at 0x7da2590d6920> constant[3]]
variable[m2] assign[=] call[name[self]._to_meld, parameter[name[tiles], constant[18]]]
if binary_operation[name[j] <ast.BitAnd object at 0x7da2590d6b60> constant[4]] begin[:]
return[<ast.BoolOp object at 0x7da18f723ca0>]
if compare[name[n0] equal[==] constant[2]] begin[:]
return[<ast.BoolOp object at 0x7da18f723d30>]
if compare[name[n1] equal[==] constant[2]] begin[:]
return[<ast.BoolOp object at 0x7da1b07a67d0>]
if compare[name[n2] equal[==] constant[2]] begin[:]
return[<ast.BoolOp object at 0x7da18dc04d30>]
return[constant[False]] | keyword[def] identifier[is_agari] ( identifier[self] , identifier[tiles_34] , identifier[open_sets_34] = keyword[None] ):
literal[string]
identifier[tiles] = identifier[copy] . identifier[deepcopy] ( identifier[tiles_34] )
keyword[if] identifier[open_sets_34] :
identifier[isolated_tiles] = identifier[find_isolated_tile_indices] ( identifier[tiles] )
keyword[for] identifier[meld] keyword[in] identifier[open_sets_34] :
keyword[if] keyword[not] identifier[isolated_tiles] :
keyword[break]
identifier[isolated_tile] = identifier[isolated_tiles] . identifier[pop] ()
identifier[tiles] [ identifier[meld] [ literal[int] ]]-= literal[int]
identifier[tiles] [ identifier[meld] [ literal[int] ]]-= literal[int]
identifier[tiles] [ identifier[meld] [ literal[int] ]]-= literal[int]
identifier[tiles] [ identifier[isolated_tile] ]= literal[int]
identifier[j] =( literal[int] << identifier[tiles] [ literal[int] ])|( literal[int] << identifier[tiles] [ literal[int] ])|( literal[int] << identifier[tiles] [ literal[int] ])|( literal[int] << identifier[tiles] [ literal[int] ])|( literal[int] << identifier[tiles] [ literal[int] ])|( literal[int] << identifier[tiles] [ literal[int] ])|( literal[int] << identifier[tiles] [ literal[int] ])
keyword[if] identifier[j] >= literal[int] :
keyword[return] keyword[False]
keyword[if] (( identifier[j] & literal[int] )== literal[int] ) keyword[and] ( identifier[tiles] [ literal[int] ]* identifier[tiles] [ literal[int] ]* identifier[tiles] [ literal[int] ]* identifier[tiles] [ literal[int] ]* identifier[tiles] [ literal[int] ]*
identifier[tiles] [ literal[int] ]* identifier[tiles] [ literal[int] ]* identifier[tiles] [ literal[int] ]* identifier[tiles] [ literal[int] ]* identifier[tiles] [ literal[int] ]*
identifier[tiles] [ literal[int] ]* identifier[tiles] [ literal[int] ]* identifier[tiles] [ literal[int] ]== literal[int] ):
keyword[return] keyword[True]
keyword[if] keyword[not] ( identifier[j] & literal[int] ) keyword[and] identifier[sum] ([ identifier[tiles] [ identifier[i] ]== literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] )])== literal[int] :
keyword[return] keyword[True]
keyword[if] identifier[j] & literal[int] :
keyword[return] keyword[False]
identifier[n00] = identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]
identifier[n01] = identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]
identifier[n02] = identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]
identifier[n10] = identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]
identifier[n11] = identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]
identifier[n12] = identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]
identifier[n20] = identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]
identifier[n21] = identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]
identifier[n22] = identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]+ identifier[tiles] [ literal[int] ]
identifier[n0] =( identifier[n00] + identifier[n01] + identifier[n02] )% literal[int]
keyword[if] identifier[n0] == literal[int] :
keyword[return] keyword[False]
identifier[n1] =( identifier[n10] + identifier[n11] + identifier[n12] )% literal[int]
keyword[if] identifier[n1] == literal[int] :
keyword[return] keyword[False]
identifier[n2] =( identifier[n20] + identifier[n21] + identifier[n22] )% literal[int]
keyword[if] identifier[n2] == literal[int] :
keyword[return] keyword[False]
keyword[if] (( identifier[n0] == literal[int] )+( identifier[n1] == literal[int] )+( identifier[n2] == literal[int] )+( identifier[tiles] [ literal[int] ]== literal[int] )+( identifier[tiles] [ literal[int] ]== literal[int] )+
( identifier[tiles] [ literal[int] ]== literal[int] )+( identifier[tiles] [ literal[int] ]== literal[int] )+( identifier[tiles] [ literal[int] ]== literal[int] )+( identifier[tiles] [ literal[int] ]== literal[int] )+
( identifier[tiles] [ literal[int] ]== literal[int] )!= literal[int] ):
keyword[return] keyword[False]
identifier[nn0] =( identifier[n00] * literal[int] + identifier[n01] * literal[int] )% literal[int]
identifier[m0] = identifier[self] . identifier[_to_meld] ( identifier[tiles] , literal[int] )
identifier[nn1] =( identifier[n10] * literal[int] + identifier[n11] * literal[int] )% literal[int]
identifier[m1] = identifier[self] . identifier[_to_meld] ( identifier[tiles] , literal[int] )
identifier[nn2] =( identifier[n20] * literal[int] + identifier[n21] * literal[int] )% literal[int]
identifier[m2] = identifier[self] . identifier[_to_meld] ( identifier[tiles] , literal[int] )
keyword[if] identifier[j] & literal[int] :
keyword[return] keyword[not] ( identifier[n0] | identifier[nn0] | identifier[n1] | identifier[nn1] | identifier[n2] | identifier[nn2] ) keyword[and] identifier[self] . identifier[_is_mentsu] ( identifier[m0] ) keyword[and] identifier[self] . identifier[_is_mentsu] ( identifier[m1] ) keyword[and] identifier[self] . identifier[_is_mentsu] ( identifier[m2] )
keyword[if] identifier[n0] == literal[int] :
keyword[return] keyword[not] ( identifier[n1] | identifier[nn1] | identifier[n2] | identifier[nn2] ) keyword[and] identifier[self] . identifier[_is_mentsu] ( identifier[m1] ) keyword[and] identifier[self] . identifier[_is_mentsu] ( identifier[m2] ) keyword[and] identifier[self] . identifier[_is_atama_mentsu] ( identifier[nn0] , identifier[m0] )
keyword[if] identifier[n1] == literal[int] :
keyword[return] keyword[not] ( identifier[n2] | identifier[nn2] | identifier[n0] | identifier[nn0] ) keyword[and] identifier[self] . identifier[_is_mentsu] ( identifier[m2] ) keyword[and] identifier[self] . identifier[_is_mentsu] ( identifier[m0] ) keyword[and] identifier[self] . identifier[_is_atama_mentsu] ( identifier[nn1] , identifier[m1] )
keyword[if] identifier[n2] == literal[int] :
keyword[return] keyword[not] ( identifier[n0] | identifier[nn0] | identifier[n1] | identifier[nn1] ) keyword[and] identifier[self] . identifier[_is_mentsu] ( identifier[m0] ) keyword[and] identifier[self] . identifier[_is_mentsu] ( identifier[m1] ) keyword[and] identifier[self] . identifier[_is_atama_mentsu] ( identifier[nn2] , identifier[m2] )
keyword[return] keyword[False] | def is_agari(self, tiles_34, open_sets_34=None):
"""
Determine was it win or not
:param tiles_34: 34 tiles format array
:param open_sets_34: array of array of 34 tiles format
:return: boolean
"""
# we will modify them later, so we need to use a copy
tiles = copy.deepcopy(tiles_34)
# With open hand we need to remove open sets from hand and replace them with isolated pon sets
# it will allow to determine agari state correctly
if open_sets_34:
isolated_tiles = find_isolated_tile_indices(tiles)
for meld in open_sets_34:
if not isolated_tiles:
break # depends on [control=['if'], data=[]]
isolated_tile = isolated_tiles.pop()
tiles[meld[0]] -= 1
tiles[meld[1]] -= 1
tiles[meld[2]] -= 1
tiles[isolated_tile] = 3 # depends on [control=['for'], data=['meld']] # depends on [control=['if'], data=[]]
j = 1 << tiles[27] | 1 << tiles[28] | 1 << tiles[29] | 1 << tiles[30] | 1 << tiles[31] | 1 << tiles[32] | 1 << tiles[33]
if j >= 16:
return False # depends on [control=['if'], data=[]]
# 13 orphans
if j & 3 == 2 and tiles[0] * tiles[8] * tiles[9] * tiles[17] * tiles[18] * tiles[26] * tiles[27] * tiles[28] * tiles[29] * tiles[30] * tiles[31] * tiles[32] * tiles[33] == 2:
return True # depends on [control=['if'], data=[]]
# seven pairs
if not j & 10 and sum([tiles[i] == 2 for i in range(0, 34)]) == 7:
return True # depends on [control=['if'], data=[]]
if j & 2:
return False # depends on [control=['if'], data=[]]
n00 = tiles[0] + tiles[3] + tiles[6]
n01 = tiles[1] + tiles[4] + tiles[7]
n02 = tiles[2] + tiles[5] + tiles[8]
n10 = tiles[9] + tiles[12] + tiles[15]
n11 = tiles[10] + tiles[13] + tiles[16]
n12 = tiles[11] + tiles[14] + tiles[17]
n20 = tiles[18] + tiles[21] + tiles[24]
n21 = tiles[19] + tiles[22] + tiles[25]
n22 = tiles[20] + tiles[23] + tiles[26]
n0 = (n00 + n01 + n02) % 3
if n0 == 1:
return False # depends on [control=['if'], data=[]]
n1 = (n10 + n11 + n12) % 3
if n1 == 1:
return False # depends on [control=['if'], data=[]]
n2 = (n20 + n21 + n22) % 3
if n2 == 1:
return False # depends on [control=['if'], data=[]]
if (n0 == 2) + (n1 == 2) + (n2 == 2) + (tiles[27] == 2) + (tiles[28] == 2) + (tiles[29] == 2) + (tiles[30] == 2) + (tiles[31] == 2) + (tiles[32] == 2) + (tiles[33] == 2) != 1:
return False # depends on [control=['if'], data=[]]
nn0 = (n00 * 1 + n01 * 2) % 3
m0 = self._to_meld(tiles, 0)
nn1 = (n10 * 1 + n11 * 2) % 3
m1 = self._to_meld(tiles, 9)
nn2 = (n20 * 1 + n21 * 2) % 3
m2 = self._to_meld(tiles, 18)
if j & 4:
return not n0 | nn0 | n1 | nn1 | n2 | nn2 and self._is_mentsu(m0) and self._is_mentsu(m1) and self._is_mentsu(m2) # depends on [control=['if'], data=[]]
if n0 == 2:
return not n1 | nn1 | n2 | nn2 and self._is_mentsu(m1) and self._is_mentsu(m2) and self._is_atama_mentsu(nn0, m0) # depends on [control=['if'], data=[]]
if n1 == 2:
return not n2 | nn2 | n0 | nn0 and self._is_mentsu(m2) and self._is_mentsu(m0) and self._is_atama_mentsu(nn1, m1) # depends on [control=['if'], data=[]]
if n2 == 2:
return not n0 | nn0 | n1 | nn1 and self._is_mentsu(m0) and self._is_mentsu(m1) and self._is_atama_mentsu(nn2, m2) # depends on [control=['if'], data=[]]
return False |
def _generate_request_head_bytes(method, endpoint, headers):
"""
:type method: str
:type endpoint: str
:type headers: dict[str, str]
:rtype: bytes
"""
head_string = _FORMAT_METHOD_AND_ENDPOINT.format(method, endpoint)
header_tuples = sorted((k, headers[k]) for k in headers)
for name, value in header_tuples:
if _should_sign_request_header(name):
head_string += _FORMAT_HEADER_STRING.format(name, value)
return (head_string + _DELIMITER_NEWLINE).encode() | def function[_generate_request_head_bytes, parameter[method, endpoint, headers]]:
constant[
:type method: str
:type endpoint: str
:type headers: dict[str, str]
:rtype: bytes
]
variable[head_string] assign[=] call[name[_FORMAT_METHOD_AND_ENDPOINT].format, parameter[name[method], name[endpoint]]]
variable[header_tuples] assign[=] call[name[sorted], parameter[<ast.GeneratorExp object at 0x7da1b07aead0>]]
for taget[tuple[[<ast.Name object at 0x7da1b07ad630>, <ast.Name object at 0x7da1b07ac760>]]] in starred[name[header_tuples]] begin[:]
if call[name[_should_sign_request_header], parameter[name[name]]] begin[:]
<ast.AugAssign object at 0x7da1b07af7c0>
return[call[binary_operation[name[head_string] + name[_DELIMITER_NEWLINE]].encode, parameter[]]] | keyword[def] identifier[_generate_request_head_bytes] ( identifier[method] , identifier[endpoint] , identifier[headers] ):
literal[string]
identifier[head_string] = identifier[_FORMAT_METHOD_AND_ENDPOINT] . identifier[format] ( identifier[method] , identifier[endpoint] )
identifier[header_tuples] = identifier[sorted] (( identifier[k] , identifier[headers] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[headers] )
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[header_tuples] :
keyword[if] identifier[_should_sign_request_header] ( identifier[name] ):
identifier[head_string] += identifier[_FORMAT_HEADER_STRING] . identifier[format] ( identifier[name] , identifier[value] )
keyword[return] ( identifier[head_string] + identifier[_DELIMITER_NEWLINE] ). identifier[encode] () | def _generate_request_head_bytes(method, endpoint, headers):
"""
:type method: str
:type endpoint: str
:type headers: dict[str, str]
:rtype: bytes
"""
head_string = _FORMAT_METHOD_AND_ENDPOINT.format(method, endpoint)
header_tuples = sorted(((k, headers[k]) for k in headers))
for (name, value) in header_tuples:
if _should_sign_request_header(name):
head_string += _FORMAT_HEADER_STRING.format(name, value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return (head_string + _DELIMITER_NEWLINE).encode() |
def set_context(pid_file, context_info):
"""Set context of running notebook.
:param context_info: dict of extra context parameters, see comm.py comments
"""
assert type(context_info) == dict
port_file = get_context_file_name(pid_file)
with open(port_file, "wt") as f:
f.write(json.dumps(context_info)) | def function[set_context, parameter[pid_file, context_info]]:
constant[Set context of running notebook.
:param context_info: dict of extra context parameters, see comm.py comments
]
assert[compare[call[name[type], parameter[name[context_info]]] equal[==] name[dict]]]
variable[port_file] assign[=] call[name[get_context_file_name], parameter[name[pid_file]]]
with call[name[open], parameter[name[port_file], constant[wt]]] begin[:]
call[name[f].write, parameter[call[name[json].dumps, parameter[name[context_info]]]]] | keyword[def] identifier[set_context] ( identifier[pid_file] , identifier[context_info] ):
literal[string]
keyword[assert] identifier[type] ( identifier[context_info] )== identifier[dict]
identifier[port_file] = identifier[get_context_file_name] ( identifier[pid_file] )
keyword[with] identifier[open] ( identifier[port_file] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[json] . identifier[dumps] ( identifier[context_info] )) | def set_context(pid_file, context_info):
"""Set context of running notebook.
:param context_info: dict of extra context parameters, see comm.py comments
"""
assert type(context_info) == dict
port_file = get_context_file_name(pid_file)
with open(port_file, 'wt') as f:
f.write(json.dumps(context_info)) # depends on [control=['with'], data=['f']] |
def _instantiateFont(self, path):
"""
Return a instance of a font object
with all the given subclasses
"""
return self._fontClass(path,
libClass=self._libClass,
kerningClass=self._kerningClass,
groupsClass=self._groupsClass,
infoClass=self._infoClass,
featuresClass=self._featuresClass,
glyphClass=self._glyphClass,
glyphContourClass=self._glyphContourClass,
glyphPointClass=self._glyphPointClass,
glyphComponentClass=self._glyphComponentClass,
glyphAnchorClass=self._glyphAnchorClass) | def function[_instantiateFont, parameter[self, path]]:
constant[
Return a instance of a font object
with all the given subclasses
]
return[call[name[self]._fontClass, parameter[name[path]]]] | keyword[def] identifier[_instantiateFont] ( identifier[self] , identifier[path] ):
literal[string]
keyword[return] identifier[self] . identifier[_fontClass] ( identifier[path] ,
identifier[libClass] = identifier[self] . identifier[_libClass] ,
identifier[kerningClass] = identifier[self] . identifier[_kerningClass] ,
identifier[groupsClass] = identifier[self] . identifier[_groupsClass] ,
identifier[infoClass] = identifier[self] . identifier[_infoClass] ,
identifier[featuresClass] = identifier[self] . identifier[_featuresClass] ,
identifier[glyphClass] = identifier[self] . identifier[_glyphClass] ,
identifier[glyphContourClass] = identifier[self] . identifier[_glyphContourClass] ,
identifier[glyphPointClass] = identifier[self] . identifier[_glyphPointClass] ,
identifier[glyphComponentClass] = identifier[self] . identifier[_glyphComponentClass] ,
identifier[glyphAnchorClass] = identifier[self] . identifier[_glyphAnchorClass] ) | def _instantiateFont(self, path):
"""
Return a instance of a font object
with all the given subclasses
"""
return self._fontClass(path, libClass=self._libClass, kerningClass=self._kerningClass, groupsClass=self._groupsClass, infoClass=self._infoClass, featuresClass=self._featuresClass, glyphClass=self._glyphClass, glyphContourClass=self._glyphContourClass, glyphPointClass=self._glyphPointClass, glyphComponentClass=self._glyphComponentClass, glyphAnchorClass=self._glyphAnchorClass) |
def vm_disk_save(name, kwargs=None, call=None):
'''
Sets the disk to be saved in the given image.
.. versionadded:: 2016.3.0
name
The name of the VM containing the disk to save.
disk_id
The ID of the disk to save.
image_name
The name of the new image where the disk will be saved.
image_type
The type for the new image. If not set, then the default ``ONED`` Configuration
will be used. Other valid types include: OS, CDROM, DATABLOCK, KERNEL, RAMDISK,
and CONTEXT.
snapshot_id
The ID of the snapshot to export. If not set, the current image state will be
used.
CLI Example:
.. code-block:: bash
salt-cloud -a vm_disk_save my-vm disk_id=1 image_name=my-new-image
salt-cloud -a vm_disk_save my-vm disk_id=1 image_name=my-new-image image_type=CONTEXT snapshot_id=10
'''
if call != 'action':
raise SaltCloudSystemExit(
'The vm_disk_save action must be called with -a or --action.'
)
if kwargs is None:
kwargs = {}
disk_id = kwargs.get('disk_id', None)
image_name = kwargs.get('image_name', None)
image_type = kwargs.get('image_type', '')
snapshot_id = int(kwargs.get('snapshot_id', '-1'))
if disk_id is None or image_name is None:
raise SaltCloudSystemExit(
'The vm_disk_save function requires a \'disk_id\' and an \'image_name\' '
'to be provided.'
)
server, user, password = _get_xml_rpc()
auth = ':'.join([user, password])
vm_id = int(get_vm_id(kwargs={'name': name}))
response = server.one.vm.disksave(auth,
vm_id,
int(disk_id),
image_name,
image_type,
snapshot_id)
data = {
'action': 'vm.disksave',
'saved': response[0],
'image_id': response[1],
'error_code': response[2],
}
return data | def function[vm_disk_save, parameter[name, kwargs, call]]:
constant[
Sets the disk to be saved in the given image.
.. versionadded:: 2016.3.0
name
The name of the VM containing the disk to save.
disk_id
The ID of the disk to save.
image_name
The name of the new image where the disk will be saved.
image_type
The type for the new image. If not set, then the default ``ONED`` Configuration
will be used. Other valid types include: OS, CDROM, DATABLOCK, KERNEL, RAMDISK,
and CONTEXT.
snapshot_id
The ID of the snapshot to export. If not set, the current image state will be
used.
CLI Example:
.. code-block:: bash
salt-cloud -a vm_disk_save my-vm disk_id=1 image_name=my-new-image
salt-cloud -a vm_disk_save my-vm disk_id=1 image_name=my-new-image image_type=CONTEXT snapshot_id=10
]
if compare[name[call] not_equal[!=] constant[action]] begin[:]
<ast.Raise object at 0x7da20e74afe0>
if compare[name[kwargs] is constant[None]] begin[:]
variable[kwargs] assign[=] dictionary[[], []]
variable[disk_id] assign[=] call[name[kwargs].get, parameter[constant[disk_id], constant[None]]]
variable[image_name] assign[=] call[name[kwargs].get, parameter[constant[image_name], constant[None]]]
variable[image_type] assign[=] call[name[kwargs].get, parameter[constant[image_type], constant[]]]
variable[snapshot_id] assign[=] call[name[int], parameter[call[name[kwargs].get, parameter[constant[snapshot_id], constant[-1]]]]]
if <ast.BoolOp object at 0x7da18dc060b0> begin[:]
<ast.Raise object at 0x7da18dc048e0>
<ast.Tuple object at 0x7da18dc04460> assign[=] call[name[_get_xml_rpc], parameter[]]
variable[auth] assign[=] call[constant[:].join, parameter[list[[<ast.Name object at 0x7da18dc067d0>, <ast.Name object at 0x7da18dc04d00>]]]]
variable[vm_id] assign[=] call[name[int], parameter[call[name[get_vm_id], parameter[]]]]
variable[response] assign[=] call[name[server].one.vm.disksave, parameter[name[auth], name[vm_id], call[name[int], parameter[name[disk_id]]], name[image_name], name[image_type], name[snapshot_id]]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da20c76e200>, <ast.Constant object at 0x7da20c76c2b0>, <ast.Constant object at 0x7da20c76d9f0>, <ast.Constant object at 0x7da207f9a860>], [<ast.Constant object at 0x7da207f98f40>, <ast.Subscript object at 0x7da207f99c30>, <ast.Subscript object at 0x7da207f9bb50>, <ast.Subscript object at 0x7da207f9a530>]]
return[name[data]] | keyword[def] identifier[vm_disk_save] ( identifier[name] , identifier[kwargs] = keyword[None] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
keyword[if] identifier[kwargs] keyword[is] keyword[None] :
identifier[kwargs] ={}
identifier[disk_id] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[image_name] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[image_type] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[snapshot_id] = identifier[int] ( identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ))
keyword[if] identifier[disk_id] keyword[is] keyword[None] keyword[or] identifier[image_name] keyword[is] keyword[None] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
literal[string]
)
identifier[server] , identifier[user] , identifier[password] = identifier[_get_xml_rpc] ()
identifier[auth] = literal[string] . identifier[join] ([ identifier[user] , identifier[password] ])
identifier[vm_id] = identifier[int] ( identifier[get_vm_id] ( identifier[kwargs] ={ literal[string] : identifier[name] }))
identifier[response] = identifier[server] . identifier[one] . identifier[vm] . identifier[disksave] ( identifier[auth] ,
identifier[vm_id] ,
identifier[int] ( identifier[disk_id] ),
identifier[image_name] ,
identifier[image_type] ,
identifier[snapshot_id] )
identifier[data] ={
literal[string] : literal[string] ,
literal[string] : identifier[response] [ literal[int] ],
literal[string] : identifier[response] [ literal[int] ],
literal[string] : identifier[response] [ literal[int] ],
}
keyword[return] identifier[data] | def vm_disk_save(name, kwargs=None, call=None):
"""
Sets the disk to be saved in the given image.
.. versionadded:: 2016.3.0
name
The name of the VM containing the disk to save.
disk_id
The ID of the disk to save.
image_name
The name of the new image where the disk will be saved.
image_type
The type for the new image. If not set, then the default ``ONED`` Configuration
will be used. Other valid types include: OS, CDROM, DATABLOCK, KERNEL, RAMDISK,
and CONTEXT.
snapshot_id
The ID of the snapshot to export. If not set, the current image state will be
used.
CLI Example:
.. code-block:: bash
salt-cloud -a vm_disk_save my-vm disk_id=1 image_name=my-new-image
salt-cloud -a vm_disk_save my-vm disk_id=1 image_name=my-new-image image_type=CONTEXT snapshot_id=10
"""
if call != 'action':
raise SaltCloudSystemExit('The vm_disk_save action must be called with -a or --action.') # depends on [control=['if'], data=[]]
if kwargs is None:
kwargs = {} # depends on [control=['if'], data=['kwargs']]
disk_id = kwargs.get('disk_id', None)
image_name = kwargs.get('image_name', None)
image_type = kwargs.get('image_type', '')
snapshot_id = int(kwargs.get('snapshot_id', '-1'))
if disk_id is None or image_name is None:
raise SaltCloudSystemExit("The vm_disk_save function requires a 'disk_id' and an 'image_name' to be provided.") # depends on [control=['if'], data=[]]
(server, user, password) = _get_xml_rpc()
auth = ':'.join([user, password])
vm_id = int(get_vm_id(kwargs={'name': name}))
response = server.one.vm.disksave(auth, vm_id, int(disk_id), image_name, image_type, snapshot_id)
data = {'action': 'vm.disksave', 'saved': response[0], 'image_id': response[1], 'error_code': response[2]}
return data |
def _index_file(self):
"""Open and index the contour file
This function populates the internal list of contours
as strings which will be available as `self.data`.
"""
with self.filename.open() as fd:
data = fd.read()
ident = "Contour in frame"
self._data = data.split(ident)[1:]
self._initialized = True | def function[_index_file, parameter[self]]:
constant[Open and index the contour file
This function populates the internal list of contours
as strings which will be available as `self.data`.
]
with call[name[self].filename.open, parameter[]] begin[:]
variable[data] assign[=] call[name[fd].read, parameter[]]
variable[ident] assign[=] constant[Contour in frame]
name[self]._data assign[=] call[call[name[data].split, parameter[name[ident]]]][<ast.Slice object at 0x7da1b18641c0>]
name[self]._initialized assign[=] constant[True] | keyword[def] identifier[_index_file] ( identifier[self] ):
literal[string]
keyword[with] identifier[self] . identifier[filename] . identifier[open] () keyword[as] identifier[fd] :
identifier[data] = identifier[fd] . identifier[read] ()
identifier[ident] = literal[string]
identifier[self] . identifier[_data] = identifier[data] . identifier[split] ( identifier[ident] )[ literal[int] :]
identifier[self] . identifier[_initialized] = keyword[True] | def _index_file(self):
"""Open and index the contour file
This function populates the internal list of contours
as strings which will be available as `self.data`.
"""
with self.filename.open() as fd:
data = fd.read() # depends on [control=['with'], data=['fd']]
ident = 'Contour in frame'
self._data = data.split(ident)[1:]
self._initialized = True |
def embed_check_categorical_event_shape(
categorical_param, name="embed_check_categorical_event_shape"):
"""Embeds checks that categorical distributions don't have too many classes.
A categorical-type distribution is one which, e.g., returns the class label
rather than a one-hot encoding. E.g., `Categorical(probs)`.
Since distributions output samples in the same dtype as the parameters, we
must ensure that casting doesn't lose precision. That is, the
`parameter.dtype` implies a maximum number of classes. However, since shape is
`int32` and categorical variables are presumed to be indexes into a `Tensor`,
we must also ensure that the number of classes is no larger than the largest
possible `int32` index, i.e., `2**31-1`.
In other words the number of classes, `K`, must satisfy the following
condition:
```python
K <= min(
int(2**31 - 1), # Largest float as an index.
{
tf.float16: int(2**11), # Largest int as a float16.
tf.float32: int(2**24),
tf.float64: int(2**53),
}.get(dtype_util.base_dtype(categorical_param.dtype), 0))
```
Args:
categorical_param: Floating-point `Tensor` representing parameters of
distribution over categories. The rightmost shape is presumed to be the
number of categories.
name: A name for this operation (optional).
Returns:
categorical_param: Input `Tensor` with appropriate assertions embedded.
Raises:
TypeError: if `categorical_param` has an unknown `dtype`.
ValueError: if we can statically identify `categorical_param` as being too
large (for being closed under int32/float casting).
"""
with tf.name_scope(name):
x = tf.convert_to_tensor(value=categorical_param, name="categorical_param")
# The size must not exceed both of:
# - The largest possible int32 (since categorical values are presumed to be
# indexes into a Tensor).
# - The largest possible integer exactly representable under the given
# floating-point dtype (since we need to cast to/from).
#
# The chosen floating-point thresholds are 2**(1 + mantissa_bits).
# For more details, see:
# https://en.wikipedia.org/wiki/Floating-point_arithmetic#Internal_representation
x_dtype = dtype_util.base_dtype(x.dtype)
max_event_size = (
_largest_integer_by_dtype(x_dtype)
if dtype_util.is_floating(x_dtype) else 0)
if max_event_size is 0:
raise TypeError("Unable to validate size of unrecognized dtype "
"({}).".format(dtype_util.name(x_dtype)))
try:
x_shape_static = tensorshape_util.with_rank_at_least(x.shape, 1)
except ValueError:
raise ValueError("A categorical-distribution parameter must have "
"at least 1 dimension.")
event_size = tf.compat.dimension_value(x_shape_static[-1])
if event_size is not None:
if event_size < 2:
raise ValueError("A categorical-distribution parameter must have at "
"least 2 events.")
if event_size > max_event_size:
raise ValueError("Number of classes exceeds `dtype` precision, i.e., "
"{} implies shape ({}) cannot exceed {}.".format(
dtype_util.name(x_dtype), event_size,
max_event_size))
return x
else:
event_size = tf.shape(input=x, out_type=tf.int64, name="x_shape")[-1]
return with_dependencies([
assert_util.assert_rank_at_least(
x,
1,
message=("A categorical-distribution parameter must have "
"at least 1 dimension.")),
assert_util.assert_greater_equal(
tf.shape(input=x)[-1],
2,
message=("A categorical-distribution parameter must have at "
"least 2 events.")),
assert_util.assert_less_equal(
event_size,
tf.convert_to_tensor(max_event_size, dtype=tf.int64),
message="Number of classes exceeds `dtype` precision, "
"i.e., {} dtype cannot exceed {} shape.".format(
dtype_util.name(x_dtype), max_event_size)),
], x) | def function[embed_check_categorical_event_shape, parameter[categorical_param, name]]:
constant[Embeds checks that categorical distributions don't have too many classes.
A categorical-type distribution is one which, e.g., returns the class label
rather than a one-hot encoding. E.g., `Categorical(probs)`.
Since distributions output samples in the same dtype as the parameters, we
must ensure that casting doesn't lose precision. That is, the
`parameter.dtype` implies a maximum number of classes. However, since shape is
`int32` and categorical variables are presumed to be indexes into a `Tensor`,
we must also ensure that the number of classes is no larger than the largest
possible `int32` index, i.e., `2**31-1`.
In other words the number of classes, `K`, must satisfy the following
condition:
```python
K <= min(
int(2**31 - 1), # Largest float as an index.
{
tf.float16: int(2**11), # Largest int as a float16.
tf.float32: int(2**24),
tf.float64: int(2**53),
}.get(dtype_util.base_dtype(categorical_param.dtype), 0))
```
Args:
categorical_param: Floating-point `Tensor` representing parameters of
distribution over categories. The rightmost shape is presumed to be the
number of categories.
name: A name for this operation (optional).
Returns:
categorical_param: Input `Tensor` with appropriate assertions embedded.
Raises:
TypeError: if `categorical_param` has an unknown `dtype`.
ValueError: if we can statically identify `categorical_param` as being too
large (for being closed under int32/float casting).
]
with call[name[tf].name_scope, parameter[name[name]]] begin[:]
variable[x] assign[=] call[name[tf].convert_to_tensor, parameter[]]
variable[x_dtype] assign[=] call[name[dtype_util].base_dtype, parameter[name[x].dtype]]
variable[max_event_size] assign[=] <ast.IfExp object at 0x7da1b0210cd0>
if compare[name[max_event_size] is constant[0]] begin[:]
<ast.Raise object at 0x7da1b02122c0>
<ast.Try object at 0x7da1b02120e0>
variable[event_size] assign[=] call[name[tf].compat.dimension_value, parameter[call[name[x_shape_static]][<ast.UnaryOp object at 0x7da1b0321720>]]]
if compare[name[event_size] is_not constant[None]] begin[:]
if compare[name[event_size] less[<] constant[2]] begin[:]
<ast.Raise object at 0x7da1b03235e0>
if compare[name[event_size] greater[>] name[max_event_size]] begin[:]
<ast.Raise object at 0x7da1b0320f10>
return[name[x]] | keyword[def] identifier[embed_check_categorical_event_shape] (
identifier[categorical_param] , identifier[name] = literal[string] ):
literal[string]
keyword[with] identifier[tf] . identifier[name_scope] ( identifier[name] ):
identifier[x] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[categorical_param] , identifier[name] = literal[string] )
identifier[x_dtype] = identifier[dtype_util] . identifier[base_dtype] ( identifier[x] . identifier[dtype] )
identifier[max_event_size] =(
identifier[_largest_integer_by_dtype] ( identifier[x_dtype] )
keyword[if] identifier[dtype_util] . identifier[is_floating] ( identifier[x_dtype] ) keyword[else] literal[int] )
keyword[if] identifier[max_event_size] keyword[is] literal[int] :
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] . identifier[format] ( identifier[dtype_util] . identifier[name] ( identifier[x_dtype] )))
keyword[try] :
identifier[x_shape_static] = identifier[tensorshape_util] . identifier[with_rank_at_least] ( identifier[x] . identifier[shape] , literal[int] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[event_size] = identifier[tf] . identifier[compat] . identifier[dimension_value] ( identifier[x_shape_static] [- literal[int] ])
keyword[if] identifier[event_size] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[event_size] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[event_size] > identifier[max_event_size] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] (
identifier[dtype_util] . identifier[name] ( identifier[x_dtype] ), identifier[event_size] ,
identifier[max_event_size] ))
keyword[return] identifier[x]
keyword[else] :
identifier[event_size] = identifier[tf] . identifier[shape] ( identifier[input] = identifier[x] , identifier[out_type] = identifier[tf] . identifier[int64] , identifier[name] = literal[string] )[- literal[int] ]
keyword[return] identifier[with_dependencies] ([
identifier[assert_util] . identifier[assert_rank_at_least] (
identifier[x] ,
literal[int] ,
identifier[message] =( literal[string]
literal[string] )),
identifier[assert_util] . identifier[assert_greater_equal] (
identifier[tf] . identifier[shape] ( identifier[input] = identifier[x] )[- literal[int] ],
literal[int] ,
identifier[message] =( literal[string]
literal[string] )),
identifier[assert_util] . identifier[assert_less_equal] (
identifier[event_size] ,
identifier[tf] . identifier[convert_to_tensor] ( identifier[max_event_size] , identifier[dtype] = identifier[tf] . identifier[int64] ),
identifier[message] = literal[string]
literal[string] . identifier[format] (
identifier[dtype_util] . identifier[name] ( identifier[x_dtype] ), identifier[max_event_size] )),
], identifier[x] ) | def embed_check_categorical_event_shape(categorical_param, name='embed_check_categorical_event_shape'):
"""Embeds checks that categorical distributions don't have too many classes.
A categorical-type distribution is one which, e.g., returns the class label
rather than a one-hot encoding. E.g., `Categorical(probs)`.
Since distributions output samples in the same dtype as the parameters, we
must ensure that casting doesn't lose precision. That is, the
`parameter.dtype` implies a maximum number of classes. However, since shape is
`int32` and categorical variables are presumed to be indexes into a `Tensor`,
we must also ensure that the number of classes is no larger than the largest
possible `int32` index, i.e., `2**31-1`.
In other words the number of classes, `K`, must satisfy the following
condition:
```python
K <= min(
int(2**31 - 1), # Largest float as an index.
{
tf.float16: int(2**11), # Largest int as a float16.
tf.float32: int(2**24),
tf.float64: int(2**53),
}.get(dtype_util.base_dtype(categorical_param.dtype), 0))
```
Args:
categorical_param: Floating-point `Tensor` representing parameters of
distribution over categories. The rightmost shape is presumed to be the
number of categories.
name: A name for this operation (optional).
Returns:
categorical_param: Input `Tensor` with appropriate assertions embedded.
Raises:
TypeError: if `categorical_param` has an unknown `dtype`.
ValueError: if we can statically identify `categorical_param` as being too
large (for being closed under int32/float casting).
"""
with tf.name_scope(name):
x = tf.convert_to_tensor(value=categorical_param, name='categorical_param')
# The size must not exceed both of:
# - The largest possible int32 (since categorical values are presumed to be
# indexes into a Tensor).
# - The largest possible integer exactly representable under the given
# floating-point dtype (since we need to cast to/from).
#
# The chosen floating-point thresholds are 2**(1 + mantissa_bits).
# For more details, see:
# https://en.wikipedia.org/wiki/Floating-point_arithmetic#Internal_representation
x_dtype = dtype_util.base_dtype(x.dtype)
max_event_size = _largest_integer_by_dtype(x_dtype) if dtype_util.is_floating(x_dtype) else 0
if max_event_size is 0:
raise TypeError('Unable to validate size of unrecognized dtype ({}).'.format(dtype_util.name(x_dtype))) # depends on [control=['if'], data=[]]
try:
x_shape_static = tensorshape_util.with_rank_at_least(x.shape, 1) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('A categorical-distribution parameter must have at least 1 dimension.') # depends on [control=['except'], data=[]]
event_size = tf.compat.dimension_value(x_shape_static[-1])
if event_size is not None:
if event_size < 2:
raise ValueError('A categorical-distribution parameter must have at least 2 events.') # depends on [control=['if'], data=[]]
if event_size > max_event_size:
raise ValueError('Number of classes exceeds `dtype` precision, i.e., {} implies shape ({}) cannot exceed {}.'.format(dtype_util.name(x_dtype), event_size, max_event_size)) # depends on [control=['if'], data=['event_size', 'max_event_size']]
return x # depends on [control=['if'], data=['event_size']]
else:
event_size = tf.shape(input=x, out_type=tf.int64, name='x_shape')[-1]
return with_dependencies([assert_util.assert_rank_at_least(x, 1, message='A categorical-distribution parameter must have at least 1 dimension.'), assert_util.assert_greater_equal(tf.shape(input=x)[-1], 2, message='A categorical-distribution parameter must have at least 2 events.'), assert_util.assert_less_equal(event_size, tf.convert_to_tensor(max_event_size, dtype=tf.int64), message='Number of classes exceeds `dtype` precision, i.e., {} dtype cannot exceed {} shape.'.format(dtype_util.name(x_dtype), max_event_size))], x) # depends on [control=['with'], data=[]] |
def output(self, _filename):
"""
_filename is not used
Args:
_filename(string)
"""
txt = ''
for contract in self.slither.contracts_derived:
txt += '\n{}:\n'.format(contract.name)
table = PrettyTable(['Name', 'Type'])
for variable in contract.state_variables:
if not variable.is_constant:
table.add_row([variable.name, str(variable.type)])
txt += str(table) + '\n'
self.info(txt) | def function[output, parameter[self, _filename]]:
constant[
_filename is not used
Args:
_filename(string)
]
variable[txt] assign[=] constant[]
for taget[name[contract]] in starred[name[self].slither.contracts_derived] begin[:]
<ast.AugAssign object at 0x7da20c7cb9d0>
variable[table] assign[=] call[name[PrettyTable], parameter[list[[<ast.Constant object at 0x7da20c7cb5e0>, <ast.Constant object at 0x7da20c7c89a0>]]]]
for taget[name[variable]] in starred[name[contract].state_variables] begin[:]
if <ast.UnaryOp object at 0x7da20c7c9f00> begin[:]
call[name[table].add_row, parameter[list[[<ast.Attribute object at 0x7da20c7c9a50>, <ast.Call object at 0x7da20c7cb670>]]]]
<ast.AugAssign object at 0x7da20c7cab90>
call[name[self].info, parameter[name[txt]]] | keyword[def] identifier[output] ( identifier[self] , identifier[_filename] ):
literal[string]
identifier[txt] = literal[string]
keyword[for] identifier[contract] keyword[in] identifier[self] . identifier[slither] . identifier[contracts_derived] :
identifier[txt] += literal[string] . identifier[format] ( identifier[contract] . identifier[name] )
identifier[table] = identifier[PrettyTable] ([ literal[string] , literal[string] ])
keyword[for] identifier[variable] keyword[in] identifier[contract] . identifier[state_variables] :
keyword[if] keyword[not] identifier[variable] . identifier[is_constant] :
identifier[table] . identifier[add_row] ([ identifier[variable] . identifier[name] , identifier[str] ( identifier[variable] . identifier[type] )])
identifier[txt] += identifier[str] ( identifier[table] )+ literal[string]
identifier[self] . identifier[info] ( identifier[txt] ) | def output(self, _filename):
"""
_filename is not used
Args:
_filename(string)
"""
txt = ''
for contract in self.slither.contracts_derived:
txt += '\n{}:\n'.format(contract.name)
table = PrettyTable(['Name', 'Type'])
for variable in contract.state_variables:
if not variable.is_constant:
table.add_row([variable.name, str(variable.type)]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['variable']]
txt += str(table) + '\n' # depends on [control=['for'], data=['contract']]
self.info(txt) |
def check_background(qpi):
"""Check QPimage background data
Parameters
----------
qpi: qpimage.core.QPImage
Raises
------
IntegrityCheckError
if the check fails
"""
for imdat in [qpi._amp, qpi._pha]:
try:
fit, attrs = imdat.get_bg(key="fit", ret_attrs=True)
except KeyError:
# No bg correction performed
pass
else:
kwargs = dict(attrs)
# check if we have a user-defined mask image
binkey = "estimate_bg_from_mask"
if binkey in imdat.h5:
kwargs["from_mask"] = imdat.h5[binkey][:]
else:
kwargs["from_mask"] = None
# compute background correction
with h5py.File("check.h5",
driver="core",
backing_store=False) as h5:
# imdat.__class__ is "Amplitude" or "Phase"
testimdat = imdat.__class__(h5)
testimdat["raw"] = imdat.raw
# Set experimental bg data if given
try:
bg = imdat.get_bg("data")
except KeyError:
pass
else:
testimdat.set_bg(bg, key="data")
# fit bg
testimdat.estimate_bg(**kwargs)
# compare
if not np.allclose(testimdat.get_bg(key="fit"), fit):
msg = "Wrong estimated (fitted) background!"
raise IntegrityCheckError(msg) | def function[check_background, parameter[qpi]]:
constant[Check QPimage background data
Parameters
----------
qpi: qpimage.core.QPImage
Raises
------
IntegrityCheckError
if the check fails
]
for taget[name[imdat]] in starred[list[[<ast.Attribute object at 0x7da1b11404f0>, <ast.Attribute object at 0x7da1b1142710>]]] begin[:]
<ast.Try object at 0x7da1b11421d0> | keyword[def] identifier[check_background] ( identifier[qpi] ):
literal[string]
keyword[for] identifier[imdat] keyword[in] [ identifier[qpi] . identifier[_amp] , identifier[qpi] . identifier[_pha] ]:
keyword[try] :
identifier[fit] , identifier[attrs] = identifier[imdat] . identifier[get_bg] ( identifier[key] = literal[string] , identifier[ret_attrs] = keyword[True] )
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[else] :
identifier[kwargs] = identifier[dict] ( identifier[attrs] )
identifier[binkey] = literal[string]
keyword[if] identifier[binkey] keyword[in] identifier[imdat] . identifier[h5] :
identifier[kwargs] [ literal[string] ]= identifier[imdat] . identifier[h5] [ identifier[binkey] ][:]
keyword[else] :
identifier[kwargs] [ literal[string] ]= keyword[None]
keyword[with] identifier[h5py] . identifier[File] ( literal[string] ,
identifier[driver] = literal[string] ,
identifier[backing_store] = keyword[False] ) keyword[as] identifier[h5] :
identifier[testimdat] = identifier[imdat] . identifier[__class__] ( identifier[h5] )
identifier[testimdat] [ literal[string] ]= identifier[imdat] . identifier[raw]
keyword[try] :
identifier[bg] = identifier[imdat] . identifier[get_bg] ( literal[string] )
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[else] :
identifier[testimdat] . identifier[set_bg] ( identifier[bg] , identifier[key] = literal[string] )
identifier[testimdat] . identifier[estimate_bg] (** identifier[kwargs] )
keyword[if] keyword[not] identifier[np] . identifier[allclose] ( identifier[testimdat] . identifier[get_bg] ( identifier[key] = literal[string] ), identifier[fit] ):
identifier[msg] = literal[string]
keyword[raise] identifier[IntegrityCheckError] ( identifier[msg] ) | def check_background(qpi):
"""Check QPimage background data
Parameters
----------
qpi: qpimage.core.QPImage
Raises
------
IntegrityCheckError
if the check fails
"""
for imdat in [qpi._amp, qpi._pha]:
try:
(fit, attrs) = imdat.get_bg(key='fit', ret_attrs=True) # depends on [control=['try'], data=[]]
except KeyError:
# No bg correction performed
pass # depends on [control=['except'], data=[]]
else:
kwargs = dict(attrs)
# check if we have a user-defined mask image
binkey = 'estimate_bg_from_mask'
if binkey in imdat.h5:
kwargs['from_mask'] = imdat.h5[binkey][:] # depends on [control=['if'], data=['binkey']]
else:
kwargs['from_mask'] = None
# compute background correction
with h5py.File('check.h5', driver='core', backing_store=False) as h5:
# imdat.__class__ is "Amplitude" or "Phase"
testimdat = imdat.__class__(h5)
testimdat['raw'] = imdat.raw
# Set experimental bg data if given
try:
bg = imdat.get_bg('data') # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
else:
testimdat.set_bg(bg, key='data')
# fit bg
testimdat.estimate_bg(**kwargs)
# compare
if not np.allclose(testimdat.get_bg(key='fit'), fit):
msg = 'Wrong estimated (fitted) background!'
raise IntegrityCheckError(msg) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['h5']] # depends on [control=['for'], data=['imdat']] |
def add(self, type):
# type: (InternalType) -> None
"""
Add type to the runtime type samples.
"""
try:
if isinstance(type, SetType):
if EMPTY_SET_TYPE in self.types_hashable:
self.types_hashable.remove(EMPTY_SET_TYPE)
elif isinstance(type, ListType):
if EMPTY_LIST_TYPE in self.types_hashable:
self.types_hashable.remove(EMPTY_LIST_TYPE)
elif isinstance(type, IteratorType):
if EMPTY_ITERATOR_TYPE in self.types_hashable:
self.types_hashable.remove(EMPTY_ITERATOR_TYPE)
elif isinstance(type, DictType):
if EMPTY_DICT_TYPE in self.types_hashable:
self.types_hashable.remove(EMPTY_DICT_TYPE)
for item in self.types_hashable:
if isinstance(item, DictType):
if item.key_type == type.key_type:
item.val_type.merge(type.val_type)
return
self.types_hashable.add(type)
except (TypeError, AttributeError):
try:
if type not in self.types:
self.types.append(type)
except AttributeError:
if TypeWasIncomparable not in self.types:
self.types.append(TypeWasIncomparable) | def function[add, parameter[self, type]]:
constant[
Add type to the runtime type samples.
]
<ast.Try object at 0x7da18c4cca60> | keyword[def] identifier[add] ( identifier[self] , identifier[type] ):
literal[string]
keyword[try] :
keyword[if] identifier[isinstance] ( identifier[type] , identifier[SetType] ):
keyword[if] identifier[EMPTY_SET_TYPE] keyword[in] identifier[self] . identifier[types_hashable] :
identifier[self] . identifier[types_hashable] . identifier[remove] ( identifier[EMPTY_SET_TYPE] )
keyword[elif] identifier[isinstance] ( identifier[type] , identifier[ListType] ):
keyword[if] identifier[EMPTY_LIST_TYPE] keyword[in] identifier[self] . identifier[types_hashable] :
identifier[self] . identifier[types_hashable] . identifier[remove] ( identifier[EMPTY_LIST_TYPE] )
keyword[elif] identifier[isinstance] ( identifier[type] , identifier[IteratorType] ):
keyword[if] identifier[EMPTY_ITERATOR_TYPE] keyword[in] identifier[self] . identifier[types_hashable] :
identifier[self] . identifier[types_hashable] . identifier[remove] ( identifier[EMPTY_ITERATOR_TYPE] )
keyword[elif] identifier[isinstance] ( identifier[type] , identifier[DictType] ):
keyword[if] identifier[EMPTY_DICT_TYPE] keyword[in] identifier[self] . identifier[types_hashable] :
identifier[self] . identifier[types_hashable] . identifier[remove] ( identifier[EMPTY_DICT_TYPE] )
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[types_hashable] :
keyword[if] identifier[isinstance] ( identifier[item] , identifier[DictType] ):
keyword[if] identifier[item] . identifier[key_type] == identifier[type] . identifier[key_type] :
identifier[item] . identifier[val_type] . identifier[merge] ( identifier[type] . identifier[val_type] )
keyword[return]
identifier[self] . identifier[types_hashable] . identifier[add] ( identifier[type] )
keyword[except] ( identifier[TypeError] , identifier[AttributeError] ):
keyword[try] :
keyword[if] identifier[type] keyword[not] keyword[in] identifier[self] . identifier[types] :
identifier[self] . identifier[types] . identifier[append] ( identifier[type] )
keyword[except] identifier[AttributeError] :
keyword[if] identifier[TypeWasIncomparable] keyword[not] keyword[in] identifier[self] . identifier[types] :
identifier[self] . identifier[types] . identifier[append] ( identifier[TypeWasIncomparable] ) | def add(self, type):
# type: (InternalType) -> None
'\n Add type to the runtime type samples.\n '
try:
if isinstance(type, SetType):
if EMPTY_SET_TYPE in self.types_hashable:
self.types_hashable.remove(EMPTY_SET_TYPE) # depends on [control=['if'], data=['EMPTY_SET_TYPE']] # depends on [control=['if'], data=[]]
elif isinstance(type, ListType):
if EMPTY_LIST_TYPE in self.types_hashable:
self.types_hashable.remove(EMPTY_LIST_TYPE) # depends on [control=['if'], data=['EMPTY_LIST_TYPE']] # depends on [control=['if'], data=[]]
elif isinstance(type, IteratorType):
if EMPTY_ITERATOR_TYPE in self.types_hashable:
self.types_hashable.remove(EMPTY_ITERATOR_TYPE) # depends on [control=['if'], data=['EMPTY_ITERATOR_TYPE']] # depends on [control=['if'], data=[]]
elif isinstance(type, DictType):
if EMPTY_DICT_TYPE in self.types_hashable:
self.types_hashable.remove(EMPTY_DICT_TYPE) # depends on [control=['if'], data=['EMPTY_DICT_TYPE']]
for item in self.types_hashable:
if isinstance(item, DictType):
if item.key_type == type.key_type:
item.val_type.merge(type.val_type)
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]]
self.types_hashable.add(type) # depends on [control=['try'], data=[]]
except (TypeError, AttributeError):
try:
if type not in self.types:
self.types.append(type) # depends on [control=['if'], data=['type']] # depends on [control=['try'], data=[]]
except AttributeError:
if TypeWasIncomparable not in self.types:
self.types.append(TypeWasIncomparable) # depends on [control=['if'], data=['TypeWasIncomparable']] # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] |
def mkArrayUpdater(nextItemVal: Value, indexes: Tuple[Value],
invalidate: bool):
"""
Create value updater for simulation for value of array type
:param nextVal: instance of Value which will be asssiggned to signal
:param indexes: tuple on indexes where value should be updated
in target array
:return: function(value) -> tuple(valueHasChangedFlag, nextVal)
"""
def updater(currentVal):
if len(indexes) > 1:
raise NotImplementedError("[TODO] implement for more indexes")
_nextItemVal = nextItemVal.clone()
if invalidate:
_nextItemVal.vldMask = 0
index = indexes[0]
change = valueHasChanged(currentVal._getitem__val(index), _nextItemVal)
currentVal._setitem__val(index, _nextItemVal)
return (change, currentVal)
return updater | def function[mkArrayUpdater, parameter[nextItemVal, indexes, invalidate]]:
constant[
Create value updater for simulation for value of array type
:param nextVal: instance of Value which will be asssiggned to signal
:param indexes: tuple on indexes where value should be updated
in target array
:return: function(value) -> tuple(valueHasChangedFlag, nextVal)
]
def function[updater, parameter[currentVal]]:
if compare[call[name[len], parameter[name[indexes]]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da18c4ce170>
variable[_nextItemVal] assign[=] call[name[nextItemVal].clone, parameter[]]
if name[invalidate] begin[:]
name[_nextItemVal].vldMask assign[=] constant[0]
variable[index] assign[=] call[name[indexes]][constant[0]]
variable[change] assign[=] call[name[valueHasChanged], parameter[call[name[currentVal]._getitem__val, parameter[name[index]]], name[_nextItemVal]]]
call[name[currentVal]._setitem__val, parameter[name[index], name[_nextItemVal]]]
return[tuple[[<ast.Name object at 0x7da1b26af580>, <ast.Name object at 0x7da1b26adae0>]]]
return[name[updater]] | keyword[def] identifier[mkArrayUpdater] ( identifier[nextItemVal] : identifier[Value] , identifier[indexes] : identifier[Tuple] [ identifier[Value] ],
identifier[invalidate] : identifier[bool] ):
literal[string]
keyword[def] identifier[updater] ( identifier[currentVal] ):
keyword[if] identifier[len] ( identifier[indexes] )> literal[int] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
identifier[_nextItemVal] = identifier[nextItemVal] . identifier[clone] ()
keyword[if] identifier[invalidate] :
identifier[_nextItemVal] . identifier[vldMask] = literal[int]
identifier[index] = identifier[indexes] [ literal[int] ]
identifier[change] = identifier[valueHasChanged] ( identifier[currentVal] . identifier[_getitem__val] ( identifier[index] ), identifier[_nextItemVal] )
identifier[currentVal] . identifier[_setitem__val] ( identifier[index] , identifier[_nextItemVal] )
keyword[return] ( identifier[change] , identifier[currentVal] )
keyword[return] identifier[updater] | def mkArrayUpdater(nextItemVal: Value, indexes: Tuple[Value], invalidate: bool):
"""
Create value updater for simulation for value of array type
:param nextVal: instance of Value which will be asssiggned to signal
:param indexes: tuple on indexes where value should be updated
in target array
:return: function(value) -> tuple(valueHasChangedFlag, nextVal)
"""
def updater(currentVal):
if len(indexes) > 1:
raise NotImplementedError('[TODO] implement for more indexes') # depends on [control=['if'], data=[]]
_nextItemVal = nextItemVal.clone()
if invalidate:
_nextItemVal.vldMask = 0 # depends on [control=['if'], data=[]]
index = indexes[0]
change = valueHasChanged(currentVal._getitem__val(index), _nextItemVal)
currentVal._setitem__val(index, _nextItemVal)
return (change, currentVal)
return updater |
def showEditor(self):
"""Creates and shows an editor for this Stimulus"""
if self.editor is not None:
editor = self.editor()
editor.setModel(self)
factory = get_stimulus_factory(self._stim.stimType())
editor.editingFinished.connect(factory.update)
return editor
else:
logger = logging.getLogger('main')
logger.warning('Erm, no editor available :(') | def function[showEditor, parameter[self]]:
constant[Creates and shows an editor for this Stimulus]
if compare[name[self].editor is_not constant[None]] begin[:]
variable[editor] assign[=] call[name[self].editor, parameter[]]
call[name[editor].setModel, parameter[name[self]]]
variable[factory] assign[=] call[name[get_stimulus_factory], parameter[call[name[self]._stim.stimType, parameter[]]]]
call[name[editor].editingFinished.connect, parameter[name[factory].update]]
return[name[editor]] | keyword[def] identifier[showEditor] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[editor] keyword[is] keyword[not] keyword[None] :
identifier[editor] = identifier[self] . identifier[editor] ()
identifier[editor] . identifier[setModel] ( identifier[self] )
identifier[factory] = identifier[get_stimulus_factory] ( identifier[self] . identifier[_stim] . identifier[stimType] ())
identifier[editor] . identifier[editingFinished] . identifier[connect] ( identifier[factory] . identifier[update] )
keyword[return] identifier[editor]
keyword[else] :
identifier[logger] = identifier[logging] . identifier[getLogger] ( literal[string] )
identifier[logger] . identifier[warning] ( literal[string] ) | def showEditor(self):
"""Creates and shows an editor for this Stimulus"""
if self.editor is not None:
editor = self.editor()
editor.setModel(self)
factory = get_stimulus_factory(self._stim.stimType())
editor.editingFinished.connect(factory.update)
return editor # depends on [control=['if'], data=[]]
else:
logger = logging.getLogger('main')
logger.warning('Erm, no editor available :(') |
def write_to_file(self, out_data):
"""
Outputs data to a netCDF file. If the file does not exist, it will be created. Otherwise, additional variables
are appended to the current file
Args:
out_data: Full-path and name of output netCDF file
"""
full_var_name = self.consensus_type + "_" + self.variable
if "-hour" in self.consensus_type:
if full_var_name not in out_data.variables.keys():
var = out_data.createVariable(full_var_name, "f4", ("y", "x"), zlib=True,
least_significant_digit=3, shuffle=True)
else:
var = out_data.variables[full_var_name]
var.coordinates = "y x"
else:
if full_var_name not in out_data.variables.keys():
var = out_data.createVariable(full_var_name, "f4", ("time", "y", "x"), zlib=True,
least_significant_digit=3, shuffle=True)
else:
var = out_data.variables[full_var_name]
var.coordinates = "time y x"
var[:] = self.data
var.units = self.units
var.long_name = self.consensus_type + "_" + self.variable
return | def function[write_to_file, parameter[self, out_data]]:
constant[
Outputs data to a netCDF file. If the file does not exist, it will be created. Otherwise, additional variables
are appended to the current file
Args:
out_data: Full-path and name of output netCDF file
]
variable[full_var_name] assign[=] binary_operation[binary_operation[name[self].consensus_type + constant[_]] + name[self].variable]
if compare[constant[-hour] in name[self].consensus_type] begin[:]
if compare[name[full_var_name] <ast.NotIn object at 0x7da2590d7190> call[name[out_data].variables.keys, parameter[]]] begin[:]
variable[var] assign[=] call[name[out_data].createVariable, parameter[name[full_var_name], constant[f4], tuple[[<ast.Constant object at 0x7da1b0ea3a00>, <ast.Constant object at 0x7da1b0ea2b60>]]]]
name[var].coordinates assign[=] constant[y x]
call[name[var]][<ast.Slice object at 0x7da1b0ea3100>] assign[=] name[self].data
name[var].units assign[=] name[self].units
name[var].long_name assign[=] binary_operation[binary_operation[name[self].consensus_type + constant[_]] + name[self].variable]
return[None] | keyword[def] identifier[write_to_file] ( identifier[self] , identifier[out_data] ):
literal[string]
identifier[full_var_name] = identifier[self] . identifier[consensus_type] + literal[string] + identifier[self] . identifier[variable]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[consensus_type] :
keyword[if] identifier[full_var_name] keyword[not] keyword[in] identifier[out_data] . identifier[variables] . identifier[keys] ():
identifier[var] = identifier[out_data] . identifier[createVariable] ( identifier[full_var_name] , literal[string] ,( literal[string] , literal[string] ), identifier[zlib] = keyword[True] ,
identifier[least_significant_digit] = literal[int] , identifier[shuffle] = keyword[True] )
keyword[else] :
identifier[var] = identifier[out_data] . identifier[variables] [ identifier[full_var_name] ]
identifier[var] . identifier[coordinates] = literal[string]
keyword[else] :
keyword[if] identifier[full_var_name] keyword[not] keyword[in] identifier[out_data] . identifier[variables] . identifier[keys] ():
identifier[var] = identifier[out_data] . identifier[createVariable] ( identifier[full_var_name] , literal[string] ,( literal[string] , literal[string] , literal[string] ), identifier[zlib] = keyword[True] ,
identifier[least_significant_digit] = literal[int] , identifier[shuffle] = keyword[True] )
keyword[else] :
identifier[var] = identifier[out_data] . identifier[variables] [ identifier[full_var_name] ]
identifier[var] . identifier[coordinates] = literal[string]
identifier[var] [:]= identifier[self] . identifier[data]
identifier[var] . identifier[units] = identifier[self] . identifier[units]
identifier[var] . identifier[long_name] = identifier[self] . identifier[consensus_type] + literal[string] + identifier[self] . identifier[variable]
keyword[return] | def write_to_file(self, out_data):
"""
Outputs data to a netCDF file. If the file does not exist, it will be created. Otherwise, additional variables
are appended to the current file
Args:
out_data: Full-path and name of output netCDF file
"""
full_var_name = self.consensus_type + '_' + self.variable
if '-hour' in self.consensus_type:
if full_var_name not in out_data.variables.keys():
var = out_data.createVariable(full_var_name, 'f4', ('y', 'x'), zlib=True, least_significant_digit=3, shuffle=True) # depends on [control=['if'], data=['full_var_name']]
else:
var = out_data.variables[full_var_name]
var.coordinates = 'y x' # depends on [control=['if'], data=[]]
else:
if full_var_name not in out_data.variables.keys():
var = out_data.createVariable(full_var_name, 'f4', ('time', 'y', 'x'), zlib=True, least_significant_digit=3, shuffle=True) # depends on [control=['if'], data=['full_var_name']]
else:
var = out_data.variables[full_var_name]
var.coordinates = 'time y x'
var[:] = self.data
var.units = self.units
var.long_name = self.consensus_type + '_' + self.variable
return |
def write_quick(self, addr):
"""write_quick(addr)
Perform SMBus Quick transaction.
"""
self._set_addr(addr)
if SMBUS.i2c_smbus_write_quick(self._fd, SMBUS.I2C_SMBUS_WRITE) != 0:
raise IOError(ffi.errno) | def function[write_quick, parameter[self, addr]]:
constant[write_quick(addr)
Perform SMBus Quick transaction.
]
call[name[self]._set_addr, parameter[name[addr]]]
if compare[call[name[SMBUS].i2c_smbus_write_quick, parameter[name[self]._fd, name[SMBUS].I2C_SMBUS_WRITE]] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da2054a77f0> | keyword[def] identifier[write_quick] ( identifier[self] , identifier[addr] ):
literal[string]
identifier[self] . identifier[_set_addr] ( identifier[addr] )
keyword[if] identifier[SMBUS] . identifier[i2c_smbus_write_quick] ( identifier[self] . identifier[_fd] , identifier[SMBUS] . identifier[I2C_SMBUS_WRITE] )!= literal[int] :
keyword[raise] identifier[IOError] ( identifier[ffi] . identifier[errno] ) | def write_quick(self, addr):
"""write_quick(addr)
Perform SMBus Quick transaction.
"""
self._set_addr(addr)
if SMBUS.i2c_smbus_write_quick(self._fd, SMBUS.I2C_SMBUS_WRITE) != 0:
raise IOError(ffi.errno) # depends on [control=['if'], data=[]] |
def get_snpeff_info(snpeff_string, snpeff_header):
"""Make the vep annotations into a dictionaries
A snpeff dictionary will have the snpeff column names as keys and
the vep annotations as values.
The dictionaries are stored in a list.
One dictionary for each transcript.
Args:
snpeff_string (string): A string with the ANN annotation
snpeff_header (list): A list with the vep header
Return:
snpeff_annotations (list): A list of vep dicts
"""
snpeff_annotations = [
dict(zip(snpeff_header, snpeff_annotation.split('|')))
for snpeff_annotation in snpeff_string.split(',')
]
return snpeff_annotations | def function[get_snpeff_info, parameter[snpeff_string, snpeff_header]]:
constant[Make the vep annotations into a dictionaries
A snpeff dictionary will have the snpeff column names as keys and
the vep annotations as values.
The dictionaries are stored in a list.
One dictionary for each transcript.
Args:
snpeff_string (string): A string with the ANN annotation
snpeff_header (list): A list with the vep header
Return:
snpeff_annotations (list): A list of vep dicts
]
variable[snpeff_annotations] assign[=] <ast.ListComp object at 0x7da2047e8640>
return[name[snpeff_annotations]] | keyword[def] identifier[get_snpeff_info] ( identifier[snpeff_string] , identifier[snpeff_header] ):
literal[string]
identifier[snpeff_annotations] =[
identifier[dict] ( identifier[zip] ( identifier[snpeff_header] , identifier[snpeff_annotation] . identifier[split] ( literal[string] )))
keyword[for] identifier[snpeff_annotation] keyword[in] identifier[snpeff_string] . identifier[split] ( literal[string] )
]
keyword[return] identifier[snpeff_annotations] | def get_snpeff_info(snpeff_string, snpeff_header):
"""Make the vep annotations into a dictionaries
A snpeff dictionary will have the snpeff column names as keys and
the vep annotations as values.
The dictionaries are stored in a list.
One dictionary for each transcript.
Args:
snpeff_string (string): A string with the ANN annotation
snpeff_header (list): A list with the vep header
Return:
snpeff_annotations (list): A list of vep dicts
"""
snpeff_annotations = [dict(zip(snpeff_header, snpeff_annotation.split('|'))) for snpeff_annotation in snpeff_string.split(',')]
return snpeff_annotations |
def concat(cls, variables, dim='concat_dim', positions=None,
shortcut=False):
"""Specialized version of Variable.concat for IndexVariable objects.
This exists because we want to avoid converting Index objects to NumPy
arrays, if possible.
"""
if not isinstance(dim, str):
dim, = dim.dims
variables = list(variables)
first_var = variables[0]
if any(not isinstance(v, cls) for v in variables):
raise TypeError('IndexVariable.concat requires that all input '
'variables be IndexVariable objects')
indexes = [v._data.array for v in variables]
if not indexes:
data = []
else:
data = indexes[0].append(indexes[1:])
if positions is not None:
indices = nputils.inverse_permutation(
np.concatenate(positions))
data = data.take(indices)
attrs = OrderedDict(first_var.attrs)
if not shortcut:
for var in variables:
if var.dims != first_var.dims:
raise ValueError('inconsistent dimensions')
utils.remove_incompatible_items(attrs, var.attrs)
return cls(first_var.dims, data, attrs) | def function[concat, parameter[cls, variables, dim, positions, shortcut]]:
constant[Specialized version of Variable.concat for IndexVariable objects.
This exists because we want to avoid converting Index objects to NumPy
arrays, if possible.
]
if <ast.UnaryOp object at 0x7da18ede6350> begin[:]
<ast.Tuple object at 0x7da18ede6140> assign[=] name[dim].dims
variable[variables] assign[=] call[name[list], parameter[name[variables]]]
variable[first_var] assign[=] call[name[variables]][constant[0]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da18ede4c70>]] begin[:]
<ast.Raise object at 0x7da18ede76a0>
variable[indexes] assign[=] <ast.ListComp object at 0x7da18ede5450>
if <ast.UnaryOp object at 0x7da18ede74f0> begin[:]
variable[data] assign[=] list[[]]
variable[attrs] assign[=] call[name[OrderedDict], parameter[name[first_var].attrs]]
if <ast.UnaryOp object at 0x7da18ede6fe0> begin[:]
for taget[name[var]] in starred[name[variables]] begin[:]
if compare[name[var].dims not_equal[!=] name[first_var].dims] begin[:]
<ast.Raise object at 0x7da18ede6c50>
call[name[utils].remove_incompatible_items, parameter[name[attrs], name[var].attrs]]
return[call[name[cls], parameter[name[first_var].dims, name[data], name[attrs]]]] | keyword[def] identifier[concat] ( identifier[cls] , identifier[variables] , identifier[dim] = literal[string] , identifier[positions] = keyword[None] ,
identifier[shortcut] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[dim] , identifier[str] ):
identifier[dim] ,= identifier[dim] . identifier[dims]
identifier[variables] = identifier[list] ( identifier[variables] )
identifier[first_var] = identifier[variables] [ literal[int] ]
keyword[if] identifier[any] ( keyword[not] identifier[isinstance] ( identifier[v] , identifier[cls] ) keyword[for] identifier[v] keyword[in] identifier[variables] ):
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] )
identifier[indexes] =[ identifier[v] . identifier[_data] . identifier[array] keyword[for] identifier[v] keyword[in] identifier[variables] ]
keyword[if] keyword[not] identifier[indexes] :
identifier[data] =[]
keyword[else] :
identifier[data] = identifier[indexes] [ literal[int] ]. identifier[append] ( identifier[indexes] [ literal[int] :])
keyword[if] identifier[positions] keyword[is] keyword[not] keyword[None] :
identifier[indices] = identifier[nputils] . identifier[inverse_permutation] (
identifier[np] . identifier[concatenate] ( identifier[positions] ))
identifier[data] = identifier[data] . identifier[take] ( identifier[indices] )
identifier[attrs] = identifier[OrderedDict] ( identifier[first_var] . identifier[attrs] )
keyword[if] keyword[not] identifier[shortcut] :
keyword[for] identifier[var] keyword[in] identifier[variables] :
keyword[if] identifier[var] . identifier[dims] != identifier[first_var] . identifier[dims] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[utils] . identifier[remove_incompatible_items] ( identifier[attrs] , identifier[var] . identifier[attrs] )
keyword[return] identifier[cls] ( identifier[first_var] . identifier[dims] , identifier[data] , identifier[attrs] ) | def concat(cls, variables, dim='concat_dim', positions=None, shortcut=False):
"""Specialized version of Variable.concat for IndexVariable objects.
This exists because we want to avoid converting Index objects to NumPy
arrays, if possible.
"""
if not isinstance(dim, str):
(dim,) = dim.dims # depends on [control=['if'], data=[]]
variables = list(variables)
first_var = variables[0]
if any((not isinstance(v, cls) for v in variables)):
raise TypeError('IndexVariable.concat requires that all input variables be IndexVariable objects') # depends on [control=['if'], data=[]]
indexes = [v._data.array for v in variables]
if not indexes:
data = [] # depends on [control=['if'], data=[]]
else:
data = indexes[0].append(indexes[1:])
if positions is not None:
indices = nputils.inverse_permutation(np.concatenate(positions))
data = data.take(indices) # depends on [control=['if'], data=['positions']]
attrs = OrderedDict(first_var.attrs)
if not shortcut:
for var in variables:
if var.dims != first_var.dims:
raise ValueError('inconsistent dimensions') # depends on [control=['if'], data=[]]
utils.remove_incompatible_items(attrs, var.attrs) # depends on [control=['for'], data=['var']] # depends on [control=['if'], data=[]]
return cls(first_var.dims, data, attrs) |
def point_before_card(self, card, x, y):
"""Return whether ``(x, y)`` is somewhere before ``card``, given how I
know cards to be arranged.
If the cards are being stacked down and to the right, that
means I'm testing whether ``(x, y)`` is above or to the left
of the card.
"""
def ycmp():
if self.card_y_hint_step == 0:
return False
elif self.card_y_hint_step > 0:
# stacking upward
return y < card.y
else:
# stacking downward
return y > card.top
if self.card_x_hint_step > 0:
# stacking to the right
if x < card.x:
return True
return ycmp()
elif self.card_x_hint_step == 0:
return ycmp()
else:
# stacking to the left
if x > card.right:
return True
return ycmp() | def function[point_before_card, parameter[self, card, x, y]]:
constant[Return whether ``(x, y)`` is somewhere before ``card``, given how I
know cards to be arranged.
If the cards are being stacked down and to the right, that
means I'm testing whether ``(x, y)`` is above or to the left
of the card.
]
def function[ycmp, parameter[]]:
if compare[name[self].card_y_hint_step equal[==] constant[0]] begin[:]
return[constant[False]]
if compare[name[self].card_x_hint_step greater[>] constant[0]] begin[:]
if compare[name[x] less[<] name[card].x] begin[:]
return[constant[True]]
return[call[name[ycmp], parameter[]]] | keyword[def] identifier[point_before_card] ( identifier[self] , identifier[card] , identifier[x] , identifier[y] ):
literal[string]
keyword[def] identifier[ycmp] ():
keyword[if] identifier[self] . identifier[card_y_hint_step] == literal[int] :
keyword[return] keyword[False]
keyword[elif] identifier[self] . identifier[card_y_hint_step] > literal[int] :
keyword[return] identifier[y] < identifier[card] . identifier[y]
keyword[else] :
keyword[return] identifier[y] > identifier[card] . identifier[top]
keyword[if] identifier[self] . identifier[card_x_hint_step] > literal[int] :
keyword[if] identifier[x] < identifier[card] . identifier[x] :
keyword[return] keyword[True]
keyword[return] identifier[ycmp] ()
keyword[elif] identifier[self] . identifier[card_x_hint_step] == literal[int] :
keyword[return] identifier[ycmp] ()
keyword[else] :
keyword[if] identifier[x] > identifier[card] . identifier[right] :
keyword[return] keyword[True]
keyword[return] identifier[ycmp] () | def point_before_card(self, card, x, y):
"""Return whether ``(x, y)`` is somewhere before ``card``, given how I
know cards to be arranged.
If the cards are being stacked down and to the right, that
means I'm testing whether ``(x, y)`` is above or to the left
of the card.
"""
def ycmp():
if self.card_y_hint_step == 0:
return False # depends on [control=['if'], data=[]]
elif self.card_y_hint_step > 0:
# stacking upward
return y < card.y # depends on [control=['if'], data=[]]
else:
# stacking downward
return y > card.top
if self.card_x_hint_step > 0:
# stacking to the right
if x < card.x:
return True # depends on [control=['if'], data=[]]
return ycmp() # depends on [control=['if'], data=[]]
elif self.card_x_hint_step == 0:
return ycmp() # depends on [control=['if'], data=[]]
else:
# stacking to the left
if x > card.right:
return True # depends on [control=['if'], data=[]]
return ycmp() |
def filesizeformat(bytes, sep=' '):
"""
Formats the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB,
102 B, 2.3 GB etc).
Grabbed from Django (http://www.djangoproject.com), slightly modified.
:param bytes: size in bytes (as integer)
:param sep: string separator between number and abbreviation
"""
try:
bytes = float(bytes)
except (TypeError, ValueError, UnicodeDecodeError):
return '0%sB' % sep
if bytes < 1024:
size = bytes
template = '%.0f%sB'
elif bytes < 1024 * 1024:
size = bytes / 1024
template = '%.0f%sKB'
elif bytes < 1024 * 1024 * 1024:
size = bytes / 1024 / 1024
template = '%.1f%sMB'
else:
size = bytes / 1024 / 1024 / 1024
template = '%.2f%sGB'
return template % (size, sep) | def function[filesizeformat, parameter[bytes, sep]]:
constant[
Formats the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB,
102 B, 2.3 GB etc).
Grabbed from Django (http://www.djangoproject.com), slightly modified.
:param bytes: size in bytes (as integer)
:param sep: string separator between number and abbreviation
]
<ast.Try object at 0x7da1b2544340>
if compare[name[bytes] less[<] constant[1024]] begin[:]
variable[size] assign[=] name[bytes]
variable[template] assign[=] constant[%.0f%sB]
return[binary_operation[name[template] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2546bc0>, <ast.Name object at 0x7da1b2546800>]]]] | keyword[def] identifier[filesizeformat] ( identifier[bytes] , identifier[sep] = literal[string] ):
literal[string]
keyword[try] :
identifier[bytes] = identifier[float] ( identifier[bytes] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] , identifier[UnicodeDecodeError] ):
keyword[return] literal[string] % identifier[sep]
keyword[if] identifier[bytes] < literal[int] :
identifier[size] = identifier[bytes]
identifier[template] = literal[string]
keyword[elif] identifier[bytes] < literal[int] * literal[int] :
identifier[size] = identifier[bytes] / literal[int]
identifier[template] = literal[string]
keyword[elif] identifier[bytes] < literal[int] * literal[int] * literal[int] :
identifier[size] = identifier[bytes] / literal[int] / literal[int]
identifier[template] = literal[string]
keyword[else] :
identifier[size] = identifier[bytes] / literal[int] / literal[int] / literal[int]
identifier[template] = literal[string]
keyword[return] identifier[template] %( identifier[size] , identifier[sep] ) | def filesizeformat(bytes, sep=' '):
"""
Formats the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB,
102 B, 2.3 GB etc).
Grabbed from Django (http://www.djangoproject.com), slightly modified.
:param bytes: size in bytes (as integer)
:param sep: string separator between number and abbreviation
"""
try:
bytes = float(bytes) # depends on [control=['try'], data=[]]
except (TypeError, ValueError, UnicodeDecodeError):
return '0%sB' % sep # depends on [control=['except'], data=[]]
if bytes < 1024:
size = bytes
template = '%.0f%sB' # depends on [control=['if'], data=['bytes']]
elif bytes < 1024 * 1024:
size = bytes / 1024
template = '%.0f%sKB' # depends on [control=['if'], data=['bytes']]
elif bytes < 1024 * 1024 * 1024:
size = bytes / 1024 / 1024
template = '%.1f%sMB' # depends on [control=['if'], data=['bytes']]
else:
size = bytes / 1024 / 1024 / 1024
template = '%.2f%sGB'
return template % (size, sep) |
def engine(self):
"""
Return an engine instance, creating it if it doesn't exist.
Recreate the engine connection if it wasn't originally created
by the current process.
"""
pid = os.getpid()
conn = SQLAlchemyTarget._engine_dict.get(self.connection_string)
if not conn or conn.pid != pid:
# create and reset connection
engine = sqlalchemy.create_engine(
self.connection_string,
connect_args=self.connect_args,
echo=self.echo
)
SQLAlchemyTarget._engine_dict[self.connection_string] = self.Connection(engine, pid)
return SQLAlchemyTarget._engine_dict[self.connection_string].engine | def function[engine, parameter[self]]:
constant[
Return an engine instance, creating it if it doesn't exist.
Recreate the engine connection if it wasn't originally created
by the current process.
]
variable[pid] assign[=] call[name[os].getpid, parameter[]]
variable[conn] assign[=] call[name[SQLAlchemyTarget]._engine_dict.get, parameter[name[self].connection_string]]
if <ast.BoolOp object at 0x7da1b21e8e80> begin[:]
variable[engine] assign[=] call[name[sqlalchemy].create_engine, parameter[name[self].connection_string]]
call[name[SQLAlchemyTarget]._engine_dict][name[self].connection_string] assign[=] call[name[self].Connection, parameter[name[engine], name[pid]]]
return[call[name[SQLAlchemyTarget]._engine_dict][name[self].connection_string].engine] | keyword[def] identifier[engine] ( identifier[self] ):
literal[string]
identifier[pid] = identifier[os] . identifier[getpid] ()
identifier[conn] = identifier[SQLAlchemyTarget] . identifier[_engine_dict] . identifier[get] ( identifier[self] . identifier[connection_string] )
keyword[if] keyword[not] identifier[conn] keyword[or] identifier[conn] . identifier[pid] != identifier[pid] :
identifier[engine] = identifier[sqlalchemy] . identifier[create_engine] (
identifier[self] . identifier[connection_string] ,
identifier[connect_args] = identifier[self] . identifier[connect_args] ,
identifier[echo] = identifier[self] . identifier[echo]
)
identifier[SQLAlchemyTarget] . identifier[_engine_dict] [ identifier[self] . identifier[connection_string] ]= identifier[self] . identifier[Connection] ( identifier[engine] , identifier[pid] )
keyword[return] identifier[SQLAlchemyTarget] . identifier[_engine_dict] [ identifier[self] . identifier[connection_string] ]. identifier[engine] | def engine(self):
"""
Return an engine instance, creating it if it doesn't exist.
Recreate the engine connection if it wasn't originally created
by the current process.
"""
pid = os.getpid()
conn = SQLAlchemyTarget._engine_dict.get(self.connection_string)
if not conn or conn.pid != pid:
# create and reset connection
engine = sqlalchemy.create_engine(self.connection_string, connect_args=self.connect_args, echo=self.echo)
SQLAlchemyTarget._engine_dict[self.connection_string] = self.Connection(engine, pid) # depends on [control=['if'], data=[]]
return SQLAlchemyTarget._engine_dict[self.connection_string].engine |
def returnTradeHistory(self, currencyPair, start=None, end=None):
"""Returns the past 200 trades for a given market, or up to 50,000
trades between a range specified in UNIX timestamps by the "start"
and "end" GET parameters."""
return self._public('returnTradeHistory', currencyPair=currencyPair,
start=start, end=end) | def function[returnTradeHistory, parameter[self, currencyPair, start, end]]:
constant[Returns the past 200 trades for a given market, or up to 50,000
trades between a range specified in UNIX timestamps by the "start"
and "end" GET parameters.]
return[call[name[self]._public, parameter[constant[returnTradeHistory]]]] | keyword[def] identifier[returnTradeHistory] ( identifier[self] , identifier[currencyPair] , identifier[start] = keyword[None] , identifier[end] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_public] ( literal[string] , identifier[currencyPair] = identifier[currencyPair] ,
identifier[start] = identifier[start] , identifier[end] = identifier[end] ) | def returnTradeHistory(self, currencyPair, start=None, end=None):
"""Returns the past 200 trades for a given market, or up to 50,000
trades between a range specified in UNIX timestamps by the "start"
and "end" GET parameters."""
return self._public('returnTradeHistory', currencyPair=currencyPair, start=start, end=end) |
def all_logging_disabled(highest_level=logging.CRITICAL):
"""Disable all logging temporarily.
A context manager that will prevent any logging messages triggered during the body from being processed.
Args:
highest_level: the maximum logging level that is being blocked
"""
previous_level = logging.root.manager.disable
logging.disable(highest_level)
try:
yield
finally:
logging.disable(previous_level) | def function[all_logging_disabled, parameter[highest_level]]:
constant[Disable all logging temporarily.
A context manager that will prevent any logging messages triggered during the body from being processed.
Args:
highest_level: the maximum logging level that is being blocked
]
variable[previous_level] assign[=] name[logging].root.manager.disable
call[name[logging].disable, parameter[name[highest_level]]]
<ast.Try object at 0x7da18f58dcc0> | keyword[def] identifier[all_logging_disabled] ( identifier[highest_level] = identifier[logging] . identifier[CRITICAL] ):
literal[string]
identifier[previous_level] = identifier[logging] . identifier[root] . identifier[manager] . identifier[disable]
identifier[logging] . identifier[disable] ( identifier[highest_level] )
keyword[try] :
keyword[yield]
keyword[finally] :
identifier[logging] . identifier[disable] ( identifier[previous_level] ) | def all_logging_disabled(highest_level=logging.CRITICAL):
"""Disable all logging temporarily.
A context manager that will prevent any logging messages triggered during the body from being processed.
Args:
highest_level: the maximum logging level that is being blocked
"""
previous_level = logging.root.manager.disable
logging.disable(highest_level)
try:
yield # depends on [control=['try'], data=[]]
finally:
logging.disable(previous_level) |
def get_content_hashes(image_path,
level=None,
regexp=None,
include_files=None,
tag_root=True,
level_filter=None,
skip_files=None,
version=None,
include_sizes=True):
'''get_content_hashes is like get_image_hash, but it returns a complete dictionary
of file names (keys) and their respective hashes (values). This function is intended
for more research purposes and was used to generate the levels in the first place.
If include_sizes is True, we include a second data structure with sizes
'''
if level_filter is not None:
file_filter = level_filter
elif level is None:
file_filter = get_level("REPLICATE",version=version,
skip_files=skip_files,
include_files=include_files)
else:
file_filter = get_level(level,version=version,
skip_files=skip_files,
include_files=include_files)
file_obj,tar = get_image_tar(image_path)
results = extract_guts(image_path=image_path,
tar=tar,
file_filter=file_filter,
tag_root=tag_root,
include_sizes=include_sizes)
delete_image_tar(file_obj, tar)
return results | def function[get_content_hashes, parameter[image_path, level, regexp, include_files, tag_root, level_filter, skip_files, version, include_sizes]]:
constant[get_content_hashes is like get_image_hash, but it returns a complete dictionary
of file names (keys) and their respective hashes (values). This function is intended
for more research purposes and was used to generate the levels in the first place.
If include_sizes is True, we include a second data structure with sizes
]
if compare[name[level_filter] is_not constant[None]] begin[:]
variable[file_filter] assign[=] name[level_filter]
<ast.Tuple object at 0x7da18f00e2c0> assign[=] call[name[get_image_tar], parameter[name[image_path]]]
variable[results] assign[=] call[name[extract_guts], parameter[]]
call[name[delete_image_tar], parameter[name[file_obj], name[tar]]]
return[name[results]] | keyword[def] identifier[get_content_hashes] ( identifier[image_path] ,
identifier[level] = keyword[None] ,
identifier[regexp] = keyword[None] ,
identifier[include_files] = keyword[None] ,
identifier[tag_root] = keyword[True] ,
identifier[level_filter] = keyword[None] ,
identifier[skip_files] = keyword[None] ,
identifier[version] = keyword[None] ,
identifier[include_sizes] = keyword[True] ):
literal[string]
keyword[if] identifier[level_filter] keyword[is] keyword[not] keyword[None] :
identifier[file_filter] = identifier[level_filter]
keyword[elif] identifier[level] keyword[is] keyword[None] :
identifier[file_filter] = identifier[get_level] ( literal[string] , identifier[version] = identifier[version] ,
identifier[skip_files] = identifier[skip_files] ,
identifier[include_files] = identifier[include_files] )
keyword[else] :
identifier[file_filter] = identifier[get_level] ( identifier[level] , identifier[version] = identifier[version] ,
identifier[skip_files] = identifier[skip_files] ,
identifier[include_files] = identifier[include_files] )
identifier[file_obj] , identifier[tar] = identifier[get_image_tar] ( identifier[image_path] )
identifier[results] = identifier[extract_guts] ( identifier[image_path] = identifier[image_path] ,
identifier[tar] = identifier[tar] ,
identifier[file_filter] = identifier[file_filter] ,
identifier[tag_root] = identifier[tag_root] ,
identifier[include_sizes] = identifier[include_sizes] )
identifier[delete_image_tar] ( identifier[file_obj] , identifier[tar] )
keyword[return] identifier[results] | def get_content_hashes(image_path, level=None, regexp=None, include_files=None, tag_root=True, level_filter=None, skip_files=None, version=None, include_sizes=True):
"""get_content_hashes is like get_image_hash, but it returns a complete dictionary
of file names (keys) and their respective hashes (values). This function is intended
for more research purposes and was used to generate the levels in the first place.
If include_sizes is True, we include a second data structure with sizes
"""
if level_filter is not None:
file_filter = level_filter # depends on [control=['if'], data=['level_filter']]
elif level is None:
file_filter = get_level('REPLICATE', version=version, skip_files=skip_files, include_files=include_files) # depends on [control=['if'], data=[]]
else:
file_filter = get_level(level, version=version, skip_files=skip_files, include_files=include_files)
(file_obj, tar) = get_image_tar(image_path)
results = extract_guts(image_path=image_path, tar=tar, file_filter=file_filter, tag_root=tag_root, include_sizes=include_sizes)
delete_image_tar(file_obj, tar)
return results |
def Connect(cls, usb, banner=b'notadb', rsa_keys=None, auth_timeout_ms=100):
"""Establish a new connection to the device.
Args:
usb: A USBHandle with BulkRead and BulkWrite methods.
banner: A string to send as a host identifier.
rsa_keys: List of AuthSigner subclass instances to be used for
authentication. The device can either accept one of these via the Sign
method, or we will send the result of GetPublicKey from the first one
if the device doesn't accept any of them.
auth_timeout_ms: Timeout to wait for when sending a new public key. This
is only relevant when we send a new public key. The device shows a
dialog and this timeout is how long to wait for that dialog. If used
in automation, this should be low to catch such a case as a failure
quickly; while in interactive settings it should be high to allow
users to accept the dialog. We default to automation here, so it's low
by default.
Returns:
The device's reported banner. Always starts with the state (device,
recovery, or sideload), sometimes includes information after a : with
various product information.
Raises:
usb_exceptions.DeviceAuthError: When the device expects authentication,
but we weren't given any valid keys.
InvalidResponseError: When the device does authentication in an
unexpected way.
"""
# In py3, convert unicode to bytes. In py2, convert str to bytes.
# It's later joined into a byte string, so in py2, this ends up kind of being a no-op.
if isinstance(banner, str):
banner = bytearray(banner, 'utf-8')
msg = cls(
command=b'CNXN', arg0=VERSION, arg1=MAX_ADB_DATA,
data=b'host::%s\0' % banner)
msg.Send(usb)
cmd, arg0, arg1, banner = cls.Read(usb, [b'CNXN', b'AUTH'])
if cmd == b'AUTH':
if not rsa_keys:
raise usb_exceptions.DeviceAuthError(
'Device authentication required, no keys available.')
# Loop through our keys, signing the last 'banner' or token.
for rsa_key in rsa_keys:
if arg0 != AUTH_TOKEN:
raise InvalidResponseError(
'Unknown AUTH response: %s %s %s' % (arg0, arg1, banner))
# Do not mangle the banner property here by converting it to a string
signed_token = rsa_key.Sign(banner)
msg = cls(
command=b'AUTH', arg0=AUTH_SIGNATURE, arg1=0, data=signed_token)
msg.Send(usb)
cmd, arg0, unused_arg1, banner = cls.Read(usb, [b'CNXN', b'AUTH'])
if cmd == b'CNXN':
return banner
# None of the keys worked, so send a public key.
msg = cls(
command=b'AUTH', arg0=AUTH_RSAPUBLICKEY, arg1=0,
data=rsa_keys[0].GetPublicKey() + b'\0')
msg.Send(usb)
try:
cmd, arg0, unused_arg1, banner = cls.Read(
usb, [b'CNXN'], timeout_ms=auth_timeout_ms)
except usb_exceptions.ReadFailedError as e:
if e.usb_error.value == -7: # Timeout.
raise usb_exceptions.DeviceAuthError(
'Accept auth key on device, then retry.')
raise
# This didn't time-out, so we got a CNXN response.
return banner
return banner | def function[Connect, parameter[cls, usb, banner, rsa_keys, auth_timeout_ms]]:
constant[Establish a new connection to the device.
Args:
usb: A USBHandle with BulkRead and BulkWrite methods.
banner: A string to send as a host identifier.
rsa_keys: List of AuthSigner subclass instances to be used for
authentication. The device can either accept one of these via the Sign
method, or we will send the result of GetPublicKey from the first one
if the device doesn't accept any of them.
auth_timeout_ms: Timeout to wait for when sending a new public key. This
is only relevant when we send a new public key. The device shows a
dialog and this timeout is how long to wait for that dialog. If used
in automation, this should be low to catch such a case as a failure
quickly; while in interactive settings it should be high to allow
users to accept the dialog. We default to automation here, so it's low
by default.
Returns:
The device's reported banner. Always starts with the state (device,
recovery, or sideload), sometimes includes information after a : with
various product information.
Raises:
usb_exceptions.DeviceAuthError: When the device expects authentication,
but we weren't given any valid keys.
InvalidResponseError: When the device does authentication in an
unexpected way.
]
if call[name[isinstance], parameter[name[banner], name[str]]] begin[:]
variable[banner] assign[=] call[name[bytearray], parameter[name[banner], constant[utf-8]]]
variable[msg] assign[=] call[name[cls], parameter[]]
call[name[msg].Send, parameter[name[usb]]]
<ast.Tuple object at 0x7da1b1710f40> assign[=] call[name[cls].Read, parameter[name[usb], list[[<ast.Constant object at 0x7da1b1711c60>, <ast.Constant object at 0x7da1b1711d80>]]]]
if compare[name[cmd] equal[==] constant[b'AUTH']] begin[:]
if <ast.UnaryOp object at 0x7da1b1710370> begin[:]
<ast.Raise object at 0x7da1b1713cd0>
for taget[name[rsa_key]] in starred[name[rsa_keys]] begin[:]
if compare[name[arg0] not_equal[!=] name[AUTH_TOKEN]] begin[:]
<ast.Raise object at 0x7da1b1711330>
variable[signed_token] assign[=] call[name[rsa_key].Sign, parameter[name[banner]]]
variable[msg] assign[=] call[name[cls], parameter[]]
call[name[msg].Send, parameter[name[usb]]]
<ast.Tuple object at 0x7da1b1713010> assign[=] call[name[cls].Read, parameter[name[usb], list[[<ast.Constant object at 0x7da1b1713b80>, <ast.Constant object at 0x7da1b1712c50>]]]]
if compare[name[cmd] equal[==] constant[b'CNXN']] begin[:]
return[name[banner]]
variable[msg] assign[=] call[name[cls], parameter[]]
call[name[msg].Send, parameter[name[usb]]]
<ast.Try object at 0x7da1b1716890>
return[name[banner]]
return[name[banner]] | keyword[def] identifier[Connect] ( identifier[cls] , identifier[usb] , identifier[banner] = literal[string] , identifier[rsa_keys] = keyword[None] , identifier[auth_timeout_ms] = literal[int] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[banner] , identifier[str] ):
identifier[banner] = identifier[bytearray] ( identifier[banner] , literal[string] )
identifier[msg] = identifier[cls] (
identifier[command] = literal[string] , identifier[arg0] = identifier[VERSION] , identifier[arg1] = identifier[MAX_ADB_DATA] ,
identifier[data] = literal[string] % identifier[banner] )
identifier[msg] . identifier[Send] ( identifier[usb] )
identifier[cmd] , identifier[arg0] , identifier[arg1] , identifier[banner] = identifier[cls] . identifier[Read] ( identifier[usb] ,[ literal[string] , literal[string] ])
keyword[if] identifier[cmd] == literal[string] :
keyword[if] keyword[not] identifier[rsa_keys] :
keyword[raise] identifier[usb_exceptions] . identifier[DeviceAuthError] (
literal[string] )
keyword[for] identifier[rsa_key] keyword[in] identifier[rsa_keys] :
keyword[if] identifier[arg0] != identifier[AUTH_TOKEN] :
keyword[raise] identifier[InvalidResponseError] (
literal[string] %( identifier[arg0] , identifier[arg1] , identifier[banner] ))
identifier[signed_token] = identifier[rsa_key] . identifier[Sign] ( identifier[banner] )
identifier[msg] = identifier[cls] (
identifier[command] = literal[string] , identifier[arg0] = identifier[AUTH_SIGNATURE] , identifier[arg1] = literal[int] , identifier[data] = identifier[signed_token] )
identifier[msg] . identifier[Send] ( identifier[usb] )
identifier[cmd] , identifier[arg0] , identifier[unused_arg1] , identifier[banner] = identifier[cls] . identifier[Read] ( identifier[usb] ,[ literal[string] , literal[string] ])
keyword[if] identifier[cmd] == literal[string] :
keyword[return] identifier[banner]
identifier[msg] = identifier[cls] (
identifier[command] = literal[string] , identifier[arg0] = identifier[AUTH_RSAPUBLICKEY] , identifier[arg1] = literal[int] ,
identifier[data] = identifier[rsa_keys] [ literal[int] ]. identifier[GetPublicKey] ()+ literal[string] )
identifier[msg] . identifier[Send] ( identifier[usb] )
keyword[try] :
identifier[cmd] , identifier[arg0] , identifier[unused_arg1] , identifier[banner] = identifier[cls] . identifier[Read] (
identifier[usb] ,[ literal[string] ], identifier[timeout_ms] = identifier[auth_timeout_ms] )
keyword[except] identifier[usb_exceptions] . identifier[ReadFailedError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[usb_error] . identifier[value] ==- literal[int] :
keyword[raise] identifier[usb_exceptions] . identifier[DeviceAuthError] (
literal[string] )
keyword[raise]
keyword[return] identifier[banner]
keyword[return] identifier[banner] | def Connect(cls, usb, banner=b'notadb', rsa_keys=None, auth_timeout_ms=100):
"""Establish a new connection to the device.
Args:
usb: A USBHandle with BulkRead and BulkWrite methods.
banner: A string to send as a host identifier.
rsa_keys: List of AuthSigner subclass instances to be used for
authentication. The device can either accept one of these via the Sign
method, or we will send the result of GetPublicKey from the first one
if the device doesn't accept any of them.
auth_timeout_ms: Timeout to wait for when sending a new public key. This
is only relevant when we send a new public key. The device shows a
dialog and this timeout is how long to wait for that dialog. If used
in automation, this should be low to catch such a case as a failure
quickly; while in interactive settings it should be high to allow
users to accept the dialog. We default to automation here, so it's low
by default.
Returns:
The device's reported banner. Always starts with the state (device,
recovery, or sideload), sometimes includes information after a : with
various product information.
Raises:
usb_exceptions.DeviceAuthError: When the device expects authentication,
but we weren't given any valid keys.
InvalidResponseError: When the device does authentication in an
unexpected way.
"""
# In py3, convert unicode to bytes. In py2, convert str to bytes.
# It's later joined into a byte string, so in py2, this ends up kind of being a no-op.
if isinstance(banner, str):
banner = bytearray(banner, 'utf-8') # depends on [control=['if'], data=[]]
msg = cls(command=b'CNXN', arg0=VERSION, arg1=MAX_ADB_DATA, data=b'host::%s\x00' % banner)
msg.Send(usb)
(cmd, arg0, arg1, banner) = cls.Read(usb, [b'CNXN', b'AUTH'])
if cmd == b'AUTH':
if not rsa_keys:
raise usb_exceptions.DeviceAuthError('Device authentication required, no keys available.') # depends on [control=['if'], data=[]]
# Loop through our keys, signing the last 'banner' or token.
for rsa_key in rsa_keys:
if arg0 != AUTH_TOKEN:
raise InvalidResponseError('Unknown AUTH response: %s %s %s' % (arg0, arg1, banner)) # depends on [control=['if'], data=['arg0']]
# Do not mangle the banner property here by converting it to a string
signed_token = rsa_key.Sign(banner)
msg = cls(command=b'AUTH', arg0=AUTH_SIGNATURE, arg1=0, data=signed_token)
msg.Send(usb)
(cmd, arg0, unused_arg1, banner) = cls.Read(usb, [b'CNXN', b'AUTH'])
if cmd == b'CNXN':
return banner # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rsa_key']]
# None of the keys worked, so send a public key.
msg = cls(command=b'AUTH', arg0=AUTH_RSAPUBLICKEY, arg1=0, data=rsa_keys[0].GetPublicKey() + b'\x00')
msg.Send(usb)
try:
(cmd, arg0, unused_arg1, banner) = cls.Read(usb, [b'CNXN'], timeout_ms=auth_timeout_ms) # depends on [control=['try'], data=[]]
except usb_exceptions.ReadFailedError as e:
if e.usb_error.value == -7: # Timeout.
raise usb_exceptions.DeviceAuthError('Accept auth key on device, then retry.') # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=['e']]
# This didn't time-out, so we got a CNXN response.
return banner # depends on [control=['if'], data=['cmd']]
return banner |
def get_names():
"""
Return a list of names.
"""
return [n.strip() for n in codecs.open(os.path.join("data", "names.txt"),"rb",'utf8').readlines()] | def function[get_names, parameter[]]:
constant[
Return a list of names.
]
return[<ast.ListComp object at 0x7da20e9610f0>] | keyword[def] identifier[get_names] ():
literal[string]
keyword[return] [ identifier[n] . identifier[strip] () keyword[for] identifier[n] keyword[in] identifier[codecs] . identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] ), literal[string] , literal[string] ). identifier[readlines] ()] | def get_names():
"""
Return a list of names.
"""
return [n.strip() for n in codecs.open(os.path.join('data', 'names.txt'), 'rb', 'utf8').readlines()] |
def patch_namespaced_replica_set(self, name, namespace, body, **kwargs): # noqa: E501
"""patch_namespaced_replica_set # noqa: E501
partially update the specified ReplicaSet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_replica_set(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ReplicaSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1beta1ReplicaSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_replica_set_with_http_info(name, namespace, body, **kwargs) # noqa: E501
else:
(data) = self.patch_namespaced_replica_set_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data | def function[patch_namespaced_replica_set, parameter[self, name, namespace, body]]:
constant[patch_namespaced_replica_set # noqa: E501
partially update the specified ReplicaSet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_replica_set(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ReplicaSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1beta1ReplicaSet
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].patch_namespaced_replica_set_with_http_info, parameter[name[name], name[namespace], name[body]]]] | keyword[def] identifier[patch_namespaced_replica_set] ( identifier[self] , identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[patch_namespaced_replica_set_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[patch_namespaced_replica_set_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data] | def patch_namespaced_replica_set(self, name, namespace, body, **kwargs): # noqa: E501
"patch_namespaced_replica_set # noqa: E501\n\n partially update the specified ReplicaSet # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_replica_set(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the ReplicaSet (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param UNKNOWN_BASE_TYPE body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1beta1ReplicaSet\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_replica_set_with_http_info(name, namespace, body, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.patch_namespaced_replica_set_with_http_info(name, namespace, body, **kwargs) # noqa: E501
return data |
def add_child_gradebook(self, gradebook_id, child_id):
"""Adds a child to a gradebook.
arg: gradebook_id (osid.id.Id): the ``Id`` of a gradebook
arg: child_id (osid.id.Id): the ``Id`` of the new child
raise: AlreadyExists - ``gradebook_id`` is already a parent of
``child_id``
raise: NotFound - ``gradebook_id`` or ``child_id`` not found
raise: NullArgument - ``gradebook_id`` or ``child_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.add_child_bin_template
if self._catalog_session is not None:
return self._catalog_session.add_child_catalog(catalog_id=gradebook_id, child_id=child_id)
return self._hierarchy_session.add_child(id_=gradebook_id, child_id=child_id) | def function[add_child_gradebook, parameter[self, gradebook_id, child_id]]:
constant[Adds a child to a gradebook.
arg: gradebook_id (osid.id.Id): the ``Id`` of a gradebook
arg: child_id (osid.id.Id): the ``Id`` of the new child
raise: AlreadyExists - ``gradebook_id`` is already a parent of
``child_id``
raise: NotFound - ``gradebook_id`` or ``child_id`` not found
raise: NullArgument - ``gradebook_id`` or ``child_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.add_child_catalog, parameter[]]]
return[call[name[self]._hierarchy_session.add_child, parameter[]]] | keyword[def] identifier[add_child_gradebook] ( identifier[self] , identifier[gradebook_id] , identifier[child_id] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[add_child_catalog] ( identifier[catalog_id] = identifier[gradebook_id] , identifier[child_id] = identifier[child_id] )
keyword[return] identifier[self] . identifier[_hierarchy_session] . identifier[add_child] ( identifier[id_] = identifier[gradebook_id] , identifier[child_id] = identifier[child_id] ) | def add_child_gradebook(self, gradebook_id, child_id):
"""Adds a child to a gradebook.
arg: gradebook_id (osid.id.Id): the ``Id`` of a gradebook
arg: child_id (osid.id.Id): the ``Id`` of the new child
raise: AlreadyExists - ``gradebook_id`` is already a parent of
``child_id``
raise: NotFound - ``gradebook_id`` or ``child_id`` not found
raise: NullArgument - ``gradebook_id`` or ``child_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.add_child_bin_template
if self._catalog_session is not None:
return self._catalog_session.add_child_catalog(catalog_id=gradebook_id, child_id=child_id) # depends on [control=['if'], data=[]]
return self._hierarchy_session.add_child(id_=gradebook_id, child_id=child_id) |
def resolve_feats(feat_list, seqin, seqref, start, locus, missing, verbose=False, verbosity=0):
"""
resolve_feats - Resolves features from alignments
:param feat_list: List of the found features
:type feat_list: ``List``
:param seqin: The input sequence
:type seqin: ``str``
:param locus: The input locus
:type locus: ``str``
:param start: Where the sequence start in the alignment
:type start: ``int``
:param missing: List of the unmapped features
:type missing: ``List``
:param verbose: Flag for running in verbose mode.
:type verbose: ``bool``
:param verbosity: Numerical value to indicate how verbose the output will be in verbose mode.
:type verbosity: ``int``
:rtype: :ref:`ann`
"""
structures = get_structures()
logger = logging.getLogger("Logger." + __name__)
seq = SeqRecord(seq=Seq("".join(seqin), SingleLetterAlphabet()))
seq_covered = len(seq.seq)
coordinates = dict(map(lambda x: [x, 1],
[i for i in range(0, len(seq.seq)+1)]))
mapping = dict(map(lambda x: [x, 1],
[i for i in range(0, len(seq.seq)+1)]))
diff = 0
if len(feat_list) > 1:
if verbose:
logger.error("resolve_feats error")
return Annotation(complete_annotation=False)
else:
features = {}
full_annotation = {}
features = feat_list[0]
# Need to sort
feature_list = sorted(features.keys(),
key=lambda f: structures[locus][f])
diff_f = True
for feat in feature_list:
if feat in missing:
f = features[feat]
seqrec = f.extract(seq)
seq_covered -= len(seqrec.seq)
if re.search("-", str(seqrec.seq)):
l1 = len(seqrec.seq)
newseq = re.sub(r'-', '', str(seqrec.seq))
seqrec.seq = Seq(newseq, IUPAC.unambiguous_dna)
tmdiff = l1 - len(newseq)
diff += tmdiff
if seqrec.seq:
#logger.error("FEAT HAS SEQ " + feat)
if diff_f and diff > 0:
sp = f.location.start + start
diff_f = False
else:
sp = f.location.start + start - diff
ep = f.location.end + start - diff
featn = SeqFeature(FeatureLocation(ExactPosition(sp),
ExactPosition(ep),
strand=1), type=f.type)
features.update({feat: featn})
full_annotation.update({feat: seqrec})
for i in range(featn.location.start, featn.location.end):
if i in coordinates:
del coordinates[i]
mapping[i] = feat
else:
f = features[feat]
seqrec = f.extract(seq)
seq_covered -= len(seqrec.seq)
if re.search("-", str(seqrec.seq)):
l1 = len(seqrec.seq)
newseq = re.sub(r'-', '', str(seqrec.seq))
seqrec.seq = Seq(newseq, IUPAC.unambiguous_dna)
tmdiff = l1 - len(newseq)
diff += tmdiff
blocks = getblocks(coordinates)
rmapping = {k+start: mapping[k] for k in mapping.keys()}
# Print out what features are missing
if verbose and verbosity > 0 and len(full_annotation.keys()) > 1:
logger.info("Features resolved:")
for f in full_annotation:
logger.info(f)
else:
if verbose:
logger.info("Failed to resolve")
if not full_annotation or len(full_annotation) == 0:
if verbose:
logger.info("Failed to align missing features")
return Annotation(complete_annotation=False)
else:
return Annotation(annotation=full_annotation,
method="clustalo",
features=features,
mapping=rmapping,
blocks=blocks,
seq=seq) | def function[resolve_feats, parameter[feat_list, seqin, seqref, start, locus, missing, verbose, verbosity]]:
constant[
resolve_feats - Resolves features from alignments
:param feat_list: List of the found features
:type feat_list: ``List``
:param seqin: The input sequence
:type seqin: ``str``
:param locus: The input locus
:type locus: ``str``
:param start: Where the sequence start in the alignment
:type start: ``int``
:param missing: List of the unmapped features
:type missing: ``List``
:param verbose: Flag for running in verbose mode.
:type verbose: ``bool``
:param verbosity: Numerical value to indicate how verbose the output will be in verbose mode.
:type verbosity: ``int``
:rtype: :ref:`ann`
]
variable[structures] assign[=] call[name[get_structures], parameter[]]
variable[logger] assign[=] call[name[logging].getLogger, parameter[binary_operation[constant[Logger.] + name[__name__]]]]
variable[seq] assign[=] call[name[SeqRecord], parameter[]]
variable[seq_covered] assign[=] call[name[len], parameter[name[seq].seq]]
variable[coordinates] assign[=] call[name[dict], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b25def20>, <ast.ListComp object at 0x7da1b25df2e0>]]]]
variable[mapping] assign[=] call[name[dict], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b25de9b0>, <ast.ListComp object at 0x7da1b25dee00>]]]]
variable[diff] assign[=] constant[0]
if compare[call[name[len], parameter[name[feat_list]]] greater[>] constant[1]] begin[:]
if name[verbose] begin[:]
call[name[logger].error, parameter[constant[resolve_feats error]]]
return[call[name[Annotation], parameter[]]] | keyword[def] identifier[resolve_feats] ( identifier[feat_list] , identifier[seqin] , identifier[seqref] , identifier[start] , identifier[locus] , identifier[missing] , identifier[verbose] = keyword[False] , identifier[verbosity] = literal[int] ):
literal[string]
identifier[structures] = identifier[get_structures] ()
identifier[logger] = identifier[logging] . identifier[getLogger] ( literal[string] + identifier[__name__] )
identifier[seq] = identifier[SeqRecord] ( identifier[seq] = identifier[Seq] ( literal[string] . identifier[join] ( identifier[seqin] ), identifier[SingleLetterAlphabet] ()))
identifier[seq_covered] = identifier[len] ( identifier[seq] . identifier[seq] )
identifier[coordinates] = identifier[dict] ( identifier[map] ( keyword[lambda] identifier[x] :[ identifier[x] , literal[int] ],
[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[seq] . identifier[seq] )+ literal[int] )]))
identifier[mapping] = identifier[dict] ( identifier[map] ( keyword[lambda] identifier[x] :[ identifier[x] , literal[int] ],
[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[seq] . identifier[seq] )+ literal[int] )]))
identifier[diff] = literal[int]
keyword[if] identifier[len] ( identifier[feat_list] )> literal[int] :
keyword[if] identifier[verbose] :
identifier[logger] . identifier[error] ( literal[string] )
keyword[return] identifier[Annotation] ( identifier[complete_annotation] = keyword[False] )
keyword[else] :
identifier[features] ={}
identifier[full_annotation] ={}
identifier[features] = identifier[feat_list] [ literal[int] ]
identifier[feature_list] = identifier[sorted] ( identifier[features] . identifier[keys] (),
identifier[key] = keyword[lambda] identifier[f] : identifier[structures] [ identifier[locus] ][ identifier[f] ])
identifier[diff_f] = keyword[True]
keyword[for] identifier[feat] keyword[in] identifier[feature_list] :
keyword[if] identifier[feat] keyword[in] identifier[missing] :
identifier[f] = identifier[features] [ identifier[feat] ]
identifier[seqrec] = identifier[f] . identifier[extract] ( identifier[seq] )
identifier[seq_covered] -= identifier[len] ( identifier[seqrec] . identifier[seq] )
keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[str] ( identifier[seqrec] . identifier[seq] )):
identifier[l1] = identifier[len] ( identifier[seqrec] . identifier[seq] )
identifier[newseq] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[str] ( identifier[seqrec] . identifier[seq] ))
identifier[seqrec] . identifier[seq] = identifier[Seq] ( identifier[newseq] , identifier[IUPAC] . identifier[unambiguous_dna] )
identifier[tmdiff] = identifier[l1] - identifier[len] ( identifier[newseq] )
identifier[diff] += identifier[tmdiff]
keyword[if] identifier[seqrec] . identifier[seq] :
keyword[if] identifier[diff_f] keyword[and] identifier[diff] > literal[int] :
identifier[sp] = identifier[f] . identifier[location] . identifier[start] + identifier[start]
identifier[diff_f] = keyword[False]
keyword[else] :
identifier[sp] = identifier[f] . identifier[location] . identifier[start] + identifier[start] - identifier[diff]
identifier[ep] = identifier[f] . identifier[location] . identifier[end] + identifier[start] - identifier[diff]
identifier[featn] = identifier[SeqFeature] ( identifier[FeatureLocation] ( identifier[ExactPosition] ( identifier[sp] ),
identifier[ExactPosition] ( identifier[ep] ),
identifier[strand] = literal[int] ), identifier[type] = identifier[f] . identifier[type] )
identifier[features] . identifier[update] ({ identifier[feat] : identifier[featn] })
identifier[full_annotation] . identifier[update] ({ identifier[feat] : identifier[seqrec] })
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[featn] . identifier[location] . identifier[start] , identifier[featn] . identifier[location] . identifier[end] ):
keyword[if] identifier[i] keyword[in] identifier[coordinates] :
keyword[del] identifier[coordinates] [ identifier[i] ]
identifier[mapping] [ identifier[i] ]= identifier[feat]
keyword[else] :
identifier[f] = identifier[features] [ identifier[feat] ]
identifier[seqrec] = identifier[f] . identifier[extract] ( identifier[seq] )
identifier[seq_covered] -= identifier[len] ( identifier[seqrec] . identifier[seq] )
keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[str] ( identifier[seqrec] . identifier[seq] )):
identifier[l1] = identifier[len] ( identifier[seqrec] . identifier[seq] )
identifier[newseq] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[str] ( identifier[seqrec] . identifier[seq] ))
identifier[seqrec] . identifier[seq] = identifier[Seq] ( identifier[newseq] , identifier[IUPAC] . identifier[unambiguous_dna] )
identifier[tmdiff] = identifier[l1] - identifier[len] ( identifier[newseq] )
identifier[diff] += identifier[tmdiff]
identifier[blocks] = identifier[getblocks] ( identifier[coordinates] )
identifier[rmapping] ={ identifier[k] + identifier[start] : identifier[mapping] [ identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[mapping] . identifier[keys] ()}
keyword[if] identifier[verbose] keyword[and] identifier[verbosity] > literal[int] keyword[and] identifier[len] ( identifier[full_annotation] . identifier[keys] ())> literal[int] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[for] identifier[f] keyword[in] identifier[full_annotation] :
identifier[logger] . identifier[info] ( identifier[f] )
keyword[else] :
keyword[if] identifier[verbose] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] keyword[not] identifier[full_annotation] keyword[or] identifier[len] ( identifier[full_annotation] )== literal[int] :
keyword[if] identifier[verbose] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[return] identifier[Annotation] ( identifier[complete_annotation] = keyword[False] )
keyword[else] :
keyword[return] identifier[Annotation] ( identifier[annotation] = identifier[full_annotation] ,
identifier[method] = literal[string] ,
identifier[features] = identifier[features] ,
identifier[mapping] = identifier[rmapping] ,
identifier[blocks] = identifier[blocks] ,
identifier[seq] = identifier[seq] ) | def resolve_feats(feat_list, seqin, seqref, start, locus, missing, verbose=False, verbosity=0):
"""
resolve_feats - Resolves features from alignments
:param feat_list: List of the found features
:type feat_list: ``List``
:param seqin: The input sequence
:type seqin: ``str``
:param locus: The input locus
:type locus: ``str``
:param start: Where the sequence start in the alignment
:type start: ``int``
:param missing: List of the unmapped features
:type missing: ``List``
:param verbose: Flag for running in verbose mode.
:type verbose: ``bool``
:param verbosity: Numerical value to indicate how verbose the output will be in verbose mode.
:type verbosity: ``int``
:rtype: :ref:`ann`
"""
structures = get_structures()
logger = logging.getLogger('Logger.' + __name__)
seq = SeqRecord(seq=Seq(''.join(seqin), SingleLetterAlphabet()))
seq_covered = len(seq.seq)
coordinates = dict(map(lambda x: [x, 1], [i for i in range(0, len(seq.seq) + 1)]))
mapping = dict(map(lambda x: [x, 1], [i for i in range(0, len(seq.seq) + 1)]))
diff = 0
if len(feat_list) > 1:
if verbose:
logger.error('resolve_feats error') # depends on [control=['if'], data=[]]
return Annotation(complete_annotation=False) # depends on [control=['if'], data=[]]
else:
features = {}
full_annotation = {}
features = feat_list[0]
# Need to sort
feature_list = sorted(features.keys(), key=lambda f: structures[locus][f])
diff_f = True
for feat in feature_list:
if feat in missing:
f = features[feat]
seqrec = f.extract(seq)
seq_covered -= len(seqrec.seq)
if re.search('-', str(seqrec.seq)):
l1 = len(seqrec.seq)
newseq = re.sub('-', '', str(seqrec.seq))
seqrec.seq = Seq(newseq, IUPAC.unambiguous_dna)
tmdiff = l1 - len(newseq)
diff += tmdiff # depends on [control=['if'], data=[]]
if seqrec.seq:
#logger.error("FEAT HAS SEQ " + feat)
if diff_f and diff > 0:
sp = f.location.start + start
diff_f = False # depends on [control=['if'], data=[]]
else:
sp = f.location.start + start - diff
ep = f.location.end + start - diff
featn = SeqFeature(FeatureLocation(ExactPosition(sp), ExactPosition(ep), strand=1), type=f.type)
features.update({feat: featn})
full_annotation.update({feat: seqrec})
for i in range(featn.location.start, featn.location.end):
if i in coordinates:
del coordinates[i] # depends on [control=['if'], data=['i', 'coordinates']]
mapping[i] = feat # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['feat']]
else:
f = features[feat]
seqrec = f.extract(seq)
seq_covered -= len(seqrec.seq)
if re.search('-', str(seqrec.seq)):
l1 = len(seqrec.seq)
newseq = re.sub('-', '', str(seqrec.seq))
seqrec.seq = Seq(newseq, IUPAC.unambiguous_dna)
tmdiff = l1 - len(newseq)
diff += tmdiff # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['feat']]
blocks = getblocks(coordinates)
rmapping = {k + start: mapping[k] for k in mapping.keys()}
# Print out what features are missing
if verbose and verbosity > 0 and (len(full_annotation.keys()) > 1):
logger.info('Features resolved:')
for f in full_annotation:
logger.info(f) # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]]
elif verbose:
logger.info('Failed to resolve') # depends on [control=['if'], data=[]]
if not full_annotation or len(full_annotation) == 0:
if verbose:
logger.info('Failed to align missing features') # depends on [control=['if'], data=[]]
return Annotation(complete_annotation=False) # depends on [control=['if'], data=[]]
else:
return Annotation(annotation=full_annotation, method='clustalo', features=features, mapping=rmapping, blocks=blocks, seq=seq) |
def p_SimpleSyntax(self, p):
"""SimpleSyntax : INTEGER
| INTEGER integerSubType
| INTEGER enumSpec
| INTEGER32
| INTEGER32 integerSubType
| UPPERCASE_IDENTIFIER enumSpec
| UPPERCASE_IDENTIFIER integerSubType
| OCTET STRING
| OCTET STRING octetStringSubType
| UPPERCASE_IDENTIFIER octetStringSubType
| OBJECT IDENTIFIER anySubType"""
n = len(p)
if n == 2:
p[0] = ('SimpleSyntax', p[1])
elif n == 3:
if p[1] == 'OCTET':
p[0] = ('SimpleSyntax', p[1] + ' ' + p[2])
else:
p[0] = ('SimpleSyntax', p[1], p[2])
elif n == 4:
p[0] = ('SimpleSyntax', p[1] + ' ' + p[2], p[3]) | def function[p_SimpleSyntax, parameter[self, p]]:
constant[SimpleSyntax : INTEGER
| INTEGER integerSubType
| INTEGER enumSpec
| INTEGER32
| INTEGER32 integerSubType
| UPPERCASE_IDENTIFIER enumSpec
| UPPERCASE_IDENTIFIER integerSubType
| OCTET STRING
| OCTET STRING octetStringSubType
| UPPERCASE_IDENTIFIER octetStringSubType
| OBJECT IDENTIFIER anySubType]
variable[n] assign[=] call[name[len], parameter[name[p]]]
if compare[name[n] equal[==] constant[2]] begin[:]
call[name[p]][constant[0]] assign[=] tuple[[<ast.Constant object at 0x7da1b016c490>, <ast.Subscript object at 0x7da1b016cd00>]] | keyword[def] identifier[p_SimpleSyntax] ( identifier[self] , identifier[p] ):
literal[string]
identifier[n] = identifier[len] ( identifier[p] )
keyword[if] identifier[n] == literal[int] :
identifier[p] [ literal[int] ]=( literal[string] , identifier[p] [ literal[int] ])
keyword[elif] identifier[n] == literal[int] :
keyword[if] identifier[p] [ literal[int] ]== literal[string] :
identifier[p] [ literal[int] ]=( literal[string] , identifier[p] [ literal[int] ]+ literal[string] + identifier[p] [ literal[int] ])
keyword[else] :
identifier[p] [ literal[int] ]=( literal[string] , identifier[p] [ literal[int] ], identifier[p] [ literal[int] ])
keyword[elif] identifier[n] == literal[int] :
identifier[p] [ literal[int] ]=( literal[string] , identifier[p] [ literal[int] ]+ literal[string] + identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]) | def p_SimpleSyntax(self, p):
"""SimpleSyntax : INTEGER
| INTEGER integerSubType
| INTEGER enumSpec
| INTEGER32
| INTEGER32 integerSubType
| UPPERCASE_IDENTIFIER enumSpec
| UPPERCASE_IDENTIFIER integerSubType
| OCTET STRING
| OCTET STRING octetStringSubType
| UPPERCASE_IDENTIFIER octetStringSubType
| OBJECT IDENTIFIER anySubType"""
n = len(p)
if n == 2:
p[0] = ('SimpleSyntax', p[1]) # depends on [control=['if'], data=[]]
elif n == 3:
if p[1] == 'OCTET':
p[0] = ('SimpleSyntax', p[1] + ' ' + p[2]) # depends on [control=['if'], data=[]]
else:
p[0] = ('SimpleSyntax', p[1], p[2]) # depends on [control=['if'], data=[]]
elif n == 4:
p[0] = ('SimpleSyntax', p[1] + ' ' + p[2], p[3]) # depends on [control=['if'], data=[]] |
def get_app(
database_uri,
exclude_tables=None,
user_models=None,
reflect_all=True,
read_only=False,
schema=None):
"""Return an application instance connected to the database described in
*database_uri*.
:param str database_uri: The URI connection string for the database
:param list exclude_tables: A list of tables to exclude from the API
service
:param list user_models: A list of user-defined models to include in the
API service
:param bool reflect_all: Include all database tables in the API service
:param bool read_only: Only allow HTTP GET commands for all endpoints
:param str schema: Use the specified named schema instead of the default
"""
app = Flask('sandman2')
app.config['SQLALCHEMY_DATABASE_URI'] = database_uri
app.config['SANDMAN2_READ_ONLY'] = read_only
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.classes = []
db.init_app(app)
admin = Admin(app, base_template='layout.html', template_mode='bootstrap3')
_register_error_handlers(app)
if user_models:
with app.app_context():
_register_user_models(user_models, admin, schema=schema)
elif reflect_all:
with app.app_context():
_reflect_all(exclude_tables, admin, read_only, schema=schema)
@app.route('/')
def index():
"""Return a list of routes to the registered classes."""
routes = {}
for cls in app.classes:
routes[cls.__model__.__name__] = '{}{{/{}}}'.format(
cls.__model__.__url__,
cls.__model__.primary_key())
return jsonify(routes)
return app | def function[get_app, parameter[database_uri, exclude_tables, user_models, reflect_all, read_only, schema]]:
constant[Return an application instance connected to the database described in
*database_uri*.
:param str database_uri: The URI connection string for the database
:param list exclude_tables: A list of tables to exclude from the API
service
:param list user_models: A list of user-defined models to include in the
API service
:param bool reflect_all: Include all database tables in the API service
:param bool read_only: Only allow HTTP GET commands for all endpoints
:param str schema: Use the specified named schema instead of the default
]
variable[app] assign[=] call[name[Flask], parameter[constant[sandman2]]]
call[name[app].config][constant[SQLALCHEMY_DATABASE_URI]] assign[=] name[database_uri]
call[name[app].config][constant[SANDMAN2_READ_ONLY]] assign[=] name[read_only]
call[name[app].config][constant[SQLALCHEMY_TRACK_MODIFICATIONS]] assign[=] constant[False]
name[app].classes assign[=] list[[]]
call[name[db].init_app, parameter[name[app]]]
variable[admin] assign[=] call[name[Admin], parameter[name[app]]]
call[name[_register_error_handlers], parameter[name[app]]]
if name[user_models] begin[:]
with call[name[app].app_context, parameter[]] begin[:]
call[name[_register_user_models], parameter[name[user_models], name[admin]]]
def function[index, parameter[]]:
constant[Return a list of routes to the registered classes.]
variable[routes] assign[=] dictionary[[], []]
for taget[name[cls]] in starred[name[app].classes] begin[:]
call[name[routes]][name[cls].__model__.__name__] assign[=] call[constant[{}{{/{}}}].format, parameter[name[cls].__model__.__url__, call[name[cls].__model__.primary_key, parameter[]]]]
return[call[name[jsonify], parameter[name[routes]]]]
return[name[app]] | keyword[def] identifier[get_app] (
identifier[database_uri] ,
identifier[exclude_tables] = keyword[None] ,
identifier[user_models] = keyword[None] ,
identifier[reflect_all] = keyword[True] ,
identifier[read_only] = keyword[False] ,
identifier[schema] = keyword[None] ):
literal[string]
identifier[app] = identifier[Flask] ( literal[string] )
identifier[app] . identifier[config] [ literal[string] ]= identifier[database_uri]
identifier[app] . identifier[config] [ literal[string] ]= identifier[read_only]
identifier[app] . identifier[config] [ literal[string] ]= keyword[False]
identifier[app] . identifier[classes] =[]
identifier[db] . identifier[init_app] ( identifier[app] )
identifier[admin] = identifier[Admin] ( identifier[app] , identifier[base_template] = literal[string] , identifier[template_mode] = literal[string] )
identifier[_register_error_handlers] ( identifier[app] )
keyword[if] identifier[user_models] :
keyword[with] identifier[app] . identifier[app_context] ():
identifier[_register_user_models] ( identifier[user_models] , identifier[admin] , identifier[schema] = identifier[schema] )
keyword[elif] identifier[reflect_all] :
keyword[with] identifier[app] . identifier[app_context] ():
identifier[_reflect_all] ( identifier[exclude_tables] , identifier[admin] , identifier[read_only] , identifier[schema] = identifier[schema] )
@ identifier[app] . identifier[route] ( literal[string] )
keyword[def] identifier[index] ():
literal[string]
identifier[routes] ={}
keyword[for] identifier[cls] keyword[in] identifier[app] . identifier[classes] :
identifier[routes] [ identifier[cls] . identifier[__model__] . identifier[__name__] ]= literal[string] . identifier[format] (
identifier[cls] . identifier[__model__] . identifier[__url__] ,
identifier[cls] . identifier[__model__] . identifier[primary_key] ())
keyword[return] identifier[jsonify] ( identifier[routes] )
keyword[return] identifier[app] | def get_app(database_uri, exclude_tables=None, user_models=None, reflect_all=True, read_only=False, schema=None):
"""Return an application instance connected to the database described in
*database_uri*.
:param str database_uri: The URI connection string for the database
:param list exclude_tables: A list of tables to exclude from the API
service
:param list user_models: A list of user-defined models to include in the
API service
:param bool reflect_all: Include all database tables in the API service
:param bool read_only: Only allow HTTP GET commands for all endpoints
:param str schema: Use the specified named schema instead of the default
"""
app = Flask('sandman2')
app.config['SQLALCHEMY_DATABASE_URI'] = database_uri
app.config['SANDMAN2_READ_ONLY'] = read_only
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.classes = []
db.init_app(app)
admin = Admin(app, base_template='layout.html', template_mode='bootstrap3')
_register_error_handlers(app)
if user_models:
with app.app_context():
_register_user_models(user_models, admin, schema=schema) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
elif reflect_all:
with app.app_context():
_reflect_all(exclude_tables, admin, read_only, schema=schema) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
@app.route('/')
def index():
"""Return a list of routes to the registered classes."""
routes = {}
for cls in app.classes:
routes[cls.__model__.__name__] = '{}{{/{}}}'.format(cls.__model__.__url__, cls.__model__.primary_key()) # depends on [control=['for'], data=['cls']]
return jsonify(routes)
return app |
def MakeSubparser(subparsers, parents, method, arguments=None):
"""Returns an argparse subparser to create a 'subcommand' to adb."""
name = ('-'.join(re.split(r'([A-Z][a-z]+)', method.__name__)[1:-1:2])).lower()
help = method.__doc__.splitlines()[0]
subparser = subparsers.add_parser(
name=name, description=help, help=help.rstrip('.'), parents=parents)
subparser.set_defaults(method=method, positional=[])
argspec = inspect.getargspec(method)
# Figure out positionals and default argument, if any. Explicitly includes
# arguments that default to '' but excludes arguments that default to None.
offset = len(argspec.args) - len(argspec.defaults or []) - 1
positional = []
for i in range(1, len(argspec.args)):
if i > offset and argspec.defaults[i - offset - 1] is None:
break
positional.append(argspec.args[i])
defaults = [None] * offset + list(argspec.defaults or [])
# Add all arguments so they append to args.positional.
args_help = _DocToArgs(method.__doc__)
for name, default in zip(positional, defaults):
if not isinstance(default, (None.__class__, str)):
continue
subparser.add_argument(
name, help=(arguments or {}).get(name, args_help.get(name)),
default=default, nargs='?' if default is not None else None,
action=PositionalArg)
if argspec.varargs:
subparser.add_argument(
argspec.varargs, nargs=argparse.REMAINDER,
help=(arguments or {}).get(argspec.varargs, args_help.get(argspec.varargs)))
return subparser | def function[MakeSubparser, parameter[subparsers, parents, method, arguments]]:
constant[Returns an argparse subparser to create a 'subcommand' to adb.]
variable[name] assign[=] call[call[constant[-].join, parameter[call[call[name[re].split, parameter[constant[([A-Z][a-z]+)], name[method].__name__]]][<ast.Slice object at 0x7da1b1714eb0>]]].lower, parameter[]]
variable[help] assign[=] call[call[name[method].__doc__.splitlines, parameter[]]][constant[0]]
variable[subparser] assign[=] call[name[subparsers].add_parser, parameter[]]
call[name[subparser].set_defaults, parameter[]]
variable[argspec] assign[=] call[name[inspect].getargspec, parameter[name[method]]]
variable[offset] assign[=] binary_operation[binary_operation[call[name[len], parameter[name[argspec].args]] - call[name[len], parameter[<ast.BoolOp object at 0x7da1b19dbac0>]]] - constant[1]]
variable[positional] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[argspec].args]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b19daef0> begin[:]
break
call[name[positional].append, parameter[call[name[argspec].args][name[i]]]]
variable[defaults] assign[=] binary_operation[binary_operation[list[[<ast.Constant object at 0x7da1b19db010>]] * name[offset]] + call[name[list], parameter[<ast.BoolOp object at 0x7da1b19db1c0>]]]
variable[args_help] assign[=] call[name[_DocToArgs], parameter[name[method].__doc__]]
for taget[tuple[[<ast.Name object at 0x7da1b19d97b0>, <ast.Name object at 0x7da1b19da710>]]] in starred[call[name[zip], parameter[name[positional], name[defaults]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b19d9150> begin[:]
continue
call[name[subparser].add_argument, parameter[name[name]]]
if name[argspec].varargs begin[:]
call[name[subparser].add_argument, parameter[name[argspec].varargs]]
return[name[subparser]] | keyword[def] identifier[MakeSubparser] ( identifier[subparsers] , identifier[parents] , identifier[method] , identifier[arguments] = keyword[None] ):
literal[string]
identifier[name] =( literal[string] . identifier[join] ( identifier[re] . identifier[split] ( literal[string] , identifier[method] . identifier[__name__] )[ literal[int] :- literal[int] : literal[int] ])). identifier[lower] ()
identifier[help] = identifier[method] . identifier[__doc__] . identifier[splitlines] ()[ literal[int] ]
identifier[subparser] = identifier[subparsers] . identifier[add_parser] (
identifier[name] = identifier[name] , identifier[description] = identifier[help] , identifier[help] = identifier[help] . identifier[rstrip] ( literal[string] ), identifier[parents] = identifier[parents] )
identifier[subparser] . identifier[set_defaults] ( identifier[method] = identifier[method] , identifier[positional] =[])
identifier[argspec] = identifier[inspect] . identifier[getargspec] ( identifier[method] )
identifier[offset] = identifier[len] ( identifier[argspec] . identifier[args] )- identifier[len] ( identifier[argspec] . identifier[defaults] keyword[or] [])- literal[int]
identifier[positional] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[argspec] . identifier[args] )):
keyword[if] identifier[i] > identifier[offset] keyword[and] identifier[argspec] . identifier[defaults] [ identifier[i] - identifier[offset] - literal[int] ] keyword[is] keyword[None] :
keyword[break]
identifier[positional] . identifier[append] ( identifier[argspec] . identifier[args] [ identifier[i] ])
identifier[defaults] =[ keyword[None] ]* identifier[offset] + identifier[list] ( identifier[argspec] . identifier[defaults] keyword[or] [])
identifier[args_help] = identifier[_DocToArgs] ( identifier[method] . identifier[__doc__] )
keyword[for] identifier[name] , identifier[default] keyword[in] identifier[zip] ( identifier[positional] , identifier[defaults] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[default] ,( keyword[None] . identifier[__class__] , identifier[str] )):
keyword[continue]
identifier[subparser] . identifier[add_argument] (
identifier[name] , identifier[help] =( identifier[arguments] keyword[or] {}). identifier[get] ( identifier[name] , identifier[args_help] . identifier[get] ( identifier[name] )),
identifier[default] = identifier[default] , identifier[nargs] = literal[string] keyword[if] identifier[default] keyword[is] keyword[not] keyword[None] keyword[else] keyword[None] ,
identifier[action] = identifier[PositionalArg] )
keyword[if] identifier[argspec] . identifier[varargs] :
identifier[subparser] . identifier[add_argument] (
identifier[argspec] . identifier[varargs] , identifier[nargs] = identifier[argparse] . identifier[REMAINDER] ,
identifier[help] =( identifier[arguments] keyword[or] {}). identifier[get] ( identifier[argspec] . identifier[varargs] , identifier[args_help] . identifier[get] ( identifier[argspec] . identifier[varargs] )))
keyword[return] identifier[subparser] | def MakeSubparser(subparsers, parents, method, arguments=None):
"""Returns an argparse subparser to create a 'subcommand' to adb."""
name = '-'.join(re.split('([A-Z][a-z]+)', method.__name__)[1:-1:2]).lower()
help = method.__doc__.splitlines()[0]
subparser = subparsers.add_parser(name=name, description=help, help=help.rstrip('.'), parents=parents)
subparser.set_defaults(method=method, positional=[])
argspec = inspect.getargspec(method)
# Figure out positionals and default argument, if any. Explicitly includes
# arguments that default to '' but excludes arguments that default to None.
offset = len(argspec.args) - len(argspec.defaults or []) - 1
positional = []
for i in range(1, len(argspec.args)):
if i > offset and argspec.defaults[i - offset - 1] is None:
break # depends on [control=['if'], data=[]]
positional.append(argspec.args[i]) # depends on [control=['for'], data=['i']]
defaults = [None] * offset + list(argspec.defaults or [])
# Add all arguments so they append to args.positional.
args_help = _DocToArgs(method.__doc__)
for (name, default) in zip(positional, defaults):
if not isinstance(default, (None.__class__, str)):
continue # depends on [control=['if'], data=[]]
subparser.add_argument(name, help=(arguments or {}).get(name, args_help.get(name)), default=default, nargs='?' if default is not None else None, action=PositionalArg) # depends on [control=['for'], data=[]]
if argspec.varargs:
subparser.add_argument(argspec.varargs, nargs=argparse.REMAINDER, help=(arguments or {}).get(argspec.varargs, args_help.get(argspec.varargs))) # depends on [control=['if'], data=[]]
return subparser |
def _assemble(self):
"""Calls the appropriate assemble method based on policies."""
for stmt in self.statements:
pol = self.processed_policies[stmt.uuid]
if _is_whitelisted(stmt):
self._dispatch(stmt, 'assemble', self.model, self.agent_set,
pol.parameters) | def function[_assemble, parameter[self]]:
constant[Calls the appropriate assemble method based on policies.]
for taget[name[stmt]] in starred[name[self].statements] begin[:]
variable[pol] assign[=] call[name[self].processed_policies][name[stmt].uuid]
if call[name[_is_whitelisted], parameter[name[stmt]]] begin[:]
call[name[self]._dispatch, parameter[name[stmt], constant[assemble], name[self].model, name[self].agent_set, name[pol].parameters]] | keyword[def] identifier[_assemble] ( identifier[self] ):
literal[string]
keyword[for] identifier[stmt] keyword[in] identifier[self] . identifier[statements] :
identifier[pol] = identifier[self] . identifier[processed_policies] [ identifier[stmt] . identifier[uuid] ]
keyword[if] identifier[_is_whitelisted] ( identifier[stmt] ):
identifier[self] . identifier[_dispatch] ( identifier[stmt] , literal[string] , identifier[self] . identifier[model] , identifier[self] . identifier[agent_set] ,
identifier[pol] . identifier[parameters] ) | def _assemble(self):
"""Calls the appropriate assemble method based on policies."""
for stmt in self.statements:
pol = self.processed_policies[stmt.uuid]
if _is_whitelisted(stmt):
self._dispatch(stmt, 'assemble', self.model, self.agent_set, pol.parameters) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['stmt']] |
def set_selinux_context(path,
user=None,
role=None,
type=None, # pylint: disable=W0622
range=None, # pylint: disable=W0622
persist=False):
'''
.. versionchanged:: Neon
Added persist option
Set a specific SELinux label on a given path
CLI Example:
.. code-block:: bash
salt '*' file.set_selinux_context path <user> <role> <type> <range>
salt '*' file.set_selinux_context /etc/yum.repos.d/epel.repo system_u object_r system_conf_t s0
'''
if not any((user, role, type, range)):
return False
if persist:
fcontext_result = __salt__['selinux.fcontext_add_policy'](path,
sel_type=type, sel_user=user, sel_level=range)
if fcontext_result.get('retcode', None) is not 0:
# Problem setting fcontext policy
raise CommandExecutionError(
'Problem setting fcontext: {0}'.format(fcontext_result)
)
cmd = ['chcon']
if user:
cmd.extend(['-u', user])
if role:
cmd.extend(['-r', role])
if type:
cmd.extend(['-t', type])
if range:
cmd.extend(['-l', range])
cmd.append(path)
ret = not __salt__['cmd.retcode'](cmd, python_shell=False)
if ret:
return get_selinux_context(path)
else:
return ret | def function[set_selinux_context, parameter[path, user, role, type, range, persist]]:
constant[
.. versionchanged:: Neon
Added persist option
Set a specific SELinux label on a given path
CLI Example:
.. code-block:: bash
salt '*' file.set_selinux_context path <user> <role> <type> <range>
salt '*' file.set_selinux_context /etc/yum.repos.d/epel.repo system_u object_r system_conf_t s0
]
if <ast.UnaryOp object at 0x7da18f00dbd0> begin[:]
return[constant[False]]
if name[persist] begin[:]
variable[fcontext_result] assign[=] call[call[name[__salt__]][constant[selinux.fcontext_add_policy]], parameter[name[path]]]
if compare[call[name[fcontext_result].get, parameter[constant[retcode], constant[None]]] is_not constant[0]] begin[:]
<ast.Raise object at 0x7da18f00cf70>
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da18f00fb50>]]
if name[user] begin[:]
call[name[cmd].extend, parameter[list[[<ast.Constant object at 0x7da18f00c100>, <ast.Name object at 0x7da18f00d540>]]]]
if name[role] begin[:]
call[name[cmd].extend, parameter[list[[<ast.Constant object at 0x7da18f00cdc0>, <ast.Name object at 0x7da18f00ea10>]]]]
if name[type] begin[:]
call[name[cmd].extend, parameter[list[[<ast.Constant object at 0x7da18f00f340>, <ast.Name object at 0x7da18f00d9c0>]]]]
if name[range] begin[:]
call[name[cmd].extend, parameter[list[[<ast.Constant object at 0x7da207f02b60>, <ast.Name object at 0x7da207f01210>]]]]
call[name[cmd].append, parameter[name[path]]]
variable[ret] assign[=] <ast.UnaryOp object at 0x7da207f014b0>
if name[ret] begin[:]
return[call[name[get_selinux_context], parameter[name[path]]]] | keyword[def] identifier[set_selinux_context] ( identifier[path] ,
identifier[user] = keyword[None] ,
identifier[role] = keyword[None] ,
identifier[type] = keyword[None] ,
identifier[range] = keyword[None] ,
identifier[persist] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[any] (( identifier[user] , identifier[role] , identifier[type] , identifier[range] )):
keyword[return] keyword[False]
keyword[if] identifier[persist] :
identifier[fcontext_result] = identifier[__salt__] [ literal[string] ]( identifier[path] ,
identifier[sel_type] = identifier[type] , identifier[sel_user] = identifier[user] , identifier[sel_level] = identifier[range] )
keyword[if] identifier[fcontext_result] . identifier[get] ( literal[string] , keyword[None] ) keyword[is] keyword[not] literal[int] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[fcontext_result] )
)
identifier[cmd] =[ literal[string] ]
keyword[if] identifier[user] :
identifier[cmd] . identifier[extend] ([ literal[string] , identifier[user] ])
keyword[if] identifier[role] :
identifier[cmd] . identifier[extend] ([ literal[string] , identifier[role] ])
keyword[if] identifier[type] :
identifier[cmd] . identifier[extend] ([ literal[string] , identifier[type] ])
keyword[if] identifier[range] :
identifier[cmd] . identifier[extend] ([ literal[string] , identifier[range] ])
identifier[cmd] . identifier[append] ( identifier[path] )
identifier[ret] = keyword[not] identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] )
keyword[if] identifier[ret] :
keyword[return] identifier[get_selinux_context] ( identifier[path] )
keyword[else] :
keyword[return] identifier[ret] | def set_selinux_context(path, user=None, role=None, type=None, range=None, persist=False): # pylint: disable=W0622
# pylint: disable=W0622
"\n .. versionchanged:: Neon\n\n Added persist option\n\n Set a specific SELinux label on a given path\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' file.set_selinux_context path <user> <role> <type> <range>\n salt '*' file.set_selinux_context /etc/yum.repos.d/epel.repo system_u object_r system_conf_t s0\n "
if not any((user, role, type, range)):
return False # depends on [control=['if'], data=[]]
if persist:
fcontext_result = __salt__['selinux.fcontext_add_policy'](path, sel_type=type, sel_user=user, sel_level=range)
if fcontext_result.get('retcode', None) is not 0:
# Problem setting fcontext policy
raise CommandExecutionError('Problem setting fcontext: {0}'.format(fcontext_result)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
cmd = ['chcon']
if user:
cmd.extend(['-u', user]) # depends on [control=['if'], data=[]]
if role:
cmd.extend(['-r', role]) # depends on [control=['if'], data=[]]
if type:
cmd.extend(['-t', type]) # depends on [control=['if'], data=[]]
if range:
cmd.extend(['-l', range]) # depends on [control=['if'], data=[]]
cmd.append(path)
ret = not __salt__['cmd.retcode'](cmd, python_shell=False)
if ret:
return get_selinux_context(path) # depends on [control=['if'], data=[]]
else:
return ret |
def _add_cloud_distro_check(cloud_archive_release, openstack_release):
"""Add the cloud pocket, but also check the cloud_archive_release against
the current distro, and use the openstack_release as the full lookup.
This just calls _add_cloud_pocket() with the openstack_release as pocket
to get the correct cloud-archive.list for dpkg to work with.
:param cloud_archive_release:String, codename for the distro release.
:param openstack_release: String, spec for the release to look up in the
CLOUD_ARCHIVE_POCKETS
:raises: SourceConfigError if this is the wrong distro, or the pocket spec
doesn't exist.
"""
_verify_is_ubuntu_rel(cloud_archive_release, openstack_release)
_add_cloud_pocket("{}-{}".format(cloud_archive_release, openstack_release)) | def function[_add_cloud_distro_check, parameter[cloud_archive_release, openstack_release]]:
constant[Add the cloud pocket, but also check the cloud_archive_release against
the current distro, and use the openstack_release as the full lookup.
This just calls _add_cloud_pocket() with the openstack_release as pocket
to get the correct cloud-archive.list for dpkg to work with.
:param cloud_archive_release:String, codename for the distro release.
:param openstack_release: String, spec for the release to look up in the
CLOUD_ARCHIVE_POCKETS
:raises: SourceConfigError if this is the wrong distro, or the pocket spec
doesn't exist.
]
call[name[_verify_is_ubuntu_rel], parameter[name[cloud_archive_release], name[openstack_release]]]
call[name[_add_cloud_pocket], parameter[call[constant[{}-{}].format, parameter[name[cloud_archive_release], name[openstack_release]]]]] | keyword[def] identifier[_add_cloud_distro_check] ( identifier[cloud_archive_release] , identifier[openstack_release] ):
literal[string]
identifier[_verify_is_ubuntu_rel] ( identifier[cloud_archive_release] , identifier[openstack_release] )
identifier[_add_cloud_pocket] ( literal[string] . identifier[format] ( identifier[cloud_archive_release] , identifier[openstack_release] )) | def _add_cloud_distro_check(cloud_archive_release, openstack_release):
"""Add the cloud pocket, but also check the cloud_archive_release against
the current distro, and use the openstack_release as the full lookup.
This just calls _add_cloud_pocket() with the openstack_release as pocket
to get the correct cloud-archive.list for dpkg to work with.
:param cloud_archive_release:String, codename for the distro release.
:param openstack_release: String, spec for the release to look up in the
CLOUD_ARCHIVE_POCKETS
:raises: SourceConfigError if this is the wrong distro, or the pocket spec
doesn't exist.
"""
_verify_is_ubuntu_rel(cloud_archive_release, openstack_release)
_add_cloud_pocket('{}-{}'.format(cloud_archive_release, openstack_release)) |
def query(self, sql, *args, **kwargs):
"""Executes an SQL SELECT query and returns rows generator.
:param sql: query to execute
:param args: parameters iterable
:param kwargs: parameters iterable
:return: rows generator
:rtype: generator
"""
with self.locked() as conn:
for row in conn.query(sql, *args, **kwargs):
yield row | def function[query, parameter[self, sql]]:
constant[Executes an SQL SELECT query and returns rows generator.
:param sql: query to execute
:param args: parameters iterable
:param kwargs: parameters iterable
:return: rows generator
:rtype: generator
]
with call[name[self].locked, parameter[]] begin[:]
for taget[name[row]] in starred[call[name[conn].query, parameter[name[sql], <ast.Starred object at 0x7da18dc079d0>]]] begin[:]
<ast.Yield object at 0x7da18dc07430> | keyword[def] identifier[query] ( identifier[self] , identifier[sql] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[with] identifier[self] . identifier[locked] () keyword[as] identifier[conn] :
keyword[for] identifier[row] keyword[in] identifier[conn] . identifier[query] ( identifier[sql] ,* identifier[args] ,** identifier[kwargs] ):
keyword[yield] identifier[row] | def query(self, sql, *args, **kwargs):
"""Executes an SQL SELECT query and returns rows generator.
:param sql: query to execute
:param args: parameters iterable
:param kwargs: parameters iterable
:return: rows generator
:rtype: generator
"""
with self.locked() as conn:
for row in conn.query(sql, *args, **kwargs):
yield row # depends on [control=['for'], data=['row']] # depends on [control=['with'], data=['conn']] |
def top(
df,
value: str,
limit: int,
order: str = 'asc',
group: Union[str, List[str]] = None
):
"""
Get the top or flop N results based on a column value for each specified group columns
---
### Parameters
*mandatory :*
- `value` (*str*): column name on which you will rank the results
- `limit` (*int*): Number to specify the N results you want to retrieve.
Use a positive number x to retrieve the first x results.
Use a negative number -x to retrieve the last x results.
*optional :*
- `order` (*str*): `"asc"` or `"desc"` to sort by ascending ou descending order. By default : `"asc"`.
- `group` (*str*, *list of str*): name(s) of columns on which you want to perform the group operation.
---
### Example
**Input**
| variable | Category | value |
|:--------:|:--------:|:-----:|
| lili | 1 | 50 |
| lili | 1 | 20 |
| toto | 1 | 100 |
| toto | 1 | 200 |
| toto | 1 | 300 |
| lala | 1 | 100 |
| lala | 1 | 150 |
| lala | 1 | 250 |
| lala | 2 | 350 |
| lala | 2 | 450 |
```cson
top:
value: 'value'
limit: 4
order: 'asc'
```
**Output**
| variable | Category | value |
|:--------:|:--------:|:-----:|
| lala | 1 | 250 |
| toto | 1 | 300 |
| lala | 2 | 350 |
| lala | 2 | 450 |
"""
ascending = order != 'desc'
limit = int(limit)
filter_func = 'nlargest' if (limit > 0) ^ ascending else 'nsmallest'
def _top(df):
return getattr(df, filter_func)(abs(limit), value).sort_values(by=value,
ascending=ascending)
if group is None:
df = _top(df)
else:
df = df.groupby(group).apply(_top)
return df | def function[top, parameter[df, value, limit, order, group]]:
constant[
Get the top or flop N results based on a column value for each specified group columns
---
### Parameters
*mandatory :*
- `value` (*str*): column name on which you will rank the results
- `limit` (*int*): Number to specify the N results you want to retrieve.
Use a positive number x to retrieve the first x results.
Use a negative number -x to retrieve the last x results.
*optional :*
- `order` (*str*): `"asc"` or `"desc"` to sort by ascending ou descending order. By default : `"asc"`.
- `group` (*str*, *list of str*): name(s) of columns on which you want to perform the group operation.
---
### Example
**Input**
| variable | Category | value |
|:--------:|:--------:|:-----:|
| lili | 1 | 50 |
| lili | 1 | 20 |
| toto | 1 | 100 |
| toto | 1 | 200 |
| toto | 1 | 300 |
| lala | 1 | 100 |
| lala | 1 | 150 |
| lala | 1 | 250 |
| lala | 2 | 350 |
| lala | 2 | 450 |
```cson
top:
value: 'value'
limit: 4
order: 'asc'
```
**Output**
| variable | Category | value |
|:--------:|:--------:|:-----:|
| lala | 1 | 250 |
| toto | 1 | 300 |
| lala | 2 | 350 |
| lala | 2 | 450 |
]
variable[ascending] assign[=] compare[name[order] not_equal[!=] constant[desc]]
variable[limit] assign[=] call[name[int], parameter[name[limit]]]
variable[filter_func] assign[=] <ast.IfExp object at 0x7da1b03b96f0>
def function[_top, parameter[df]]:
return[call[call[call[name[getattr], parameter[name[df], name[filter_func]]], parameter[call[name[abs], parameter[name[limit]]], name[value]]].sort_values, parameter[]]]
if compare[name[group] is constant[None]] begin[:]
variable[df] assign[=] call[name[_top], parameter[name[df]]]
return[name[df]] | keyword[def] identifier[top] (
identifier[df] ,
identifier[value] : identifier[str] ,
identifier[limit] : identifier[int] ,
identifier[order] : identifier[str] = literal[string] ,
identifier[group] : identifier[Union] [ identifier[str] , identifier[List] [ identifier[str] ]]= keyword[None]
):
literal[string]
identifier[ascending] = identifier[order] != literal[string]
identifier[limit] = identifier[int] ( identifier[limit] )
identifier[filter_func] = literal[string] keyword[if] ( identifier[limit] > literal[int] )^ identifier[ascending] keyword[else] literal[string]
keyword[def] identifier[_top] ( identifier[df] ):
keyword[return] identifier[getattr] ( identifier[df] , identifier[filter_func] )( identifier[abs] ( identifier[limit] ), identifier[value] ). identifier[sort_values] ( identifier[by] = identifier[value] ,
identifier[ascending] = identifier[ascending] )
keyword[if] identifier[group] keyword[is] keyword[None] :
identifier[df] = identifier[_top] ( identifier[df] )
keyword[else] :
identifier[df] = identifier[df] . identifier[groupby] ( identifier[group] ). identifier[apply] ( identifier[_top] )
keyword[return] identifier[df] | def top(df, value: str, limit: int, order: str='asc', group: Union[str, List[str]]=None):
"""
Get the top or flop N results based on a column value for each specified group columns
---
### Parameters
*mandatory :*
- `value` (*str*): column name on which you will rank the results
- `limit` (*int*): Number to specify the N results you want to retrieve.
Use a positive number x to retrieve the first x results.
Use a negative number -x to retrieve the last x results.
*optional :*
- `order` (*str*): `"asc"` or `"desc"` to sort by ascending ou descending order. By default : `"asc"`.
- `group` (*str*, *list of str*): name(s) of columns on which you want to perform the group operation.
---
### Example
**Input**
| variable | Category | value |
|:--------:|:--------:|:-----:|
| lili | 1 | 50 |
| lili | 1 | 20 |
| toto | 1 | 100 |
| toto | 1 | 200 |
| toto | 1 | 300 |
| lala | 1 | 100 |
| lala | 1 | 150 |
| lala | 1 | 250 |
| lala | 2 | 350 |
| lala | 2 | 450 |
```cson
top:
value: 'value'
limit: 4
order: 'asc'
```
**Output**
| variable | Category | value |
|:--------:|:--------:|:-----:|
| lala | 1 | 250 |
| toto | 1 | 300 |
| lala | 2 | 350 |
| lala | 2 | 450 |
"""
ascending = order != 'desc'
limit = int(limit)
filter_func = 'nlargest' if (limit > 0) ^ ascending else 'nsmallest'
def _top(df):
return getattr(df, filter_func)(abs(limit), value).sort_values(by=value, ascending=ascending)
if group is None:
df = _top(df) # depends on [control=['if'], data=[]]
else:
df = df.groupby(group).apply(_top)
return df |
def register(cls, name, encode, decode):
"""Add a codec to the registry.
Registers a codec with the given `name` (a string) to be used with the
given `encode` and `decode` functions, which take a `bytes` object and
return another one. An existing codec is replaced.
>>> import binascii
>>> CodecReg.register('uu', binascii.b2a_uu, binascii.a2b_uu)
>>> CodecReg.get_decoder('uu') is binascii.a2b_uu
True
>>> CodecReg.reset()
>>> 'uu' in CodecReg
False
"""
cls._codecs[name] = cls._codec(encode, decode) | def function[register, parameter[cls, name, encode, decode]]:
constant[Add a codec to the registry.
Registers a codec with the given `name` (a string) to be used with the
given `encode` and `decode` functions, which take a `bytes` object and
return another one. An existing codec is replaced.
>>> import binascii
>>> CodecReg.register('uu', binascii.b2a_uu, binascii.a2b_uu)
>>> CodecReg.get_decoder('uu') is binascii.a2b_uu
True
>>> CodecReg.reset()
>>> 'uu' in CodecReg
False
]
call[name[cls]._codecs][name[name]] assign[=] call[name[cls]._codec, parameter[name[encode], name[decode]]] | keyword[def] identifier[register] ( identifier[cls] , identifier[name] , identifier[encode] , identifier[decode] ):
literal[string]
identifier[cls] . identifier[_codecs] [ identifier[name] ]= identifier[cls] . identifier[_codec] ( identifier[encode] , identifier[decode] ) | def register(cls, name, encode, decode):
"""Add a codec to the registry.
Registers a codec with the given `name` (a string) to be used with the
given `encode` and `decode` functions, which take a `bytes` object and
return another one. An existing codec is replaced.
>>> import binascii
>>> CodecReg.register('uu', binascii.b2a_uu, binascii.a2b_uu)
>>> CodecReg.get_decoder('uu') is binascii.a2b_uu
True
>>> CodecReg.reset()
>>> 'uu' in CodecReg
False
"""
cls._codecs[name] = cls._codec(encode, decode) |
def run_hook(self, hook, *args, **kwargs):
"""
Loop over all plugins and invoke function `hook` with `args` and
`kwargs` in each of them. If the plugin does not have the function, it
is skipped.
"""
for plugin in self.raw_plugins:
if hasattr(plugin, hook):
self.logger.debug('Calling hook {0} in plugin {1}'.format(hook, plugin.__name__))
getattr(plugin, hook)(*args, **kwargs) | def function[run_hook, parameter[self, hook]]:
constant[
Loop over all plugins and invoke function `hook` with `args` and
`kwargs` in each of them. If the plugin does not have the function, it
is skipped.
]
for taget[name[plugin]] in starred[name[self].raw_plugins] begin[:]
if call[name[hasattr], parameter[name[plugin], name[hook]]] begin[:]
call[name[self].logger.debug, parameter[call[constant[Calling hook {0} in plugin {1}].format, parameter[name[hook], name[plugin].__name__]]]]
call[call[name[getattr], parameter[name[plugin], name[hook]]], parameter[<ast.Starred object at 0x7da1b24b4190>]] | keyword[def] identifier[run_hook] ( identifier[self] , identifier[hook] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[for] identifier[plugin] keyword[in] identifier[self] . identifier[raw_plugins] :
keyword[if] identifier[hasattr] ( identifier[plugin] , identifier[hook] ):
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[hook] , identifier[plugin] . identifier[__name__] ))
identifier[getattr] ( identifier[plugin] , identifier[hook] )(* identifier[args] ,** identifier[kwargs] ) | def run_hook(self, hook, *args, **kwargs):
"""
Loop over all plugins and invoke function `hook` with `args` and
`kwargs` in each of them. If the plugin does not have the function, it
is skipped.
"""
for plugin in self.raw_plugins:
if hasattr(plugin, hook):
self.logger.debug('Calling hook {0} in plugin {1}'.format(hook, plugin.__name__))
getattr(plugin, hook)(*args, **kwargs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['plugin']] |
def get_object(self, *args, **kwargs):
'''Only talk owners can see talks, unless they've been accepted'''
object_ = super(TalkView, self).get_object(*args, **kwargs)
if not object_.can_view(self.request.user):
raise PermissionDenied
return object_ | def function[get_object, parameter[self]]:
constant[Only talk owners can see talks, unless they've been accepted]
variable[object_] assign[=] call[call[name[super], parameter[name[TalkView], name[self]]].get_object, parameter[<ast.Starred object at 0x7da1b0ea1a80>]]
if <ast.UnaryOp object at 0x7da1b0ea0280> begin[:]
<ast.Raise object at 0x7da1b0ea2590>
return[name[object_]] | keyword[def] identifier[get_object] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[object_] = identifier[super] ( identifier[TalkView] , identifier[self] ). identifier[get_object] (* identifier[args] ,** identifier[kwargs] )
keyword[if] keyword[not] identifier[object_] . identifier[can_view] ( identifier[self] . identifier[request] . identifier[user] ):
keyword[raise] identifier[PermissionDenied]
keyword[return] identifier[object_] | def get_object(self, *args, **kwargs):
"""Only talk owners can see talks, unless they've been accepted"""
object_ = super(TalkView, self).get_object(*args, **kwargs)
if not object_.can_view(self.request.user):
raise PermissionDenied # depends on [control=['if'], data=[]]
return object_ |
def _get_two_data_sources(self):
"""Get two sensible data sources, which may be the same."""
selected_display_items = self.selected_display_items
if len(selected_display_items) < 2:
selected_display_items = list()
display_item = self.selected_display_item
if display_item:
selected_display_items.append(display_item)
if len(selected_display_items) == 1:
display_item = selected_display_items[0]
data_item = display_item.data_item if display_item else None
if display_item and len(display_item.graphic_selection.indexes) == 2:
index1 = display_item.graphic_selection.anchor_index
index2 = list(display_item.graphic_selection.indexes.difference({index1}))[0]
graphic1 = display_item.graphics[index1]
graphic2 = display_item.graphics[index2]
if data_item:
if data_item.is_datum_1d and isinstance(graphic1, Graphics.IntervalGraphic) and isinstance(graphic2, Graphics.IntervalGraphic):
crop_graphic1 = graphic1
crop_graphic2 = graphic2
elif data_item.is_datum_2d and isinstance(graphic1, Graphics.RectangleTypeGraphic) and isinstance(graphic2, Graphics.RectangleTypeGraphic):
crop_graphic1 = graphic1
crop_graphic2 = graphic2
else:
crop_graphic1 = self.__get_crop_graphic(display_item)
crop_graphic2 = crop_graphic1
else:
crop_graphic1 = self.__get_crop_graphic(display_item)
crop_graphic2 = crop_graphic1
else:
crop_graphic1 = self.__get_crop_graphic(display_item)
crop_graphic2 = crop_graphic1
return (display_item, crop_graphic1), (display_item, crop_graphic2)
if len(selected_display_items) == 2:
display_item1 = selected_display_items[0]
crop_graphic1 = self.__get_crop_graphic(display_item1)
display_item2 = selected_display_items[1]
crop_graphic2 = self.__get_crop_graphic(display_item2)
return (display_item1, crop_graphic1), (display_item2, crop_graphic2)
return None | def function[_get_two_data_sources, parameter[self]]:
constant[Get two sensible data sources, which may be the same.]
variable[selected_display_items] assign[=] name[self].selected_display_items
if compare[call[name[len], parameter[name[selected_display_items]]] less[<] constant[2]] begin[:]
variable[selected_display_items] assign[=] call[name[list], parameter[]]
variable[display_item] assign[=] name[self].selected_display_item
if name[display_item] begin[:]
call[name[selected_display_items].append, parameter[name[display_item]]]
if compare[call[name[len], parameter[name[selected_display_items]]] equal[==] constant[1]] begin[:]
variable[display_item] assign[=] call[name[selected_display_items]][constant[0]]
variable[data_item] assign[=] <ast.IfExp object at 0x7da1b0e74820>
if <ast.BoolOp object at 0x7da1b0e74100> begin[:]
variable[index1] assign[=] name[display_item].graphic_selection.anchor_index
variable[index2] assign[=] call[call[name[list], parameter[call[name[display_item].graphic_selection.indexes.difference, parameter[<ast.Set object at 0x7da1b0e755d0>]]]]][constant[0]]
variable[graphic1] assign[=] call[name[display_item].graphics][name[index1]]
variable[graphic2] assign[=] call[name[display_item].graphics][name[index2]]
if name[data_item] begin[:]
if <ast.BoolOp object at 0x7da1b0e75ff0> begin[:]
variable[crop_graphic1] assign[=] name[graphic1]
variable[crop_graphic2] assign[=] name[graphic2]
return[tuple[[<ast.Tuple object at 0x7da1b0e765f0>, <ast.Tuple object at 0x7da1b0e74b50>]]]
if compare[call[name[len], parameter[name[selected_display_items]]] equal[==] constant[2]] begin[:]
variable[display_item1] assign[=] call[name[selected_display_items]][constant[0]]
variable[crop_graphic1] assign[=] call[name[self].__get_crop_graphic, parameter[name[display_item1]]]
variable[display_item2] assign[=] call[name[selected_display_items]][constant[1]]
variable[crop_graphic2] assign[=] call[name[self].__get_crop_graphic, parameter[name[display_item2]]]
return[tuple[[<ast.Tuple object at 0x7da18f813be0>, <ast.Tuple object at 0x7da18f8113f0>]]]
return[constant[None]] | keyword[def] identifier[_get_two_data_sources] ( identifier[self] ):
literal[string]
identifier[selected_display_items] = identifier[self] . identifier[selected_display_items]
keyword[if] identifier[len] ( identifier[selected_display_items] )< literal[int] :
identifier[selected_display_items] = identifier[list] ()
identifier[display_item] = identifier[self] . identifier[selected_display_item]
keyword[if] identifier[display_item] :
identifier[selected_display_items] . identifier[append] ( identifier[display_item] )
keyword[if] identifier[len] ( identifier[selected_display_items] )== literal[int] :
identifier[display_item] = identifier[selected_display_items] [ literal[int] ]
identifier[data_item] = identifier[display_item] . identifier[data_item] keyword[if] identifier[display_item] keyword[else] keyword[None]
keyword[if] identifier[display_item] keyword[and] identifier[len] ( identifier[display_item] . identifier[graphic_selection] . identifier[indexes] )== literal[int] :
identifier[index1] = identifier[display_item] . identifier[graphic_selection] . identifier[anchor_index]
identifier[index2] = identifier[list] ( identifier[display_item] . identifier[graphic_selection] . identifier[indexes] . identifier[difference] ({ identifier[index1] }))[ literal[int] ]
identifier[graphic1] = identifier[display_item] . identifier[graphics] [ identifier[index1] ]
identifier[graphic2] = identifier[display_item] . identifier[graphics] [ identifier[index2] ]
keyword[if] identifier[data_item] :
keyword[if] identifier[data_item] . identifier[is_datum_1d] keyword[and] identifier[isinstance] ( identifier[graphic1] , identifier[Graphics] . identifier[IntervalGraphic] ) keyword[and] identifier[isinstance] ( identifier[graphic2] , identifier[Graphics] . identifier[IntervalGraphic] ):
identifier[crop_graphic1] = identifier[graphic1]
identifier[crop_graphic2] = identifier[graphic2]
keyword[elif] identifier[data_item] . identifier[is_datum_2d] keyword[and] identifier[isinstance] ( identifier[graphic1] , identifier[Graphics] . identifier[RectangleTypeGraphic] ) keyword[and] identifier[isinstance] ( identifier[graphic2] , identifier[Graphics] . identifier[RectangleTypeGraphic] ):
identifier[crop_graphic1] = identifier[graphic1]
identifier[crop_graphic2] = identifier[graphic2]
keyword[else] :
identifier[crop_graphic1] = identifier[self] . identifier[__get_crop_graphic] ( identifier[display_item] )
identifier[crop_graphic2] = identifier[crop_graphic1]
keyword[else] :
identifier[crop_graphic1] = identifier[self] . identifier[__get_crop_graphic] ( identifier[display_item] )
identifier[crop_graphic2] = identifier[crop_graphic1]
keyword[else] :
identifier[crop_graphic1] = identifier[self] . identifier[__get_crop_graphic] ( identifier[display_item] )
identifier[crop_graphic2] = identifier[crop_graphic1]
keyword[return] ( identifier[display_item] , identifier[crop_graphic1] ),( identifier[display_item] , identifier[crop_graphic2] )
keyword[if] identifier[len] ( identifier[selected_display_items] )== literal[int] :
identifier[display_item1] = identifier[selected_display_items] [ literal[int] ]
identifier[crop_graphic1] = identifier[self] . identifier[__get_crop_graphic] ( identifier[display_item1] )
identifier[display_item2] = identifier[selected_display_items] [ literal[int] ]
identifier[crop_graphic2] = identifier[self] . identifier[__get_crop_graphic] ( identifier[display_item2] )
keyword[return] ( identifier[display_item1] , identifier[crop_graphic1] ),( identifier[display_item2] , identifier[crop_graphic2] )
keyword[return] keyword[None] | def _get_two_data_sources(self):
"""Get two sensible data sources, which may be the same."""
selected_display_items = self.selected_display_items
if len(selected_display_items) < 2:
selected_display_items = list()
display_item = self.selected_display_item
if display_item:
selected_display_items.append(display_item) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if len(selected_display_items) == 1:
display_item = selected_display_items[0]
data_item = display_item.data_item if display_item else None
if display_item and len(display_item.graphic_selection.indexes) == 2:
index1 = display_item.graphic_selection.anchor_index
index2 = list(display_item.graphic_selection.indexes.difference({index1}))[0]
graphic1 = display_item.graphics[index1]
graphic2 = display_item.graphics[index2]
if data_item:
if data_item.is_datum_1d and isinstance(graphic1, Graphics.IntervalGraphic) and isinstance(graphic2, Graphics.IntervalGraphic):
crop_graphic1 = graphic1
crop_graphic2 = graphic2 # depends on [control=['if'], data=[]]
elif data_item.is_datum_2d and isinstance(graphic1, Graphics.RectangleTypeGraphic) and isinstance(graphic2, Graphics.RectangleTypeGraphic):
crop_graphic1 = graphic1
crop_graphic2 = graphic2 # depends on [control=['if'], data=[]]
else:
crop_graphic1 = self.__get_crop_graphic(display_item)
crop_graphic2 = crop_graphic1 # depends on [control=['if'], data=[]]
else:
crop_graphic1 = self.__get_crop_graphic(display_item)
crop_graphic2 = crop_graphic1 # depends on [control=['if'], data=[]]
else:
crop_graphic1 = self.__get_crop_graphic(display_item)
crop_graphic2 = crop_graphic1
return ((display_item, crop_graphic1), (display_item, crop_graphic2)) # depends on [control=['if'], data=[]]
if len(selected_display_items) == 2:
display_item1 = selected_display_items[0]
crop_graphic1 = self.__get_crop_graphic(display_item1)
display_item2 = selected_display_items[1]
crop_graphic2 = self.__get_crop_graphic(display_item2)
return ((display_item1, crop_graphic1), (display_item2, crop_graphic2)) # depends on [control=['if'], data=[]]
return None |
def _iter_text_wave(
self, text, numbers, step=1,
fore=None, back=None, style=None, rgb_mode=False):
""" Yield colorized characters from `text`, using a wave of `numbers`.
Arguments:
text : String to be colorized.
numbers : A list/tuple of numbers (256 colors).
step : Number of characters to colorize per color.
fore : Fore color to use (name or number).
(Back will be gradient)
back : Background color to use (name or number).
(Fore will be gradient)
style : Style name to use.
rgb_mode : Use number for rgb value.
This should never be used when the numbers
are rgb values themselves.
"""
if fore and back:
raise ValueError('Both fore and back colors cannot be specified.')
pos = 0
end = len(text)
numbergen = self._iter_wave(numbers)
def make_color(n):
try:
r, g, b = n
except TypeError:
if rgb_mode:
return n, n, n
return n
return r, g, b
for value in numbergen:
lastchar = pos + step
yield self.color(
text[pos:lastchar],
fore=make_color(value) if fore is None else fore,
back=make_color(value) if fore is not None else back,
style=style
)
if lastchar >= end:
numbergen.send(True)
pos = lastchar | def function[_iter_text_wave, parameter[self, text, numbers, step, fore, back, style, rgb_mode]]:
constant[ Yield colorized characters from `text`, using a wave of `numbers`.
Arguments:
text : String to be colorized.
numbers : A list/tuple of numbers (256 colors).
step : Number of characters to colorize per color.
fore : Fore color to use (name or number).
(Back will be gradient)
back : Background color to use (name or number).
(Fore will be gradient)
style : Style name to use.
rgb_mode : Use number for rgb value.
This should never be used when the numbers
are rgb values themselves.
]
if <ast.BoolOp object at 0x7da1b0328400> begin[:]
<ast.Raise object at 0x7da1b03288e0>
variable[pos] assign[=] constant[0]
variable[end] assign[=] call[name[len], parameter[name[text]]]
variable[numbergen] assign[=] call[name[self]._iter_wave, parameter[name[numbers]]]
def function[make_color, parameter[n]]:
<ast.Try object at 0x7da1b032a110>
return[tuple[[<ast.Name object at 0x7da1b032a290>, <ast.Name object at 0x7da1b03285e0>, <ast.Name object at 0x7da1b03287f0>]]]
for taget[name[value]] in starred[name[numbergen]] begin[:]
variable[lastchar] assign[=] binary_operation[name[pos] + name[step]]
<ast.Yield object at 0x7da1b0329cc0>
if compare[name[lastchar] greater_or_equal[>=] name[end]] begin[:]
call[name[numbergen].send, parameter[constant[True]]]
variable[pos] assign[=] name[lastchar] | keyword[def] identifier[_iter_text_wave] (
identifier[self] , identifier[text] , identifier[numbers] , identifier[step] = literal[int] ,
identifier[fore] = keyword[None] , identifier[back] = keyword[None] , identifier[style] = keyword[None] , identifier[rgb_mode] = keyword[False] ):
literal[string]
keyword[if] identifier[fore] keyword[and] identifier[back] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[pos] = literal[int]
identifier[end] = identifier[len] ( identifier[text] )
identifier[numbergen] = identifier[self] . identifier[_iter_wave] ( identifier[numbers] )
keyword[def] identifier[make_color] ( identifier[n] ):
keyword[try] :
identifier[r] , identifier[g] , identifier[b] = identifier[n]
keyword[except] identifier[TypeError] :
keyword[if] identifier[rgb_mode] :
keyword[return] identifier[n] , identifier[n] , identifier[n]
keyword[return] identifier[n]
keyword[return] identifier[r] , identifier[g] , identifier[b]
keyword[for] identifier[value] keyword[in] identifier[numbergen] :
identifier[lastchar] = identifier[pos] + identifier[step]
keyword[yield] identifier[self] . identifier[color] (
identifier[text] [ identifier[pos] : identifier[lastchar] ],
identifier[fore] = identifier[make_color] ( identifier[value] ) keyword[if] identifier[fore] keyword[is] keyword[None] keyword[else] identifier[fore] ,
identifier[back] = identifier[make_color] ( identifier[value] ) keyword[if] identifier[fore] keyword[is] keyword[not] keyword[None] keyword[else] identifier[back] ,
identifier[style] = identifier[style]
)
keyword[if] identifier[lastchar] >= identifier[end] :
identifier[numbergen] . identifier[send] ( keyword[True] )
identifier[pos] = identifier[lastchar] | def _iter_text_wave(self, text, numbers, step=1, fore=None, back=None, style=None, rgb_mode=False):
""" Yield colorized characters from `text`, using a wave of `numbers`.
Arguments:
text : String to be colorized.
numbers : A list/tuple of numbers (256 colors).
step : Number of characters to colorize per color.
fore : Fore color to use (name or number).
(Back will be gradient)
back : Background color to use (name or number).
(Fore will be gradient)
style : Style name to use.
rgb_mode : Use number for rgb value.
This should never be used when the numbers
are rgb values themselves.
"""
if fore and back:
raise ValueError('Both fore and back colors cannot be specified.') # depends on [control=['if'], data=[]]
pos = 0
end = len(text)
numbergen = self._iter_wave(numbers)
def make_color(n):
try:
(r, g, b) = n # depends on [control=['try'], data=[]]
except TypeError:
if rgb_mode:
return (n, n, n) # depends on [control=['if'], data=[]]
return n # depends on [control=['except'], data=[]]
return (r, g, b)
for value in numbergen:
lastchar = pos + step
yield self.color(text[pos:lastchar], fore=make_color(value) if fore is None else fore, back=make_color(value) if fore is not None else back, style=style)
if lastchar >= end:
numbergen.send(True) # depends on [control=['if'], data=[]]
pos = lastchar # depends on [control=['for'], data=['value']] |
def to_string(self, buf=None, columns=None, col_space=None, header=True,
index=True, na_rep='NaN', formatters=None, float_format=None,
sparsify=None, index_names=True, justify=None,
max_rows=None, max_cols=None, show_dimensions=False,
decimal='.', line_width=None):
"""
Render a DataFrame to a console-friendly tabular output.
%(shared_params)s
line_width : int, optional
Width to wrap a line in characters.
%(returns)s
See Also
--------
to_html : Convert DataFrame to HTML.
Examples
--------
>>> d = {'col1': [1, 2, 3], 'col2': [4, 5, 6]}
>>> df = pd.DataFrame(d)
>>> print(df.to_string())
col1 col2
0 1 4
1 2 5
2 3 6
"""
formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns,
col_space=col_space, na_rep=na_rep,
formatters=formatters,
float_format=float_format,
sparsify=sparsify, justify=justify,
index_names=index_names,
header=header, index=index,
max_rows=max_rows,
max_cols=max_cols,
show_dimensions=show_dimensions,
decimal=decimal,
line_width=line_width)
formatter.to_string()
if buf is None:
result = formatter.buf.getvalue()
return result | def function[to_string, parameter[self, buf, columns, col_space, header, index, na_rep, formatters, float_format, sparsify, index_names, justify, max_rows, max_cols, show_dimensions, decimal, line_width]]:
constant[
Render a DataFrame to a console-friendly tabular output.
%(shared_params)s
line_width : int, optional
Width to wrap a line in characters.
%(returns)s
See Also
--------
to_html : Convert DataFrame to HTML.
Examples
--------
>>> d = {'col1': [1, 2, 3], 'col2': [4, 5, 6]}
>>> df = pd.DataFrame(d)
>>> print(df.to_string())
col1 col2
0 1 4
1 2 5
2 3 6
]
variable[formatter] assign[=] call[name[fmt].DataFrameFormatter, parameter[name[self]]]
call[name[formatter].to_string, parameter[]]
if compare[name[buf] is constant[None]] begin[:]
variable[result] assign[=] call[name[formatter].buf.getvalue, parameter[]]
return[name[result]] | keyword[def] identifier[to_string] ( identifier[self] , identifier[buf] = keyword[None] , identifier[columns] = keyword[None] , identifier[col_space] = keyword[None] , identifier[header] = keyword[True] ,
identifier[index] = keyword[True] , identifier[na_rep] = literal[string] , identifier[formatters] = keyword[None] , identifier[float_format] = keyword[None] ,
identifier[sparsify] = keyword[None] , identifier[index_names] = keyword[True] , identifier[justify] = keyword[None] ,
identifier[max_rows] = keyword[None] , identifier[max_cols] = keyword[None] , identifier[show_dimensions] = keyword[False] ,
identifier[decimal] = literal[string] , identifier[line_width] = keyword[None] ):
literal[string]
identifier[formatter] = identifier[fmt] . identifier[DataFrameFormatter] ( identifier[self] , identifier[buf] = identifier[buf] , identifier[columns] = identifier[columns] ,
identifier[col_space] = identifier[col_space] , identifier[na_rep] = identifier[na_rep] ,
identifier[formatters] = identifier[formatters] ,
identifier[float_format] = identifier[float_format] ,
identifier[sparsify] = identifier[sparsify] , identifier[justify] = identifier[justify] ,
identifier[index_names] = identifier[index_names] ,
identifier[header] = identifier[header] , identifier[index] = identifier[index] ,
identifier[max_rows] = identifier[max_rows] ,
identifier[max_cols] = identifier[max_cols] ,
identifier[show_dimensions] = identifier[show_dimensions] ,
identifier[decimal] = identifier[decimal] ,
identifier[line_width] = identifier[line_width] )
identifier[formatter] . identifier[to_string] ()
keyword[if] identifier[buf] keyword[is] keyword[None] :
identifier[result] = identifier[formatter] . identifier[buf] . identifier[getvalue] ()
keyword[return] identifier[result] | def to_string(self, buf=None, columns=None, col_space=None, header=True, index=True, na_rep='NaN', formatters=None, float_format=None, sparsify=None, index_names=True, justify=None, max_rows=None, max_cols=None, show_dimensions=False, decimal='.', line_width=None):
"""
Render a DataFrame to a console-friendly tabular output.
%(shared_params)s
line_width : int, optional
Width to wrap a line in characters.
%(returns)s
See Also
--------
to_html : Convert DataFrame to HTML.
Examples
--------
>>> d = {'col1': [1, 2, 3], 'col2': [4, 5, 6]}
>>> df = pd.DataFrame(d)
>>> print(df.to_string())
col1 col2
0 1 4
1 2 5
2 3 6
"""
formatter = fmt.DataFrameFormatter(self, buf=buf, columns=columns, col_space=col_space, na_rep=na_rep, formatters=formatters, float_format=float_format, sparsify=sparsify, justify=justify, index_names=index_names, header=header, index=index, max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, decimal=decimal, line_width=line_width)
formatter.to_string()
if buf is None:
result = formatter.buf.getvalue()
return result # depends on [control=['if'], data=[]] |
def prune(self):
"""
On a subtree where the root node's s_center is empty,
return a new subtree with no empty s_centers.
"""
if not self[0] or not self[1]: # if I have an empty branch
direction = not self[0] # graft the other branch here
#if trace:
# print('Grafting {} branch'.format(
# 'right' if direction else 'left'))
result = self[direction]
#if result: result.verify()
return result
else:
# Replace the root node with the greatest predecessor.
heir, self[0] = self[0].pop_greatest_child()
#if trace:
# print('Replacing {} with {}.'.format(
# self.x_center, heir.x_center
# ))
# print('Removed greatest predecessor:')
# self.print_structure()
#if self[0]: self[0].verify()
#if self[1]: self[1].verify()
# Set up the heir as the new root node
(heir[0], heir[1]) = (self[0], self[1])
#if trace: print('Setting up the heir:')
#if trace: heir.print_structure()
# popping the predecessor may have unbalanced this node;
# fix it
heir.refresh_balance()
heir = heir.rotate()
#heir.verify()
#if trace: print('Rotated the heir:')
#if trace: heir.print_structure()
return heir | def function[prune, parameter[self]]:
constant[
On a subtree where the root node's s_center is empty,
return a new subtree with no empty s_centers.
]
if <ast.BoolOp object at 0x7da1b12cb010> begin[:]
variable[direction] assign[=] <ast.UnaryOp object at 0x7da18f58d3c0>
variable[result] assign[=] call[name[self]][name[direction]]
return[name[result]] | keyword[def] identifier[prune] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] [ literal[int] ] keyword[or] keyword[not] identifier[self] [ literal[int] ]:
identifier[direction] = keyword[not] identifier[self] [ literal[int] ]
identifier[result] = identifier[self] [ identifier[direction] ]
keyword[return] identifier[result]
keyword[else] :
identifier[heir] , identifier[self] [ literal[int] ]= identifier[self] [ literal[int] ]. identifier[pop_greatest_child] ()
( identifier[heir] [ literal[int] ], identifier[heir] [ literal[int] ])=( identifier[self] [ literal[int] ], identifier[self] [ literal[int] ])
identifier[heir] . identifier[refresh_balance] ()
identifier[heir] = identifier[heir] . identifier[rotate] ()
keyword[return] identifier[heir] | def prune(self):
"""
On a subtree where the root node's s_center is empty,
return a new subtree with no empty s_centers.
"""
if not self[0] or not self[1]: # if I have an empty branch
direction = not self[0] # graft the other branch here
#if trace:
# print('Grafting {} branch'.format(
# 'right' if direction else 'left'))
result = self[direction]
#if result: result.verify()
return result # depends on [control=['if'], data=[]]
else:
# Replace the root node with the greatest predecessor.
(heir, self[0]) = self[0].pop_greatest_child()
#if trace:
# print('Replacing {} with {}.'.format(
# self.x_center, heir.x_center
# ))
# print('Removed greatest predecessor:')
# self.print_structure()
#if self[0]: self[0].verify()
#if self[1]: self[1].verify()
# Set up the heir as the new root node
(heir[0], heir[1]) = (self[0], self[1])
#if trace: print('Setting up the heir:')
#if trace: heir.print_structure()
# popping the predecessor may have unbalanced this node;
# fix it
heir.refresh_balance()
heir = heir.rotate()
#heir.verify()
#if trace: print('Rotated the heir:')
#if trace: heir.print_structure()
return heir |
def generateName(nodeName: str, instId: int):
"""
Create and return the name for a replica using its nodeName and
instanceId.
Ex: Alpha:1
"""
if isinstance(nodeName, str):
# Because sometimes it is bytes (why?)
if ":" in nodeName:
# Because in some cases (for requested messages) it
# already has ':'. This should be fixed.
return nodeName
return "{}:{}".format(nodeName, instId) | def function[generateName, parameter[nodeName, instId]]:
constant[
Create and return the name for a replica using its nodeName and
instanceId.
Ex: Alpha:1
]
if call[name[isinstance], parameter[name[nodeName], name[str]]] begin[:]
if compare[constant[:] in name[nodeName]] begin[:]
return[name[nodeName]]
return[call[constant[{}:{}].format, parameter[name[nodeName], name[instId]]]] | keyword[def] identifier[generateName] ( identifier[nodeName] : identifier[str] , identifier[instId] : identifier[int] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[nodeName] , identifier[str] ):
keyword[if] literal[string] keyword[in] identifier[nodeName] :
keyword[return] identifier[nodeName]
keyword[return] literal[string] . identifier[format] ( identifier[nodeName] , identifier[instId] ) | def generateName(nodeName: str, instId: int):
"""
Create and return the name for a replica using its nodeName and
instanceId.
Ex: Alpha:1
"""
if isinstance(nodeName, str):
# Because sometimes it is bytes (why?)
if ':' in nodeName:
# Because in some cases (for requested messages) it
# already has ':'. This should be fixed.
return nodeName # depends on [control=['if'], data=['nodeName']] # depends on [control=['if'], data=[]]
return '{}:{}'.format(nodeName, instId) |
def supports_calendar_type(self, calendar_type=None):
"""Tests if the given calendar type is supported.
arg: calendar_type (osid.type.Type): a calendar Type
return: (boolean) - ``true`` if the type is supported, ``false``
otherwise
raise: IllegalState - syntax is not a ``DATETIME`` or
``DURATION``
raise: NullArgument - ``calendar_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.Metadata.supports_coordinate_type
from .osid_errors import IllegalState, NullArgument
if not calendar_type:
raise NullArgument('no input Type provided')
if self._kwargs['syntax'] not in ['``DATETIME``', '``DURATION``']:
raise IllegalState('put more meaninful message here')
return calendar_type in self.get_calendar_types | def function[supports_calendar_type, parameter[self, calendar_type]]:
constant[Tests if the given calendar type is supported.
arg: calendar_type (osid.type.Type): a calendar Type
return: (boolean) - ``true`` if the type is supported, ``false``
otherwise
raise: IllegalState - syntax is not a ``DATETIME`` or
``DURATION``
raise: NullArgument - ``calendar_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
]
from relative_module[osid_errors] import module[IllegalState], module[NullArgument]
if <ast.UnaryOp object at 0x7da1b26ad7b0> begin[:]
<ast.Raise object at 0x7da1b26afac0>
if compare[call[name[self]._kwargs][constant[syntax]] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b26ac550>, <ast.Constant object at 0x7da1b26aee60>]]] begin[:]
<ast.Raise object at 0x7da1b26afd90>
return[compare[name[calendar_type] in name[self].get_calendar_types]] | keyword[def] identifier[supports_calendar_type] ( identifier[self] , identifier[calendar_type] = keyword[None] ):
literal[string]
keyword[from] . identifier[osid_errors] keyword[import] identifier[IllegalState] , identifier[NullArgument]
keyword[if] keyword[not] identifier[calendar_type] :
keyword[raise] identifier[NullArgument] ( literal[string] )
keyword[if] identifier[self] . identifier[_kwargs] [ literal[string] ] keyword[not] keyword[in] [ literal[string] , literal[string] ]:
keyword[raise] identifier[IllegalState] ( literal[string] )
keyword[return] identifier[calendar_type] keyword[in] identifier[self] . identifier[get_calendar_types] | def supports_calendar_type(self, calendar_type=None):
"""Tests if the given calendar type is supported.
arg: calendar_type (osid.type.Type): a calendar Type
return: (boolean) - ``true`` if the type is supported, ``false``
otherwise
raise: IllegalState - syntax is not a ``DATETIME`` or
``DURATION``
raise: NullArgument - ``calendar_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.Metadata.supports_coordinate_type
from .osid_errors import IllegalState, NullArgument
if not calendar_type:
raise NullArgument('no input Type provided') # depends on [control=['if'], data=[]]
if self._kwargs['syntax'] not in ['``DATETIME``', '``DURATION``']:
raise IllegalState('put more meaninful message here') # depends on [control=['if'], data=[]]
return calendar_type in self.get_calendar_types |
def setup_logging(args, conf):
"""Setup logging"""
logging_format = "%(asctime)s [%(levelname)s]: %(name)s (%(module)s.%(funcName)s; " \
"%(filename)s:%(lineno)d)\n %(message)s"
if getattr(args, "verbose", None):
debug_logger = logging.StreamHandler(sys.stdout)
debug_logger.addFilter(LogLevelFilter(max_level=logging.WARNING))
debug_logger.setFormatter(logging.Formatter(logging_format))
logging.root.addHandler(debug_logger)
logging.root.level = logging.DEBUG
error_logger = logging.StreamHandler(sys.stderr)
error_logger.addFilter(LogLevelFilter(min_level=logging.ERROR))
error_logger.setFormatter(logging.Formatter(logging_format))
logging.root.addHandler(error_logger)
if conf['logging']:
logging.config.dictConfig(conf['logging']) | def function[setup_logging, parameter[args, conf]]:
constant[Setup logging]
variable[logging_format] assign[=] constant[%(asctime)s [%(levelname)s]: %(name)s (%(module)s.%(funcName)s; %(filename)s:%(lineno)d)
%(message)s]
if call[name[getattr], parameter[name[args], constant[verbose], constant[None]]] begin[:]
variable[debug_logger] assign[=] call[name[logging].StreamHandler, parameter[name[sys].stdout]]
call[name[debug_logger].addFilter, parameter[call[name[LogLevelFilter], parameter[]]]]
call[name[debug_logger].setFormatter, parameter[call[name[logging].Formatter, parameter[name[logging_format]]]]]
call[name[logging].root.addHandler, parameter[name[debug_logger]]]
name[logging].root.level assign[=] name[logging].DEBUG
variable[error_logger] assign[=] call[name[logging].StreamHandler, parameter[name[sys].stderr]]
call[name[error_logger].addFilter, parameter[call[name[LogLevelFilter], parameter[]]]]
call[name[error_logger].setFormatter, parameter[call[name[logging].Formatter, parameter[name[logging_format]]]]]
call[name[logging].root.addHandler, parameter[name[error_logger]]]
if call[name[conf]][constant[logging]] begin[:]
call[name[logging].config.dictConfig, parameter[call[name[conf]][constant[logging]]]] | keyword[def] identifier[setup_logging] ( identifier[args] , identifier[conf] ):
literal[string]
identifier[logging_format] = literal[string] literal[string]
keyword[if] identifier[getattr] ( identifier[args] , literal[string] , keyword[None] ):
identifier[debug_logger] = identifier[logging] . identifier[StreamHandler] ( identifier[sys] . identifier[stdout] )
identifier[debug_logger] . identifier[addFilter] ( identifier[LogLevelFilter] ( identifier[max_level] = identifier[logging] . identifier[WARNING] ))
identifier[debug_logger] . identifier[setFormatter] ( identifier[logging] . identifier[Formatter] ( identifier[logging_format] ))
identifier[logging] . identifier[root] . identifier[addHandler] ( identifier[debug_logger] )
identifier[logging] . identifier[root] . identifier[level] = identifier[logging] . identifier[DEBUG]
identifier[error_logger] = identifier[logging] . identifier[StreamHandler] ( identifier[sys] . identifier[stderr] )
identifier[error_logger] . identifier[addFilter] ( identifier[LogLevelFilter] ( identifier[min_level] = identifier[logging] . identifier[ERROR] ))
identifier[error_logger] . identifier[setFormatter] ( identifier[logging] . identifier[Formatter] ( identifier[logging_format] ))
identifier[logging] . identifier[root] . identifier[addHandler] ( identifier[error_logger] )
keyword[if] identifier[conf] [ literal[string] ]:
identifier[logging] . identifier[config] . identifier[dictConfig] ( identifier[conf] [ literal[string] ]) | def setup_logging(args, conf):
"""Setup logging"""
logging_format = '%(asctime)s [%(levelname)s]: %(name)s (%(module)s.%(funcName)s; %(filename)s:%(lineno)d)\n %(message)s'
if getattr(args, 'verbose', None):
debug_logger = logging.StreamHandler(sys.stdout)
debug_logger.addFilter(LogLevelFilter(max_level=logging.WARNING))
debug_logger.setFormatter(logging.Formatter(logging_format))
logging.root.addHandler(debug_logger)
logging.root.level = logging.DEBUG # depends on [control=['if'], data=[]]
error_logger = logging.StreamHandler(sys.stderr)
error_logger.addFilter(LogLevelFilter(min_level=logging.ERROR))
error_logger.setFormatter(logging.Formatter(logging_format))
logging.root.addHandler(error_logger)
if conf['logging']:
logging.config.dictConfig(conf['logging']) # depends on [control=['if'], data=[]] |
def load_modules(self, data=None, proxy=None):
'''
Load the modules into the state
'''
log.info('Loading fresh modules for state activity')
# Load a modified client interface that looks like the interface used
# from the minion, but uses remote execution
#
self.functions = salt.client.FunctionWrapper(
self.opts,
self.opts['id']
)
# Load the states, but they should not be used in this class apart
# from inspection
self.utils = salt.loader.utils(self.opts)
self.serializers = salt.loader.serializers(self.opts)
self.states = salt.loader.states(self.opts, self.functions, self.utils, self.serializers)
self.rend = salt.loader.render(self.opts, self.functions, states=self.states, context=self.state_con) | def function[load_modules, parameter[self, data, proxy]]:
constant[
Load the modules into the state
]
call[name[log].info, parameter[constant[Loading fresh modules for state activity]]]
name[self].functions assign[=] call[name[salt].client.FunctionWrapper, parameter[name[self].opts, call[name[self].opts][constant[id]]]]
name[self].utils assign[=] call[name[salt].loader.utils, parameter[name[self].opts]]
name[self].serializers assign[=] call[name[salt].loader.serializers, parameter[name[self].opts]]
name[self].states assign[=] call[name[salt].loader.states, parameter[name[self].opts, name[self].functions, name[self].utils, name[self].serializers]]
name[self].rend assign[=] call[name[salt].loader.render, parameter[name[self].opts, name[self].functions]] | keyword[def] identifier[load_modules] ( identifier[self] , identifier[data] = keyword[None] , identifier[proxy] = keyword[None] ):
literal[string]
identifier[log] . identifier[info] ( literal[string] )
identifier[self] . identifier[functions] = identifier[salt] . identifier[client] . identifier[FunctionWrapper] (
identifier[self] . identifier[opts] ,
identifier[self] . identifier[opts] [ literal[string] ]
)
identifier[self] . identifier[utils] = identifier[salt] . identifier[loader] . identifier[utils] ( identifier[self] . identifier[opts] )
identifier[self] . identifier[serializers] = identifier[salt] . identifier[loader] . identifier[serializers] ( identifier[self] . identifier[opts] )
identifier[self] . identifier[states] = identifier[salt] . identifier[loader] . identifier[states] ( identifier[self] . identifier[opts] , identifier[self] . identifier[functions] , identifier[self] . identifier[utils] , identifier[self] . identifier[serializers] )
identifier[self] . identifier[rend] = identifier[salt] . identifier[loader] . identifier[render] ( identifier[self] . identifier[opts] , identifier[self] . identifier[functions] , identifier[states] = identifier[self] . identifier[states] , identifier[context] = identifier[self] . identifier[state_con] ) | def load_modules(self, data=None, proxy=None):
"""
Load the modules into the state
"""
log.info('Loading fresh modules for state activity')
# Load a modified client interface that looks like the interface used
# from the minion, but uses remote execution
#
self.functions = salt.client.FunctionWrapper(self.opts, self.opts['id'])
# Load the states, but they should not be used in this class apart
# from inspection
self.utils = salt.loader.utils(self.opts)
self.serializers = salt.loader.serializers(self.opts)
self.states = salt.loader.states(self.opts, self.functions, self.utils, self.serializers)
self.rend = salt.loader.render(self.opts, self.functions, states=self.states, context=self.state_con) |
def downcase_word(event):
"""
Lowercase the current (or following) word.
"""
buff = event.current_buffer
for i in range(event.arg): # XXX: not DRY: see meta_c and meta_u!!
pos = buff.document.find_next_word_ending()
words = buff.document.text_after_cursor[:pos]
buff.insert_text(words.lower(), overwrite=True) | def function[downcase_word, parameter[event]]:
constant[
Lowercase the current (or following) word.
]
variable[buff] assign[=] name[event].current_buffer
for taget[name[i]] in starred[call[name[range], parameter[name[event].arg]]] begin[:]
variable[pos] assign[=] call[name[buff].document.find_next_word_ending, parameter[]]
variable[words] assign[=] call[name[buff].document.text_after_cursor][<ast.Slice object at 0x7da1b08df070>]
call[name[buff].insert_text, parameter[call[name[words].lower, parameter[]]]] | keyword[def] identifier[downcase_word] ( identifier[event] ):
literal[string]
identifier[buff] = identifier[event] . identifier[current_buffer]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[event] . identifier[arg] ):
identifier[pos] = identifier[buff] . identifier[document] . identifier[find_next_word_ending] ()
identifier[words] = identifier[buff] . identifier[document] . identifier[text_after_cursor] [: identifier[pos] ]
identifier[buff] . identifier[insert_text] ( identifier[words] . identifier[lower] (), identifier[overwrite] = keyword[True] ) | def downcase_word(event):
"""
Lowercase the current (or following) word.
"""
buff = event.current_buffer
for i in range(event.arg): # XXX: not DRY: see meta_c and meta_u!!
pos = buff.document.find_next_word_ending()
words = buff.document.text_after_cursor[:pos]
buff.insert_text(words.lower(), overwrite=True) # depends on [control=['for'], data=[]] |
def status_favourite(self, id):
"""
Favourite a status.
Returns a `toot dict`_ with the favourited status.
"""
id = self.__unpack_id(id)
url = '/api/v1/statuses/{0}/favourite'.format(str(id))
return self.__api_request('POST', url) | def function[status_favourite, parameter[self, id]]:
constant[
Favourite a status.
Returns a `toot dict`_ with the favourited status.
]
variable[id] assign[=] call[name[self].__unpack_id, parameter[name[id]]]
variable[url] assign[=] call[constant[/api/v1/statuses/{0}/favourite].format, parameter[call[name[str], parameter[name[id]]]]]
return[call[name[self].__api_request, parameter[constant[POST], name[url]]]] | keyword[def] identifier[status_favourite] ( identifier[self] , identifier[id] ):
literal[string]
identifier[id] = identifier[self] . identifier[__unpack_id] ( identifier[id] )
identifier[url] = literal[string] . identifier[format] ( identifier[str] ( identifier[id] ))
keyword[return] identifier[self] . identifier[__api_request] ( literal[string] , identifier[url] ) | def status_favourite(self, id):
"""
Favourite a status.
Returns a `toot dict`_ with the favourited status.
"""
id = self.__unpack_id(id)
url = '/api/v1/statuses/{0}/favourite'.format(str(id))
return self.__api_request('POST', url) |
def _set_rbridge_id(self, v, load=False):
"""
Setter method for rbridge_id, mapped from YANG variable /preprovision/rbridge_id (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_rbridge_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rbridge_id() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("rbridge_id wwn",rbridge_id.rbridge_id, yang_name="rbridge-id", rest_name="rbridge-id", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='rbridge-id wwn', extensions={u'tailf-common': {u'info': u'Rbridge Id for Pre-provision configuration', u'callpoint': u'switch_attributes_callpoint', u'display-when': u'((/vcsmode/vcs-mode = "true") and (/vcsmode/vcs-cluster-mode = "true"))', u'cli-mode-name': u'config-preprovision-rbridge-id-$(rbridge-id)'}}), is_container='list', yang_name="rbridge-id", rest_name="rbridge-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Rbridge Id for Pre-provision configuration', u'callpoint': u'switch_attributes_callpoint', u'display-when': u'((/vcsmode/vcs-mode = "true") and (/vcsmode/vcs-cluster-mode = "true"))', u'cli-mode-name': u'config-preprovision-rbridge-id-$(rbridge-id)'}}, namespace='urn:brocade.com:mgmt:brocade-preprovision', defining_module='brocade-preprovision', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """rbridge_id must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("rbridge_id wwn",rbridge_id.rbridge_id, yang_name="rbridge-id", rest_name="rbridge-id", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='rbridge-id wwn', extensions={u'tailf-common': {u'info': u'Rbridge Id for Pre-provision configuration', u'callpoint': u'switch_attributes_callpoint', u'display-when': u'((/vcsmode/vcs-mode = "true") and (/vcsmode/vcs-cluster-mode = "true"))', u'cli-mode-name': u'config-preprovision-rbridge-id-$(rbridge-id)'}}), is_container='list', yang_name="rbridge-id", rest_name="rbridge-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Rbridge Id for Pre-provision configuration', u'callpoint': u'switch_attributes_callpoint', u'display-when': u'((/vcsmode/vcs-mode = "true") and (/vcsmode/vcs-cluster-mode = "true"))', u'cli-mode-name': u'config-preprovision-rbridge-id-$(rbridge-id)'}}, namespace='urn:brocade.com:mgmt:brocade-preprovision', defining_module='brocade-preprovision', yang_type='list', is_config=True)""",
})
self.__rbridge_id = t
if hasattr(self, '_set'):
self._set() | def function[_set_rbridge_id, parameter[self, v, load]]:
constant[
Setter method for rbridge_id, mapped from YANG variable /preprovision/rbridge_id (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_rbridge_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rbridge_id() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da2041dafe0>
name[self].__rbridge_id assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_rbridge_id] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[rbridge_id] . identifier[rbridge_id] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__rbridge_id] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_rbridge_id(self, v, load=False):
"""
Setter method for rbridge_id, mapped from YANG variable /preprovision/rbridge_id (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_rbridge_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rbridge_id() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGListType('rbridge_id wwn', rbridge_id.rbridge_id, yang_name='rbridge-id', rest_name='rbridge-id', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='rbridge-id wwn', extensions={u'tailf-common': {u'info': u'Rbridge Id for Pre-provision configuration', u'callpoint': u'switch_attributes_callpoint', u'display-when': u'((/vcsmode/vcs-mode = "true") and (/vcsmode/vcs-cluster-mode = "true"))', u'cli-mode-name': u'config-preprovision-rbridge-id-$(rbridge-id)'}}), is_container='list', yang_name='rbridge-id', rest_name='rbridge-id', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Rbridge Id for Pre-provision configuration', u'callpoint': u'switch_attributes_callpoint', u'display-when': u'((/vcsmode/vcs-mode = "true") and (/vcsmode/vcs-cluster-mode = "true"))', u'cli-mode-name': u'config-preprovision-rbridge-id-$(rbridge-id)'}}, namespace='urn:brocade.com:mgmt:brocade-preprovision', defining_module='brocade-preprovision', yang_type='list', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'rbridge_id must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("rbridge_id wwn",rbridge_id.rbridge_id, yang_name="rbridge-id", rest_name="rbridge-id", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'rbridge-id wwn\', extensions={u\'tailf-common\': {u\'info\': u\'Rbridge Id for Pre-provision configuration\', u\'callpoint\': u\'switch_attributes_callpoint\', u\'display-when\': u\'((/vcsmode/vcs-mode = "true") and (/vcsmode/vcs-cluster-mode = "true"))\', u\'cli-mode-name\': u\'config-preprovision-rbridge-id-$(rbridge-id)\'}}), is_container=\'list\', yang_name="rbridge-id", rest_name="rbridge-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Rbridge Id for Pre-provision configuration\', u\'callpoint\': u\'switch_attributes_callpoint\', u\'display-when\': u\'((/vcsmode/vcs-mode = "true") and (/vcsmode/vcs-cluster-mode = "true"))\', u\'cli-mode-name\': u\'config-preprovision-rbridge-id-$(rbridge-id)\'}}, namespace=\'urn:brocade.com:mgmt:brocade-preprovision\', defining_module=\'brocade-preprovision\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__rbridge_id = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def check_json(json_type):
"""
Checks whether json_type is a dict or a string. If it is already a dict, it is returned as-is.
If it is not, it is converted to a dict by means of json.loads(json_type)
:param json_type:
:return:
"""
try:
str_types = (str, unicode)
except NameError:
str_types = (str,)
if type(json_type) == dict:
return json_type
elif type(json_type) in str_types:
return json.loads(json_type)
else:
raise ValueError("json_type should be a json dict or string.") | def function[check_json, parameter[json_type]]:
constant[
Checks whether json_type is a dict or a string. If it is already a dict, it is returned as-is.
If it is not, it is converted to a dict by means of json.loads(json_type)
:param json_type:
:return:
]
<ast.Try object at 0x7da1b215fe50>
if compare[call[name[type], parameter[name[json_type]]] equal[==] name[dict]] begin[:]
return[name[json_type]] | keyword[def] identifier[check_json] ( identifier[json_type] ):
literal[string]
keyword[try] :
identifier[str_types] =( identifier[str] , identifier[unicode] )
keyword[except] identifier[NameError] :
identifier[str_types] =( identifier[str] ,)
keyword[if] identifier[type] ( identifier[json_type] )== identifier[dict] :
keyword[return] identifier[json_type]
keyword[elif] identifier[type] ( identifier[json_type] ) keyword[in] identifier[str_types] :
keyword[return] identifier[json] . identifier[loads] ( identifier[json_type] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def check_json(json_type):
"""
Checks whether json_type is a dict or a string. If it is already a dict, it is returned as-is.
If it is not, it is converted to a dict by means of json.loads(json_type)
:param json_type:
:return:
"""
try:
str_types = (str, unicode) # depends on [control=['try'], data=[]]
except NameError:
str_types = (str,) # depends on [control=['except'], data=[]]
if type(json_type) == dict:
return json_type # depends on [control=['if'], data=[]]
elif type(json_type) in str_types:
return json.loads(json_type) # depends on [control=['if'], data=[]]
else:
raise ValueError('json_type should be a json dict or string.') |
def hardware_connector_sfp_breakout(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hardware = ET.SubElement(config, "hardware", xmlns="urn:brocade.com:mgmt:brocade-hardware")
connector = ET.SubElement(hardware, "connector")
name_key = ET.SubElement(connector, "name")
name_key.text = kwargs.pop('name')
sfp = ET.SubElement(connector, "sfp")
breakout = ET.SubElement(sfp, "breakout")
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[hardware_connector_sfp_breakout, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[hardware] assign[=] call[name[ET].SubElement, parameter[name[config], constant[hardware]]]
variable[connector] assign[=] call[name[ET].SubElement, parameter[name[hardware], constant[connector]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[connector], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[sfp] assign[=] call[name[ET].SubElement, parameter[name[connector], constant[sfp]]]
variable[breakout] assign[=] call[name[ET].SubElement, parameter[name[sfp], constant[breakout]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[hardware_connector_sfp_breakout] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[hardware] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[connector] = identifier[ET] . identifier[SubElement] ( identifier[hardware] , literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[connector] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[sfp] = identifier[ET] . identifier[SubElement] ( identifier[connector] , literal[string] )
identifier[breakout] = identifier[ET] . identifier[SubElement] ( identifier[sfp] , literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def hardware_connector_sfp_breakout(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
hardware = ET.SubElement(config, 'hardware', xmlns='urn:brocade.com:mgmt:brocade-hardware')
connector = ET.SubElement(hardware, 'connector')
name_key = ET.SubElement(connector, 'name')
name_key.text = kwargs.pop('name')
sfp = ET.SubElement(connector, 'sfp')
breakout = ET.SubElement(sfp, 'breakout')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def keypoint_rot90(keypoint, factor, rows, cols, **params):
"""Rotates a keypoint by 90 degrees CCW (see np.rot90)
Args:
keypoint (tuple): A tuple (x, y, angle, scale).
factor (int): Number of CCW rotations. Must be in range [0;3] See np.rot90.
rows (int): Image rows.
cols (int): Image cols.
"""
if factor < 0 or factor > 3:
raise ValueError('Parameter n must be in range [0;3]')
x, y, angle, scale = keypoint
if factor == 1:
keypoint = [y, (cols - 1) - x, angle - math.pi / 2, scale]
if factor == 2:
keypoint = [(cols - 1) - x, (rows - 1) - y, angle - math.pi, scale]
if factor == 3:
keypoint = [(rows - 1) - y, x, angle + math.pi / 2, scale]
return keypoint | def function[keypoint_rot90, parameter[keypoint, factor, rows, cols]]:
constant[Rotates a keypoint by 90 degrees CCW (see np.rot90)
Args:
keypoint (tuple): A tuple (x, y, angle, scale).
factor (int): Number of CCW rotations. Must be in range [0;3] See np.rot90.
rows (int): Image rows.
cols (int): Image cols.
]
if <ast.BoolOp object at 0x7da1b1f75420> begin[:]
<ast.Raise object at 0x7da1b1f75030>
<ast.Tuple object at 0x7da1b1f75c90> assign[=] name[keypoint]
if compare[name[factor] equal[==] constant[1]] begin[:]
variable[keypoint] assign[=] list[[<ast.Name object at 0x7da1b1f39180>, <ast.BinOp object at 0x7da1b1f3a1a0>, <ast.BinOp object at 0x7da1b1f38640>, <ast.Name object at 0x7da1b1f392a0>]]
if compare[name[factor] equal[==] constant[2]] begin[:]
variable[keypoint] assign[=] list[[<ast.BinOp object at 0x7da1b1f39d20>, <ast.BinOp object at 0x7da1b1f385b0>, <ast.BinOp object at 0x7da1b1f3a290>, <ast.Name object at 0x7da1b1f386a0>]]
if compare[name[factor] equal[==] constant[3]] begin[:]
variable[keypoint] assign[=] list[[<ast.BinOp object at 0x7da1b1f39420>, <ast.Name object at 0x7da1b1f3a440>, <ast.BinOp object at 0x7da1b1f381c0>, <ast.Name object at 0x7da1b1f38ac0>]]
return[name[keypoint]] | keyword[def] identifier[keypoint_rot90] ( identifier[keypoint] , identifier[factor] , identifier[rows] , identifier[cols] ,** identifier[params] ):
literal[string]
keyword[if] identifier[factor] < literal[int] keyword[or] identifier[factor] > literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[x] , identifier[y] , identifier[angle] , identifier[scale] = identifier[keypoint]
keyword[if] identifier[factor] == literal[int] :
identifier[keypoint] =[ identifier[y] ,( identifier[cols] - literal[int] )- identifier[x] , identifier[angle] - identifier[math] . identifier[pi] / literal[int] , identifier[scale] ]
keyword[if] identifier[factor] == literal[int] :
identifier[keypoint] =[( identifier[cols] - literal[int] )- identifier[x] ,( identifier[rows] - literal[int] )- identifier[y] , identifier[angle] - identifier[math] . identifier[pi] , identifier[scale] ]
keyword[if] identifier[factor] == literal[int] :
identifier[keypoint] =[( identifier[rows] - literal[int] )- identifier[y] , identifier[x] , identifier[angle] + identifier[math] . identifier[pi] / literal[int] , identifier[scale] ]
keyword[return] identifier[keypoint] | def keypoint_rot90(keypoint, factor, rows, cols, **params):
"""Rotates a keypoint by 90 degrees CCW (see np.rot90)
Args:
keypoint (tuple): A tuple (x, y, angle, scale).
factor (int): Number of CCW rotations. Must be in range [0;3] See np.rot90.
rows (int): Image rows.
cols (int): Image cols.
"""
if factor < 0 or factor > 3:
raise ValueError('Parameter n must be in range [0;3]') # depends on [control=['if'], data=[]]
(x, y, angle, scale) = keypoint
if factor == 1:
keypoint = [y, cols - 1 - x, angle - math.pi / 2, scale] # depends on [control=['if'], data=[]]
if factor == 2:
keypoint = [cols - 1 - x, rows - 1 - y, angle - math.pi, scale] # depends on [control=['if'], data=[]]
if factor == 3:
keypoint = [rows - 1 - y, x, angle + math.pi / 2, scale] # depends on [control=['if'], data=[]]
return keypoint |
def _view_filter(self):
"""
Overrides OsidSession._view_filter to add sequestering filter.
"""
view_filter = OsidSession._view_filter(self)
if self._sequestered_view == SEQUESTERED:
view_filter['sequestered'] = False
return view_filter | def function[_view_filter, parameter[self]]:
constant[
Overrides OsidSession._view_filter to add sequestering filter.
]
variable[view_filter] assign[=] call[name[OsidSession]._view_filter, parameter[name[self]]]
if compare[name[self]._sequestered_view equal[==] name[SEQUESTERED]] begin[:]
call[name[view_filter]][constant[sequestered]] assign[=] constant[False]
return[name[view_filter]] | keyword[def] identifier[_view_filter] ( identifier[self] ):
literal[string]
identifier[view_filter] = identifier[OsidSession] . identifier[_view_filter] ( identifier[self] )
keyword[if] identifier[self] . identifier[_sequestered_view] == identifier[SEQUESTERED] :
identifier[view_filter] [ literal[string] ]= keyword[False]
keyword[return] identifier[view_filter] | def _view_filter(self):
"""
Overrides OsidSession._view_filter to add sequestering filter.
"""
view_filter = OsidSession._view_filter(self)
if self._sequestered_view == SEQUESTERED:
view_filter['sequestered'] = False # depends on [control=['if'], data=[]]
return view_filter |
def upload_delete(self, token, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/attachments#delete-upload"
api_path = "/api/v2/uploads/{token}.json"
api_path = api_path.format(token=token)
return self.call(api_path, method="DELETE", **kwargs) | def function[upload_delete, parameter[self, token]]:
constant[https://developer.zendesk.com/rest_api/docs/core/attachments#delete-upload]
variable[api_path] assign[=] constant[/api/v2/uploads/{token}.json]
variable[api_path] assign[=] call[name[api_path].format, parameter[]]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[upload_delete] ( identifier[self] , identifier[token] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[token] = identifier[token] )
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] , identifier[method] = literal[string] ,** identifier[kwargs] ) | def upload_delete(self, token, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/core/attachments#delete-upload"""
api_path = '/api/v2/uploads/{token}.json'
api_path = api_path.format(token=token)
return self.call(api_path, method='DELETE', **kwargs) |
def where_pivot(self, column, operator=None, value=None, boolean="and"):
"""
Set a where clause for a pivot table column.
:param column: The column of the where clause, can also be a QueryBuilder instance for sub where
:type column: str|Builder
:param operator: The operator of the where clause
:type operator: str
:param value: The value of the where clause
:type value: mixed
:param boolean: The boolean of the where clause
:type boolean: str
:return: self
:rtype: self
"""
self._pivot_wheres.append([column, operator, value, boolean])
return self._query.where(
"%s.%s" % (self._table, column), operator, value, boolean
) | def function[where_pivot, parameter[self, column, operator, value, boolean]]:
constant[
Set a where clause for a pivot table column.
:param column: The column of the where clause, can also be a QueryBuilder instance for sub where
:type column: str|Builder
:param operator: The operator of the where clause
:type operator: str
:param value: The value of the where clause
:type value: mixed
:param boolean: The boolean of the where clause
:type boolean: str
:return: self
:rtype: self
]
call[name[self]._pivot_wheres.append, parameter[list[[<ast.Name object at 0x7da18f09d7b0>, <ast.Name object at 0x7da18f09e8f0>, <ast.Name object at 0x7da18f09c790>, <ast.Name object at 0x7da18f09d030>]]]]
return[call[name[self]._query.where, parameter[binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18f09fd90>, <ast.Name object at 0x7da18f09fbe0>]]], name[operator], name[value], name[boolean]]]] | keyword[def] identifier[where_pivot] ( identifier[self] , identifier[column] , identifier[operator] = keyword[None] , identifier[value] = keyword[None] , identifier[boolean] = literal[string] ):
literal[string]
identifier[self] . identifier[_pivot_wheres] . identifier[append] ([ identifier[column] , identifier[operator] , identifier[value] , identifier[boolean] ])
keyword[return] identifier[self] . identifier[_query] . identifier[where] (
literal[string] %( identifier[self] . identifier[_table] , identifier[column] ), identifier[operator] , identifier[value] , identifier[boolean]
) | def where_pivot(self, column, operator=None, value=None, boolean='and'):
"""
Set a where clause for a pivot table column.
:param column: The column of the where clause, can also be a QueryBuilder instance for sub where
:type column: str|Builder
:param operator: The operator of the where clause
:type operator: str
:param value: The value of the where clause
:type value: mixed
:param boolean: The boolean of the where clause
:type boolean: str
:return: self
:rtype: self
"""
self._pivot_wheres.append([column, operator, value, boolean])
return self._query.where('%s.%s' % (self._table, column), operator, value, boolean) |
def master_compile(master_opts, minion_opts, grains, id_, saltenv):
'''
Compile the master side low state data, and build the hidden state file
'''
st_ = MasterHighState(master_opts, minion_opts, grains, id_, saltenv)
return st_.compile_highstate() | def function[master_compile, parameter[master_opts, minion_opts, grains, id_, saltenv]]:
constant[
Compile the master side low state data, and build the hidden state file
]
variable[st_] assign[=] call[name[MasterHighState], parameter[name[master_opts], name[minion_opts], name[grains], name[id_], name[saltenv]]]
return[call[name[st_].compile_highstate, parameter[]]] | keyword[def] identifier[master_compile] ( identifier[master_opts] , identifier[minion_opts] , identifier[grains] , identifier[id_] , identifier[saltenv] ):
literal[string]
identifier[st_] = identifier[MasterHighState] ( identifier[master_opts] , identifier[minion_opts] , identifier[grains] , identifier[id_] , identifier[saltenv] )
keyword[return] identifier[st_] . identifier[compile_highstate] () | def master_compile(master_opts, minion_opts, grains, id_, saltenv):
"""
Compile the master side low state data, and build the hidden state file
"""
st_ = MasterHighState(master_opts, minion_opts, grains, id_, saltenv)
return st_.compile_highstate() |
def multithreader(args, paths):
"""Execute multiple processes at once."""
def shellprocess(path):
"""Return a ready-to-use subprocess."""
import subprocess
return subprocess.Popen(args + [path],
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL)
processes = [shellprocess(path) for path in paths]
for process in processes:
process.wait() | def function[multithreader, parameter[args, paths]]:
constant[Execute multiple processes at once.]
def function[shellprocess, parameter[path]]:
constant[Return a ready-to-use subprocess.]
import module[subprocess]
return[call[name[subprocess].Popen, parameter[binary_operation[name[args] + list[[<ast.Name object at 0x7da1b1341840>]]]]]]
variable[processes] assign[=] <ast.ListComp object at 0x7da1b13533a0>
for taget[name[process]] in starred[name[processes]] begin[:]
call[name[process].wait, parameter[]] | keyword[def] identifier[multithreader] ( identifier[args] , identifier[paths] ):
literal[string]
keyword[def] identifier[shellprocess] ( identifier[path] ):
literal[string]
keyword[import] identifier[subprocess]
keyword[return] identifier[subprocess] . identifier[Popen] ( identifier[args] +[ identifier[path] ],
identifier[stderr] = identifier[subprocess] . identifier[DEVNULL] ,
identifier[stdout] = identifier[subprocess] . identifier[DEVNULL] )
identifier[processes] =[ identifier[shellprocess] ( identifier[path] ) keyword[for] identifier[path] keyword[in] identifier[paths] ]
keyword[for] identifier[process] keyword[in] identifier[processes] :
identifier[process] . identifier[wait] () | def multithreader(args, paths):
"""Execute multiple processes at once."""
def shellprocess(path):
"""Return a ready-to-use subprocess."""
import subprocess
return subprocess.Popen(args + [path], stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL)
processes = [shellprocess(path) for path in paths]
for process in processes:
process.wait() # depends on [control=['for'], data=['process']] |
def create(self, label=None, name=None, cidr=None):
"""
Wraps the basic create() call to handle specific failures.
"""
try:
return super(CloudNetworkClient, self).create(label=label,
name=name, cidr=cidr)
except exc.BadRequest as e:
msg = e.message
if "too many networks" in msg:
raise exc.NetworkCountExceeded("Cannot create network; the "
"maximum number of isolated networks already exist.")
elif "does not contain enough" in msg:
raise exc.NetworkCIDRInvalid("Networks must contain two or "
"more hosts; the CIDR '%s' is too restrictive." % cidr)
elif "CIDR is malformed" in msg:
raise exc.NetworkCIDRMalformed("The CIDR '%s' is not valid." % cidr)
else:
# Something unexpected
raise | def function[create, parameter[self, label, name, cidr]]:
constant[
Wraps the basic create() call to handle specific failures.
]
<ast.Try object at 0x7da2054a4820> | keyword[def] identifier[create] ( identifier[self] , identifier[label] = keyword[None] , identifier[name] = keyword[None] , identifier[cidr] = keyword[None] ):
literal[string]
keyword[try] :
keyword[return] identifier[super] ( identifier[CloudNetworkClient] , identifier[self] ). identifier[create] ( identifier[label] = identifier[label] ,
identifier[name] = identifier[name] , identifier[cidr] = identifier[cidr] )
keyword[except] identifier[exc] . identifier[BadRequest] keyword[as] identifier[e] :
identifier[msg] = identifier[e] . identifier[message]
keyword[if] literal[string] keyword[in] identifier[msg] :
keyword[raise] identifier[exc] . identifier[NetworkCountExceeded] ( literal[string]
literal[string] )
keyword[elif] literal[string] keyword[in] identifier[msg] :
keyword[raise] identifier[exc] . identifier[NetworkCIDRInvalid] ( literal[string]
literal[string] % identifier[cidr] )
keyword[elif] literal[string] keyword[in] identifier[msg] :
keyword[raise] identifier[exc] . identifier[NetworkCIDRMalformed] ( literal[string] % identifier[cidr] )
keyword[else] :
keyword[raise] | def create(self, label=None, name=None, cidr=None):
"""
Wraps the basic create() call to handle specific failures.
"""
try:
return super(CloudNetworkClient, self).create(label=label, name=name, cidr=cidr) # depends on [control=['try'], data=[]]
except exc.BadRequest as e:
msg = e.message
if 'too many networks' in msg:
raise exc.NetworkCountExceeded('Cannot create network; the maximum number of isolated networks already exist.') # depends on [control=['if'], data=[]]
elif 'does not contain enough' in msg:
raise exc.NetworkCIDRInvalid("Networks must contain two or more hosts; the CIDR '%s' is too restrictive." % cidr) # depends on [control=['if'], data=[]]
elif 'CIDR is malformed' in msg:
raise exc.NetworkCIDRMalformed("The CIDR '%s' is not valid." % cidr) # depends on [control=['if'], data=[]]
else:
# Something unexpected
raise # depends on [control=['except'], data=['e']] |
def get_live_league_games(self):
"""Returns a dictionary containing a list of ticked games in progress
:return: dictionary of live games, see :doc:`responses </responses>`
"""
url = self.__build_url(urls.GET_LIVE_LEAGUE_GAMES)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url))
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) | def function[get_live_league_games, parameter[self]]:
constant[Returns a dictionary containing a list of ticked games in progress
:return: dictionary of live games, see :doc:`responses </responses>`
]
variable[url] assign[=] call[name[self].__build_url, parameter[name[urls].GET_LIVE_LEAGUE_GAMES]]
variable[req] assign[=] call[name[self].executor, parameter[name[url]]]
if name[self].logger begin[:]
call[name[self].logger.info, parameter[call[constant[URL: {0}].format, parameter[name[url]]]]]
if <ast.UnaryOp object at 0x7da1b11d67a0> begin[:]
return[call[name[response].build, parameter[name[req], name[url], name[self].raw_mode]]] | keyword[def] identifier[get_live_league_games] ( identifier[self] ):
literal[string]
identifier[url] = identifier[self] . identifier[__build_url] ( identifier[urls] . identifier[GET_LIVE_LEAGUE_GAMES] )
identifier[req] = identifier[self] . identifier[executor] ( identifier[url] )
keyword[if] identifier[self] . identifier[logger] :
identifier[self] . identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[url] ))
keyword[if] keyword[not] identifier[self] . identifier[__check_http_err] ( identifier[req] . identifier[status_code] ):
keyword[return] identifier[response] . identifier[build] ( identifier[req] , identifier[url] , identifier[self] . identifier[raw_mode] ) | def get_live_league_games(self):
"""Returns a dictionary containing a list of ticked games in progress
:return: dictionary of live games, see :doc:`responses </responses>`
"""
url = self.__build_url(urls.GET_LIVE_LEAGUE_GAMES)
req = self.executor(url)
if self.logger:
self.logger.info('URL: {0}'.format(url)) # depends on [control=['if'], data=[]]
if not self.__check_http_err(req.status_code):
return response.build(req, url, self.raw_mode) # depends on [control=['if'], data=[]] |
def genes_by_alias(self, build='37', genes=None):
"""Return a dictionary with hgnc symbols as keys and a list of hgnc ids
as value.
If a gene symbol is listed as primary the list of ids will only consist
of that entry if not the gene can not be determined so the result is a list
of hgnc_ids
Args:
build(str)
genes(iterable(scout.models.HgncGene)):
Returns:
alias_genes(dict): {<hgnc_alias>: {'true': <hgnc_id>, 'ids': {<hgnc_id_1>, <hgnc_id_2>, ...}}}
"""
LOG.info("Fetching all genes by alias")
# Collect one entry for each alias symbol that exists
alias_genes = {}
# Loop over all genes
if not genes:
genes = self.hgnc_collection.find({'build':build})
for gene in genes:
# Collect the hgnc_id
hgnc_id = gene['hgnc_id']
# Collect the true symbol given by hgnc
hgnc_symbol = gene['hgnc_symbol']
# Loop aver all aliases
for alias in gene['aliases']:
true_id = None
# If the alias is the same as hgnc symbol we know the true id
if alias == hgnc_symbol:
true_id = hgnc_id
# If the alias is already in the list we add the id
if alias in alias_genes:
alias_genes[alias]['ids'].add(hgnc_id)
if true_id:
alias_genes[alias]['true'] = hgnc_id
else:
alias_genes[alias] = {
'true': hgnc_id,
'ids': set([hgnc_id])
}
return alias_genes | def function[genes_by_alias, parameter[self, build, genes]]:
constant[Return a dictionary with hgnc symbols as keys and a list of hgnc ids
as value.
If a gene symbol is listed as primary the list of ids will only consist
of that entry if not the gene can not be determined so the result is a list
of hgnc_ids
Args:
build(str)
genes(iterable(scout.models.HgncGene)):
Returns:
alias_genes(dict): {<hgnc_alias>: {'true': <hgnc_id>, 'ids': {<hgnc_id_1>, <hgnc_id_2>, ...}}}
]
call[name[LOG].info, parameter[constant[Fetching all genes by alias]]]
variable[alias_genes] assign[=] dictionary[[], []]
if <ast.UnaryOp object at 0x7da2046230a0> begin[:]
variable[genes] assign[=] call[name[self].hgnc_collection.find, parameter[dictionary[[<ast.Constant object at 0x7da204621300>], [<ast.Name object at 0x7da2046210f0>]]]]
for taget[name[gene]] in starred[name[genes]] begin[:]
variable[hgnc_id] assign[=] call[name[gene]][constant[hgnc_id]]
variable[hgnc_symbol] assign[=] call[name[gene]][constant[hgnc_symbol]]
for taget[name[alias]] in starred[call[name[gene]][constant[aliases]]] begin[:]
variable[true_id] assign[=] constant[None]
if compare[name[alias] equal[==] name[hgnc_symbol]] begin[:]
variable[true_id] assign[=] name[hgnc_id]
if compare[name[alias] in name[alias_genes]] begin[:]
call[call[call[name[alias_genes]][name[alias]]][constant[ids]].add, parameter[name[hgnc_id]]]
if name[true_id] begin[:]
call[call[name[alias_genes]][name[alias]]][constant[true]] assign[=] name[hgnc_id]
return[name[alias_genes]] | keyword[def] identifier[genes_by_alias] ( identifier[self] , identifier[build] = literal[string] , identifier[genes] = keyword[None] ):
literal[string]
identifier[LOG] . identifier[info] ( literal[string] )
identifier[alias_genes] ={}
keyword[if] keyword[not] identifier[genes] :
identifier[genes] = identifier[self] . identifier[hgnc_collection] . identifier[find] ({ literal[string] : identifier[build] })
keyword[for] identifier[gene] keyword[in] identifier[genes] :
identifier[hgnc_id] = identifier[gene] [ literal[string] ]
identifier[hgnc_symbol] = identifier[gene] [ literal[string] ]
keyword[for] identifier[alias] keyword[in] identifier[gene] [ literal[string] ]:
identifier[true_id] = keyword[None]
keyword[if] identifier[alias] == identifier[hgnc_symbol] :
identifier[true_id] = identifier[hgnc_id]
keyword[if] identifier[alias] keyword[in] identifier[alias_genes] :
identifier[alias_genes] [ identifier[alias] ][ literal[string] ]. identifier[add] ( identifier[hgnc_id] )
keyword[if] identifier[true_id] :
identifier[alias_genes] [ identifier[alias] ][ literal[string] ]= identifier[hgnc_id]
keyword[else] :
identifier[alias_genes] [ identifier[alias] ]={
literal[string] : identifier[hgnc_id] ,
literal[string] : identifier[set] ([ identifier[hgnc_id] ])
}
keyword[return] identifier[alias_genes] | def genes_by_alias(self, build='37', genes=None):
"""Return a dictionary with hgnc symbols as keys and a list of hgnc ids
as value.
If a gene symbol is listed as primary the list of ids will only consist
of that entry if not the gene can not be determined so the result is a list
of hgnc_ids
Args:
build(str)
genes(iterable(scout.models.HgncGene)):
Returns:
alias_genes(dict): {<hgnc_alias>: {'true': <hgnc_id>, 'ids': {<hgnc_id_1>, <hgnc_id_2>, ...}}}
"""
LOG.info('Fetching all genes by alias')
# Collect one entry for each alias symbol that exists
alias_genes = {}
# Loop over all genes
if not genes:
genes = self.hgnc_collection.find({'build': build}) # depends on [control=['if'], data=[]]
for gene in genes:
# Collect the hgnc_id
hgnc_id = gene['hgnc_id']
# Collect the true symbol given by hgnc
hgnc_symbol = gene['hgnc_symbol']
# Loop aver all aliases
for alias in gene['aliases']:
true_id = None
# If the alias is the same as hgnc symbol we know the true id
if alias == hgnc_symbol:
true_id = hgnc_id # depends on [control=['if'], data=[]]
# If the alias is already in the list we add the id
if alias in alias_genes:
alias_genes[alias]['ids'].add(hgnc_id)
if true_id:
alias_genes[alias]['true'] = hgnc_id # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['alias', 'alias_genes']]
else:
alias_genes[alias] = {'true': hgnc_id, 'ids': set([hgnc_id])} # depends on [control=['for'], data=['alias']] # depends on [control=['for'], data=['gene']]
return alias_genes |
def format_script(sensor_graph):
"""Create a binary script containing this sensor graph.
This function produces a repeatable script by applying a known sorting
order to all constants and config variables when iterating over those
dictionaries.
Args:
sensor_graph (SensorGraph): the sensor graph that we want to format
Returns:
bytearray: The binary script data.
"""
records = []
records.append(SetGraphOnlineRecord(False, address=8))
records.append(ClearDataRecord(address=8))
records.append(ResetGraphRecord(address=8))
for node in sensor_graph.nodes:
records.append(AddNodeRecord(str(node), address=8))
for streamer in sensor_graph.streamers:
records.append(AddStreamerRecord(streamer, address=8))
for stream, value in sorted(sensor_graph.constant_database.items(), key=lambda x: x[0].encode()):
records.append(SetConstantRecord(stream, value, address=8))
records.append(PersistGraphRecord(address=8))
records.append(ClearConfigVariablesRecord())
for slot in sorted(sensor_graph.config_database, key=lambda x: x.encode()):
for config_id in sorted(sensor_graph.config_database[slot]):
config_type, value = sensor_graph.config_database[slot][config_id]
byte_value = _convert_to_bytes(config_type, value)
records.append(SetConfigRecord(slot, config_id, byte_value))
# If we have an app tag and version set program them in
app_tag = sensor_graph.metadata_database.get('app_tag')
app_version = sensor_graph.metadata_database.get('app_version')
if app_tag is not None:
records.append(SetDeviceTagRecord(app_tag=app_tag, app_version=app_version))
script = UpdateScript(records)
return script.encode() | def function[format_script, parameter[sensor_graph]]:
constant[Create a binary script containing this sensor graph.
This function produces a repeatable script by applying a known sorting
order to all constants and config variables when iterating over those
dictionaries.
Args:
sensor_graph (SensorGraph): the sensor graph that we want to format
Returns:
bytearray: The binary script data.
]
variable[records] assign[=] list[[]]
call[name[records].append, parameter[call[name[SetGraphOnlineRecord], parameter[constant[False]]]]]
call[name[records].append, parameter[call[name[ClearDataRecord], parameter[]]]]
call[name[records].append, parameter[call[name[ResetGraphRecord], parameter[]]]]
for taget[name[node]] in starred[name[sensor_graph].nodes] begin[:]
call[name[records].append, parameter[call[name[AddNodeRecord], parameter[call[name[str], parameter[name[node]]]]]]]
for taget[name[streamer]] in starred[name[sensor_graph].streamers] begin[:]
call[name[records].append, parameter[call[name[AddStreamerRecord], parameter[name[streamer]]]]]
for taget[tuple[[<ast.Name object at 0x7da20e955e10>, <ast.Name object at 0x7da20e955360>]]] in starred[call[name[sorted], parameter[call[name[sensor_graph].constant_database.items, parameter[]]]]] begin[:]
call[name[records].append, parameter[call[name[SetConstantRecord], parameter[name[stream], name[value]]]]]
call[name[records].append, parameter[call[name[PersistGraphRecord], parameter[]]]]
call[name[records].append, parameter[call[name[ClearConfigVariablesRecord], parameter[]]]]
for taget[name[slot]] in starred[call[name[sorted], parameter[name[sensor_graph].config_database]]] begin[:]
for taget[name[config_id]] in starred[call[name[sorted], parameter[call[name[sensor_graph].config_database][name[slot]]]]] begin[:]
<ast.Tuple object at 0x7da20e956350> assign[=] call[call[name[sensor_graph].config_database][name[slot]]][name[config_id]]
variable[byte_value] assign[=] call[name[_convert_to_bytes], parameter[name[config_type], name[value]]]
call[name[records].append, parameter[call[name[SetConfigRecord], parameter[name[slot], name[config_id], name[byte_value]]]]]
variable[app_tag] assign[=] call[name[sensor_graph].metadata_database.get, parameter[constant[app_tag]]]
variable[app_version] assign[=] call[name[sensor_graph].metadata_database.get, parameter[constant[app_version]]]
if compare[name[app_tag] is_not constant[None]] begin[:]
call[name[records].append, parameter[call[name[SetDeviceTagRecord], parameter[]]]]
variable[script] assign[=] call[name[UpdateScript], parameter[name[records]]]
return[call[name[script].encode, parameter[]]] | keyword[def] identifier[format_script] ( identifier[sensor_graph] ):
literal[string]
identifier[records] =[]
identifier[records] . identifier[append] ( identifier[SetGraphOnlineRecord] ( keyword[False] , identifier[address] = literal[int] ))
identifier[records] . identifier[append] ( identifier[ClearDataRecord] ( identifier[address] = literal[int] ))
identifier[records] . identifier[append] ( identifier[ResetGraphRecord] ( identifier[address] = literal[int] ))
keyword[for] identifier[node] keyword[in] identifier[sensor_graph] . identifier[nodes] :
identifier[records] . identifier[append] ( identifier[AddNodeRecord] ( identifier[str] ( identifier[node] ), identifier[address] = literal[int] ))
keyword[for] identifier[streamer] keyword[in] identifier[sensor_graph] . identifier[streamers] :
identifier[records] . identifier[append] ( identifier[AddStreamerRecord] ( identifier[streamer] , identifier[address] = literal[int] ))
keyword[for] identifier[stream] , identifier[value] keyword[in] identifier[sorted] ( identifier[sensor_graph] . identifier[constant_database] . identifier[items] (), identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]. identifier[encode] ()):
identifier[records] . identifier[append] ( identifier[SetConstantRecord] ( identifier[stream] , identifier[value] , identifier[address] = literal[int] ))
identifier[records] . identifier[append] ( identifier[PersistGraphRecord] ( identifier[address] = literal[int] ))
identifier[records] . identifier[append] ( identifier[ClearConfigVariablesRecord] ())
keyword[for] identifier[slot] keyword[in] identifier[sorted] ( identifier[sensor_graph] . identifier[config_database] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] . identifier[encode] ()):
keyword[for] identifier[config_id] keyword[in] identifier[sorted] ( identifier[sensor_graph] . identifier[config_database] [ identifier[slot] ]):
identifier[config_type] , identifier[value] = identifier[sensor_graph] . identifier[config_database] [ identifier[slot] ][ identifier[config_id] ]
identifier[byte_value] = identifier[_convert_to_bytes] ( identifier[config_type] , identifier[value] )
identifier[records] . identifier[append] ( identifier[SetConfigRecord] ( identifier[slot] , identifier[config_id] , identifier[byte_value] ))
identifier[app_tag] = identifier[sensor_graph] . identifier[metadata_database] . identifier[get] ( literal[string] )
identifier[app_version] = identifier[sensor_graph] . identifier[metadata_database] . identifier[get] ( literal[string] )
keyword[if] identifier[app_tag] keyword[is] keyword[not] keyword[None] :
identifier[records] . identifier[append] ( identifier[SetDeviceTagRecord] ( identifier[app_tag] = identifier[app_tag] , identifier[app_version] = identifier[app_version] ))
identifier[script] = identifier[UpdateScript] ( identifier[records] )
keyword[return] identifier[script] . identifier[encode] () | def format_script(sensor_graph):
"""Create a binary script containing this sensor graph.
This function produces a repeatable script by applying a known sorting
order to all constants and config variables when iterating over those
dictionaries.
Args:
sensor_graph (SensorGraph): the sensor graph that we want to format
Returns:
bytearray: The binary script data.
"""
records = []
records.append(SetGraphOnlineRecord(False, address=8))
records.append(ClearDataRecord(address=8))
records.append(ResetGraphRecord(address=8))
for node in sensor_graph.nodes:
records.append(AddNodeRecord(str(node), address=8)) # depends on [control=['for'], data=['node']]
for streamer in sensor_graph.streamers:
records.append(AddStreamerRecord(streamer, address=8)) # depends on [control=['for'], data=['streamer']]
for (stream, value) in sorted(sensor_graph.constant_database.items(), key=lambda x: x[0].encode()):
records.append(SetConstantRecord(stream, value, address=8)) # depends on [control=['for'], data=[]]
records.append(PersistGraphRecord(address=8))
records.append(ClearConfigVariablesRecord())
for slot in sorted(sensor_graph.config_database, key=lambda x: x.encode()):
for config_id in sorted(sensor_graph.config_database[slot]):
(config_type, value) = sensor_graph.config_database[slot][config_id]
byte_value = _convert_to_bytes(config_type, value)
records.append(SetConfigRecord(slot, config_id, byte_value)) # depends on [control=['for'], data=['config_id']] # depends on [control=['for'], data=['slot']]
# If we have an app tag and version set program them in
app_tag = sensor_graph.metadata_database.get('app_tag')
app_version = sensor_graph.metadata_database.get('app_version')
if app_tag is not None:
records.append(SetDeviceTagRecord(app_tag=app_tag, app_version=app_version)) # depends on [control=['if'], data=['app_tag']]
script = UpdateScript(records)
return script.encode() |
def load_uint_b(buffer, width):
"""
Loads fixed size integer from the buffer
:param buffer:
:return:
"""
result = 0
for idx in range(width):
result += buffer[idx] << (8 * idx)
return result | def function[load_uint_b, parameter[buffer, width]]:
constant[
Loads fixed size integer from the buffer
:param buffer:
:return:
]
variable[result] assign[=] constant[0]
for taget[name[idx]] in starred[call[name[range], parameter[name[width]]]] begin[:]
<ast.AugAssign object at 0x7da1b2407eb0>
return[name[result]] | keyword[def] identifier[load_uint_b] ( identifier[buffer] , identifier[width] ):
literal[string]
identifier[result] = literal[int]
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[width] ):
identifier[result] += identifier[buffer] [ identifier[idx] ]<<( literal[int] * identifier[idx] )
keyword[return] identifier[result] | def load_uint_b(buffer, width):
"""
Loads fixed size integer from the buffer
:param buffer:
:return:
"""
result = 0
for idx in range(width):
result += buffer[idx] << 8 * idx # depends on [control=['for'], data=['idx']]
return result |
def lock(self):
'''
Try to get locked the file
- the function will wait until the file is unlocked if 'wait' was defined as locktype
- the funciton will raise AlreadyLocked exception if 'lock' was defined as locktype
'''
# Open file
self.__fd = open(self.__lockfile, "w")
# Get it locked
if self.__locktype == "wait":
# Try to get it locked until ready
fcntl.flock(self.__fd.fileno(), fcntl.LOCK_EX)
elif self.__locktype == "lock":
# Try to get the locker if can not raise an exception
try:
fcntl.flock(self.__fd.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
except IOError:
raise AlreadyLocked("File is already locked") | def function[lock, parameter[self]]:
constant[
Try to get locked the file
- the function will wait until the file is unlocked if 'wait' was defined as locktype
- the funciton will raise AlreadyLocked exception if 'lock' was defined as locktype
]
name[self].__fd assign[=] call[name[open], parameter[name[self].__lockfile, constant[w]]]
if compare[name[self].__locktype equal[==] constant[wait]] begin[:]
call[name[fcntl].flock, parameter[call[name[self].__fd.fileno, parameter[]], name[fcntl].LOCK_EX]] | keyword[def] identifier[lock] ( identifier[self] ):
literal[string]
identifier[self] . identifier[__fd] = identifier[open] ( identifier[self] . identifier[__lockfile] , literal[string] )
keyword[if] identifier[self] . identifier[__locktype] == literal[string] :
identifier[fcntl] . identifier[flock] ( identifier[self] . identifier[__fd] . identifier[fileno] (), identifier[fcntl] . identifier[LOCK_EX] )
keyword[elif] identifier[self] . identifier[__locktype] == literal[string] :
keyword[try] :
identifier[fcntl] . identifier[flock] ( identifier[self] . identifier[__fd] . identifier[fileno] (), identifier[fcntl] . identifier[LOCK_EX] | identifier[fcntl] . identifier[LOCK_NB] )
keyword[except] identifier[IOError] :
keyword[raise] identifier[AlreadyLocked] ( literal[string] ) | def lock(self):
"""
Try to get locked the file
- the function will wait until the file is unlocked if 'wait' was defined as locktype
- the funciton will raise AlreadyLocked exception if 'lock' was defined as locktype
"""
# Open file
self.__fd = open(self.__lockfile, 'w')
# Get it locked
if self.__locktype == 'wait':
# Try to get it locked until ready
fcntl.flock(self.__fd.fileno(), fcntl.LOCK_EX) # depends on [control=['if'], data=[]]
elif self.__locktype == 'lock':
# Try to get the locker if can not raise an exception
try:
fcntl.flock(self.__fd.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) # depends on [control=['try'], data=[]]
except IOError:
raise AlreadyLocked('File is already locked') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def _valid_encoding(elt):
'''Does this node have a valid encoding?
'''
enc = _find_encstyle(elt)
if not enc or enc == _SOAP.ENC: return 1
for e in enc.split():
if e.startswith(_SOAP.ENC):
# XXX Is this correct? Once we find a Sec5 compatible
# XXX encoding, should we check that all the rest are from
# XXX that same base? Perhaps. But since the if test above
# XXX will surely get 99% of the cases, leave it for now.
return 1
return 0 | def function[_valid_encoding, parameter[elt]]:
constant[Does this node have a valid encoding?
]
variable[enc] assign[=] call[name[_find_encstyle], parameter[name[elt]]]
if <ast.BoolOp object at 0x7da1b1594280> begin[:]
return[constant[1]]
for taget[name[e]] in starred[call[name[enc].split, parameter[]]] begin[:]
if call[name[e].startswith, parameter[name[_SOAP].ENC]] begin[:]
return[constant[1]]
return[constant[0]] | keyword[def] identifier[_valid_encoding] ( identifier[elt] ):
literal[string]
identifier[enc] = identifier[_find_encstyle] ( identifier[elt] )
keyword[if] keyword[not] identifier[enc] keyword[or] identifier[enc] == identifier[_SOAP] . identifier[ENC] : keyword[return] literal[int]
keyword[for] identifier[e] keyword[in] identifier[enc] . identifier[split] ():
keyword[if] identifier[e] . identifier[startswith] ( identifier[_SOAP] . identifier[ENC] ):
keyword[return] literal[int]
keyword[return] literal[int] | def _valid_encoding(elt):
"""Does this node have a valid encoding?
"""
enc = _find_encstyle(elt)
if not enc or enc == _SOAP.ENC:
return 1 # depends on [control=['if'], data=[]]
for e in enc.split():
if e.startswith(_SOAP.ENC):
# XXX Is this correct? Once we find a Sec5 compatible
# XXX encoding, should we check that all the rest are from
# XXX that same base? Perhaps. But since the if test above
# XXX will surely get 99% of the cases, leave it for now.
return 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['e']]
return 0 |
def search_for_recipient(self, email, timeout=None, content_type=None):
"""
Get content of emails, sent to a specific email address.
@Params
email - the recipient email address to search for
timeout - seconds to try beore timing out
content_type - type of email string to return
@Returns
Content of the matched email in the given content type
"""
return self.search(timeout=timeout,
content_type=content_type, TO=email) | def function[search_for_recipient, parameter[self, email, timeout, content_type]]:
constant[
Get content of emails, sent to a specific email address.
@Params
email - the recipient email address to search for
timeout - seconds to try beore timing out
content_type - type of email string to return
@Returns
Content of the matched email in the given content type
]
return[call[name[self].search, parameter[]]] | keyword[def] identifier[search_for_recipient] ( identifier[self] , identifier[email] , identifier[timeout] = keyword[None] , identifier[content_type] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[search] ( identifier[timeout] = identifier[timeout] ,
identifier[content_type] = identifier[content_type] , identifier[TO] = identifier[email] ) | def search_for_recipient(self, email, timeout=None, content_type=None):
"""
Get content of emails, sent to a specific email address.
@Params
email - the recipient email address to search for
timeout - seconds to try beore timing out
content_type - type of email string to return
@Returns
Content of the matched email in the given content type
"""
return self.search(timeout=timeout, content_type=content_type, TO=email) |
def paste_buffer(pymux, variables):
"""
Paste clipboard content into buffer.
"""
pane = pymux.arrangement.get_active_pane()
pane.process.write_input(get_app().clipboard.get_data().text, paste=True) | def function[paste_buffer, parameter[pymux, variables]]:
constant[
Paste clipboard content into buffer.
]
variable[pane] assign[=] call[name[pymux].arrangement.get_active_pane, parameter[]]
call[name[pane].process.write_input, parameter[call[call[name[get_app], parameter[]].clipboard.get_data, parameter[]].text]] | keyword[def] identifier[paste_buffer] ( identifier[pymux] , identifier[variables] ):
literal[string]
identifier[pane] = identifier[pymux] . identifier[arrangement] . identifier[get_active_pane] ()
identifier[pane] . identifier[process] . identifier[write_input] ( identifier[get_app] (). identifier[clipboard] . identifier[get_data] (). identifier[text] , identifier[paste] = keyword[True] ) | def paste_buffer(pymux, variables):
"""
Paste clipboard content into buffer.
"""
pane = pymux.arrangement.get_active_pane()
pane.process.write_input(get_app().clipboard.get_data().text, paste=True) |
def is_obsoleted_by_pid(pid):
"""Return True if ``pid`` is referenced in the obsoletedBy field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return d1_gmn.app.models.ScienceObject.objects.filter(
obsoleted_by__did=pid
).exists() | def function[is_obsoleted_by_pid, parameter[pid]]:
constant[Return True if ``pid`` is referenced in the obsoletedBy field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
]
return[call[call[name[d1_gmn].app.models.ScienceObject.objects.filter, parameter[]].exists, parameter[]]] | keyword[def] identifier[is_obsoleted_by_pid] ( identifier[pid] ):
literal[string]
keyword[return] identifier[d1_gmn] . identifier[app] . identifier[models] . identifier[ScienceObject] . identifier[objects] . identifier[filter] (
identifier[obsoleted_by__did] = identifier[pid]
). identifier[exists] () | def is_obsoleted_by_pid(pid):
"""Return True if ``pid`` is referenced in the obsoletedBy field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return d1_gmn.app.models.ScienceObject.objects.filter(obsoleted_by__did=pid).exists() |
def _phir(self, tau, delta):
"""Residual contribution to the free Helmholtz energy
Parameters
----------
tau : float
Inverse reduced temperature Tc/T, [-]
delta : float
Reduced density rho/rhoc, [-]
Returns
-------
prop : dict
Dictionary with residual adimensional helmholtz energy and deriv:
* fir
* firt: ∂fir/∂τ|δ,x
* fird: ∂fir/∂δ|τ,x
* firtt: ∂²fir/∂τ²|δ,x
* firdt: ∂²fir/∂τ∂δ|x
* firdd: ∂²fir/∂δ²|τ,x
References
----------
IAPWS, Revised Release on the IAPWS Formulation 1995 for the
Thermodynamic Properties of Ordinary Water Substance for General and
Scientific Use, September 2016, Table 5
http://www.iapws.org/relguide/IAPWS-95.html
"""
fir = fird = firdd = firt = firtt = firdt = 0
# Polinomial terms
nr1 = self._constants.get("nr1", [])
d1 = self._constants.get("d1", [])
t1 = self._constants.get("t1", [])
for n, d, t in zip(nr1, d1, t1):
fir += n*delta**d*tau**t
fird += n*d*delta**(d-1)*tau**t
firdd += n*d*(d-1)*delta**(d-2)*tau**t
firt += n*t*delta**d*tau**(t-1)
firtt += n*t*(t-1)*delta**d*tau**(t-2)
firdt += n*t*d*delta**(d-1)*tau**(t-1)
# Exponential terms
nr2 = self._constants.get("nr2", [])
d2 = self._constants.get("d2", [])
g2 = self._constants.get("gamma2", [])
t2 = self._constants.get("t2", [])
c2 = self._constants.get("c2", [])
for n, d, g, t, c in zip(nr2, d2, g2, t2, c2):
fir += n*delta**d*tau**t*exp(-g*delta**c)
fird += n*exp(-g*delta**c)*delta**(d-1)*tau**t*(d-g*c*delta**c)
firdd += n*exp(-g*delta**c)*delta**(d-2)*tau**t * \
((d-g*c*delta**c)*(d-1-g*c*delta**c)-g**2*c**2*delta**c)
firt += n*t*delta**d*tau**(t-1)*exp(-g*delta**c)
firtt += n*t*(t-1)*delta**d*tau**(t-2)*exp(-g*delta**c)
firdt += n*t*delta**(d-1)*tau**(t-1)*(d-g*c*delta**c)*exp(
-g*delta**c)
# Gaussian terms
nr3 = self._constants.get("nr3", [])
d3 = self._constants.get("d3", [])
t3 = self._constants.get("t3", [])
a3 = self._constants.get("alfa3", [])
e3 = self._constants.get("epsilon3", [])
b3 = self._constants.get("beta3", [])
g3 = self._constants.get("gamma3", [])
for n, d, t, a, e, b, g in zip(nr3, d3, t3, a3, e3, b3, g3):
fir += n*delta**d*tau**t*exp(-a*(delta-e)**2-b*(tau-g)**2)
fird += n*delta**d*tau**t*exp(-a*(delta-e)**2-b*(tau-g)**2)*(
d/delta-2*a*(delta-e))
firdd += n*tau**t*exp(-a*(delta-e)**2-b*(tau-g)**2)*(
-2*a*delta**d + 4*a**2*delta**d*(delta-e)**2 -
4*d*a*delta**(d-1)*(delta-e) + d*(d-1)*delta**(d-2))
firt += n*delta**d*tau**t*exp(-a*(delta-e)**2-b*(tau-g)**2)*(
t/tau-2*b*(tau-g))
firtt += n*delta**d*tau**t*exp(-a*(delta-e)**2-b*(tau-g)**2)*(
(t/tau-2*b*(tau-g))**2-t/tau**2-2*b)
firdt += n*delta**d*tau**t*exp(-a*(delta-e)**2-b*(tau-g)**2)*(
t/tau-2*b*(tau-g))*(d/delta-2*a*(delta-e))
# Non analitic terms
nr4 = self._constants.get("nr4", [])
a4 = self._constants.get("a4", [])
b4 = self._constants.get("b4", [])
Ai = self._constants.get("A", [])
Bi = self._constants.get("B", [])
Ci = self._constants.get("C", [])
Di = self._constants.get("D", [])
bt4 = self._constants.get("beta4", [])
for n, a, b, A, B, C, D, bt in zip(nr4, a4, b4, Ai, Bi, Ci, Di, bt4):
Tita = (1-tau)+A*((delta-1)**2)**(0.5/bt)
F = exp(-C*(delta-1)**2-D*(tau-1)**2)
Fd = -2*C*F*(delta-1)
Fdd = 2*C*F*(2*C*(delta-1)**2-1)
Ft = -2*D*F*(tau-1)
Ftt = 2*D*F*(2*D*(tau-1)**2-1)
Fdt = 4*C*D*F*(delta-1)*(tau-1)
Delta = Tita**2+B*((delta-1)**2)**a
Deltad = (delta-1)*(A*Tita*2/bt*((delta-1)**2)**(0.5/bt-1) +
2*B*a*((delta-1)**2)**(a-1))
if delta == 1:
Deltadd = 0
else:
Deltadd = Deltad/(delta-1)+(delta-1)**2*(
4*B*a*(a-1)*((delta-1)**2)**(a-2) +
2*A**2/bt**2*(((delta-1)**2)**(0.5/bt-1))**2 +
A*Tita*4/bt*(0.5/bt-1)*((delta-1)**2)**(0.5/bt-2))
DeltaBd = b*Delta**(b-1)*Deltad
DeltaBdd = b*(Delta**(b-1)*Deltadd+(b-1)*Delta**(b-2)*Deltad**2)
DeltaBt = -2*Tita*b*Delta**(b-1)
DeltaBtt = 2*b*Delta**(b-1)+4*Tita**2*b*(b-1)*Delta**(b-2)
DeltaBdt = -A*b*2/bt*Delta**(b-1)*(delta-1)*((delta-1)**2)**(
0.5/bt-1)-2*Tita*b*(b-1)*Delta**(b-2)*Deltad
fir += n*Delta**b*delta*F
fird += n*(Delta**b*(F+delta*Fd)+DeltaBd*delta*F)
firdd += n*(Delta**b*(2*Fd+delta*Fdd) + 2*DeltaBd*(F+delta*Fd) +
DeltaBdd*delta*F)
firt += n*delta*(DeltaBt*F+Delta**b*Ft)
firtt += n*delta*(DeltaBtt*F+2*DeltaBt*Ft+Delta**b*Ftt)
firdt += n*(Delta**b*(Ft+delta*Fdt)+delta*DeltaBd*Ft +
DeltaBt*(F+delta*Fd)+DeltaBdt*delta*F)
prop = {}
prop["fir"] = fir
prop["firt"] = firt
prop["firtt"] = firtt
prop["fird"] = fird
prop["firdd"] = firdd
prop["firdt"] = firdt
return prop | def function[_phir, parameter[self, tau, delta]]:
constant[Residual contribution to the free Helmholtz energy
Parameters
----------
tau : float
Inverse reduced temperature Tc/T, [-]
delta : float
Reduced density rho/rhoc, [-]
Returns
-------
prop : dict
Dictionary with residual adimensional helmholtz energy and deriv:
* fir
* firt: ∂fir/∂τ|δ,x
* fird: ∂fir/∂δ|τ,x
* firtt: ∂²fir/∂τ²|δ,x
* firdt: ∂²fir/∂τ∂δ|x
* firdd: ∂²fir/∂δ²|τ,x
References
----------
IAPWS, Revised Release on the IAPWS Formulation 1995 for the
Thermodynamic Properties of Ordinary Water Substance for General and
Scientific Use, September 2016, Table 5
http://www.iapws.org/relguide/IAPWS-95.html
]
variable[fir] assign[=] constant[0]
variable[nr1] assign[=] call[name[self]._constants.get, parameter[constant[nr1], list[[]]]]
variable[d1] assign[=] call[name[self]._constants.get, parameter[constant[d1], list[[]]]]
variable[t1] assign[=] call[name[self]._constants.get, parameter[constant[t1], list[[]]]]
for taget[tuple[[<ast.Name object at 0x7da1b23471f0>, <ast.Name object at 0x7da1b2344d00>, <ast.Name object at 0x7da1b2346d70>]]] in starred[call[name[zip], parameter[name[nr1], name[d1], name[t1]]]] begin[:]
<ast.AugAssign object at 0x7da1b2347f70>
<ast.AugAssign object at 0x7da1b2345900>
<ast.AugAssign object at 0x7da1b2345ae0>
<ast.AugAssign object at 0x7da1b2346560>
<ast.AugAssign object at 0x7da1b2347700>
<ast.AugAssign object at 0x7da1b2347190>
variable[nr2] assign[=] call[name[self]._constants.get, parameter[constant[nr2], list[[]]]]
variable[d2] assign[=] call[name[self]._constants.get, parameter[constant[d2], list[[]]]]
variable[g2] assign[=] call[name[self]._constants.get, parameter[constant[gamma2], list[[]]]]
variable[t2] assign[=] call[name[self]._constants.get, parameter[constant[t2], list[[]]]]
variable[c2] assign[=] call[name[self]._constants.get, parameter[constant[c2], list[[]]]]
for taget[tuple[[<ast.Name object at 0x7da1b2345960>, <ast.Name object at 0x7da1b2344c40>, <ast.Name object at 0x7da1b2344700>, <ast.Name object at 0x7da1b2347df0>, <ast.Name object at 0x7da1b2347100>]]] in starred[call[name[zip], parameter[name[nr2], name[d2], name[g2], name[t2], name[c2]]]] begin[:]
<ast.AugAssign object at 0x7da20e954820>
<ast.AugAssign object at 0x7da20e956ec0>
<ast.AugAssign object at 0x7da20e955420>
<ast.AugAssign object at 0x7da20e957670>
<ast.AugAssign object at 0x7da20e9548e0>
<ast.AugAssign object at 0x7da20e956c80>
variable[nr3] assign[=] call[name[self]._constants.get, parameter[constant[nr3], list[[]]]]
variable[d3] assign[=] call[name[self]._constants.get, parameter[constant[d3], list[[]]]]
variable[t3] assign[=] call[name[self]._constants.get, parameter[constant[t3], list[[]]]]
variable[a3] assign[=] call[name[self]._constants.get, parameter[constant[alfa3], list[[]]]]
variable[e3] assign[=] call[name[self]._constants.get, parameter[constant[epsilon3], list[[]]]]
variable[b3] assign[=] call[name[self]._constants.get, parameter[constant[beta3], list[[]]]]
variable[g3] assign[=] call[name[self]._constants.get, parameter[constant[gamma3], list[[]]]]
for taget[tuple[[<ast.Name object at 0x7da20c7968f0>, <ast.Name object at 0x7da20c795c30>, <ast.Name object at 0x7da20c7950c0>, <ast.Name object at 0x7da20c7957e0>, <ast.Name object at 0x7da20c796920>, <ast.Name object at 0x7da20c794130>, <ast.Name object at 0x7da20c795690>]]] in starred[call[name[zip], parameter[name[nr3], name[d3], name[t3], name[a3], name[e3], name[b3], name[g3]]]] begin[:]
<ast.AugAssign object at 0x7da20c794a90>
<ast.AugAssign object at 0x7da20c796020>
<ast.AugAssign object at 0x7da20c795360>
<ast.AugAssign object at 0x7da204566e60>
<ast.AugAssign object at 0x7da204567490>
<ast.AugAssign object at 0x7da204567df0>
variable[nr4] assign[=] call[name[self]._constants.get, parameter[constant[nr4], list[[]]]]
variable[a4] assign[=] call[name[self]._constants.get, parameter[constant[a4], list[[]]]]
variable[b4] assign[=] call[name[self]._constants.get, parameter[constant[b4], list[[]]]]
variable[Ai] assign[=] call[name[self]._constants.get, parameter[constant[A], list[[]]]]
variable[Bi] assign[=] call[name[self]._constants.get, parameter[constant[B], list[[]]]]
variable[Ci] assign[=] call[name[self]._constants.get, parameter[constant[C], list[[]]]]
variable[Di] assign[=] call[name[self]._constants.get, parameter[constant[D], list[[]]]]
variable[bt4] assign[=] call[name[self]._constants.get, parameter[constant[beta4], list[[]]]]
for taget[tuple[[<ast.Name object at 0x7da204567a30>, <ast.Name object at 0x7da204566170>, <ast.Name object at 0x7da204567940>, <ast.Name object at 0x7da204564d00>, <ast.Name object at 0x7da204566b90>, <ast.Name object at 0x7da204564ca0>, <ast.Name object at 0x7da204564e20>, <ast.Name object at 0x7da2045652a0>]]] in starred[call[name[zip], parameter[name[nr4], name[a4], name[b4], name[Ai], name[Bi], name[Ci], name[Di], name[bt4]]]] begin[:]
variable[Tita] assign[=] binary_operation[binary_operation[constant[1] - name[tau]] + binary_operation[name[A] * binary_operation[binary_operation[binary_operation[name[delta] - constant[1]] ** constant[2]] ** binary_operation[constant[0.5] / name[bt]]]]]
variable[F] assign[=] call[name[exp], parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da2045650f0> * binary_operation[binary_operation[name[delta] - constant[1]] ** constant[2]]] - binary_operation[name[D] * binary_operation[binary_operation[name[tau] - constant[1]] ** constant[2]]]]]]
variable[Fd] assign[=] binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18eb55690> * name[C]] * name[F]] * binary_operation[name[delta] - constant[1]]]
variable[Fdd] assign[=] binary_operation[binary_operation[binary_operation[constant[2] * name[C]] * name[F]] * binary_operation[binary_operation[binary_operation[constant[2] * name[C]] * binary_operation[binary_operation[name[delta] - constant[1]] ** constant[2]]] - constant[1]]]
variable[Ft] assign[=] binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18eb57d00> * name[D]] * name[F]] * binary_operation[name[tau] - constant[1]]]
variable[Ftt] assign[=] binary_operation[binary_operation[binary_operation[constant[2] * name[D]] * name[F]] * binary_operation[binary_operation[binary_operation[constant[2] * name[D]] * binary_operation[binary_operation[name[tau] - constant[1]] ** constant[2]]] - constant[1]]]
variable[Fdt] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[4] * name[C]] * name[D]] * name[F]] * binary_operation[name[delta] - constant[1]]] * binary_operation[name[tau] - constant[1]]]
variable[Delta] assign[=] binary_operation[binary_operation[name[Tita] ** constant[2]] + binary_operation[name[B] * binary_operation[binary_operation[binary_operation[name[delta] - constant[1]] ** constant[2]] ** name[a]]]]
variable[Deltad] assign[=] binary_operation[binary_operation[name[delta] - constant[1]] * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[A] * name[Tita]] * constant[2]] / name[bt]] * binary_operation[binary_operation[binary_operation[name[delta] - constant[1]] ** constant[2]] ** binary_operation[binary_operation[constant[0.5] / name[bt]] - constant[1]]]] + binary_operation[binary_operation[binary_operation[constant[2] * name[B]] * name[a]] * binary_operation[binary_operation[binary_operation[name[delta] - constant[1]] ** constant[2]] ** binary_operation[name[a] - constant[1]]]]]]
if compare[name[delta] equal[==] constant[1]] begin[:]
variable[Deltadd] assign[=] constant[0]
variable[DeltaBd] assign[=] binary_operation[binary_operation[name[b] * binary_operation[name[Delta] ** binary_operation[name[b] - constant[1]]]] * name[Deltad]]
variable[DeltaBdd] assign[=] binary_operation[name[b] * binary_operation[binary_operation[binary_operation[name[Delta] ** binary_operation[name[b] - constant[1]]] * name[Deltadd]] + binary_operation[binary_operation[binary_operation[name[b] - constant[1]] * binary_operation[name[Delta] ** binary_operation[name[b] - constant[2]]]] * binary_operation[name[Deltad] ** constant[2]]]]]
variable[DeltaBt] assign[=] binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18eb55b70> * name[Tita]] * name[b]] * binary_operation[name[Delta] ** binary_operation[name[b] - constant[1]]]]
variable[DeltaBtt] assign[=] binary_operation[binary_operation[binary_operation[constant[2] * name[b]] * binary_operation[name[Delta] ** binary_operation[name[b] - constant[1]]]] + binary_operation[binary_operation[binary_operation[binary_operation[constant[4] * binary_operation[name[Tita] ** constant[2]]] * name[b]] * binary_operation[name[b] - constant[1]]] * binary_operation[name[Delta] ** binary_operation[name[b] - constant[2]]]]]
variable[DeltaBdt] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b0695d50> * name[b]] * constant[2]] / name[bt]] * binary_operation[name[Delta] ** binary_operation[name[b] - constant[1]]]] * binary_operation[name[delta] - constant[1]]] * binary_operation[binary_operation[binary_operation[name[delta] - constant[1]] ** constant[2]] ** binary_operation[binary_operation[constant[0.5] / name[bt]] - constant[1]]]] - binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * name[Tita]] * name[b]] * binary_operation[name[b] - constant[1]]] * binary_operation[name[Delta] ** binary_operation[name[b] - constant[2]]]] * name[Deltad]]]
<ast.AugAssign object at 0x7da1b0694670>
<ast.AugAssign object at 0x7da1b06958a0>
<ast.AugAssign object at 0x7da1b0694a00>
<ast.AugAssign object at 0x7da1b0694fa0>
<ast.AugAssign object at 0x7da1b06942e0>
<ast.AugAssign object at 0x7da1b0695270>
variable[prop] assign[=] dictionary[[], []]
call[name[prop]][constant[fir]] assign[=] name[fir]
call[name[prop]][constant[firt]] assign[=] name[firt]
call[name[prop]][constant[firtt]] assign[=] name[firtt]
call[name[prop]][constant[fird]] assign[=] name[fird]
call[name[prop]][constant[firdd]] assign[=] name[firdd]
call[name[prop]][constant[firdt]] assign[=] name[firdt]
return[name[prop]] | keyword[def] identifier[_phir] ( identifier[self] , identifier[tau] , identifier[delta] ):
literal[string]
identifier[fir] = identifier[fird] = identifier[firdd] = identifier[firt] = identifier[firtt] = identifier[firdt] = literal[int]
identifier[nr1] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[d1] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[t1] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
keyword[for] identifier[n] , identifier[d] , identifier[t] keyword[in] identifier[zip] ( identifier[nr1] , identifier[d1] , identifier[t1] ):
identifier[fir] += identifier[n] * identifier[delta] ** identifier[d] * identifier[tau] ** identifier[t]
identifier[fird] += identifier[n] * identifier[d] * identifier[delta] **( identifier[d] - literal[int] )* identifier[tau] ** identifier[t]
identifier[firdd] += identifier[n] * identifier[d] *( identifier[d] - literal[int] )* identifier[delta] **( identifier[d] - literal[int] )* identifier[tau] ** identifier[t]
identifier[firt] += identifier[n] * identifier[t] * identifier[delta] ** identifier[d] * identifier[tau] **( identifier[t] - literal[int] )
identifier[firtt] += identifier[n] * identifier[t] *( identifier[t] - literal[int] )* identifier[delta] ** identifier[d] * identifier[tau] **( identifier[t] - literal[int] )
identifier[firdt] += identifier[n] * identifier[t] * identifier[d] * identifier[delta] **( identifier[d] - literal[int] )* identifier[tau] **( identifier[t] - literal[int] )
identifier[nr2] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[d2] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[g2] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[t2] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[c2] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
keyword[for] identifier[n] , identifier[d] , identifier[g] , identifier[t] , identifier[c] keyword[in] identifier[zip] ( identifier[nr2] , identifier[d2] , identifier[g2] , identifier[t2] , identifier[c2] ):
identifier[fir] += identifier[n] * identifier[delta] ** identifier[d] * identifier[tau] ** identifier[t] * identifier[exp] (- identifier[g] * identifier[delta] ** identifier[c] )
identifier[fird] += identifier[n] * identifier[exp] (- identifier[g] * identifier[delta] ** identifier[c] )* identifier[delta] **( identifier[d] - literal[int] )* identifier[tau] ** identifier[t] *( identifier[d] - identifier[g] * identifier[c] * identifier[delta] ** identifier[c] )
identifier[firdd] += identifier[n] * identifier[exp] (- identifier[g] * identifier[delta] ** identifier[c] )* identifier[delta] **( identifier[d] - literal[int] )* identifier[tau] ** identifier[t] *(( identifier[d] - identifier[g] * identifier[c] * identifier[delta] ** identifier[c] )*( identifier[d] - literal[int] - identifier[g] * identifier[c] * identifier[delta] ** identifier[c] )- identifier[g] ** literal[int] * identifier[c] ** literal[int] * identifier[delta] ** identifier[c] )
identifier[firt] += identifier[n] * identifier[t] * identifier[delta] ** identifier[d] * identifier[tau] **( identifier[t] - literal[int] )* identifier[exp] (- identifier[g] * identifier[delta] ** identifier[c] )
identifier[firtt] += identifier[n] * identifier[t] *( identifier[t] - literal[int] )* identifier[delta] ** identifier[d] * identifier[tau] **( identifier[t] - literal[int] )* identifier[exp] (- identifier[g] * identifier[delta] ** identifier[c] )
identifier[firdt] += identifier[n] * identifier[t] * identifier[delta] **( identifier[d] - literal[int] )* identifier[tau] **( identifier[t] - literal[int] )*( identifier[d] - identifier[g] * identifier[c] * identifier[delta] ** identifier[c] )* identifier[exp] (
- identifier[g] * identifier[delta] ** identifier[c] )
identifier[nr3] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[d3] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[t3] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[a3] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[e3] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[b3] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[g3] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
keyword[for] identifier[n] , identifier[d] , identifier[t] , identifier[a] , identifier[e] , identifier[b] , identifier[g] keyword[in] identifier[zip] ( identifier[nr3] , identifier[d3] , identifier[t3] , identifier[a3] , identifier[e3] , identifier[b3] , identifier[g3] ):
identifier[fir] += identifier[n] * identifier[delta] ** identifier[d] * identifier[tau] ** identifier[t] * identifier[exp] (- identifier[a] *( identifier[delta] - identifier[e] )** literal[int] - identifier[b] *( identifier[tau] - identifier[g] )** literal[int] )
identifier[fird] += identifier[n] * identifier[delta] ** identifier[d] * identifier[tau] ** identifier[t] * identifier[exp] (- identifier[a] *( identifier[delta] - identifier[e] )** literal[int] - identifier[b] *( identifier[tau] - identifier[g] )** literal[int] )*(
identifier[d] / identifier[delta] - literal[int] * identifier[a] *( identifier[delta] - identifier[e] ))
identifier[firdd] += identifier[n] * identifier[tau] ** identifier[t] * identifier[exp] (- identifier[a] *( identifier[delta] - identifier[e] )** literal[int] - identifier[b] *( identifier[tau] - identifier[g] )** literal[int] )*(
- literal[int] * identifier[a] * identifier[delta] ** identifier[d] + literal[int] * identifier[a] ** literal[int] * identifier[delta] ** identifier[d] *( identifier[delta] - identifier[e] )** literal[int] -
literal[int] * identifier[d] * identifier[a] * identifier[delta] **( identifier[d] - literal[int] )*( identifier[delta] - identifier[e] )+ identifier[d] *( identifier[d] - literal[int] )* identifier[delta] **( identifier[d] - literal[int] ))
identifier[firt] += identifier[n] * identifier[delta] ** identifier[d] * identifier[tau] ** identifier[t] * identifier[exp] (- identifier[a] *( identifier[delta] - identifier[e] )** literal[int] - identifier[b] *( identifier[tau] - identifier[g] )** literal[int] )*(
identifier[t] / identifier[tau] - literal[int] * identifier[b] *( identifier[tau] - identifier[g] ))
identifier[firtt] += identifier[n] * identifier[delta] ** identifier[d] * identifier[tau] ** identifier[t] * identifier[exp] (- identifier[a] *( identifier[delta] - identifier[e] )** literal[int] - identifier[b] *( identifier[tau] - identifier[g] )** literal[int] )*(
( identifier[t] / identifier[tau] - literal[int] * identifier[b] *( identifier[tau] - identifier[g] ))** literal[int] - identifier[t] / identifier[tau] ** literal[int] - literal[int] * identifier[b] )
identifier[firdt] += identifier[n] * identifier[delta] ** identifier[d] * identifier[tau] ** identifier[t] * identifier[exp] (- identifier[a] *( identifier[delta] - identifier[e] )** literal[int] - identifier[b] *( identifier[tau] - identifier[g] )** literal[int] )*(
identifier[t] / identifier[tau] - literal[int] * identifier[b] *( identifier[tau] - identifier[g] ))*( identifier[d] / identifier[delta] - literal[int] * identifier[a] *( identifier[delta] - identifier[e] ))
identifier[nr4] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[a4] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[b4] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[Ai] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[Bi] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[Ci] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[Di] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
identifier[bt4] = identifier[self] . identifier[_constants] . identifier[get] ( literal[string] ,[])
keyword[for] identifier[n] , identifier[a] , identifier[b] , identifier[A] , identifier[B] , identifier[C] , identifier[D] , identifier[bt] keyword[in] identifier[zip] ( identifier[nr4] , identifier[a4] , identifier[b4] , identifier[Ai] , identifier[Bi] , identifier[Ci] , identifier[Di] , identifier[bt4] ):
identifier[Tita] =( literal[int] - identifier[tau] )+ identifier[A] *(( identifier[delta] - literal[int] )** literal[int] )**( literal[int] / identifier[bt] )
identifier[F] = identifier[exp] (- identifier[C] *( identifier[delta] - literal[int] )** literal[int] - identifier[D] *( identifier[tau] - literal[int] )** literal[int] )
identifier[Fd] =- literal[int] * identifier[C] * identifier[F] *( identifier[delta] - literal[int] )
identifier[Fdd] = literal[int] * identifier[C] * identifier[F] *( literal[int] * identifier[C] *( identifier[delta] - literal[int] )** literal[int] - literal[int] )
identifier[Ft] =- literal[int] * identifier[D] * identifier[F] *( identifier[tau] - literal[int] )
identifier[Ftt] = literal[int] * identifier[D] * identifier[F] *( literal[int] * identifier[D] *( identifier[tau] - literal[int] )** literal[int] - literal[int] )
identifier[Fdt] = literal[int] * identifier[C] * identifier[D] * identifier[F] *( identifier[delta] - literal[int] )*( identifier[tau] - literal[int] )
identifier[Delta] = identifier[Tita] ** literal[int] + identifier[B] *(( identifier[delta] - literal[int] )** literal[int] )** identifier[a]
identifier[Deltad] =( identifier[delta] - literal[int] )*( identifier[A] * identifier[Tita] * literal[int] / identifier[bt] *(( identifier[delta] - literal[int] )** literal[int] )**( literal[int] / identifier[bt] - literal[int] )+
literal[int] * identifier[B] * identifier[a] *(( identifier[delta] - literal[int] )** literal[int] )**( identifier[a] - literal[int] ))
keyword[if] identifier[delta] == literal[int] :
identifier[Deltadd] = literal[int]
keyword[else] :
identifier[Deltadd] = identifier[Deltad] /( identifier[delta] - literal[int] )+( identifier[delta] - literal[int] )** literal[int] *(
literal[int] * identifier[B] * identifier[a] *( identifier[a] - literal[int] )*(( identifier[delta] - literal[int] )** literal[int] )**( identifier[a] - literal[int] )+
literal[int] * identifier[A] ** literal[int] / identifier[bt] ** literal[int] *((( identifier[delta] - literal[int] )** literal[int] )**( literal[int] / identifier[bt] - literal[int] ))** literal[int] +
identifier[A] * identifier[Tita] * literal[int] / identifier[bt] *( literal[int] / identifier[bt] - literal[int] )*(( identifier[delta] - literal[int] )** literal[int] )**( literal[int] / identifier[bt] - literal[int] ))
identifier[DeltaBd] = identifier[b] * identifier[Delta] **( identifier[b] - literal[int] )* identifier[Deltad]
identifier[DeltaBdd] = identifier[b] *( identifier[Delta] **( identifier[b] - literal[int] )* identifier[Deltadd] +( identifier[b] - literal[int] )* identifier[Delta] **( identifier[b] - literal[int] )* identifier[Deltad] ** literal[int] )
identifier[DeltaBt] =- literal[int] * identifier[Tita] * identifier[b] * identifier[Delta] **( identifier[b] - literal[int] )
identifier[DeltaBtt] = literal[int] * identifier[b] * identifier[Delta] **( identifier[b] - literal[int] )+ literal[int] * identifier[Tita] ** literal[int] * identifier[b] *( identifier[b] - literal[int] )* identifier[Delta] **( identifier[b] - literal[int] )
identifier[DeltaBdt] =- identifier[A] * identifier[b] * literal[int] / identifier[bt] * identifier[Delta] **( identifier[b] - literal[int] )*( identifier[delta] - literal[int] )*(( identifier[delta] - literal[int] )** literal[int] )**(
literal[int] / identifier[bt] - literal[int] )- literal[int] * identifier[Tita] * identifier[b] *( identifier[b] - literal[int] )* identifier[Delta] **( identifier[b] - literal[int] )* identifier[Deltad]
identifier[fir] += identifier[n] * identifier[Delta] ** identifier[b] * identifier[delta] * identifier[F]
identifier[fird] += identifier[n] *( identifier[Delta] ** identifier[b] *( identifier[F] + identifier[delta] * identifier[Fd] )+ identifier[DeltaBd] * identifier[delta] * identifier[F] )
identifier[firdd] += identifier[n] *( identifier[Delta] ** identifier[b] *( literal[int] * identifier[Fd] + identifier[delta] * identifier[Fdd] )+ literal[int] * identifier[DeltaBd] *( identifier[F] + identifier[delta] * identifier[Fd] )+
identifier[DeltaBdd] * identifier[delta] * identifier[F] )
identifier[firt] += identifier[n] * identifier[delta] *( identifier[DeltaBt] * identifier[F] + identifier[Delta] ** identifier[b] * identifier[Ft] )
identifier[firtt] += identifier[n] * identifier[delta] *( identifier[DeltaBtt] * identifier[F] + literal[int] * identifier[DeltaBt] * identifier[Ft] + identifier[Delta] ** identifier[b] * identifier[Ftt] )
identifier[firdt] += identifier[n] *( identifier[Delta] ** identifier[b] *( identifier[Ft] + identifier[delta] * identifier[Fdt] )+ identifier[delta] * identifier[DeltaBd] * identifier[Ft] +
identifier[DeltaBt] *( identifier[F] + identifier[delta] * identifier[Fd] )+ identifier[DeltaBdt] * identifier[delta] * identifier[F] )
identifier[prop] ={}
identifier[prop] [ literal[string] ]= identifier[fir]
identifier[prop] [ literal[string] ]= identifier[firt]
identifier[prop] [ literal[string] ]= identifier[firtt]
identifier[prop] [ literal[string] ]= identifier[fird]
identifier[prop] [ literal[string] ]= identifier[firdd]
identifier[prop] [ literal[string] ]= identifier[firdt]
keyword[return] identifier[prop] | def _phir(self, tau, delta):
"""Residual contribution to the free Helmholtz energy
Parameters
----------
tau : float
Inverse reduced temperature Tc/T, [-]
delta : float
Reduced density rho/rhoc, [-]
Returns
-------
prop : dict
Dictionary with residual adimensional helmholtz energy and deriv:
* fir
* firt: ∂fir/∂τ|δ,x
* fird: ∂fir/∂δ|τ,x
* firtt: ∂²fir/∂τ²|δ,x
* firdt: ∂²fir/∂τ∂δ|x
* firdd: ∂²fir/∂δ²|τ,x
References
----------
IAPWS, Revised Release on the IAPWS Formulation 1995 for the
Thermodynamic Properties of Ordinary Water Substance for General and
Scientific Use, September 2016, Table 5
http://www.iapws.org/relguide/IAPWS-95.html
"""
fir = fird = firdd = firt = firtt = firdt = 0
# Polinomial terms
nr1 = self._constants.get('nr1', [])
d1 = self._constants.get('d1', [])
t1 = self._constants.get('t1', [])
for (n, d, t) in zip(nr1, d1, t1):
fir += n * delta ** d * tau ** t
fird += n * d * delta ** (d - 1) * tau ** t
firdd += n * d * (d - 1) * delta ** (d - 2) * tau ** t
firt += n * t * delta ** d * tau ** (t - 1)
firtt += n * t * (t - 1) * delta ** d * tau ** (t - 2)
firdt += n * t * d * delta ** (d - 1) * tau ** (t - 1) # depends on [control=['for'], data=[]]
# Exponential terms
nr2 = self._constants.get('nr2', [])
d2 = self._constants.get('d2', [])
g2 = self._constants.get('gamma2', [])
t2 = self._constants.get('t2', [])
c2 = self._constants.get('c2', [])
for (n, d, g, t, c) in zip(nr2, d2, g2, t2, c2):
fir += n * delta ** d * tau ** t * exp(-g * delta ** c)
fird += n * exp(-g * delta ** c) * delta ** (d - 1) * tau ** t * (d - g * c * delta ** c)
firdd += n * exp(-g * delta ** c) * delta ** (d - 2) * tau ** t * ((d - g * c * delta ** c) * (d - 1 - g * c * delta ** c) - g ** 2 * c ** 2 * delta ** c)
firt += n * t * delta ** d * tau ** (t - 1) * exp(-g * delta ** c)
firtt += n * t * (t - 1) * delta ** d * tau ** (t - 2) * exp(-g * delta ** c)
firdt += n * t * delta ** (d - 1) * tau ** (t - 1) * (d - g * c * delta ** c) * exp(-g * delta ** c) # depends on [control=['for'], data=[]]
# Gaussian terms
nr3 = self._constants.get('nr3', [])
d3 = self._constants.get('d3', [])
t3 = self._constants.get('t3', [])
a3 = self._constants.get('alfa3', [])
e3 = self._constants.get('epsilon3', [])
b3 = self._constants.get('beta3', [])
g3 = self._constants.get('gamma3', [])
for (n, d, t, a, e, b, g) in zip(nr3, d3, t3, a3, e3, b3, g3):
fir += n * delta ** d * tau ** t * exp(-a * (delta - e) ** 2 - b * (tau - g) ** 2)
fird += n * delta ** d * tau ** t * exp(-a * (delta - e) ** 2 - b * (tau - g) ** 2) * (d / delta - 2 * a * (delta - e))
firdd += n * tau ** t * exp(-a * (delta - e) ** 2 - b * (tau - g) ** 2) * (-2 * a * delta ** d + 4 * a ** 2 * delta ** d * (delta - e) ** 2 - 4 * d * a * delta ** (d - 1) * (delta - e) + d * (d - 1) * delta ** (d - 2))
firt += n * delta ** d * tau ** t * exp(-a * (delta - e) ** 2 - b * (tau - g) ** 2) * (t / tau - 2 * b * (tau - g))
firtt += n * delta ** d * tau ** t * exp(-a * (delta - e) ** 2 - b * (tau - g) ** 2) * ((t / tau - 2 * b * (tau - g)) ** 2 - t / tau ** 2 - 2 * b)
firdt += n * delta ** d * tau ** t * exp(-a * (delta - e) ** 2 - b * (tau - g) ** 2) * (t / tau - 2 * b * (tau - g)) * (d / delta - 2 * a * (delta - e)) # depends on [control=['for'], data=[]]
# Non analitic terms
nr4 = self._constants.get('nr4', [])
a4 = self._constants.get('a4', [])
b4 = self._constants.get('b4', [])
Ai = self._constants.get('A', [])
Bi = self._constants.get('B', [])
Ci = self._constants.get('C', [])
Di = self._constants.get('D', [])
bt4 = self._constants.get('beta4', [])
for (n, a, b, A, B, C, D, bt) in zip(nr4, a4, b4, Ai, Bi, Ci, Di, bt4):
Tita = 1 - tau + A * ((delta - 1) ** 2) ** (0.5 / bt)
F = exp(-C * (delta - 1) ** 2 - D * (tau - 1) ** 2)
Fd = -2 * C * F * (delta - 1)
Fdd = 2 * C * F * (2 * C * (delta - 1) ** 2 - 1)
Ft = -2 * D * F * (tau - 1)
Ftt = 2 * D * F * (2 * D * (tau - 1) ** 2 - 1)
Fdt = 4 * C * D * F * (delta - 1) * (tau - 1)
Delta = Tita ** 2 + B * ((delta - 1) ** 2) ** a
Deltad = (delta - 1) * (A * Tita * 2 / bt * ((delta - 1) ** 2) ** (0.5 / bt - 1) + 2 * B * a * ((delta - 1) ** 2) ** (a - 1))
if delta == 1:
Deltadd = 0 # depends on [control=['if'], data=[]]
else:
Deltadd = Deltad / (delta - 1) + (delta - 1) ** 2 * (4 * B * a * (a - 1) * ((delta - 1) ** 2) ** (a - 2) + 2 * A ** 2 / bt ** 2 * (((delta - 1) ** 2) ** (0.5 / bt - 1)) ** 2 + A * Tita * 4 / bt * (0.5 / bt - 1) * ((delta - 1) ** 2) ** (0.5 / bt - 2))
DeltaBd = b * Delta ** (b - 1) * Deltad
DeltaBdd = b * (Delta ** (b - 1) * Deltadd + (b - 1) * Delta ** (b - 2) * Deltad ** 2)
DeltaBt = -2 * Tita * b * Delta ** (b - 1)
DeltaBtt = 2 * b * Delta ** (b - 1) + 4 * Tita ** 2 * b * (b - 1) * Delta ** (b - 2)
DeltaBdt = -A * b * 2 / bt * Delta ** (b - 1) * (delta - 1) * ((delta - 1) ** 2) ** (0.5 / bt - 1) - 2 * Tita * b * (b - 1) * Delta ** (b - 2) * Deltad
fir += n * Delta ** b * delta * F
fird += n * (Delta ** b * (F + delta * Fd) + DeltaBd * delta * F)
firdd += n * (Delta ** b * (2 * Fd + delta * Fdd) + 2 * DeltaBd * (F + delta * Fd) + DeltaBdd * delta * F)
firt += n * delta * (DeltaBt * F + Delta ** b * Ft)
firtt += n * delta * (DeltaBtt * F + 2 * DeltaBt * Ft + Delta ** b * Ftt)
firdt += n * (Delta ** b * (Ft + delta * Fdt) + delta * DeltaBd * Ft + DeltaBt * (F + delta * Fd) + DeltaBdt * delta * F) # depends on [control=['for'], data=[]]
prop = {}
prop['fir'] = fir
prop['firt'] = firt
prop['firtt'] = firtt
prop['fird'] = fird
prop['firdd'] = firdd
prop['firdt'] = firdt
return prop |
def orifice_expansibility(D, Do, P1, P2, k):
r'''Calculates the expansibility factor for orifice plate calculations
based on the geometry of the plate, measured pressures of the orifice, and
the isentropic exponent of the fluid.
.. math::
\epsilon = 1 - (0.351 + 0.256\beta^4 + 0.93\beta^8)
\left[1-\left(\frac{P_2}{P_1}\right)^{1/\kappa}\right]
Parameters
----------
D : float
Upstream internal pipe diameter, [m]
Do : float
Diameter of orifice at flow conditions, [m]
P1 : float
Static pressure of fluid upstream of orifice at the cross-section of
the pressure tap, [Pa]
P2 : float
Static pressure of fluid downstream of orifice at the cross-section of
the pressure tap, [Pa]
k : float
Isentropic exponent of fluid, [-]
Returns
-------
expansibility : float, optional
Expansibility factor (1 for incompressible fluids, less than 1 for
real fluids), [-]
Notes
-----
This formula was determined for the range of P2/P1 >= 0.80, and for fluids
of air, steam, and natural gas. However, there is no objection to using
it for other fluids.
Examples
--------
>>> orifice_expansibility(D=0.0739, Do=0.0222, P1=1E5, P2=9.9E4, k=1.4)
0.9974739057343425
References
----------
.. [1] American Society of Mechanical Engineers. Mfc-3M-2004 Measurement
Of Fluid Flow In Pipes Using Orifice, Nozzle, And Venturi. ASME, 2001.
.. [2] ISO 5167-2:2003 - Measurement of Fluid Flow by Means of Pressure
Differential Devices Inserted in Circular Cross-Section Conduits Running
Full -- Part 2: Orifice Plates.
'''
beta = Do/D
beta2 = beta*beta
beta4 = beta2*beta2
return (1.0 - (0.351 + 0.256*beta4 + 0.93*beta4*beta4)*(
1.0 - (P2/P1)**(1./k))) | def function[orifice_expansibility, parameter[D, Do, P1, P2, k]]:
constant[Calculates the expansibility factor for orifice plate calculations
based on the geometry of the plate, measured pressures of the orifice, and
the isentropic exponent of the fluid.
.. math::
\epsilon = 1 - (0.351 + 0.256\beta^4 + 0.93\beta^8)
\left[1-\left(\frac{P_2}{P_1}\right)^{1/\kappa}\right]
Parameters
----------
D : float
Upstream internal pipe diameter, [m]
Do : float
Diameter of orifice at flow conditions, [m]
P1 : float
Static pressure of fluid upstream of orifice at the cross-section of
the pressure tap, [Pa]
P2 : float
Static pressure of fluid downstream of orifice at the cross-section of
the pressure tap, [Pa]
k : float
Isentropic exponent of fluid, [-]
Returns
-------
expansibility : float, optional
Expansibility factor (1 for incompressible fluids, less than 1 for
real fluids), [-]
Notes
-----
This formula was determined for the range of P2/P1 >= 0.80, and for fluids
of air, steam, and natural gas. However, there is no objection to using
it for other fluids.
Examples
--------
>>> orifice_expansibility(D=0.0739, Do=0.0222, P1=1E5, P2=9.9E4, k=1.4)
0.9974739057343425
References
----------
.. [1] American Society of Mechanical Engineers. Mfc-3M-2004 Measurement
Of Fluid Flow In Pipes Using Orifice, Nozzle, And Venturi. ASME, 2001.
.. [2] ISO 5167-2:2003 - Measurement of Fluid Flow by Means of Pressure
Differential Devices Inserted in Circular Cross-Section Conduits Running
Full -- Part 2: Orifice Plates.
]
variable[beta] assign[=] binary_operation[name[Do] / name[D]]
variable[beta2] assign[=] binary_operation[name[beta] * name[beta]]
variable[beta4] assign[=] binary_operation[name[beta2] * name[beta2]]
return[binary_operation[constant[1.0] - binary_operation[binary_operation[binary_operation[constant[0.351] + binary_operation[constant[0.256] * name[beta4]]] + binary_operation[binary_operation[constant[0.93] * name[beta4]] * name[beta4]]] * binary_operation[constant[1.0] - binary_operation[binary_operation[name[P2] / name[P1]] ** binary_operation[constant[1.0] / name[k]]]]]]] | keyword[def] identifier[orifice_expansibility] ( identifier[D] , identifier[Do] , identifier[P1] , identifier[P2] , identifier[k] ):
literal[string]
identifier[beta] = identifier[Do] / identifier[D]
identifier[beta2] = identifier[beta] * identifier[beta]
identifier[beta4] = identifier[beta2] * identifier[beta2]
keyword[return] ( literal[int] -( literal[int] + literal[int] * identifier[beta4] + literal[int] * identifier[beta4] * identifier[beta4] )*(
literal[int] -( identifier[P2] / identifier[P1] )**( literal[int] / identifier[k] ))) | def orifice_expansibility(D, Do, P1, P2, k):
"""Calculates the expansibility factor for orifice plate calculations
based on the geometry of the plate, measured pressures of the orifice, and
the isentropic exponent of the fluid.
.. math::
\\epsilon = 1 - (0.351 + 0.256\\beta^4 + 0.93\\beta^8)
\\left[1-\\left(\\frac{P_2}{P_1}\\right)^{1/\\kappa}\\right]
Parameters
----------
D : float
Upstream internal pipe diameter, [m]
Do : float
Diameter of orifice at flow conditions, [m]
P1 : float
Static pressure of fluid upstream of orifice at the cross-section of
the pressure tap, [Pa]
P2 : float
Static pressure of fluid downstream of orifice at the cross-section of
the pressure tap, [Pa]
k : float
Isentropic exponent of fluid, [-]
Returns
-------
expansibility : float, optional
Expansibility factor (1 for incompressible fluids, less than 1 for
real fluids), [-]
Notes
-----
This formula was determined for the range of P2/P1 >= 0.80, and for fluids
of air, steam, and natural gas. However, there is no objection to using
it for other fluids.
Examples
--------
>>> orifice_expansibility(D=0.0739, Do=0.0222, P1=1E5, P2=9.9E4, k=1.4)
0.9974739057343425
References
----------
.. [1] American Society of Mechanical Engineers. Mfc-3M-2004 Measurement
Of Fluid Flow In Pipes Using Orifice, Nozzle, And Venturi. ASME, 2001.
.. [2] ISO 5167-2:2003 - Measurement of Fluid Flow by Means of Pressure
Differential Devices Inserted in Circular Cross-Section Conduits Running
Full -- Part 2: Orifice Plates.
"""
beta = Do / D
beta2 = beta * beta
beta4 = beta2 * beta2
return 1.0 - (0.351 + 0.256 * beta4 + 0.93 * beta4 * beta4) * (1.0 - (P2 / P1) ** (1.0 / k)) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.