code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def _marshal(self, newSerial=True, oobFDs=None):
"""
Encodes the message into binary format. The resulting binary message is
stored in C{self.rawMessage}
"""
flags = 0
if not self.expectReply:
flags |= 0x1
if not self.autoStart:
flags |= 0x2
# may be overriden below, depending on oobFDs
_headerAttrs = self._headerAttrs
# marshal body before headers to know if the 'unix_fd' header is needed
if self.signature:
binBody = b''.join(
marshal.marshal(
self.signature,
self.body,
oobFDs=oobFDs
)[1]
)
if oobFDs:
# copy class based _headerAttrs to add a unix_fds header this
# time
_headerAttrs = list(self._headerAttrs)
_headerAttrs.append(('unix_fds', 9, False))
self.unix_fds = len(oobFDs)
else:
binBody = b''
self.headers = []
for attr_name, code, _ in _headerAttrs:
hval = getattr(self, attr_name, None)
if hval is not None:
if attr_name == 'path':
hval = marshal.ObjectPath(hval)
elif attr_name == 'signature':
hval = marshal.Signature(hval)
elif attr_name == 'unix_fds':
hval = marshal.UInt32(hval)
self.headers.append([code, hval])
self.bodyLength = len(binBody)
if newSerial:
self.serial = DBusMessage._nextSerial
DBusMessage._nextSerial += 1
binHeader = b''.join(marshal.marshal(
_headerFormat,
[
self.endian,
self._messageType,
flags,
self._protocolVersion,
self.bodyLength,
self.serial,
self.headers
],
lendian=self.endian == ord('l')
)[1])
headerPadding = marshal.pad['header'](len(binHeader))
self.rawHeader = binHeader
self.rawPadding = headerPadding
self.rawBody = binBody
self.rawMessage = b''.join([binHeader, headerPadding, binBody])
if len(self.rawMessage) > self._maxMsgLen:
raise error.MarshallingError(
'Marshalled message exceeds maximum message size of %d' %
(self._maxMsgLen,),
) | def function[_marshal, parameter[self, newSerial, oobFDs]]:
constant[
Encodes the message into binary format. The resulting binary message is
stored in C{self.rawMessage}
]
variable[flags] assign[=] constant[0]
if <ast.UnaryOp object at 0x7da18c4cf370> begin[:]
<ast.AugAssign object at 0x7da18c4cded0>
if <ast.UnaryOp object at 0x7da18c4cf6d0> begin[:]
<ast.AugAssign object at 0x7da18c4cc7c0>
variable[_headerAttrs] assign[=] name[self]._headerAttrs
if name[self].signature begin[:]
variable[binBody] assign[=] call[constant[b''].join, parameter[call[call[name[marshal].marshal, parameter[name[self].signature, name[self].body]]][constant[1]]]]
if name[oobFDs] begin[:]
variable[_headerAttrs] assign[=] call[name[list], parameter[name[self]._headerAttrs]]
call[name[_headerAttrs].append, parameter[tuple[[<ast.Constant object at 0x7da18c4cee30>, <ast.Constant object at 0x7da18c4cda20>, <ast.Constant object at 0x7da18c4cdde0>]]]]
name[self].unix_fds assign[=] call[name[len], parameter[name[oobFDs]]]
name[self].headers assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18c4cdba0>, <ast.Name object at 0x7da18c4cdab0>, <ast.Name object at 0x7da18c4cc040>]]] in starred[name[_headerAttrs]] begin[:]
variable[hval] assign[=] call[name[getattr], parameter[name[self], name[attr_name], constant[None]]]
if compare[name[hval] is_not constant[None]] begin[:]
if compare[name[attr_name] equal[==] constant[path]] begin[:]
variable[hval] assign[=] call[name[marshal].ObjectPath, parameter[name[hval]]]
call[name[self].headers.append, parameter[list[[<ast.Name object at 0x7da20c76ec20>, <ast.Name object at 0x7da20c76dd20>]]]]
name[self].bodyLength assign[=] call[name[len], parameter[name[binBody]]]
if name[newSerial] begin[:]
name[self].serial assign[=] name[DBusMessage]._nextSerial
<ast.AugAssign object at 0x7da20c76e6e0>
variable[binHeader] assign[=] call[constant[b''].join, parameter[call[call[name[marshal].marshal, parameter[name[_headerFormat], list[[<ast.Attribute object at 0x7da20c76cd30>, <ast.Attribute object at 0x7da20c76cdf0>, <ast.Name object at 0x7da20c76e8c0>, <ast.Attribute object at 0x7da20c76c2e0>, <ast.Attribute object at 0x7da20c76d8a0>, <ast.Attribute object at 0x7da20c76ee60>, <ast.Attribute object at 0x7da20c76d960>]]]]][constant[1]]]]
variable[headerPadding] assign[=] call[call[name[marshal].pad][constant[header]], parameter[call[name[len], parameter[name[binHeader]]]]]
name[self].rawHeader assign[=] name[binHeader]
name[self].rawPadding assign[=] name[headerPadding]
name[self].rawBody assign[=] name[binBody]
name[self].rawMessage assign[=] call[constant[b''].join, parameter[list[[<ast.Name object at 0x7da20c76ebf0>, <ast.Name object at 0x7da20c76f0a0>, <ast.Name object at 0x7da20c76e5c0>]]]]
if compare[call[name[len], parameter[name[self].rawMessage]] greater[>] name[self]._maxMsgLen] begin[:]
<ast.Raise object at 0x7da20c76cca0> | keyword[def] identifier[_marshal] ( identifier[self] , identifier[newSerial] = keyword[True] , identifier[oobFDs] = keyword[None] ):
literal[string]
identifier[flags] = literal[int]
keyword[if] keyword[not] identifier[self] . identifier[expectReply] :
identifier[flags] |= literal[int]
keyword[if] keyword[not] identifier[self] . identifier[autoStart] :
identifier[flags] |= literal[int]
identifier[_headerAttrs] = identifier[self] . identifier[_headerAttrs]
keyword[if] identifier[self] . identifier[signature] :
identifier[binBody] = literal[string] . identifier[join] (
identifier[marshal] . identifier[marshal] (
identifier[self] . identifier[signature] ,
identifier[self] . identifier[body] ,
identifier[oobFDs] = identifier[oobFDs]
)[ literal[int] ]
)
keyword[if] identifier[oobFDs] :
identifier[_headerAttrs] = identifier[list] ( identifier[self] . identifier[_headerAttrs] )
identifier[_headerAttrs] . identifier[append] (( literal[string] , literal[int] , keyword[False] ))
identifier[self] . identifier[unix_fds] = identifier[len] ( identifier[oobFDs] )
keyword[else] :
identifier[binBody] = literal[string]
identifier[self] . identifier[headers] =[]
keyword[for] identifier[attr_name] , identifier[code] , identifier[_] keyword[in] identifier[_headerAttrs] :
identifier[hval] = identifier[getattr] ( identifier[self] , identifier[attr_name] , keyword[None] )
keyword[if] identifier[hval] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[attr_name] == literal[string] :
identifier[hval] = identifier[marshal] . identifier[ObjectPath] ( identifier[hval] )
keyword[elif] identifier[attr_name] == literal[string] :
identifier[hval] = identifier[marshal] . identifier[Signature] ( identifier[hval] )
keyword[elif] identifier[attr_name] == literal[string] :
identifier[hval] = identifier[marshal] . identifier[UInt32] ( identifier[hval] )
identifier[self] . identifier[headers] . identifier[append] ([ identifier[code] , identifier[hval] ])
identifier[self] . identifier[bodyLength] = identifier[len] ( identifier[binBody] )
keyword[if] identifier[newSerial] :
identifier[self] . identifier[serial] = identifier[DBusMessage] . identifier[_nextSerial]
identifier[DBusMessage] . identifier[_nextSerial] += literal[int]
identifier[binHeader] = literal[string] . identifier[join] ( identifier[marshal] . identifier[marshal] (
identifier[_headerFormat] ,
[
identifier[self] . identifier[endian] ,
identifier[self] . identifier[_messageType] ,
identifier[flags] ,
identifier[self] . identifier[_protocolVersion] ,
identifier[self] . identifier[bodyLength] ,
identifier[self] . identifier[serial] ,
identifier[self] . identifier[headers]
],
identifier[lendian] = identifier[self] . identifier[endian] == identifier[ord] ( literal[string] )
)[ literal[int] ])
identifier[headerPadding] = identifier[marshal] . identifier[pad] [ literal[string] ]( identifier[len] ( identifier[binHeader] ))
identifier[self] . identifier[rawHeader] = identifier[binHeader]
identifier[self] . identifier[rawPadding] = identifier[headerPadding]
identifier[self] . identifier[rawBody] = identifier[binBody]
identifier[self] . identifier[rawMessage] = literal[string] . identifier[join] ([ identifier[binHeader] , identifier[headerPadding] , identifier[binBody] ])
keyword[if] identifier[len] ( identifier[self] . identifier[rawMessage] )> identifier[self] . identifier[_maxMsgLen] :
keyword[raise] identifier[error] . identifier[MarshallingError] (
literal[string] %
( identifier[self] . identifier[_maxMsgLen] ,),
) | def _marshal(self, newSerial=True, oobFDs=None):
"""
Encodes the message into binary format. The resulting binary message is
stored in C{self.rawMessage}
"""
flags = 0
if not self.expectReply:
flags |= 1 # depends on [control=['if'], data=[]]
if not self.autoStart:
flags |= 2 # depends on [control=['if'], data=[]]
# may be overriden below, depending on oobFDs
_headerAttrs = self._headerAttrs
# marshal body before headers to know if the 'unix_fd' header is needed
if self.signature:
binBody = b''.join(marshal.marshal(self.signature, self.body, oobFDs=oobFDs)[1])
if oobFDs:
# copy class based _headerAttrs to add a unix_fds header this
# time
_headerAttrs = list(self._headerAttrs)
_headerAttrs.append(('unix_fds', 9, False))
self.unix_fds = len(oobFDs) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
binBody = b''
self.headers = []
for (attr_name, code, _) in _headerAttrs:
hval = getattr(self, attr_name, None)
if hval is not None:
if attr_name == 'path':
hval = marshal.ObjectPath(hval) # depends on [control=['if'], data=[]]
elif attr_name == 'signature':
hval = marshal.Signature(hval) # depends on [control=['if'], data=[]]
elif attr_name == 'unix_fds':
hval = marshal.UInt32(hval) # depends on [control=['if'], data=[]]
self.headers.append([code, hval]) # depends on [control=['if'], data=['hval']] # depends on [control=['for'], data=[]]
self.bodyLength = len(binBody)
if newSerial:
self.serial = DBusMessage._nextSerial
DBusMessage._nextSerial += 1 # depends on [control=['if'], data=[]]
binHeader = b''.join(marshal.marshal(_headerFormat, [self.endian, self._messageType, flags, self._protocolVersion, self.bodyLength, self.serial, self.headers], lendian=self.endian == ord('l'))[1])
headerPadding = marshal.pad['header'](len(binHeader))
self.rawHeader = binHeader
self.rawPadding = headerPadding
self.rawBody = binBody
self.rawMessage = b''.join([binHeader, headerPadding, binBody])
if len(self.rawMessage) > self._maxMsgLen:
raise error.MarshallingError('Marshalled message exceeds maximum message size of %d' % (self._maxMsgLen,)) # depends on [control=['if'], data=[]] |
def ceil(self):
"""Round `x` and `y` up to integers."""
return Point(int(math.ceil(self.x)), int(math.ceil(self.y))) | def function[ceil, parameter[self]]:
constant[Round `x` and `y` up to integers.]
return[call[name[Point], parameter[call[name[int], parameter[call[name[math].ceil, parameter[name[self].x]]]], call[name[int], parameter[call[name[math].ceil, parameter[name[self].y]]]]]]] | keyword[def] identifier[ceil] ( identifier[self] ):
literal[string]
keyword[return] identifier[Point] ( identifier[int] ( identifier[math] . identifier[ceil] ( identifier[self] . identifier[x] )), identifier[int] ( identifier[math] . identifier[ceil] ( identifier[self] . identifier[y] ))) | def ceil(self):
"""Round `x` and `y` up to integers."""
return Point(int(math.ceil(self.x)), int(math.ceil(self.y))) |
def _get_template(t):
"""Return a single template *t*."""
if os.path.exists(t): # 1) Is it an accessible file?
pass
else:
_t = t
_t_found = False
for d in path: # 2) search config.path
p = os.path.join(d, _t)
if os.path.exists(p):
t = p
_t_found = True
break
_t = os.path.basename(t)
if not _t_found: # 3) try template dirs
for p in templates.values():
if _t == os.path.basename(p):
t = p
_t_found = True # NOTE: in principle this could match multiple
break # times if more than one template dir existed.
if not _t_found: # 4) try it as a key into templates
try:
t = templates[t]
except KeyError:
pass
else:
_t_found = True
if not _t_found: # 5) nothing else to try...
raise ValueError("Failed to locate the template file {t!r}.".format(**vars()))
return os.path.realpath(t) | def function[_get_template, parameter[t]]:
constant[Return a single template *t*.]
if call[name[os].path.exists, parameter[name[t]]] begin[:]
pass
return[call[name[os].path.realpath, parameter[name[t]]]] | keyword[def] identifier[_get_template] ( identifier[t] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[t] ):
keyword[pass]
keyword[else] :
identifier[_t] = identifier[t]
identifier[_t_found] = keyword[False]
keyword[for] identifier[d] keyword[in] identifier[path] :
identifier[p] = identifier[os] . identifier[path] . identifier[join] ( identifier[d] , identifier[_t] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[p] ):
identifier[t] = identifier[p]
identifier[_t_found] = keyword[True]
keyword[break]
identifier[_t] = identifier[os] . identifier[path] . identifier[basename] ( identifier[t] )
keyword[if] keyword[not] identifier[_t_found] :
keyword[for] identifier[p] keyword[in] identifier[templates] . identifier[values] ():
keyword[if] identifier[_t] == identifier[os] . identifier[path] . identifier[basename] ( identifier[p] ):
identifier[t] = identifier[p]
identifier[_t_found] = keyword[True]
keyword[break]
keyword[if] keyword[not] identifier[_t_found] :
keyword[try] :
identifier[t] = identifier[templates] [ identifier[t] ]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[else] :
identifier[_t_found] = keyword[True]
keyword[if] keyword[not] identifier[_t_found] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] (** identifier[vars] ()))
keyword[return] identifier[os] . identifier[path] . identifier[realpath] ( identifier[t] ) | def _get_template(t):
"""Return a single template *t*."""
if os.path.exists(t): # 1) Is it an accessible file?
pass # depends on [control=['if'], data=[]]
else:
_t = t
_t_found = False
for d in path: # 2) search config.path
p = os.path.join(d, _t)
if os.path.exists(p):
t = p
_t_found = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']]
_t = os.path.basename(t)
if not _t_found: # 3) try template dirs
for p in templates.values():
if _t == os.path.basename(p):
t = p
_t_found = True # NOTE: in principle this could match multiple
break # times if more than one template dir existed. # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']] # depends on [control=['if'], data=[]]
if not _t_found: # 4) try it as a key into templates
try:
t = templates[t] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
else:
_t_found = True # depends on [control=['if'], data=[]]
if not _t_found: # 5) nothing else to try...
raise ValueError('Failed to locate the template file {t!r}.'.format(**vars())) # depends on [control=['if'], data=[]]
return os.path.realpath(t) |
def halt(self):
"""
halt: None -> None
If this instance has a separate thread running, it will be
halted. This method will wait until the thread has cleaned
up before returning.
"""
if self._callback:
self._thread_continue = False
self._thread.join() | def function[halt, parameter[self]]:
constant[
halt: None -> None
If this instance has a separate thread running, it will be
halted. This method will wait until the thread has cleaned
up before returning.
]
if name[self]._callback begin[:]
name[self]._thread_continue assign[=] constant[False]
call[name[self]._thread.join, parameter[]] | keyword[def] identifier[halt] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_callback] :
identifier[self] . identifier[_thread_continue] = keyword[False]
identifier[self] . identifier[_thread] . identifier[join] () | def halt(self):
"""
halt: None -> None
If this instance has a separate thread running, it will be
halted. This method will wait until the thread has cleaned
up before returning.
"""
if self._callback:
self._thread_continue = False
self._thread.join() # depends on [control=['if'], data=[]] |
def interpret(self, startpos, args, addr=None, simfd=None):
"""
implement scanf - extract formatted data from memory or a file according to the stored format
specifiers and store them into the pointers extracted from `args`.
:param startpos: The index of the first argument corresponding to the first format element
:param args: A function which, given the index of an argument to the function, returns that argument
:param addr: The address in the memory to extract data from, or...
:param simfd: A file descriptor to use for reading data from
:return: The number of arguments parsed
"""
if simfd is not None and isinstance(simfd.read_storage, SimPackets):
argnum = startpos
for component in self.components:
if type(component) is bytes:
sdata, _ = simfd.read_data(len(component), short_reads=False)
self.state.solver.add(sdata == component)
elif isinstance(component, claripy.Bits):
sdata, _ = simfd.read_data(len(component) // 8, short_reads=False)
self.state.solver.add(sdata == component)
elif component.spec_type == b's':
if component.length_spec is None:
sdata, slen = simfd.read_data(self.state.libc.buf_symbolic_bytes)
else:
sdata, slen = simfd.read_data(component.length_spec)
for byte in sdata.chop(8):
self.state.solver.add(claripy.And(*[byte != char for char in self.SCANF_DELIMITERS]))
self.state.memory.store(args(argnum), sdata, size=slen)
self.state.memory.store(args(argnum) + slen, claripy.BVV(0, 8))
argnum += 1
elif component.spec_type == b'c':
sdata, _ = simfd.read_data(1, short_reads=False)
self.state.memory.store(args(argnum), sdata)
argnum += 1
else:
bits = component.size * 8
if component.spec_type == b'x':
base = 16
elif component.spec_type == b'o':
base = 8
else:
base = 10
# here's the variable representing the result of the parsing
target_variable = self.state.solver.BVS('scanf_' + component.string.decode(), bits,
key=('api', 'scanf', argnum - startpos, component.string))
negative = claripy.SLT(target_variable, 0)
# how many digits does it take to represent this variable fully?
max_digits = int(math.ceil(math.log(2**bits, base)))
# how many digits does the format specify?
spec_digits = component.length_spec
# how many bits can we specify as input?
available_bits = float('inf') if spec_digits is None else spec_digits * math.log(base, 2)
not_enough_bits = available_bits < bits
# how many digits will we model this input as?
digits = max_digits if spec_digits is None else spec_digits
# constrain target variable range explicitly if it can't take on all possible values
if not_enough_bits:
self.state.solver.add(self.state.solver.And(
self.state.solver.SLE(target_variable, (base**digits) - 1),
self.state.solver.SGE(target_variable, -(base**(digits - 1) - 1))))
# perform the parsing in reverse - constrain the input digits to be the string version of the input
# this only works because we're reading from a packet stream and therefore nobody has the ability
# to add other constraints to this data!
# this makes z3's job EXTREMELY easy
sdata, _ = simfd.read_data(digits, short_reads=False)
for i, digit in enumerate(reversed(sdata.chop(8))):
digit_value = (target_variable // (base**i)) % base
digit_ascii = digit_value + ord('0')
if base > 10:
digit_ascii = claripy.If(digit_value >= 10, digit_value + (-10 + ord('a')), digit_ascii)
# if there aren't enough bits, we can increase the range by accounting for the possibility that
# the first digit is a minus sign
if not_enough_bits:
if i == digits - 1:
neg_digit_ascii = ord('-')
else:
neg_digit_value = (-target_variable // (base**i)) % base
neg_digit_ascii = neg_digit_value + ord('0')
if base > 10:
neg_digit_ascii = claripy.If(neg_digit_value >= 10, neg_digit_value + (-10 + ord('a')), neg_digit_ascii)
digit_ascii = claripy.If(negative, neg_digit_ascii, digit_ascii)
self.state.solver.add(digit == digit_ascii[7:0])
self.state.memory.store(args(argnum), target_variable, endness=self.state.arch.memory_endness)
argnum += 1
return argnum - startpos
# TODO: we only support one format specifier in interpretation for now
format_specifier_count = sum(1 for x in self.components if isinstance(x, FormatSpecifier))
if format_specifier_count > 1:
l.warning("We don't support more than one format specifiers in format strings.")
if simfd is not None:
region = simfd.read_storage
addr = simfd._pos if hasattr(simfd, '_pos') else simfd._read_pos # XXX THIS IS BAD
else:
region = self.parser.state.memory
bits = self.parser.state.arch.bits
failed = self.parser.state.solver.BVV(0, bits)
argpos = startpos
position = addr
for component in self.components:
if isinstance(component, bytes):
# TODO we skip non-format-specifiers in format string interpretation for now
# if the region doesn't match the concrete component, we need to return immediately
pass
else:
fmt_spec = component
try:
dest = args(argpos)
except SimProcedureArgumentError:
dest = None
if fmt_spec.spec_type == b's':
# set some limits for the find
max_str_len = self.parser.state.libc.max_str_len
max_sym_bytes = self.parser.state.libc.buf_symbolic_bytes
# has the length of the format been limited by the string itself?
if fmt_spec.length_spec is not None:
max_str_len = fmt_spec.length_spec
max_sym_bytes = fmt_spec.length_spec
# TODO: look for limits on other characters which scanf is sensitive to, '\x00', '\x20'
ohr, ohc, ohi = region.find(position, self.parser.state.solver.BVV(b'\n'), max_str_len, max_symbolic_bytes=max_sym_bytes)
# if no newline is found, mm is position + max_strlen
# If-branch will really only happen for format specifiers with a length
mm = self.parser.state.solver.If(ohr == 0, position + max_str_len, ohr)
# we're just going to concretize the length, load will do this anyways
length = self.parser.state.solver.max_int(mm - position)
src_str = region.load(position, length)
# TODO all of these should be delimiters we search for above
# add that the contents of the string cannot be any scanf %s string delimiters
for delimiter in set(FormatString.SCANF_DELIMITERS):
delim_bvv = self.parser.state.solver.BVV(delimiter)
for i in range(length):
self.parser.state.add_constraints(region.load(position + i, 1) != delim_bvv)
# write it out to the pointer
self.parser.state.memory.store(dest, src_str)
# store the terminating null byte
self.parser.state.memory.store(dest + length, self.parser.state.solver.BVV(0, 8))
position += length
else:
# XXX: atoi only supports strings of one byte
if fmt_spec.spec_type in [b'd', b'i', b'u', b'x']:
base = 16 if fmt_spec.spec_type == b'x' else 10
status, i, num_bytes = self.parser._sim_atoi_inner(position, region, base=base, read_length=fmt_spec.length_spec)
# increase failed count if we were unable to parse it
failed = self.parser.state.solver.If(status, failed, failed + 1)
position += num_bytes
elif fmt_spec.spec_type == b'c':
i = region.load(position, 1)
i = i.zero_extend(bits - 8)
position += 1
else:
raise SimProcedureError("unsupported format spec '%s' in interpret" % fmt_spec.spec_type)
i = self.parser.state.solver.Extract(fmt_spec.size*8-1, 0, i)
self.parser.state.memory.store(dest, i, size=fmt_spec.size, endness=self.parser.state.arch.memory_endness)
argpos += 1
if simfd is not None:
simfd.read_data(position - addr)
return (argpos - startpos) - failed | def function[interpret, parameter[self, startpos, args, addr, simfd]]:
constant[
implement scanf - extract formatted data from memory or a file according to the stored format
specifiers and store them into the pointers extracted from `args`.
:param startpos: The index of the first argument corresponding to the first format element
:param args: A function which, given the index of an argument to the function, returns that argument
:param addr: The address in the memory to extract data from, or...
:param simfd: A file descriptor to use for reading data from
:return: The number of arguments parsed
]
if <ast.BoolOp object at 0x7da18f00f2e0> begin[:]
variable[argnum] assign[=] name[startpos]
for taget[name[component]] in starred[name[self].components] begin[:]
if compare[call[name[type], parameter[name[component]]] is name[bytes]] begin[:]
<ast.Tuple object at 0x7da18f00c0a0> assign[=] call[name[simfd].read_data, parameter[call[name[len], parameter[name[component]]]]]
call[name[self].state.solver.add, parameter[compare[name[sdata] equal[==] name[component]]]]
return[binary_operation[name[argnum] - name[startpos]]]
variable[format_specifier_count] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da20c6c5ed0>]]
if compare[name[format_specifier_count] greater[>] constant[1]] begin[:]
call[name[l].warning, parameter[constant[We don't support more than one format specifiers in format strings.]]]
if compare[name[simfd] is_not constant[None]] begin[:]
variable[region] assign[=] name[simfd].read_storage
variable[addr] assign[=] <ast.IfExp object at 0x7da20c6c4970>
variable[bits] assign[=] name[self].parser.state.arch.bits
variable[failed] assign[=] call[name[self].parser.state.solver.BVV, parameter[constant[0], name[bits]]]
variable[argpos] assign[=] name[startpos]
variable[position] assign[=] name[addr]
for taget[name[component]] in starred[name[self].components] begin[:]
if call[name[isinstance], parameter[name[component], name[bytes]]] begin[:]
pass
if compare[name[simfd] is_not constant[None]] begin[:]
call[name[simfd].read_data, parameter[binary_operation[name[position] - name[addr]]]]
return[binary_operation[binary_operation[name[argpos] - name[startpos]] - name[failed]]] | keyword[def] identifier[interpret] ( identifier[self] , identifier[startpos] , identifier[args] , identifier[addr] = keyword[None] , identifier[simfd] = keyword[None] ):
literal[string]
keyword[if] identifier[simfd] keyword[is] keyword[not] keyword[None] keyword[and] identifier[isinstance] ( identifier[simfd] . identifier[read_storage] , identifier[SimPackets] ):
identifier[argnum] = identifier[startpos]
keyword[for] identifier[component] keyword[in] identifier[self] . identifier[components] :
keyword[if] identifier[type] ( identifier[component] ) keyword[is] identifier[bytes] :
identifier[sdata] , identifier[_] = identifier[simfd] . identifier[read_data] ( identifier[len] ( identifier[component] ), identifier[short_reads] = keyword[False] )
identifier[self] . identifier[state] . identifier[solver] . identifier[add] ( identifier[sdata] == identifier[component] )
keyword[elif] identifier[isinstance] ( identifier[component] , identifier[claripy] . identifier[Bits] ):
identifier[sdata] , identifier[_] = identifier[simfd] . identifier[read_data] ( identifier[len] ( identifier[component] )// literal[int] , identifier[short_reads] = keyword[False] )
identifier[self] . identifier[state] . identifier[solver] . identifier[add] ( identifier[sdata] == identifier[component] )
keyword[elif] identifier[component] . identifier[spec_type] == literal[string] :
keyword[if] identifier[component] . identifier[length_spec] keyword[is] keyword[None] :
identifier[sdata] , identifier[slen] = identifier[simfd] . identifier[read_data] ( identifier[self] . identifier[state] . identifier[libc] . identifier[buf_symbolic_bytes] )
keyword[else] :
identifier[sdata] , identifier[slen] = identifier[simfd] . identifier[read_data] ( identifier[component] . identifier[length_spec] )
keyword[for] identifier[byte] keyword[in] identifier[sdata] . identifier[chop] ( literal[int] ):
identifier[self] . identifier[state] . identifier[solver] . identifier[add] ( identifier[claripy] . identifier[And] (*[ identifier[byte] != identifier[char] keyword[for] identifier[char] keyword[in] identifier[self] . identifier[SCANF_DELIMITERS] ]))
identifier[self] . identifier[state] . identifier[memory] . identifier[store] ( identifier[args] ( identifier[argnum] ), identifier[sdata] , identifier[size] = identifier[slen] )
identifier[self] . identifier[state] . identifier[memory] . identifier[store] ( identifier[args] ( identifier[argnum] )+ identifier[slen] , identifier[claripy] . identifier[BVV] ( literal[int] , literal[int] ))
identifier[argnum] += literal[int]
keyword[elif] identifier[component] . identifier[spec_type] == literal[string] :
identifier[sdata] , identifier[_] = identifier[simfd] . identifier[read_data] ( literal[int] , identifier[short_reads] = keyword[False] )
identifier[self] . identifier[state] . identifier[memory] . identifier[store] ( identifier[args] ( identifier[argnum] ), identifier[sdata] )
identifier[argnum] += literal[int]
keyword[else] :
identifier[bits] = identifier[component] . identifier[size] * literal[int]
keyword[if] identifier[component] . identifier[spec_type] == literal[string] :
identifier[base] = literal[int]
keyword[elif] identifier[component] . identifier[spec_type] == literal[string] :
identifier[base] = literal[int]
keyword[else] :
identifier[base] = literal[int]
identifier[target_variable] = identifier[self] . identifier[state] . identifier[solver] . identifier[BVS] ( literal[string] + identifier[component] . identifier[string] . identifier[decode] (), identifier[bits] ,
identifier[key] =( literal[string] , literal[string] , identifier[argnum] - identifier[startpos] , identifier[component] . identifier[string] ))
identifier[negative] = identifier[claripy] . identifier[SLT] ( identifier[target_variable] , literal[int] )
identifier[max_digits] = identifier[int] ( identifier[math] . identifier[ceil] ( identifier[math] . identifier[log] ( literal[int] ** identifier[bits] , identifier[base] )))
identifier[spec_digits] = identifier[component] . identifier[length_spec]
identifier[available_bits] = identifier[float] ( literal[string] ) keyword[if] identifier[spec_digits] keyword[is] keyword[None] keyword[else] identifier[spec_digits] * identifier[math] . identifier[log] ( identifier[base] , literal[int] )
identifier[not_enough_bits] = identifier[available_bits] < identifier[bits]
identifier[digits] = identifier[max_digits] keyword[if] identifier[spec_digits] keyword[is] keyword[None] keyword[else] identifier[spec_digits]
keyword[if] identifier[not_enough_bits] :
identifier[self] . identifier[state] . identifier[solver] . identifier[add] ( identifier[self] . identifier[state] . identifier[solver] . identifier[And] (
identifier[self] . identifier[state] . identifier[solver] . identifier[SLE] ( identifier[target_variable] ,( identifier[base] ** identifier[digits] )- literal[int] ),
identifier[self] . identifier[state] . identifier[solver] . identifier[SGE] ( identifier[target_variable] ,-( identifier[base] **( identifier[digits] - literal[int] )- literal[int] ))))
identifier[sdata] , identifier[_] = identifier[simfd] . identifier[read_data] ( identifier[digits] , identifier[short_reads] = keyword[False] )
keyword[for] identifier[i] , identifier[digit] keyword[in] identifier[enumerate] ( identifier[reversed] ( identifier[sdata] . identifier[chop] ( literal[int] ))):
identifier[digit_value] =( identifier[target_variable] //( identifier[base] ** identifier[i] ))% identifier[base]
identifier[digit_ascii] = identifier[digit_value] + identifier[ord] ( literal[string] )
keyword[if] identifier[base] > literal[int] :
identifier[digit_ascii] = identifier[claripy] . identifier[If] ( identifier[digit_value] >= literal[int] , identifier[digit_value] +(- literal[int] + identifier[ord] ( literal[string] )), identifier[digit_ascii] )
keyword[if] identifier[not_enough_bits] :
keyword[if] identifier[i] == identifier[digits] - literal[int] :
identifier[neg_digit_ascii] = identifier[ord] ( literal[string] )
keyword[else] :
identifier[neg_digit_value] =(- identifier[target_variable] //( identifier[base] ** identifier[i] ))% identifier[base]
identifier[neg_digit_ascii] = identifier[neg_digit_value] + identifier[ord] ( literal[string] )
keyword[if] identifier[base] > literal[int] :
identifier[neg_digit_ascii] = identifier[claripy] . identifier[If] ( identifier[neg_digit_value] >= literal[int] , identifier[neg_digit_value] +(- literal[int] + identifier[ord] ( literal[string] )), identifier[neg_digit_ascii] )
identifier[digit_ascii] = identifier[claripy] . identifier[If] ( identifier[negative] , identifier[neg_digit_ascii] , identifier[digit_ascii] )
identifier[self] . identifier[state] . identifier[solver] . identifier[add] ( identifier[digit] == identifier[digit_ascii] [ literal[int] : literal[int] ])
identifier[self] . identifier[state] . identifier[memory] . identifier[store] ( identifier[args] ( identifier[argnum] ), identifier[target_variable] , identifier[endness] = identifier[self] . identifier[state] . identifier[arch] . identifier[memory_endness] )
identifier[argnum] += literal[int]
keyword[return] identifier[argnum] - identifier[startpos]
identifier[format_specifier_count] = identifier[sum] ( literal[int] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[components] keyword[if] identifier[isinstance] ( identifier[x] , identifier[FormatSpecifier] ))
keyword[if] identifier[format_specifier_count] > literal[int] :
identifier[l] . identifier[warning] ( literal[string] )
keyword[if] identifier[simfd] keyword[is] keyword[not] keyword[None] :
identifier[region] = identifier[simfd] . identifier[read_storage]
identifier[addr] = identifier[simfd] . identifier[_pos] keyword[if] identifier[hasattr] ( identifier[simfd] , literal[string] ) keyword[else] identifier[simfd] . identifier[_read_pos]
keyword[else] :
identifier[region] = identifier[self] . identifier[parser] . identifier[state] . identifier[memory]
identifier[bits] = identifier[self] . identifier[parser] . identifier[state] . identifier[arch] . identifier[bits]
identifier[failed] = identifier[self] . identifier[parser] . identifier[state] . identifier[solver] . identifier[BVV] ( literal[int] , identifier[bits] )
identifier[argpos] = identifier[startpos]
identifier[position] = identifier[addr]
keyword[for] identifier[component] keyword[in] identifier[self] . identifier[components] :
keyword[if] identifier[isinstance] ( identifier[component] , identifier[bytes] ):
keyword[pass]
keyword[else] :
identifier[fmt_spec] = identifier[component]
keyword[try] :
identifier[dest] = identifier[args] ( identifier[argpos] )
keyword[except] identifier[SimProcedureArgumentError] :
identifier[dest] = keyword[None]
keyword[if] identifier[fmt_spec] . identifier[spec_type] == literal[string] :
identifier[max_str_len] = identifier[self] . identifier[parser] . identifier[state] . identifier[libc] . identifier[max_str_len]
identifier[max_sym_bytes] = identifier[self] . identifier[parser] . identifier[state] . identifier[libc] . identifier[buf_symbolic_bytes]
keyword[if] identifier[fmt_spec] . identifier[length_spec] keyword[is] keyword[not] keyword[None] :
identifier[max_str_len] = identifier[fmt_spec] . identifier[length_spec]
identifier[max_sym_bytes] = identifier[fmt_spec] . identifier[length_spec]
identifier[ohr] , identifier[ohc] , identifier[ohi] = identifier[region] . identifier[find] ( identifier[position] , identifier[self] . identifier[parser] . identifier[state] . identifier[solver] . identifier[BVV] ( literal[string] ), identifier[max_str_len] , identifier[max_symbolic_bytes] = identifier[max_sym_bytes] )
identifier[mm] = identifier[self] . identifier[parser] . identifier[state] . identifier[solver] . identifier[If] ( identifier[ohr] == literal[int] , identifier[position] + identifier[max_str_len] , identifier[ohr] )
identifier[length] = identifier[self] . identifier[parser] . identifier[state] . identifier[solver] . identifier[max_int] ( identifier[mm] - identifier[position] )
identifier[src_str] = identifier[region] . identifier[load] ( identifier[position] , identifier[length] )
keyword[for] identifier[delimiter] keyword[in] identifier[set] ( identifier[FormatString] . identifier[SCANF_DELIMITERS] ):
identifier[delim_bvv] = identifier[self] . identifier[parser] . identifier[state] . identifier[solver] . identifier[BVV] ( identifier[delimiter] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[length] ):
identifier[self] . identifier[parser] . identifier[state] . identifier[add_constraints] ( identifier[region] . identifier[load] ( identifier[position] + identifier[i] , literal[int] )!= identifier[delim_bvv] )
identifier[self] . identifier[parser] . identifier[state] . identifier[memory] . identifier[store] ( identifier[dest] , identifier[src_str] )
identifier[self] . identifier[parser] . identifier[state] . identifier[memory] . identifier[store] ( identifier[dest] + identifier[length] , identifier[self] . identifier[parser] . identifier[state] . identifier[solver] . identifier[BVV] ( literal[int] , literal[int] ))
identifier[position] += identifier[length]
keyword[else] :
keyword[if] identifier[fmt_spec] . identifier[spec_type] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[base] = literal[int] keyword[if] identifier[fmt_spec] . identifier[spec_type] == literal[string] keyword[else] literal[int]
identifier[status] , identifier[i] , identifier[num_bytes] = identifier[self] . identifier[parser] . identifier[_sim_atoi_inner] ( identifier[position] , identifier[region] , identifier[base] = identifier[base] , identifier[read_length] = identifier[fmt_spec] . identifier[length_spec] )
identifier[failed] = identifier[self] . identifier[parser] . identifier[state] . identifier[solver] . identifier[If] ( identifier[status] , identifier[failed] , identifier[failed] + literal[int] )
identifier[position] += identifier[num_bytes]
keyword[elif] identifier[fmt_spec] . identifier[spec_type] == literal[string] :
identifier[i] = identifier[region] . identifier[load] ( identifier[position] , literal[int] )
identifier[i] = identifier[i] . identifier[zero_extend] ( identifier[bits] - literal[int] )
identifier[position] += literal[int]
keyword[else] :
keyword[raise] identifier[SimProcedureError] ( literal[string] % identifier[fmt_spec] . identifier[spec_type] )
identifier[i] = identifier[self] . identifier[parser] . identifier[state] . identifier[solver] . identifier[Extract] ( identifier[fmt_spec] . identifier[size] * literal[int] - literal[int] , literal[int] , identifier[i] )
identifier[self] . identifier[parser] . identifier[state] . identifier[memory] . identifier[store] ( identifier[dest] , identifier[i] , identifier[size] = identifier[fmt_spec] . identifier[size] , identifier[endness] = identifier[self] . identifier[parser] . identifier[state] . identifier[arch] . identifier[memory_endness] )
identifier[argpos] += literal[int]
keyword[if] identifier[simfd] keyword[is] keyword[not] keyword[None] :
identifier[simfd] . identifier[read_data] ( identifier[position] - identifier[addr] )
keyword[return] ( identifier[argpos] - identifier[startpos] )- identifier[failed] | def interpret(self, startpos, args, addr=None, simfd=None):
"""
implement scanf - extract formatted data from memory or a file according to the stored format
specifiers and store them into the pointers extracted from `args`.
:param startpos: The index of the first argument corresponding to the first format element
:param args: A function which, given the index of an argument to the function, returns that argument
:param addr: The address in the memory to extract data from, or...
:param simfd: A file descriptor to use for reading data from
:return: The number of arguments parsed
"""
if simfd is not None and isinstance(simfd.read_storage, SimPackets):
argnum = startpos
for component in self.components:
if type(component) is bytes:
(sdata, _) = simfd.read_data(len(component), short_reads=False)
self.state.solver.add(sdata == component) # depends on [control=['if'], data=[]]
elif isinstance(component, claripy.Bits):
(sdata, _) = simfd.read_data(len(component) // 8, short_reads=False)
self.state.solver.add(sdata == component) # depends on [control=['if'], data=[]]
elif component.spec_type == b's':
if component.length_spec is None:
(sdata, slen) = simfd.read_data(self.state.libc.buf_symbolic_bytes) # depends on [control=['if'], data=[]]
else:
(sdata, slen) = simfd.read_data(component.length_spec)
for byte in sdata.chop(8):
self.state.solver.add(claripy.And(*[byte != char for char in self.SCANF_DELIMITERS])) # depends on [control=['for'], data=['byte']]
self.state.memory.store(args(argnum), sdata, size=slen)
self.state.memory.store(args(argnum) + slen, claripy.BVV(0, 8))
argnum += 1 # depends on [control=['if'], data=[]]
elif component.spec_type == b'c':
(sdata, _) = simfd.read_data(1, short_reads=False)
self.state.memory.store(args(argnum), sdata)
argnum += 1 # depends on [control=['if'], data=[]]
else:
bits = component.size * 8
if component.spec_type == b'x':
base = 16 # depends on [control=['if'], data=[]]
elif component.spec_type == b'o':
base = 8 # depends on [control=['if'], data=[]]
else:
base = 10
# here's the variable representing the result of the parsing
target_variable = self.state.solver.BVS('scanf_' + component.string.decode(), bits, key=('api', 'scanf', argnum - startpos, component.string))
negative = claripy.SLT(target_variable, 0)
# how many digits does it take to represent this variable fully?
max_digits = int(math.ceil(math.log(2 ** bits, base)))
# how many digits does the format specify?
spec_digits = component.length_spec
# how many bits can we specify as input?
available_bits = float('inf') if spec_digits is None else spec_digits * math.log(base, 2)
not_enough_bits = available_bits < bits
# how many digits will we model this input as?
digits = max_digits if spec_digits is None else spec_digits
# constrain target variable range explicitly if it can't take on all possible values
if not_enough_bits:
self.state.solver.add(self.state.solver.And(self.state.solver.SLE(target_variable, base ** digits - 1), self.state.solver.SGE(target_variable, -(base ** (digits - 1) - 1)))) # depends on [control=['if'], data=[]]
# perform the parsing in reverse - constrain the input digits to be the string version of the input
# this only works because we're reading from a packet stream and therefore nobody has the ability
# to add other constraints to this data!
# this makes z3's job EXTREMELY easy
(sdata, _) = simfd.read_data(digits, short_reads=False)
for (i, digit) in enumerate(reversed(sdata.chop(8))):
digit_value = target_variable // base ** i % base
digit_ascii = digit_value + ord('0')
if base > 10:
digit_ascii = claripy.If(digit_value >= 10, digit_value + (-10 + ord('a')), digit_ascii) # depends on [control=['if'], data=[]]
# if there aren't enough bits, we can increase the range by accounting for the possibility that
# the first digit is a minus sign
if not_enough_bits:
if i == digits - 1:
neg_digit_ascii = ord('-') # depends on [control=['if'], data=[]]
else:
neg_digit_value = -target_variable // base ** i % base
neg_digit_ascii = neg_digit_value + ord('0')
if base > 10:
neg_digit_ascii = claripy.If(neg_digit_value >= 10, neg_digit_value + (-10 + ord('a')), neg_digit_ascii) # depends on [control=['if'], data=[]]
digit_ascii = claripy.If(negative, neg_digit_ascii, digit_ascii) # depends on [control=['if'], data=[]]
self.state.solver.add(digit == digit_ascii[7:0]) # depends on [control=['for'], data=[]]
self.state.memory.store(args(argnum), target_variable, endness=self.state.arch.memory_endness)
argnum += 1 # depends on [control=['for'], data=['component']]
return argnum - startpos # depends on [control=['if'], data=[]]
# TODO: we only support one format specifier in interpretation for now
format_specifier_count = sum((1 for x in self.components if isinstance(x, FormatSpecifier)))
if format_specifier_count > 1:
l.warning("We don't support more than one format specifiers in format strings.") # depends on [control=['if'], data=[]]
if simfd is not None:
region = simfd.read_storage
addr = simfd._pos if hasattr(simfd, '_pos') else simfd._read_pos # XXX THIS IS BAD # depends on [control=['if'], data=['simfd']]
else:
region = self.parser.state.memory
bits = self.parser.state.arch.bits
failed = self.parser.state.solver.BVV(0, bits)
argpos = startpos
position = addr
for component in self.components:
if isinstance(component, bytes):
# TODO we skip non-format-specifiers in format string interpretation for now
# if the region doesn't match the concrete component, we need to return immediately
pass # depends on [control=['if'], data=[]]
else:
fmt_spec = component
try:
dest = args(argpos) # depends on [control=['try'], data=[]]
except SimProcedureArgumentError:
dest = None # depends on [control=['except'], data=[]]
if fmt_spec.spec_type == b's':
# set some limits for the find
max_str_len = self.parser.state.libc.max_str_len
max_sym_bytes = self.parser.state.libc.buf_symbolic_bytes
# has the length of the format been limited by the string itself?
if fmt_spec.length_spec is not None:
max_str_len = fmt_spec.length_spec
max_sym_bytes = fmt_spec.length_spec # depends on [control=['if'], data=[]]
# TODO: look for limits on other characters which scanf is sensitive to, '\x00', '\x20'
(ohr, ohc, ohi) = region.find(position, self.parser.state.solver.BVV(b'\n'), max_str_len, max_symbolic_bytes=max_sym_bytes)
# if no newline is found, mm is position + max_strlen
# If-branch will really only happen for format specifiers with a length
mm = self.parser.state.solver.If(ohr == 0, position + max_str_len, ohr)
# we're just going to concretize the length, load will do this anyways
length = self.parser.state.solver.max_int(mm - position)
src_str = region.load(position, length)
# TODO all of these should be delimiters we search for above
# add that the contents of the string cannot be any scanf %s string delimiters
for delimiter in set(FormatString.SCANF_DELIMITERS):
delim_bvv = self.parser.state.solver.BVV(delimiter)
for i in range(length):
self.parser.state.add_constraints(region.load(position + i, 1) != delim_bvv) # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['delimiter']]
# write it out to the pointer
self.parser.state.memory.store(dest, src_str)
# store the terminating null byte
self.parser.state.memory.store(dest + length, self.parser.state.solver.BVV(0, 8))
position += length # depends on [control=['if'], data=[]]
else:
# XXX: atoi only supports strings of one byte
if fmt_spec.spec_type in [b'd', b'i', b'u', b'x']:
base = 16 if fmt_spec.spec_type == b'x' else 10
(status, i, num_bytes) = self.parser._sim_atoi_inner(position, region, base=base, read_length=fmt_spec.length_spec)
# increase failed count if we were unable to parse it
failed = self.parser.state.solver.If(status, failed, failed + 1)
position += num_bytes # depends on [control=['if'], data=[]]
elif fmt_spec.spec_type == b'c':
i = region.load(position, 1)
i = i.zero_extend(bits - 8)
position += 1 # depends on [control=['if'], data=[]]
else:
raise SimProcedureError("unsupported format spec '%s' in interpret" % fmt_spec.spec_type)
i = self.parser.state.solver.Extract(fmt_spec.size * 8 - 1, 0, i)
self.parser.state.memory.store(dest, i, size=fmt_spec.size, endness=self.parser.state.arch.memory_endness)
argpos += 1 # depends on [control=['for'], data=['component']]
if simfd is not None:
simfd.read_data(position - addr) # depends on [control=['if'], data=['simfd']]
return argpos - startpos - failed |
def remove_rm_na(dv=None, within=None, subject=None, data=None,
aggregate='mean'):
"""Remove missing values in long-format repeated-measures dataframe.
Parameters
----------
dv : string or list
Dependent variable(s), from which the missing values should be removed.
If ``dv`` is not specified, all the columns in the dataframe are
considered. ``dv`` must be numeric.
within : string or list
Within-subject factor(s).
subject : string
Subject identifier.
data : dataframe
Long-format dataframe.
aggregate : string
Aggregation method if there are more within-factors in the data than
specified in the ``within`` argument. Can be `mean`, `median`, `sum`,
`first`, `last`, or any other function accepted by
:py:meth:`pandas.DataFrame.groupby`.
Returns
-------
data : dataframe
Dataframe without the missing values.
Notes
-----
If multiple factors are specified, the missing values are removed on the
last factor, so the order of ``within`` is important.
In addition, if there are more within-factors in the data than specified in
the ``within`` argument, data will be aggregated using the function
specified in ``aggregate``. Note that in the default case (aggregation
using the mean), all the non-numeric column(s) will be dropped.
"""
# Safety checks
assert isinstance(aggregate, str), 'aggregate must be a str.'
assert isinstance(within, (str, list)), 'within must be str or list.'
assert isinstance(subject, str), 'subject must be a string.'
assert isinstance(data, pd.DataFrame), 'Data must be a DataFrame.'
idx_cols = _flatten_list([subject, within])
all_cols = data.columns
if data[idx_cols].isnull().any().any():
raise ValueError("NaN are present in the within-factors or in the "
"subject column. Please remove them manually.")
# Check if more within-factors are present and if so, aggregate
if (data.groupby(idx_cols).count() > 1).any().any():
# Make sure that we keep the non-numeric columns when aggregating
# This is disabled by default to avoid any confusion.
# all_others = all_cols.difference(idx_cols)
# all_num = data[all_others].select_dtypes(include='number').columns
# agg = {c: aggregate if c in all_num else 'first' for c in all_others}
data = data.groupby(idx_cols).agg(aggregate)
else:
# Set subject + within factors as index.
# Sorting is done to avoid performance warning when dropping.
data = data.set_index(idx_cols).sort_index()
# Find index with missing values
if dv is None:
iloc_nan = data.isnull().values.nonzero()[0]
else:
iloc_nan = data[dv].isnull().values.nonzero()[0]
# Drop the last within level
idx_nan = data.index[iloc_nan].droplevel(-1)
# Drop and re-order
data = data.drop(idx_nan).reset_index(drop=False)
return data.reindex(columns=all_cols).dropna(how='all', axis=1) | def function[remove_rm_na, parameter[dv, within, subject, data, aggregate]]:
constant[Remove missing values in long-format repeated-measures dataframe.
Parameters
----------
dv : string or list
Dependent variable(s), from which the missing values should be removed.
If ``dv`` is not specified, all the columns in the dataframe are
considered. ``dv`` must be numeric.
within : string or list
Within-subject factor(s).
subject : string
Subject identifier.
data : dataframe
Long-format dataframe.
aggregate : string
Aggregation method if there are more within-factors in the data than
specified in the ``within`` argument. Can be `mean`, `median`, `sum`,
`first`, `last`, or any other function accepted by
:py:meth:`pandas.DataFrame.groupby`.
Returns
-------
data : dataframe
Dataframe without the missing values.
Notes
-----
If multiple factors are specified, the missing values are removed on the
last factor, so the order of ``within`` is important.
In addition, if there are more within-factors in the data than specified in
the ``within`` argument, data will be aggregated using the function
specified in ``aggregate``. Note that in the default case (aggregation
using the mean), all the non-numeric column(s) will be dropped.
]
assert[call[name[isinstance], parameter[name[aggregate], name[str]]]]
assert[call[name[isinstance], parameter[name[within], tuple[[<ast.Name object at 0x7da18f58f040>, <ast.Name object at 0x7da18f58d840>]]]]]
assert[call[name[isinstance], parameter[name[subject], name[str]]]]
assert[call[name[isinstance], parameter[name[data], name[pd].DataFrame]]]
variable[idx_cols] assign[=] call[name[_flatten_list], parameter[list[[<ast.Name object at 0x7da18f58c7f0>, <ast.Name object at 0x7da18f58cbe0>]]]]
variable[all_cols] assign[=] name[data].columns
if call[call[call[call[name[data]][name[idx_cols]].isnull, parameter[]].any, parameter[]].any, parameter[]] begin[:]
<ast.Raise object at 0x7da18f58d6c0>
if call[call[compare[call[call[name[data].groupby, parameter[name[idx_cols]]].count, parameter[]] greater[>] constant[1]].any, parameter[]].any, parameter[]] begin[:]
variable[data] assign[=] call[call[name[data].groupby, parameter[name[idx_cols]]].agg, parameter[name[aggregate]]]
if compare[name[dv] is constant[None]] begin[:]
variable[iloc_nan] assign[=] call[call[call[name[data].isnull, parameter[]].values.nonzero, parameter[]]][constant[0]]
variable[idx_nan] assign[=] call[call[name[data].index][name[iloc_nan]].droplevel, parameter[<ast.UnaryOp object at 0x7da18f58f8e0>]]
variable[data] assign[=] call[call[name[data].drop, parameter[name[idx_nan]]].reset_index, parameter[]]
return[call[call[name[data].reindex, parameter[]].dropna, parameter[]]] | keyword[def] identifier[remove_rm_na] ( identifier[dv] = keyword[None] , identifier[within] = keyword[None] , identifier[subject] = keyword[None] , identifier[data] = keyword[None] ,
identifier[aggregate] = literal[string] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[aggregate] , identifier[str] ), literal[string]
keyword[assert] identifier[isinstance] ( identifier[within] ,( identifier[str] , identifier[list] )), literal[string]
keyword[assert] identifier[isinstance] ( identifier[subject] , identifier[str] ), literal[string]
keyword[assert] identifier[isinstance] ( identifier[data] , identifier[pd] . identifier[DataFrame] ), literal[string]
identifier[idx_cols] = identifier[_flatten_list] ([ identifier[subject] , identifier[within] ])
identifier[all_cols] = identifier[data] . identifier[columns]
keyword[if] identifier[data] [ identifier[idx_cols] ]. identifier[isnull] (). identifier[any] (). identifier[any] ():
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] ( identifier[data] . identifier[groupby] ( identifier[idx_cols] ). identifier[count] ()> literal[int] ). identifier[any] (). identifier[any] ():
identifier[data] = identifier[data] . identifier[groupby] ( identifier[idx_cols] ). identifier[agg] ( identifier[aggregate] )
keyword[else] :
identifier[data] = identifier[data] . identifier[set_index] ( identifier[idx_cols] ). identifier[sort_index] ()
keyword[if] identifier[dv] keyword[is] keyword[None] :
identifier[iloc_nan] = identifier[data] . identifier[isnull] (). identifier[values] . identifier[nonzero] ()[ literal[int] ]
keyword[else] :
identifier[iloc_nan] = identifier[data] [ identifier[dv] ]. identifier[isnull] (). identifier[values] . identifier[nonzero] ()[ literal[int] ]
identifier[idx_nan] = identifier[data] . identifier[index] [ identifier[iloc_nan] ]. identifier[droplevel] (- literal[int] )
identifier[data] = identifier[data] . identifier[drop] ( identifier[idx_nan] ). identifier[reset_index] ( identifier[drop] = keyword[False] )
keyword[return] identifier[data] . identifier[reindex] ( identifier[columns] = identifier[all_cols] ). identifier[dropna] ( identifier[how] = literal[string] , identifier[axis] = literal[int] ) | def remove_rm_na(dv=None, within=None, subject=None, data=None, aggregate='mean'):
"""Remove missing values in long-format repeated-measures dataframe.
Parameters
----------
dv : string or list
Dependent variable(s), from which the missing values should be removed.
If ``dv`` is not specified, all the columns in the dataframe are
considered. ``dv`` must be numeric.
within : string or list
Within-subject factor(s).
subject : string
Subject identifier.
data : dataframe
Long-format dataframe.
aggregate : string
Aggregation method if there are more within-factors in the data than
specified in the ``within`` argument. Can be `mean`, `median`, `sum`,
`first`, `last`, or any other function accepted by
:py:meth:`pandas.DataFrame.groupby`.
Returns
-------
data : dataframe
Dataframe without the missing values.
Notes
-----
If multiple factors are specified, the missing values are removed on the
last factor, so the order of ``within`` is important.
In addition, if there are more within-factors in the data than specified in
the ``within`` argument, data will be aggregated using the function
specified in ``aggregate``. Note that in the default case (aggregation
using the mean), all the non-numeric column(s) will be dropped.
"""
# Safety checks
assert isinstance(aggregate, str), 'aggregate must be a str.'
assert isinstance(within, (str, list)), 'within must be str or list.'
assert isinstance(subject, str), 'subject must be a string.'
assert isinstance(data, pd.DataFrame), 'Data must be a DataFrame.'
idx_cols = _flatten_list([subject, within])
all_cols = data.columns
if data[idx_cols].isnull().any().any():
raise ValueError('NaN are present in the within-factors or in the subject column. Please remove them manually.') # depends on [control=['if'], data=[]]
# Check if more within-factors are present and if so, aggregate
if (data.groupby(idx_cols).count() > 1).any().any():
# Make sure that we keep the non-numeric columns when aggregating
# This is disabled by default to avoid any confusion.
# all_others = all_cols.difference(idx_cols)
# all_num = data[all_others].select_dtypes(include='number').columns
# agg = {c: aggregate if c in all_num else 'first' for c in all_others}
data = data.groupby(idx_cols).agg(aggregate) # depends on [control=['if'], data=[]]
else:
# Set subject + within factors as index.
# Sorting is done to avoid performance warning when dropping.
data = data.set_index(idx_cols).sort_index()
# Find index with missing values
if dv is None:
iloc_nan = data.isnull().values.nonzero()[0] # depends on [control=['if'], data=[]]
else:
iloc_nan = data[dv].isnull().values.nonzero()[0]
# Drop the last within level
idx_nan = data.index[iloc_nan].droplevel(-1)
# Drop and re-order
data = data.drop(idx_nan).reset_index(drop=False)
return data.reindex(columns=all_cols).dropna(how='all', axis=1) |
def vat_id(self):
"""
:return: Swiss UID number
"""
def _checksum(digits):
code = ['8', '6', '4', '2', '3', '5', '9', '7']
remainder = 11-(sum(map(lambda x, y: int(x) * int(y), code, digits)) % 11)
if remainder == 10:
return 0
elif remainder == 11:
return 5
return remainder
vat_id = self.bothify('########')
return 'CHE' + vat_id + str(_checksum(vat_id)) | def function[vat_id, parameter[self]]:
constant[
:return: Swiss UID number
]
def function[_checksum, parameter[digits]]:
variable[code] assign[=] list[[<ast.Constant object at 0x7da18dc9ae90>, <ast.Constant object at 0x7da18dc99390>, <ast.Constant object at 0x7da18dc98280>, <ast.Constant object at 0x7da18dc9b4c0>, <ast.Constant object at 0x7da18dc9bd60>, <ast.Constant object at 0x7da18dc9a830>, <ast.Constant object at 0x7da18dc98c10>, <ast.Constant object at 0x7da18dc9afe0>]]
variable[remainder] assign[=] binary_operation[constant[11] - binary_operation[call[name[sum], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da18dc9bbe0>, name[code], name[digits]]]]] <ast.Mod object at 0x7da2590d6920> constant[11]]]
if compare[name[remainder] equal[==] constant[10]] begin[:]
return[constant[0]]
return[name[remainder]]
variable[vat_id] assign[=] call[name[self].bothify, parameter[constant[########]]]
return[binary_operation[binary_operation[constant[CHE] + name[vat_id]] + call[name[str], parameter[call[name[_checksum], parameter[name[vat_id]]]]]]] | keyword[def] identifier[vat_id] ( identifier[self] ):
literal[string]
keyword[def] identifier[_checksum] ( identifier[digits] ):
identifier[code] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
identifier[remainder] = literal[int] -( identifier[sum] ( identifier[map] ( keyword[lambda] identifier[x] , identifier[y] : identifier[int] ( identifier[x] )* identifier[int] ( identifier[y] ), identifier[code] , identifier[digits] ))% literal[int] )
keyword[if] identifier[remainder] == literal[int] :
keyword[return] literal[int]
keyword[elif] identifier[remainder] == literal[int] :
keyword[return] literal[int]
keyword[return] identifier[remainder]
identifier[vat_id] = identifier[self] . identifier[bothify] ( literal[string] )
keyword[return] literal[string] + identifier[vat_id] + identifier[str] ( identifier[_checksum] ( identifier[vat_id] )) | def vat_id(self):
"""
:return: Swiss UID number
"""
def _checksum(digits):
code = ['8', '6', '4', '2', '3', '5', '9', '7']
remainder = 11 - sum(map(lambda x, y: int(x) * int(y), code, digits)) % 11
if remainder == 10:
return 0 # depends on [control=['if'], data=[]]
elif remainder == 11:
return 5 # depends on [control=['if'], data=[]]
return remainder
vat_id = self.bothify('########')
return 'CHE' + vat_id + str(_checksum(vat_id)) |
def getVals(self):
"""Returns value list for Munin Graph
@return: List of name-value pairs.
"""
return [(name, self._fieldValDict.get(name))
for name in self._fieldNameList] | def function[getVals, parameter[self]]:
constant[Returns value list for Munin Graph
@return: List of name-value pairs.
]
return[<ast.ListComp object at 0x7da1b10c05b0>] | keyword[def] identifier[getVals] ( identifier[self] ):
literal[string]
keyword[return] [( identifier[name] , identifier[self] . identifier[_fieldValDict] . identifier[get] ( identifier[name] ))
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[_fieldNameList] ] | def getVals(self):
"""Returns value list for Munin Graph
@return: List of name-value pairs.
"""
return [(name, self._fieldValDict.get(name)) for name in self._fieldNameList] |
def linear_rref(A, b, Matrix=None, S=None):
""" Transform a linear system to reduced row-echelon form
Transforms both the matrix and right-hand side of a linear
system of equations to reduced row echelon form
Parameters
----------
A : Matrix-like
Iterable of rows.
b : iterable
Returns
-------
A', b' - transformed versions
"""
if Matrix is None:
from sympy import Matrix
if S is None:
from sympy import S
mat_rows = [_map2l(S, list(row) + [v]) for row, v in zip(A, b)]
aug = Matrix(mat_rows)
raug, pivot = aug.rref()
nindep = len(pivot)
return raug[:nindep, :-1], raug[:nindep, -1] | def function[linear_rref, parameter[A, b, Matrix, S]]:
constant[ Transform a linear system to reduced row-echelon form
Transforms both the matrix and right-hand side of a linear
system of equations to reduced row echelon form
Parameters
----------
A : Matrix-like
Iterable of rows.
b : iterable
Returns
-------
A', b' - transformed versions
]
if compare[name[Matrix] is constant[None]] begin[:]
from relative_module[sympy] import module[Matrix]
if compare[name[S] is constant[None]] begin[:]
from relative_module[sympy] import module[S]
variable[mat_rows] assign[=] <ast.ListComp object at 0x7da18eb57ac0>
variable[aug] assign[=] call[name[Matrix], parameter[name[mat_rows]]]
<ast.Tuple object at 0x7da207f98760> assign[=] call[name[aug].rref, parameter[]]
variable[nindep] assign[=] call[name[len], parameter[name[pivot]]]
return[tuple[[<ast.Subscript object at 0x7da207f9a590>, <ast.Subscript object at 0x7da207f9bb50>]]] | keyword[def] identifier[linear_rref] ( identifier[A] , identifier[b] , identifier[Matrix] = keyword[None] , identifier[S] = keyword[None] ):
literal[string]
keyword[if] identifier[Matrix] keyword[is] keyword[None] :
keyword[from] identifier[sympy] keyword[import] identifier[Matrix]
keyword[if] identifier[S] keyword[is] keyword[None] :
keyword[from] identifier[sympy] keyword[import] identifier[S]
identifier[mat_rows] =[ identifier[_map2l] ( identifier[S] , identifier[list] ( identifier[row] )+[ identifier[v] ]) keyword[for] identifier[row] , identifier[v] keyword[in] identifier[zip] ( identifier[A] , identifier[b] )]
identifier[aug] = identifier[Matrix] ( identifier[mat_rows] )
identifier[raug] , identifier[pivot] = identifier[aug] . identifier[rref] ()
identifier[nindep] = identifier[len] ( identifier[pivot] )
keyword[return] identifier[raug] [: identifier[nindep] ,:- literal[int] ], identifier[raug] [: identifier[nindep] ,- literal[int] ] | def linear_rref(A, b, Matrix=None, S=None):
""" Transform a linear system to reduced row-echelon form
Transforms both the matrix and right-hand side of a linear
system of equations to reduced row echelon form
Parameters
----------
A : Matrix-like
Iterable of rows.
b : iterable
Returns
-------
A', b' - transformed versions
"""
if Matrix is None:
from sympy import Matrix # depends on [control=['if'], data=[]]
if S is None:
from sympy import S # depends on [control=['if'], data=[]]
mat_rows = [_map2l(S, list(row) + [v]) for (row, v) in zip(A, b)]
aug = Matrix(mat_rows)
(raug, pivot) = aug.rref()
nindep = len(pivot)
return (raug[:nindep, :-1], raug[:nindep, -1]) |
def port(self):
"""The port in the URL as an integer if it was present, `None`
otherwise. This does not fill in default ports.
"""
try:
rv = int(to_native(self._split_host()[1]))
if 0 <= rv <= 65535:
return rv
except (ValueError, TypeError):
pass | def function[port, parameter[self]]:
constant[The port in the URL as an integer if it was present, `None`
otherwise. This does not fill in default ports.
]
<ast.Try object at 0x7da204622800> | keyword[def] identifier[port] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[rv] = identifier[int] ( identifier[to_native] ( identifier[self] . identifier[_split_host] ()[ literal[int] ]))
keyword[if] literal[int] <= identifier[rv] <= literal[int] :
keyword[return] identifier[rv]
keyword[except] ( identifier[ValueError] , identifier[TypeError] ):
keyword[pass] | def port(self):
"""The port in the URL as an integer if it was present, `None`
otherwise. This does not fill in default ports.
"""
try:
rv = int(to_native(self._split_host()[1]))
if 0 <= rv <= 65535:
return rv # depends on [control=['if'], data=['rv']] # depends on [control=['try'], data=[]]
except (ValueError, TypeError):
pass # depends on [control=['except'], data=[]] |
def handle_json_GET_stoptrips(self, params):
"""Given a stop_id and time in seconds since midnight return the next
trips to visit the stop."""
schedule = self.server.schedule
stop = schedule.GetStop(params.get('stop', None))
time = int(params.get('time', 0))
date = params.get('date', "")
time_trips = stop.GetStopTimeTrips(schedule)
time_trips.sort() # OPT: use bisect.insort to make this O(N*ln(N)) -> O(N)
# Keep the first 5 after param 'time'.
# Need make a tuple to find correct bisect point
time_trips = time_trips[bisect.bisect_left(time_trips, (time, 0)):]
time_trips = time_trips[:5]
# TODO: combine times for a route to show next 2 departure times
result = []
for time, (trip, index), tp in time_trips:
service_id = trip.service_id
service_period = schedule.GetServicePeriod(service_id)
if date and not service_period.IsActiveOn(date):
continue
headsign = None
# Find the most recent headsign from the StopTime objects
for stoptime in trip.GetStopTimes()[index::-1]:
if stoptime.stop_headsign:
headsign = stoptime.stop_headsign
break
# If stop_headsign isn't found, look for a trip_headsign
if not headsign:
headsign = trip.trip_headsign
route = schedule.GetRoute(trip.route_id)
trip_name = ''
if route.route_short_name:
trip_name += route.route_short_name
if route.route_long_name:
if len(trip_name):
trip_name += " - "
trip_name += route.route_long_name
if headsign:
trip_name += " (Direction: %s)" % headsign
result.append((time, (trip.trip_id, trip_name, trip.service_id), tp))
return result | def function[handle_json_GET_stoptrips, parameter[self, params]]:
constant[Given a stop_id and time in seconds since midnight return the next
trips to visit the stop.]
variable[schedule] assign[=] name[self].server.schedule
variable[stop] assign[=] call[name[schedule].GetStop, parameter[call[name[params].get, parameter[constant[stop], constant[None]]]]]
variable[time] assign[=] call[name[int], parameter[call[name[params].get, parameter[constant[time], constant[0]]]]]
variable[date] assign[=] call[name[params].get, parameter[constant[date], constant[]]]
variable[time_trips] assign[=] call[name[stop].GetStopTimeTrips, parameter[name[schedule]]]
call[name[time_trips].sort, parameter[]]
variable[time_trips] assign[=] call[name[time_trips]][<ast.Slice object at 0x7da20c76e500>]
variable[time_trips] assign[=] call[name[time_trips]][<ast.Slice object at 0x7da20c76f520>]
variable[result] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c76e6e0>, <ast.Tuple object at 0x7da20c76f940>, <ast.Name object at 0x7da20c76e950>]]] in starred[name[time_trips]] begin[:]
variable[service_id] assign[=] name[trip].service_id
variable[service_period] assign[=] call[name[schedule].GetServicePeriod, parameter[name[service_id]]]
if <ast.BoolOp object at 0x7da20c76f070> begin[:]
continue
variable[headsign] assign[=] constant[None]
for taget[name[stoptime]] in starred[call[call[name[trip].GetStopTimes, parameter[]]][<ast.Slice object at 0x7da20c76d240>]] begin[:]
if name[stoptime].stop_headsign begin[:]
variable[headsign] assign[=] name[stoptime].stop_headsign
break
if <ast.UnaryOp object at 0x7da20c76f730> begin[:]
variable[headsign] assign[=] name[trip].trip_headsign
variable[route] assign[=] call[name[schedule].GetRoute, parameter[name[trip].route_id]]
variable[trip_name] assign[=] constant[]
if name[route].route_short_name begin[:]
<ast.AugAssign object at 0x7da20c76d990>
if name[route].route_long_name begin[:]
if call[name[len], parameter[name[trip_name]]] begin[:]
<ast.AugAssign object at 0x7da20c76dd20>
<ast.AugAssign object at 0x7da20c76fac0>
if name[headsign] begin[:]
<ast.AugAssign object at 0x7da20c76c4f0>
call[name[result].append, parameter[tuple[[<ast.Name object at 0x7da20c76e3e0>, <ast.Tuple object at 0x7da20c76df60>, <ast.Name object at 0x7da20c76f5e0>]]]]
return[name[result]] | keyword[def] identifier[handle_json_GET_stoptrips] ( identifier[self] , identifier[params] ):
literal[string]
identifier[schedule] = identifier[self] . identifier[server] . identifier[schedule]
identifier[stop] = identifier[schedule] . identifier[GetStop] ( identifier[params] . identifier[get] ( literal[string] , keyword[None] ))
identifier[time] = identifier[int] ( identifier[params] . identifier[get] ( literal[string] , literal[int] ))
identifier[date] = identifier[params] . identifier[get] ( literal[string] , literal[string] )
identifier[time_trips] = identifier[stop] . identifier[GetStopTimeTrips] ( identifier[schedule] )
identifier[time_trips] . identifier[sort] ()
identifier[time_trips] = identifier[time_trips] [ identifier[bisect] . identifier[bisect_left] ( identifier[time_trips] ,( identifier[time] , literal[int] )):]
identifier[time_trips] = identifier[time_trips] [: literal[int] ]
identifier[result] =[]
keyword[for] identifier[time] ,( identifier[trip] , identifier[index] ), identifier[tp] keyword[in] identifier[time_trips] :
identifier[service_id] = identifier[trip] . identifier[service_id]
identifier[service_period] = identifier[schedule] . identifier[GetServicePeriod] ( identifier[service_id] )
keyword[if] identifier[date] keyword[and] keyword[not] identifier[service_period] . identifier[IsActiveOn] ( identifier[date] ):
keyword[continue]
identifier[headsign] = keyword[None]
keyword[for] identifier[stoptime] keyword[in] identifier[trip] . identifier[GetStopTimes] ()[ identifier[index] ::- literal[int] ]:
keyword[if] identifier[stoptime] . identifier[stop_headsign] :
identifier[headsign] = identifier[stoptime] . identifier[stop_headsign]
keyword[break]
keyword[if] keyword[not] identifier[headsign] :
identifier[headsign] = identifier[trip] . identifier[trip_headsign]
identifier[route] = identifier[schedule] . identifier[GetRoute] ( identifier[trip] . identifier[route_id] )
identifier[trip_name] = literal[string]
keyword[if] identifier[route] . identifier[route_short_name] :
identifier[trip_name] += identifier[route] . identifier[route_short_name]
keyword[if] identifier[route] . identifier[route_long_name] :
keyword[if] identifier[len] ( identifier[trip_name] ):
identifier[trip_name] += literal[string]
identifier[trip_name] += identifier[route] . identifier[route_long_name]
keyword[if] identifier[headsign] :
identifier[trip_name] += literal[string] % identifier[headsign]
identifier[result] . identifier[append] (( identifier[time] ,( identifier[trip] . identifier[trip_id] , identifier[trip_name] , identifier[trip] . identifier[service_id] ), identifier[tp] ))
keyword[return] identifier[result] | def handle_json_GET_stoptrips(self, params):
"""Given a stop_id and time in seconds since midnight return the next
trips to visit the stop."""
schedule = self.server.schedule
stop = schedule.GetStop(params.get('stop', None))
time = int(params.get('time', 0))
date = params.get('date', '')
time_trips = stop.GetStopTimeTrips(schedule)
time_trips.sort() # OPT: use bisect.insort to make this O(N*ln(N)) -> O(N)
# Keep the first 5 after param 'time'.
# Need make a tuple to find correct bisect point
time_trips = time_trips[bisect.bisect_left(time_trips, (time, 0)):]
time_trips = time_trips[:5]
# TODO: combine times for a route to show next 2 departure times
result = []
for (time, (trip, index), tp) in time_trips:
service_id = trip.service_id
service_period = schedule.GetServicePeriod(service_id)
if date and (not service_period.IsActiveOn(date)):
continue # depends on [control=['if'], data=[]]
headsign = None
# Find the most recent headsign from the StopTime objects
for stoptime in trip.GetStopTimes()[index::-1]:
if stoptime.stop_headsign:
headsign = stoptime.stop_headsign
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['stoptime']]
# If stop_headsign isn't found, look for a trip_headsign
if not headsign:
headsign = trip.trip_headsign # depends on [control=['if'], data=[]]
route = schedule.GetRoute(trip.route_id)
trip_name = ''
if route.route_short_name:
trip_name += route.route_short_name # depends on [control=['if'], data=[]]
if route.route_long_name:
if len(trip_name):
trip_name += ' - ' # depends on [control=['if'], data=[]]
trip_name += route.route_long_name # depends on [control=['if'], data=[]]
if headsign:
trip_name += ' (Direction: %s)' % headsign # depends on [control=['if'], data=[]]
result.append((time, (trip.trip_id, trip_name, trip.service_id), tp)) # depends on [control=['for'], data=[]]
return result |
def _load_meta(self, meta):
'''Load data from meta.yaml to a dictionary'''
meta = yaml.load(meta, Loader=Loader)
# Versions are often specified in a format that is convertible to an
# int or a float, so we want to make sure it is interpreted as a str.
# Fix for the bug #300.
if 'version' in meta:
meta['version'] = str(meta['version'])
return meta | def function[_load_meta, parameter[self, meta]]:
constant[Load data from meta.yaml to a dictionary]
variable[meta] assign[=] call[name[yaml].load, parameter[name[meta]]]
if compare[constant[version] in name[meta]] begin[:]
call[name[meta]][constant[version]] assign[=] call[name[str], parameter[call[name[meta]][constant[version]]]]
return[name[meta]] | keyword[def] identifier[_load_meta] ( identifier[self] , identifier[meta] ):
literal[string]
identifier[meta] = identifier[yaml] . identifier[load] ( identifier[meta] , identifier[Loader] = identifier[Loader] )
keyword[if] literal[string] keyword[in] identifier[meta] :
identifier[meta] [ literal[string] ]= identifier[str] ( identifier[meta] [ literal[string] ])
keyword[return] identifier[meta] | def _load_meta(self, meta):
"""Load data from meta.yaml to a dictionary"""
meta = yaml.load(meta, Loader=Loader)
# Versions are often specified in a format that is convertible to an
# int or a float, so we want to make sure it is interpreted as a str.
# Fix for the bug #300.
if 'version' in meta:
meta['version'] = str(meta['version']) # depends on [control=['if'], data=['meta']]
return meta |
def preprocess(train_dataset, output_dir, eval_dataset, checkpoint, pipeline_option):
"""Preprocess data in Cloud with DataFlow."""
import apache_beam as beam
import google.datalab.utils
from . import _preprocess
if checkpoint is None:
checkpoint = _util._DEFAULT_CHECKPOINT_GSURL
job_name = ('preprocess-image-classification-' +
datetime.datetime.now().strftime('%y%m%d-%H%M%S'))
staging_package_url = _util.repackage_to_staging(output_dir)
tmpdir = tempfile.mkdtemp()
# suppress DataFlow warnings about wheel package as extra package.
original_level = logging.getLogger().getEffectiveLevel()
logging.getLogger().setLevel(logging.ERROR)
try:
# Workaround for DataFlow 2.0, which doesn't work well with extra packages in GCS.
# Remove when the issue is fixed and new version of DataFlow is included in Datalab.
extra_packages = [staging_package_url, _TF_GS_URL, _PROTOBUF_GS_URL]
local_packages = [os.path.join(tmpdir, os.path.basename(p))
for p in extra_packages]
for source, dest in zip(extra_packages, local_packages):
file_io.copy(source, dest, overwrite=True)
options = {
'staging_location': os.path.join(output_dir, 'tmp', 'staging'),
'temp_location': os.path.join(output_dir, 'tmp'),
'job_name': job_name,
'project': _util.default_project(),
'extra_packages': local_packages,
'teardown_policy': 'TEARDOWN_ALWAYS',
'no_save_main_session': True
}
if pipeline_option is not None:
options.update(pipeline_option)
opts = beam.pipeline.PipelineOptions(flags=[], **options)
p = beam.Pipeline('DataflowRunner', options=opts)
_preprocess.configure_pipeline(p, train_dataset, eval_dataset,
checkpoint, output_dir, job_name)
job_results = p.run()
finally:
shutil.rmtree(tmpdir)
logging.getLogger().setLevel(original_level)
if (_util.is_in_IPython()):
import IPython
dataflow_url = 'https://console.developers.google.com/dataflow?project=%s' % \
_util.default_project()
html = 'Job "%s" submitted.' % job_name
html += '<p>Click <a href="%s" target="_blank">here</a> to track preprocessing job. <br/>' \
% dataflow_url
IPython.display.display_html(html, raw=True)
return google.datalab.utils.DataflowJob(job_results) | def function[preprocess, parameter[train_dataset, output_dir, eval_dataset, checkpoint, pipeline_option]]:
constant[Preprocess data in Cloud with DataFlow.]
import module[apache_beam] as alias[beam]
import module[google.datalab.utils]
from relative_module[None] import module[_preprocess]
if compare[name[checkpoint] is constant[None]] begin[:]
variable[checkpoint] assign[=] name[_util]._DEFAULT_CHECKPOINT_GSURL
variable[job_name] assign[=] binary_operation[constant[preprocess-image-classification-] + call[call[name[datetime].datetime.now, parameter[]].strftime, parameter[constant[%y%m%d-%H%M%S]]]]
variable[staging_package_url] assign[=] call[name[_util].repackage_to_staging, parameter[name[output_dir]]]
variable[tmpdir] assign[=] call[name[tempfile].mkdtemp, parameter[]]
variable[original_level] assign[=] call[call[name[logging].getLogger, parameter[]].getEffectiveLevel, parameter[]]
call[call[name[logging].getLogger, parameter[]].setLevel, parameter[name[logging].ERROR]]
<ast.Try object at 0x7da20e955b10>
if call[name[_util].is_in_IPython, parameter[]] begin[:]
import module[IPython]
variable[dataflow_url] assign[=] binary_operation[constant[https://console.developers.google.com/dataflow?project=%s] <ast.Mod object at 0x7da2590d6920> call[name[_util].default_project, parameter[]]]
variable[html] assign[=] binary_operation[constant[Job "%s" submitted.] <ast.Mod object at 0x7da2590d6920> name[job_name]]
<ast.AugAssign object at 0x7da20e957fa0>
call[name[IPython].display.display_html, parameter[name[html]]]
return[call[name[google].datalab.utils.DataflowJob, parameter[name[job_results]]]] | keyword[def] identifier[preprocess] ( identifier[train_dataset] , identifier[output_dir] , identifier[eval_dataset] , identifier[checkpoint] , identifier[pipeline_option] ):
literal[string]
keyword[import] identifier[apache_beam] keyword[as] identifier[beam]
keyword[import] identifier[google] . identifier[datalab] . identifier[utils]
keyword[from] . keyword[import] identifier[_preprocess]
keyword[if] identifier[checkpoint] keyword[is] keyword[None] :
identifier[checkpoint] = identifier[_util] . identifier[_DEFAULT_CHECKPOINT_GSURL]
identifier[job_name] =( literal[string] +
identifier[datetime] . identifier[datetime] . identifier[now] (). identifier[strftime] ( literal[string] ))
identifier[staging_package_url] = identifier[_util] . identifier[repackage_to_staging] ( identifier[output_dir] )
identifier[tmpdir] = identifier[tempfile] . identifier[mkdtemp] ()
identifier[original_level] = identifier[logging] . identifier[getLogger] (). identifier[getEffectiveLevel] ()
identifier[logging] . identifier[getLogger] (). identifier[setLevel] ( identifier[logging] . identifier[ERROR] )
keyword[try] :
identifier[extra_packages] =[ identifier[staging_package_url] , identifier[_TF_GS_URL] , identifier[_PROTOBUF_GS_URL] ]
identifier[local_packages] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[tmpdir] , identifier[os] . identifier[path] . identifier[basename] ( identifier[p] ))
keyword[for] identifier[p] keyword[in] identifier[extra_packages] ]
keyword[for] identifier[source] , identifier[dest] keyword[in] identifier[zip] ( identifier[extra_packages] , identifier[local_packages] ):
identifier[file_io] . identifier[copy] ( identifier[source] , identifier[dest] , identifier[overwrite] = keyword[True] )
identifier[options] ={
literal[string] : identifier[os] . identifier[path] . identifier[join] ( identifier[output_dir] , literal[string] , literal[string] ),
literal[string] : identifier[os] . identifier[path] . identifier[join] ( identifier[output_dir] , literal[string] ),
literal[string] : identifier[job_name] ,
literal[string] : identifier[_util] . identifier[default_project] (),
literal[string] : identifier[local_packages] ,
literal[string] : literal[string] ,
literal[string] : keyword[True]
}
keyword[if] identifier[pipeline_option] keyword[is] keyword[not] keyword[None] :
identifier[options] . identifier[update] ( identifier[pipeline_option] )
identifier[opts] = identifier[beam] . identifier[pipeline] . identifier[PipelineOptions] ( identifier[flags] =[],** identifier[options] )
identifier[p] = identifier[beam] . identifier[Pipeline] ( literal[string] , identifier[options] = identifier[opts] )
identifier[_preprocess] . identifier[configure_pipeline] ( identifier[p] , identifier[train_dataset] , identifier[eval_dataset] ,
identifier[checkpoint] , identifier[output_dir] , identifier[job_name] )
identifier[job_results] = identifier[p] . identifier[run] ()
keyword[finally] :
identifier[shutil] . identifier[rmtree] ( identifier[tmpdir] )
identifier[logging] . identifier[getLogger] (). identifier[setLevel] ( identifier[original_level] )
keyword[if] ( identifier[_util] . identifier[is_in_IPython] ()):
keyword[import] identifier[IPython]
identifier[dataflow_url] = literal[string] % identifier[_util] . identifier[default_project] ()
identifier[html] = literal[string] % identifier[job_name]
identifier[html] += literal[string] % identifier[dataflow_url]
identifier[IPython] . identifier[display] . identifier[display_html] ( identifier[html] , identifier[raw] = keyword[True] )
keyword[return] identifier[google] . identifier[datalab] . identifier[utils] . identifier[DataflowJob] ( identifier[job_results] ) | def preprocess(train_dataset, output_dir, eval_dataset, checkpoint, pipeline_option):
"""Preprocess data in Cloud with DataFlow."""
import apache_beam as beam
import google.datalab.utils
from . import _preprocess
if checkpoint is None:
checkpoint = _util._DEFAULT_CHECKPOINT_GSURL # depends on [control=['if'], data=['checkpoint']]
job_name = 'preprocess-image-classification-' + datetime.datetime.now().strftime('%y%m%d-%H%M%S')
staging_package_url = _util.repackage_to_staging(output_dir)
tmpdir = tempfile.mkdtemp()
# suppress DataFlow warnings about wheel package as extra package.
original_level = logging.getLogger().getEffectiveLevel()
logging.getLogger().setLevel(logging.ERROR)
try:
# Workaround for DataFlow 2.0, which doesn't work well with extra packages in GCS.
# Remove when the issue is fixed and new version of DataFlow is included in Datalab.
extra_packages = [staging_package_url, _TF_GS_URL, _PROTOBUF_GS_URL]
local_packages = [os.path.join(tmpdir, os.path.basename(p)) for p in extra_packages]
for (source, dest) in zip(extra_packages, local_packages):
file_io.copy(source, dest, overwrite=True) # depends on [control=['for'], data=[]]
options = {'staging_location': os.path.join(output_dir, 'tmp', 'staging'), 'temp_location': os.path.join(output_dir, 'tmp'), 'job_name': job_name, 'project': _util.default_project(), 'extra_packages': local_packages, 'teardown_policy': 'TEARDOWN_ALWAYS', 'no_save_main_session': True}
if pipeline_option is not None:
options.update(pipeline_option) # depends on [control=['if'], data=['pipeline_option']]
opts = beam.pipeline.PipelineOptions(flags=[], **options)
p = beam.Pipeline('DataflowRunner', options=opts)
_preprocess.configure_pipeline(p, train_dataset, eval_dataset, checkpoint, output_dir, job_name)
job_results = p.run() # depends on [control=['try'], data=[]]
finally:
shutil.rmtree(tmpdir)
logging.getLogger().setLevel(original_level)
if _util.is_in_IPython():
import IPython
dataflow_url = 'https://console.developers.google.com/dataflow?project=%s' % _util.default_project()
html = 'Job "%s" submitted.' % job_name
html += '<p>Click <a href="%s" target="_blank">here</a> to track preprocessing job. <br/>' % dataflow_url
IPython.display.display_html(html, raw=True) # depends on [control=['if'], data=[]]
return google.datalab.utils.DataflowJob(job_results) |
def create_data_figs(self):
"""
Generate the data and figs files for the report
:return:
"""
logger.info("Generating the report data and figs from %s to %s",
self.start, self.end)
for section in self.sections():
logger.info("Generating %s", section)
self.sections()[section]()
logger.info("Data and figs done") | def function[create_data_figs, parameter[self]]:
constant[
Generate the data and figs files for the report
:return:
]
call[name[logger].info, parameter[constant[Generating the report data and figs from %s to %s], name[self].start, name[self].end]]
for taget[name[section]] in starred[call[name[self].sections, parameter[]]] begin[:]
call[name[logger].info, parameter[constant[Generating %s], name[section]]]
call[call[call[name[self].sections, parameter[]]][name[section]], parameter[]]
call[name[logger].info, parameter[constant[Data and figs done]]] | keyword[def] identifier[create_data_figs] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] ,
identifier[self] . identifier[start] , identifier[self] . identifier[end] )
keyword[for] identifier[section] keyword[in] identifier[self] . identifier[sections] ():
identifier[logger] . identifier[info] ( literal[string] , identifier[section] )
identifier[self] . identifier[sections] ()[ identifier[section] ]()
identifier[logger] . identifier[info] ( literal[string] ) | def create_data_figs(self):
"""
Generate the data and figs files for the report
:return:
"""
logger.info('Generating the report data and figs from %s to %s', self.start, self.end)
for section in self.sections():
logger.info('Generating %s', section)
self.sections()[section]() # depends on [control=['for'], data=['section']]
logger.info('Data and figs done') |
def convert_random_normal(node, **kwargs):
"""Map MXNet's random_normal operator attributes to onnx's RandomNormal
operator and return the created node.
"""
name, input_nodes, attrs = get_inputs(node, kwargs)
# Converting to float32
mean = float(attrs.get("loc", 0))
scale = float(attrs.get("scale", 1.0))
shape = convert_string_to_list(attrs.get('shape', '[]'))
dtype = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[np.dtype(attrs.get('dtype', 'float32'))]
node = onnx.helper.make_node(
'RandomNormal',
input_nodes,
[name],
mean=mean,
scale=scale,
dtype=dtype,
shape=shape,
name=name
)
return [node] | def function[convert_random_normal, parameter[node]]:
constant[Map MXNet's random_normal operator attributes to onnx's RandomNormal
operator and return the created node.
]
<ast.Tuple object at 0x7da1b204f100> assign[=] call[name[get_inputs], parameter[name[node], name[kwargs]]]
variable[mean] assign[=] call[name[float], parameter[call[name[attrs].get, parameter[constant[loc], constant[0]]]]]
variable[scale] assign[=] call[name[float], parameter[call[name[attrs].get, parameter[constant[scale], constant[1.0]]]]]
variable[shape] assign[=] call[name[convert_string_to_list], parameter[call[name[attrs].get, parameter[constant[shape], constant[[]]]]]]
variable[dtype] assign[=] call[name[onnx].mapping.NP_TYPE_TO_TENSOR_TYPE][call[name[np].dtype, parameter[call[name[attrs].get, parameter[constant[dtype], constant[float32]]]]]]
variable[node] assign[=] call[name[onnx].helper.make_node, parameter[constant[RandomNormal], name[input_nodes], list[[<ast.Name object at 0x7da1b200de70>]]]]
return[list[[<ast.Name object at 0x7da1b200c340>]]] | keyword[def] identifier[convert_random_normal] ( identifier[node] ,** identifier[kwargs] ):
literal[string]
identifier[name] , identifier[input_nodes] , identifier[attrs] = identifier[get_inputs] ( identifier[node] , identifier[kwargs] )
identifier[mean] = identifier[float] ( identifier[attrs] . identifier[get] ( literal[string] , literal[int] ))
identifier[scale] = identifier[float] ( identifier[attrs] . identifier[get] ( literal[string] , literal[int] ))
identifier[shape] = identifier[convert_string_to_list] ( identifier[attrs] . identifier[get] ( literal[string] , literal[string] ))
identifier[dtype] = identifier[onnx] . identifier[mapping] . identifier[NP_TYPE_TO_TENSOR_TYPE] [ identifier[np] . identifier[dtype] ( identifier[attrs] . identifier[get] ( literal[string] , literal[string] ))]
identifier[node] = identifier[onnx] . identifier[helper] . identifier[make_node] (
literal[string] ,
identifier[input_nodes] ,
[ identifier[name] ],
identifier[mean] = identifier[mean] ,
identifier[scale] = identifier[scale] ,
identifier[dtype] = identifier[dtype] ,
identifier[shape] = identifier[shape] ,
identifier[name] = identifier[name]
)
keyword[return] [ identifier[node] ] | def convert_random_normal(node, **kwargs):
"""Map MXNet's random_normal operator attributes to onnx's RandomNormal
operator and return the created node.
"""
(name, input_nodes, attrs) = get_inputs(node, kwargs)
# Converting to float32
mean = float(attrs.get('loc', 0))
scale = float(attrs.get('scale', 1.0))
shape = convert_string_to_list(attrs.get('shape', '[]'))
dtype = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[np.dtype(attrs.get('dtype', 'float32'))]
node = onnx.helper.make_node('RandomNormal', input_nodes, [name], mean=mean, scale=scale, dtype=dtype, shape=shape, name=name)
return [node] |
def total_timer(msg):
""" A context which add the time spent inside to TotalTimer. """
start = timer()
yield
t = timer() - start
_TOTAL_TIMER_DATA[msg].feed(t) | def function[total_timer, parameter[msg]]:
constant[ A context which add the time spent inside to TotalTimer. ]
variable[start] assign[=] call[name[timer], parameter[]]
<ast.Yield object at 0x7da18f09dfc0>
variable[t] assign[=] binary_operation[call[name[timer], parameter[]] - name[start]]
call[call[name[_TOTAL_TIMER_DATA]][name[msg]].feed, parameter[name[t]]] | keyword[def] identifier[total_timer] ( identifier[msg] ):
literal[string]
identifier[start] = identifier[timer] ()
keyword[yield]
identifier[t] = identifier[timer] ()- identifier[start]
identifier[_TOTAL_TIMER_DATA] [ identifier[msg] ]. identifier[feed] ( identifier[t] ) | def total_timer(msg):
""" A context which add the time spent inside to TotalTimer. """
start = timer()
yield
t = timer() - start
_TOTAL_TIMER_DATA[msg].feed(t) |
def extra(self, value, extra_name, default=None):
"""
Get the additional enumeration value for ``extra_name``.
:param unicode value: Enumeration value.
:param str extra_name: Extra name.
:param default: Default value in the case ``extra_name`` doesn't exist.
"""
try:
return self.get(value).get(extra_name, default)
except InvalidEnumItem:
return default | def function[extra, parameter[self, value, extra_name, default]]:
constant[
Get the additional enumeration value for ``extra_name``.
:param unicode value: Enumeration value.
:param str extra_name: Extra name.
:param default: Default value in the case ``extra_name`` doesn't exist.
]
<ast.Try object at 0x7da18f813820> | keyword[def] identifier[extra] ( identifier[self] , identifier[value] , identifier[extra_name] , identifier[default] = keyword[None] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[get] ( identifier[value] ). identifier[get] ( identifier[extra_name] , identifier[default] )
keyword[except] identifier[InvalidEnumItem] :
keyword[return] identifier[default] | def extra(self, value, extra_name, default=None):
"""
Get the additional enumeration value for ``extra_name``.
:param unicode value: Enumeration value.
:param str extra_name: Extra name.
:param default: Default value in the case ``extra_name`` doesn't exist.
"""
try:
return self.get(value).get(extra_name, default) # depends on [control=['try'], data=[]]
except InvalidEnumItem:
return default # depends on [control=['except'], data=[]] |
def start(self):
"""Starts the advertise loop.
Returns the result of the first ad request.
"""
if self.running:
raise Exception('Advertiser is already running')
if self.io_loop is None:
self.io_loop = tornado.ioloop.IOLoop.current()
self.running = True
answer = tornado.gen.Future()
self._schedule_ad(0, answer)
return answer | def function[start, parameter[self]]:
constant[Starts the advertise loop.
Returns the result of the first ad request.
]
if name[self].running begin[:]
<ast.Raise object at 0x7da18dc99990>
if compare[name[self].io_loop is constant[None]] begin[:]
name[self].io_loop assign[=] call[name[tornado].ioloop.IOLoop.current, parameter[]]
name[self].running assign[=] constant[True]
variable[answer] assign[=] call[name[tornado].gen.Future, parameter[]]
call[name[self]._schedule_ad, parameter[constant[0], name[answer]]]
return[name[answer]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[running] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] identifier[self] . identifier[io_loop] keyword[is] keyword[None] :
identifier[self] . identifier[io_loop] = identifier[tornado] . identifier[ioloop] . identifier[IOLoop] . identifier[current] ()
identifier[self] . identifier[running] = keyword[True]
identifier[answer] = identifier[tornado] . identifier[gen] . identifier[Future] ()
identifier[self] . identifier[_schedule_ad] ( literal[int] , identifier[answer] )
keyword[return] identifier[answer] | def start(self):
"""Starts the advertise loop.
Returns the result of the first ad request.
"""
if self.running:
raise Exception('Advertiser is already running') # depends on [control=['if'], data=[]]
if self.io_loop is None:
self.io_loop = tornado.ioloop.IOLoop.current() # depends on [control=['if'], data=[]]
self.running = True
answer = tornado.gen.Future()
self._schedule_ad(0, answer)
return answer |
def _calculate(world, seed, elevation, mountain_level):
width = world.width
height = world.height
rng = numpy.random.RandomState(seed) # create our own random generator
base = rng.randint(0, 4096)
temp = numpy.zeros((height, width), dtype=float)
'''
Set up variables to take care of some orbital parameters:
distance_to_sun: -Earth-like planet = 1.0
-valid range between ~0.7 and ~1.3
see https://en.wikipedia.org/wiki/Circumstellar_habitable_zone
-random value chosen via Gaussian distribution
see https://en.wikipedia.org/wiki/Gaussian_function
-width of distribution around 1.0 is determined by HWHM (half width at half maximum)
-HWHM is used to calculate the second parameter passed to random.gauss():
sigma = HWHM / sqrt(2*ln(2))
-*only HWHM* should be considered a parameter here
-most likely outcomes can be estimated:
HWHM * sqrt(2*ln(10)) / sqrt(2*ln(2)) = HWHM * 1.822615728;
e.g. for HWHM = 0.12: 0.78 < distance_to_sun < 1.22
axial_tilt: -the world/planet may move around its star at an angle
see https://en.wikipedia.org/wiki/Axial_tilt
-a value of 0.5 here would refer to an angle of 90 degrees, Uranus-style
see https://en.wikipedia.org/wiki/Uranus
-this value should usually be in the range -0.15 < axial_tilt < 0.15 for a habitable planet
'''
distance_to_sun_hwhm = 0.12
axial_tilt_hwhm = 0.07
#derive parameters
distance_to_sun = rng.normal(loc=1.0, scale=distance_to_sun_hwhm / 1.177410023)
distance_to_sun = max(0.1, distance_to_sun) # clamp value; no planets inside the star allowed
distance_to_sun *= distance_to_sun # prepare for later usage; use inverse-square law
# TODO: an atmoshphere would soften the effect of distance_to_sun by *some* factor
axial_tilt = rng.normal(scale=axial_tilt_hwhm / 1.177410023)
axial_tilt = min(max(-0.5, axial_tilt), 0.5) # cut off Gaussian
border = width / 4
octaves = 8 # number of passes of snoise2
freq = 16.0 * octaves
n_scale = 1024 / float(height)
for y in range(0, height): # TODO: Check for possible numpy optimizations.
y_scaled = float(y) / height - 0.5 # -0.5...0.5
#map/linearly interpolate y_scaled to latitude measured from where the most sunlight hits the world:
#1.0 = hottest zone, 0.0 = coldest zone
latitude_factor = numpy.interp(y_scaled, [axial_tilt - 0.5, axial_tilt, axial_tilt + 0.5],
[0.0, 1.0, 0.0], left=0.0, right=0.0)
for x in range(0, width):
n = snoise2((x * n_scale) / freq, (y * n_scale) / freq, octaves, base=base)
# Added to allow noise pattern to wrap around right and left.
if x <= border:
n = (snoise2((x * n_scale) / freq, (y * n_scale) / freq, octaves,
base=base) * x / border) \
+ (snoise2(((x * n_scale) + width) / freq, (y * n_scale) / freq, octaves,
base=base) * (border - x) / border)
t = (latitude_factor * 12 + n * 1) / 13.0 / distance_to_sun
if elevation[y, x] > mountain_level: # vary temperature based on height
if elevation[y, x] > (mountain_level + 29):
altitude_factor = 0.033
else:
altitude_factor = 1.00 - (
float(elevation[y, x] - mountain_level) / 30)
t *= altitude_factor
temp[y, x] = t
return temp | def function[_calculate, parameter[world, seed, elevation, mountain_level]]:
variable[width] assign[=] name[world].width
variable[height] assign[=] name[world].height
variable[rng] assign[=] call[name[numpy].random.RandomState, parameter[name[seed]]]
variable[base] assign[=] call[name[rng].randint, parameter[constant[0], constant[4096]]]
variable[temp] assign[=] call[name[numpy].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0654310>, <ast.Name object at 0x7da1b0657280>]]]]
constant[
Set up variables to take care of some orbital parameters:
distance_to_sun: -Earth-like planet = 1.0
-valid range between ~0.7 and ~1.3
see https://en.wikipedia.org/wiki/Circumstellar_habitable_zone
-random value chosen via Gaussian distribution
see https://en.wikipedia.org/wiki/Gaussian_function
-width of distribution around 1.0 is determined by HWHM (half width at half maximum)
-HWHM is used to calculate the second parameter passed to random.gauss():
sigma = HWHM / sqrt(2*ln(2))
-*only HWHM* should be considered a parameter here
-most likely outcomes can be estimated:
HWHM * sqrt(2*ln(10)) / sqrt(2*ln(2)) = HWHM * 1.822615728;
e.g. for HWHM = 0.12: 0.78 < distance_to_sun < 1.22
axial_tilt: -the world/planet may move around its star at an angle
see https://en.wikipedia.org/wiki/Axial_tilt
-a value of 0.5 here would refer to an angle of 90 degrees, Uranus-style
see https://en.wikipedia.org/wiki/Uranus
-this value should usually be in the range -0.15 < axial_tilt < 0.15 for a habitable planet
]
variable[distance_to_sun_hwhm] assign[=] constant[0.12]
variable[axial_tilt_hwhm] assign[=] constant[0.07]
variable[distance_to_sun] assign[=] call[name[rng].normal, parameter[]]
variable[distance_to_sun] assign[=] call[name[max], parameter[constant[0.1], name[distance_to_sun]]]
<ast.AugAssign object at 0x7da1b0657700>
variable[axial_tilt] assign[=] call[name[rng].normal, parameter[]]
variable[axial_tilt] assign[=] call[name[min], parameter[call[name[max], parameter[<ast.UnaryOp object at 0x7da1b0656290>, name[axial_tilt]]], constant[0.5]]]
variable[border] assign[=] binary_operation[name[width] / constant[4]]
variable[octaves] assign[=] constant[8]
variable[freq] assign[=] binary_operation[constant[16.0] * name[octaves]]
variable[n_scale] assign[=] binary_operation[constant[1024] / call[name[float], parameter[name[height]]]]
for taget[name[y]] in starred[call[name[range], parameter[constant[0], name[height]]]] begin[:]
variable[y_scaled] assign[=] binary_operation[binary_operation[call[name[float], parameter[name[y]]] / name[height]] - constant[0.5]]
variable[latitude_factor] assign[=] call[name[numpy].interp, parameter[name[y_scaled], list[[<ast.BinOp object at 0x7da1b0656b00>, <ast.Name object at 0x7da1b0656680>, <ast.BinOp object at 0x7da1b0656920>]], list[[<ast.Constant object at 0x7da1b0657010>, <ast.Constant object at 0x7da1b0657eb0>, <ast.Constant object at 0x7da1b06545e0>]]]]
for taget[name[x]] in starred[call[name[range], parameter[constant[0], name[width]]]] begin[:]
variable[n] assign[=] call[name[snoise2], parameter[binary_operation[binary_operation[name[x] * name[n_scale]] / name[freq]], binary_operation[binary_operation[name[y] * name[n_scale]] / name[freq]], name[octaves]]]
if compare[name[x] less_or_equal[<=] name[border]] begin[:]
variable[n] assign[=] binary_operation[binary_operation[binary_operation[call[name[snoise2], parameter[binary_operation[binary_operation[name[x] * name[n_scale]] / name[freq]], binary_operation[binary_operation[name[y] * name[n_scale]] / name[freq]], name[octaves]]] * name[x]] / name[border]] + binary_operation[binary_operation[call[name[snoise2], parameter[binary_operation[binary_operation[binary_operation[name[x] * name[n_scale]] + name[width]] / name[freq]], binary_operation[binary_operation[name[y] * name[n_scale]] / name[freq]], name[octaves]]] * binary_operation[name[border] - name[x]]] / name[border]]]
variable[t] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[latitude_factor] * constant[12]] + binary_operation[name[n] * constant[1]]] / constant[13.0]] / name[distance_to_sun]]
if compare[call[name[elevation]][tuple[[<ast.Name object at 0x7da1b06666b0>, <ast.Name object at 0x7da1b0665780>]]] greater[>] name[mountain_level]] begin[:]
if compare[call[name[elevation]][tuple[[<ast.Name object at 0x7da1b0667100>, <ast.Name object at 0x7da1b0665150>]]] greater[>] binary_operation[name[mountain_level] + constant[29]]] begin[:]
variable[altitude_factor] assign[=] constant[0.033]
<ast.AugAssign object at 0x7da1b0667670>
call[name[temp]][tuple[[<ast.Name object at 0x7da1b0666aa0>, <ast.Name object at 0x7da1b0665ea0>]]] assign[=] name[t]
return[name[temp]] | keyword[def] identifier[_calculate] ( identifier[world] , identifier[seed] , identifier[elevation] , identifier[mountain_level] ):
identifier[width] = identifier[world] . identifier[width]
identifier[height] = identifier[world] . identifier[height]
identifier[rng] = identifier[numpy] . identifier[random] . identifier[RandomState] ( identifier[seed] )
identifier[base] = identifier[rng] . identifier[randint] ( literal[int] , literal[int] )
identifier[temp] = identifier[numpy] . identifier[zeros] (( identifier[height] , identifier[width] ), identifier[dtype] = identifier[float] )
literal[string]
identifier[distance_to_sun_hwhm] = literal[int]
identifier[axial_tilt_hwhm] = literal[int]
identifier[distance_to_sun] = identifier[rng] . identifier[normal] ( identifier[loc] = literal[int] , identifier[scale] = identifier[distance_to_sun_hwhm] / literal[int] )
identifier[distance_to_sun] = identifier[max] ( literal[int] , identifier[distance_to_sun] )
identifier[distance_to_sun] *= identifier[distance_to_sun]
identifier[axial_tilt] = identifier[rng] . identifier[normal] ( identifier[scale] = identifier[axial_tilt_hwhm] / literal[int] )
identifier[axial_tilt] = identifier[min] ( identifier[max] (- literal[int] , identifier[axial_tilt] ), literal[int] )
identifier[border] = identifier[width] / literal[int]
identifier[octaves] = literal[int]
identifier[freq] = literal[int] * identifier[octaves]
identifier[n_scale] = literal[int] / identifier[float] ( identifier[height] )
keyword[for] identifier[y] keyword[in] identifier[range] ( literal[int] , identifier[height] ):
identifier[y_scaled] = identifier[float] ( identifier[y] )/ identifier[height] - literal[int]
identifier[latitude_factor] = identifier[numpy] . identifier[interp] ( identifier[y_scaled] ,[ identifier[axial_tilt] - literal[int] , identifier[axial_tilt] , identifier[axial_tilt] + literal[int] ],
[ literal[int] , literal[int] , literal[int] ], identifier[left] = literal[int] , identifier[right] = literal[int] )
keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[width] ):
identifier[n] = identifier[snoise2] (( identifier[x] * identifier[n_scale] )/ identifier[freq] ,( identifier[y] * identifier[n_scale] )/ identifier[freq] , identifier[octaves] , identifier[base] = identifier[base] )
keyword[if] identifier[x] <= identifier[border] :
identifier[n] =( identifier[snoise2] (( identifier[x] * identifier[n_scale] )/ identifier[freq] ,( identifier[y] * identifier[n_scale] )/ identifier[freq] , identifier[octaves] ,
identifier[base] = identifier[base] )* identifier[x] / identifier[border] )+( identifier[snoise2] ((( identifier[x] * identifier[n_scale] )+ identifier[width] )/ identifier[freq] ,( identifier[y] * identifier[n_scale] )/ identifier[freq] , identifier[octaves] ,
identifier[base] = identifier[base] )*( identifier[border] - identifier[x] )/ identifier[border] )
identifier[t] =( identifier[latitude_factor] * literal[int] + identifier[n] * literal[int] )/ literal[int] / identifier[distance_to_sun]
keyword[if] identifier[elevation] [ identifier[y] , identifier[x] ]> identifier[mountain_level] :
keyword[if] identifier[elevation] [ identifier[y] , identifier[x] ]>( identifier[mountain_level] + literal[int] ):
identifier[altitude_factor] = literal[int]
keyword[else] :
identifier[altitude_factor] = literal[int] -(
identifier[float] ( identifier[elevation] [ identifier[y] , identifier[x] ]- identifier[mountain_level] )/ literal[int] )
identifier[t] *= identifier[altitude_factor]
identifier[temp] [ identifier[y] , identifier[x] ]= identifier[t]
keyword[return] identifier[temp] | def _calculate(world, seed, elevation, mountain_level):
width = world.width
height = world.height
rng = numpy.random.RandomState(seed) # create our own random generator
base = rng.randint(0, 4096)
temp = numpy.zeros((height, width), dtype=float)
'\n Set up variables to take care of some orbital parameters:\n distance_to_sun: -Earth-like planet = 1.0\n -valid range between ~0.7 and ~1.3\n see https://en.wikipedia.org/wiki/Circumstellar_habitable_zone\n -random value chosen via Gaussian distribution\n see https://en.wikipedia.org/wiki/Gaussian_function\n -width of distribution around 1.0 is determined by HWHM (half width at half maximum)\n -HWHM is used to calculate the second parameter passed to random.gauss():\n sigma = HWHM / sqrt(2*ln(2))\n -*only HWHM* should be considered a parameter here\n -most likely outcomes can be estimated:\n HWHM * sqrt(2*ln(10)) / sqrt(2*ln(2)) = HWHM * 1.822615728;\n e.g. for HWHM = 0.12: 0.78 < distance_to_sun < 1.22\n axial_tilt: -the world/planet may move around its star at an angle\n see https://en.wikipedia.org/wiki/Axial_tilt\n -a value of 0.5 here would refer to an angle of 90 degrees, Uranus-style\n see https://en.wikipedia.org/wiki/Uranus\n -this value should usually be in the range -0.15 < axial_tilt < 0.15 for a habitable planet\n '
distance_to_sun_hwhm = 0.12
axial_tilt_hwhm = 0.07
#derive parameters
distance_to_sun = rng.normal(loc=1.0, scale=distance_to_sun_hwhm / 1.177410023)
distance_to_sun = max(0.1, distance_to_sun) # clamp value; no planets inside the star allowed
distance_to_sun *= distance_to_sun # prepare for later usage; use inverse-square law
# TODO: an atmoshphere would soften the effect of distance_to_sun by *some* factor
axial_tilt = rng.normal(scale=axial_tilt_hwhm / 1.177410023)
axial_tilt = min(max(-0.5, axial_tilt), 0.5) # cut off Gaussian
border = width / 4
octaves = 8 # number of passes of snoise2
freq = 16.0 * octaves
n_scale = 1024 / float(height)
for y in range(0, height): # TODO: Check for possible numpy optimizations.
y_scaled = float(y) / height - 0.5 # -0.5...0.5
#map/linearly interpolate y_scaled to latitude measured from where the most sunlight hits the world:
#1.0 = hottest zone, 0.0 = coldest zone
latitude_factor = numpy.interp(y_scaled, [axial_tilt - 0.5, axial_tilt, axial_tilt + 0.5], [0.0, 1.0, 0.0], left=0.0, right=0.0)
for x in range(0, width):
n = snoise2(x * n_scale / freq, y * n_scale / freq, octaves, base=base)
# Added to allow noise pattern to wrap around right and left.
if x <= border:
n = snoise2(x * n_scale / freq, y * n_scale / freq, octaves, base=base) * x / border + snoise2((x * n_scale + width) / freq, y * n_scale / freq, octaves, base=base) * (border - x) / border # depends on [control=['if'], data=['x', 'border']]
t = (latitude_factor * 12 + n * 1) / 13.0 / distance_to_sun
if elevation[y, x] > mountain_level: # vary temperature based on height
if elevation[y, x] > mountain_level + 29:
altitude_factor = 0.033 # depends on [control=['if'], data=[]]
else:
altitude_factor = 1.0 - float(elevation[y, x] - mountain_level) / 30
t *= altitude_factor # depends on [control=['if'], data=['mountain_level']]
temp[y, x] = t # depends on [control=['for'], data=['x']] # depends on [control=['for'], data=['y']]
return temp |
def lsst_doc_shortlink_titlecase_display_role(
name, rawtext, text, lineno, inliner, options=None, content=None):
"""Link to LSST documents given their handle using LSST's ls.st link
shortener with the document handle displayed in title case.
This role is useful for Document, Report, Minutes, and Collection
DocuShare handles.
Example::
:document:`1`
"""
options = options or {}
content = content or []
node = nodes.reference(
text='{0}-{1}'.format(name.title(), text),
refuri='https://ls.st/{0}-{1}'.format(name, text),
**options)
return [node], [] | def function[lsst_doc_shortlink_titlecase_display_role, parameter[name, rawtext, text, lineno, inliner, options, content]]:
constant[Link to LSST documents given their handle using LSST's ls.st link
shortener with the document handle displayed in title case.
This role is useful for Document, Report, Minutes, and Collection
DocuShare handles.
Example::
:document:`1`
]
variable[options] assign[=] <ast.BoolOp object at 0x7da1b23923b0>
variable[content] assign[=] <ast.BoolOp object at 0x7da1b23920e0>
variable[node] assign[=] call[name[nodes].reference, parameter[]]
return[tuple[[<ast.List object at 0x7da1b23ec910>, <ast.List object at 0x7da1b23ec970>]]] | keyword[def] identifier[lsst_doc_shortlink_titlecase_display_role] (
identifier[name] , identifier[rawtext] , identifier[text] , identifier[lineno] , identifier[inliner] , identifier[options] = keyword[None] , identifier[content] = keyword[None] ):
literal[string]
identifier[options] = identifier[options] keyword[or] {}
identifier[content] = identifier[content] keyword[or] []
identifier[node] = identifier[nodes] . identifier[reference] (
identifier[text] = literal[string] . identifier[format] ( identifier[name] . identifier[title] (), identifier[text] ),
identifier[refuri] = literal[string] . identifier[format] ( identifier[name] , identifier[text] ),
** identifier[options] )
keyword[return] [ identifier[node] ],[] | def lsst_doc_shortlink_titlecase_display_role(name, rawtext, text, lineno, inliner, options=None, content=None):
"""Link to LSST documents given their handle using LSST's ls.st link
shortener with the document handle displayed in title case.
This role is useful for Document, Report, Minutes, and Collection
DocuShare handles.
Example::
:document:`1`
"""
options = options or {}
content = content or []
node = nodes.reference(text='{0}-{1}'.format(name.title(), text), refuri='https://ls.st/{0}-{1}'.format(name, text), **options)
return ([node], []) |
def get_input_list(self):
"""
Description:
Get input list
Returns an ordered list of all available input keys and names
"""
inputs = [' '] * len(self.command['input'])
for key in self.command['input']:
inputs[self.command['input'][key]['order']] = {"key":key, "name":self.command['input'][key]['name']}
return inputs | def function[get_input_list, parameter[self]]:
constant[
Description:
Get input list
Returns an ordered list of all available input keys and names
]
variable[inputs] assign[=] binary_operation[list[[<ast.Constant object at 0x7da20e960c40>]] * call[name[len], parameter[call[name[self].command][constant[input]]]]]
for taget[name[key]] in starred[call[name[self].command][constant[input]]] begin[:]
call[name[inputs]][call[call[call[name[self].command][constant[input]]][name[key]]][constant[order]]] assign[=] dictionary[[<ast.Constant object at 0x7da2041d9d50>, <ast.Constant object at 0x7da2041db8e0>], [<ast.Name object at 0x7da2041d8e50>, <ast.Subscript object at 0x7da2041dbe50>]]
return[name[inputs]] | keyword[def] identifier[get_input_list] ( identifier[self] ):
literal[string]
identifier[inputs] =[ literal[string] ]* identifier[len] ( identifier[self] . identifier[command] [ literal[string] ])
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[command] [ literal[string] ]:
identifier[inputs] [ identifier[self] . identifier[command] [ literal[string] ][ identifier[key] ][ literal[string] ]]={ literal[string] : identifier[key] , literal[string] : identifier[self] . identifier[command] [ literal[string] ][ identifier[key] ][ literal[string] ]}
keyword[return] identifier[inputs] | def get_input_list(self):
"""
Description:
Get input list
Returns an ordered list of all available input keys and names
"""
inputs = [' '] * len(self.command['input'])
for key in self.command['input']:
inputs[self.command['input'][key]['order']] = {'key': key, 'name': self.command['input'][key]['name']} # depends on [control=['for'], data=['key']]
return inputs |
def _get_synsets(synset_offsets):
"""Given synset offsets in the WordNet file, parses synset object for every offset.
Notes
-----
Internal function. Do not call directly.
Stores every parsed synset into global synset dictionary under two keys:
synset's name lemma.pos.sense_no and synset's id (unique integer).
Parameters
----------
synset_offsets : list of ints
Lists pointer offsets from which synset objects will be parsed.
Returns
-------
list of Synsets
Lists synset objects which synset_offsets point to.
"""
global parser
if parser is None:
parser = Parser(_WN_FILE)
synsets = []
for offset in synset_offsets:
raw_synset = parser.parse_synset(offset)
synset = Synset(raw_synset)
SYNSETS_DICT[_get_key_from_raw_synset(raw_synset)] = synset
SYNSETS_DICT[synset.id] = synset
synsets.append(synset)
return synsets | def function[_get_synsets, parameter[synset_offsets]]:
constant[Given synset offsets in the WordNet file, parses synset object for every offset.
Notes
-----
Internal function. Do not call directly.
Stores every parsed synset into global synset dictionary under two keys:
synset's name lemma.pos.sense_no and synset's id (unique integer).
Parameters
----------
synset_offsets : list of ints
Lists pointer offsets from which synset objects will be parsed.
Returns
-------
list of Synsets
Lists synset objects which synset_offsets point to.
]
<ast.Global object at 0x7da18f58e5f0>
if compare[name[parser] is constant[None]] begin[:]
variable[parser] assign[=] call[name[Parser], parameter[name[_WN_FILE]]]
variable[synsets] assign[=] list[[]]
for taget[name[offset]] in starred[name[synset_offsets]] begin[:]
variable[raw_synset] assign[=] call[name[parser].parse_synset, parameter[name[offset]]]
variable[synset] assign[=] call[name[Synset], parameter[name[raw_synset]]]
call[name[SYNSETS_DICT]][call[name[_get_key_from_raw_synset], parameter[name[raw_synset]]]] assign[=] name[synset]
call[name[SYNSETS_DICT]][name[synset].id] assign[=] name[synset]
call[name[synsets].append, parameter[name[synset]]]
return[name[synsets]] | keyword[def] identifier[_get_synsets] ( identifier[synset_offsets] ):
literal[string]
keyword[global] identifier[parser]
keyword[if] identifier[parser] keyword[is] keyword[None] :
identifier[parser] = identifier[Parser] ( identifier[_WN_FILE] )
identifier[synsets] =[]
keyword[for] identifier[offset] keyword[in] identifier[synset_offsets] :
identifier[raw_synset] = identifier[parser] . identifier[parse_synset] ( identifier[offset] )
identifier[synset] = identifier[Synset] ( identifier[raw_synset] )
identifier[SYNSETS_DICT] [ identifier[_get_key_from_raw_synset] ( identifier[raw_synset] )]= identifier[synset]
identifier[SYNSETS_DICT] [ identifier[synset] . identifier[id] ]= identifier[synset]
identifier[synsets] . identifier[append] ( identifier[synset] )
keyword[return] identifier[synsets] | def _get_synsets(synset_offsets):
"""Given synset offsets in the WordNet file, parses synset object for every offset.
Notes
-----
Internal function. Do not call directly.
Stores every parsed synset into global synset dictionary under two keys:
synset's name lemma.pos.sense_no and synset's id (unique integer).
Parameters
----------
synset_offsets : list of ints
Lists pointer offsets from which synset objects will be parsed.
Returns
-------
list of Synsets
Lists synset objects which synset_offsets point to.
"""
global parser
if parser is None:
parser = Parser(_WN_FILE) # depends on [control=['if'], data=['parser']]
synsets = []
for offset in synset_offsets:
raw_synset = parser.parse_synset(offset)
synset = Synset(raw_synset)
SYNSETS_DICT[_get_key_from_raw_synset(raw_synset)] = synset
SYNSETS_DICT[synset.id] = synset
synsets.append(synset) # depends on [control=['for'], data=['offset']]
return synsets |
def wait_for_import(self, connection_id, wait_interval):
"""
Wait until connection state is no longer ``IMPORT_CONFIGURATION``.
Args:
connection_id (str): Heroku Connect connection to monitor.
wait_interval (int): How frequently to poll in seconds.
Raises:
CommandError: If fetch connection information fails.
"""
self.stdout.write(self.style.NOTICE('Waiting for import'), ending='')
state = utils.ConnectionStates.IMPORT_CONFIGURATION
while state == utils.ConnectionStates.IMPORT_CONFIGURATION:
# before you get the first state, the API can be a bit behind
self.stdout.write(self.style.NOTICE('.'), ending='')
time.sleep(wait_interval) # take a breath
try:
connection = utils.get_connection(connection_id)
except requests.HTTPError as e:
raise CommandError("Failed to fetch connection information.") from e
else:
state = connection['state']
self.stdout.write(self.style.NOTICE(' Done!')) | def function[wait_for_import, parameter[self, connection_id, wait_interval]]:
constant[
Wait until connection state is no longer ``IMPORT_CONFIGURATION``.
Args:
connection_id (str): Heroku Connect connection to monitor.
wait_interval (int): How frequently to poll in seconds.
Raises:
CommandError: If fetch connection information fails.
]
call[name[self].stdout.write, parameter[call[name[self].style.NOTICE, parameter[constant[Waiting for import]]]]]
variable[state] assign[=] name[utils].ConnectionStates.IMPORT_CONFIGURATION
while compare[name[state] equal[==] name[utils].ConnectionStates.IMPORT_CONFIGURATION] begin[:]
call[name[self].stdout.write, parameter[call[name[self].style.NOTICE, parameter[constant[.]]]]]
call[name[time].sleep, parameter[name[wait_interval]]]
<ast.Try object at 0x7da18f8137c0>
call[name[self].stdout.write, parameter[call[name[self].style.NOTICE, parameter[constant[ Done!]]]]] | keyword[def] identifier[wait_for_import] ( identifier[self] , identifier[connection_id] , identifier[wait_interval] ):
literal[string]
identifier[self] . identifier[stdout] . identifier[write] ( identifier[self] . identifier[style] . identifier[NOTICE] ( literal[string] ), identifier[ending] = literal[string] )
identifier[state] = identifier[utils] . identifier[ConnectionStates] . identifier[IMPORT_CONFIGURATION]
keyword[while] identifier[state] == identifier[utils] . identifier[ConnectionStates] . identifier[IMPORT_CONFIGURATION] :
identifier[self] . identifier[stdout] . identifier[write] ( identifier[self] . identifier[style] . identifier[NOTICE] ( literal[string] ), identifier[ending] = literal[string] )
identifier[time] . identifier[sleep] ( identifier[wait_interval] )
keyword[try] :
identifier[connection] = identifier[utils] . identifier[get_connection] ( identifier[connection_id] )
keyword[except] identifier[requests] . identifier[HTTPError] keyword[as] identifier[e] :
keyword[raise] identifier[CommandError] ( literal[string] ) keyword[from] identifier[e]
keyword[else] :
identifier[state] = identifier[connection] [ literal[string] ]
identifier[self] . identifier[stdout] . identifier[write] ( identifier[self] . identifier[style] . identifier[NOTICE] ( literal[string] )) | def wait_for_import(self, connection_id, wait_interval):
"""
Wait until connection state is no longer ``IMPORT_CONFIGURATION``.
Args:
connection_id (str): Heroku Connect connection to monitor.
wait_interval (int): How frequently to poll in seconds.
Raises:
CommandError: If fetch connection information fails.
"""
self.stdout.write(self.style.NOTICE('Waiting for import'), ending='')
state = utils.ConnectionStates.IMPORT_CONFIGURATION
while state == utils.ConnectionStates.IMPORT_CONFIGURATION:
# before you get the first state, the API can be a bit behind
self.stdout.write(self.style.NOTICE('.'), ending='')
time.sleep(wait_interval) # take a breath
try:
connection = utils.get_connection(connection_id) # depends on [control=['try'], data=[]]
except requests.HTTPError as e:
raise CommandError('Failed to fetch connection information.') from e # depends on [control=['except'], data=['e']]
else:
state = connection['state'] # depends on [control=['while'], data=['state']]
self.stdout.write(self.style.NOTICE(' Done!')) |
def create_gh_pr(self, base_branch, head_branch, *, commit_message, gh_auth):
"""
Create PR in GitHub
"""
request_headers = sansio.create_headers(self.username, oauth_token=gh_auth)
title, body = normalize_commit_message(commit_message)
if not self.prefix_commit:
title = f"[{base_branch}] {title}"
data = {
"title": title,
"body": body,
"head": f"{self.username}:{head_branch}",
"base": base_branch,
"maintainer_can_modify": True,
}
url = CREATE_PR_URL_TEMPLATE.format(config=self.config)
response = requests.post(url, headers=request_headers, json=data)
if response.status_code == requests.codes.created:
click.echo(f"Backport PR created at {response.json()['html_url']}")
else:
click.echo(response.status_code)
click.echo(response.text) | def function[create_gh_pr, parameter[self, base_branch, head_branch]]:
constant[
Create PR in GitHub
]
variable[request_headers] assign[=] call[name[sansio].create_headers, parameter[name[self].username]]
<ast.Tuple object at 0x7da1b230b2e0> assign[=] call[name[normalize_commit_message], parameter[name[commit_message]]]
if <ast.UnaryOp object at 0x7da1b230a7d0> begin[:]
variable[title] assign[=] <ast.JoinedStr object at 0x7da1b2309cf0>
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b2309330>, <ast.Constant object at 0x7da1b23092a0>, <ast.Constant object at 0x7da1b230beb0>, <ast.Constant object at 0x7da1b230b670>, <ast.Constant object at 0x7da1b2308ee0>], [<ast.Name object at 0x7da1b23099c0>, <ast.Name object at 0x7da1b230b130>, <ast.JoinedStr object at 0x7da1b2309210>, <ast.Name object at 0x7da1b230a680>, <ast.Constant object at 0x7da1b2309810>]]
variable[url] assign[=] call[name[CREATE_PR_URL_TEMPLATE].format, parameter[]]
variable[response] assign[=] call[name[requests].post, parameter[name[url]]]
if compare[name[response].status_code equal[==] name[requests].codes.created] begin[:]
call[name[click].echo, parameter[<ast.JoinedStr object at 0x7da1b230bd30>]] | keyword[def] identifier[create_gh_pr] ( identifier[self] , identifier[base_branch] , identifier[head_branch] ,*, identifier[commit_message] , identifier[gh_auth] ):
literal[string]
identifier[request_headers] = identifier[sansio] . identifier[create_headers] ( identifier[self] . identifier[username] , identifier[oauth_token] = identifier[gh_auth] )
identifier[title] , identifier[body] = identifier[normalize_commit_message] ( identifier[commit_message] )
keyword[if] keyword[not] identifier[self] . identifier[prefix_commit] :
identifier[title] = literal[string]
identifier[data] ={
literal[string] : identifier[title] ,
literal[string] : identifier[body] ,
literal[string] : literal[string] ,
literal[string] : identifier[base_branch] ,
literal[string] : keyword[True] ,
}
identifier[url] = identifier[CREATE_PR_URL_TEMPLATE] . identifier[format] ( identifier[config] = identifier[self] . identifier[config] )
identifier[response] = identifier[requests] . identifier[post] ( identifier[url] , identifier[headers] = identifier[request_headers] , identifier[json] = identifier[data] )
keyword[if] identifier[response] . identifier[status_code] == identifier[requests] . identifier[codes] . identifier[created] :
identifier[click] . identifier[echo] ( literal[string] )
keyword[else] :
identifier[click] . identifier[echo] ( identifier[response] . identifier[status_code] )
identifier[click] . identifier[echo] ( identifier[response] . identifier[text] ) | def create_gh_pr(self, base_branch, head_branch, *, commit_message, gh_auth):
"""
Create PR in GitHub
"""
request_headers = sansio.create_headers(self.username, oauth_token=gh_auth)
(title, body) = normalize_commit_message(commit_message)
if not self.prefix_commit:
title = f'[{base_branch}] {title}' # depends on [control=['if'], data=[]]
data = {'title': title, 'body': body, 'head': f'{self.username}:{head_branch}', 'base': base_branch, 'maintainer_can_modify': True}
url = CREATE_PR_URL_TEMPLATE.format(config=self.config)
response = requests.post(url, headers=request_headers, json=data)
if response.status_code == requests.codes.created:
click.echo(f"Backport PR created at {response.json()['html_url']}") # depends on [control=['if'], data=[]]
else:
click.echo(response.status_code)
click.echo(response.text) |
def get_parameters_by_path(self, path, with_decryption, recursive, filters=None):
"""Implement the get-parameters-by-path-API in the backend."""
result = []
# path could be with or without a trailing /. we handle this
# difference here.
path = path.rstrip('/') + '/'
for param in self._parameters:
if path != '/' and not param.startswith(path):
continue
if '/' in param[len(path) + 1:] and not recursive:
continue
if not self._match_filters(self._parameters[param], filters):
continue
result.append(self._parameters[param])
return result | def function[get_parameters_by_path, parameter[self, path, with_decryption, recursive, filters]]:
constant[Implement the get-parameters-by-path-API in the backend.]
variable[result] assign[=] list[[]]
variable[path] assign[=] binary_operation[call[name[path].rstrip, parameter[constant[/]]] + constant[/]]
for taget[name[param]] in starred[name[self]._parameters] begin[:]
if <ast.BoolOp object at 0x7da18dc05780> begin[:]
continue
if <ast.BoolOp object at 0x7da18dc04130> begin[:]
continue
if <ast.UnaryOp object at 0x7da18dc06290> begin[:]
continue
call[name[result].append, parameter[call[name[self]._parameters][name[param]]]]
return[name[result]] | keyword[def] identifier[get_parameters_by_path] ( identifier[self] , identifier[path] , identifier[with_decryption] , identifier[recursive] , identifier[filters] = keyword[None] ):
literal[string]
identifier[result] =[]
identifier[path] = identifier[path] . identifier[rstrip] ( literal[string] )+ literal[string]
keyword[for] identifier[param] keyword[in] identifier[self] . identifier[_parameters] :
keyword[if] identifier[path] != literal[string] keyword[and] keyword[not] identifier[param] . identifier[startswith] ( identifier[path] ):
keyword[continue]
keyword[if] literal[string] keyword[in] identifier[param] [ identifier[len] ( identifier[path] )+ literal[int] :] keyword[and] keyword[not] identifier[recursive] :
keyword[continue]
keyword[if] keyword[not] identifier[self] . identifier[_match_filters] ( identifier[self] . identifier[_parameters] [ identifier[param] ], identifier[filters] ):
keyword[continue]
identifier[result] . identifier[append] ( identifier[self] . identifier[_parameters] [ identifier[param] ])
keyword[return] identifier[result] | def get_parameters_by_path(self, path, with_decryption, recursive, filters=None):
"""Implement the get-parameters-by-path-API in the backend."""
result = []
# path could be with or without a trailing /. we handle this
# difference here.
path = path.rstrip('/') + '/'
for param in self._parameters:
if path != '/' and (not param.startswith(path)):
continue # depends on [control=['if'], data=[]]
if '/' in param[len(path) + 1:] and (not recursive):
continue # depends on [control=['if'], data=[]]
if not self._match_filters(self._parameters[param], filters):
continue # depends on [control=['if'], data=[]]
result.append(self._parameters[param]) # depends on [control=['for'], data=['param']]
return result |
def installedUniqueRequirements(self, target):
"""
Return an iterable of things installed on the target that this item
requires and are not required by anything else.
"""
myDepends = dependentsOf(self.__class__)
#XXX optimize?
for dc in self.store.query(_DependencyConnector,
_DependencyConnector.target==target):
if dc.installee is self:
#we're checking all the others not ourself
continue
depends = dependentsOf(dc.installee.__class__)
if self.__class__ in depends:
raise DependencyError(
"%r cannot be uninstalled from %r, "
"%r still depends on it" % (self, target, dc.installee))
for cls in myDepends[:]:
#If one of my dependencies is required by somebody
#else, leave it alone
if cls in depends:
myDepends.remove(cls)
for dc in self.store.query(_DependencyConnector,
_DependencyConnector.target==target):
if (dc.installee.__class__ in myDepends
and not dc.explicitlyInstalled):
yield dc.installee | def function[installedUniqueRequirements, parameter[self, target]]:
constant[
Return an iterable of things installed on the target that this item
requires and are not required by anything else.
]
variable[myDepends] assign[=] call[name[dependentsOf], parameter[name[self].__class__]]
for taget[name[dc]] in starred[call[name[self].store.query, parameter[name[_DependencyConnector], compare[name[_DependencyConnector].target equal[==] name[target]]]]] begin[:]
if compare[name[dc].installee is name[self]] begin[:]
continue
variable[depends] assign[=] call[name[dependentsOf], parameter[name[dc].installee.__class__]]
if compare[name[self].__class__ in name[depends]] begin[:]
<ast.Raise object at 0x7da1b0d523e0>
for taget[name[cls]] in starred[call[name[myDepends]][<ast.Slice object at 0x7da1b0d522f0>]] begin[:]
if compare[name[cls] in name[depends]] begin[:]
call[name[myDepends].remove, parameter[name[cls]]]
for taget[name[dc]] in starred[call[name[self].store.query, parameter[name[_DependencyConnector], compare[name[_DependencyConnector].target equal[==] name[target]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b0d52590> begin[:]
<ast.Yield object at 0x7da1b0d50610> | keyword[def] identifier[installedUniqueRequirements] ( identifier[self] , identifier[target] ):
literal[string]
identifier[myDepends] = identifier[dependentsOf] ( identifier[self] . identifier[__class__] )
keyword[for] identifier[dc] keyword[in] identifier[self] . identifier[store] . identifier[query] ( identifier[_DependencyConnector] ,
identifier[_DependencyConnector] . identifier[target] == identifier[target] ):
keyword[if] identifier[dc] . identifier[installee] keyword[is] identifier[self] :
keyword[continue]
identifier[depends] = identifier[dependentsOf] ( identifier[dc] . identifier[installee] . identifier[__class__] )
keyword[if] identifier[self] . identifier[__class__] keyword[in] identifier[depends] :
keyword[raise] identifier[DependencyError] (
literal[string]
literal[string] %( identifier[self] , identifier[target] , identifier[dc] . identifier[installee] ))
keyword[for] identifier[cls] keyword[in] identifier[myDepends] [:]:
keyword[if] identifier[cls] keyword[in] identifier[depends] :
identifier[myDepends] . identifier[remove] ( identifier[cls] )
keyword[for] identifier[dc] keyword[in] identifier[self] . identifier[store] . identifier[query] ( identifier[_DependencyConnector] ,
identifier[_DependencyConnector] . identifier[target] == identifier[target] ):
keyword[if] ( identifier[dc] . identifier[installee] . identifier[__class__] keyword[in] identifier[myDepends]
keyword[and] keyword[not] identifier[dc] . identifier[explicitlyInstalled] ):
keyword[yield] identifier[dc] . identifier[installee] | def installedUniqueRequirements(self, target):
"""
Return an iterable of things installed on the target that this item
requires and are not required by anything else.
"""
myDepends = dependentsOf(self.__class__)
#XXX optimize?
for dc in self.store.query(_DependencyConnector, _DependencyConnector.target == target):
if dc.installee is self:
#we're checking all the others not ourself
continue # depends on [control=['if'], data=[]]
depends = dependentsOf(dc.installee.__class__)
if self.__class__ in depends:
raise DependencyError('%r cannot be uninstalled from %r, %r still depends on it' % (self, target, dc.installee)) # depends on [control=['if'], data=[]]
for cls in myDepends[:]:
#If one of my dependencies is required by somebody
#else, leave it alone
if cls in depends:
myDepends.remove(cls) # depends on [control=['if'], data=['cls']] # depends on [control=['for'], data=['cls']] # depends on [control=['for'], data=['dc']]
for dc in self.store.query(_DependencyConnector, _DependencyConnector.target == target):
if dc.installee.__class__ in myDepends and (not dc.explicitlyInstalled):
yield dc.installee # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dc']] |
def dicom_to_nifti(dicom_input, output_file):
"""
This function will convert an anatomical dicom series to a nifti
Examples: See unit test
:param output_file: filepath to the output nifti
:param dicom_input: directory with the dicom files for a single scan, or list of read in dicoms
"""
if len(dicom_input) <= 0:
raise ConversionError('NO_DICOM_FILES_FOUND')
# remove duplicate slices based on position and data
dicom_input = _remove_duplicate_slices(dicom_input)
# remove localizers based on image type
dicom_input = _remove_localizers_by_imagetype(dicom_input)
if settings.validate_slicecount:
# remove_localizers based on image orientation (only valid if slicecount is validated)
dicom_input = _remove_localizers_by_orientation(dicom_input)
# validate all the dicom files for correct orientations
common.validate_slicecount(dicom_input)
if settings.validate_orientation:
# validate that all slices have the same orientation
common.validate_orientation(dicom_input)
if settings.validate_orthogonal:
# validate that we have an orthogonal image (to detect gantry tilting etc)
common.validate_orthogonal(dicom_input)
# sort the dicoms
dicom_input = common.sort_dicoms(dicom_input)
# validate slice increment inconsistent
slice_increment_inconsistent = False
if settings.validate_slice_increment:
# validate that all slices have a consistent slice increment
common.validate_slice_increment(dicom_input)
elif common.is_slice_increment_inconsistent(dicom_input):
slice_increment_inconsistent = True
# if inconsistent increment and we allow resampling then do the resampling based conversion to maintain the correct geometric shape
if slice_increment_inconsistent and settings.resample:
nii_image, max_slice_increment = _convert_slice_incement_inconsistencies(dicom_input)
# do the normal conversion
else:
# Get data; originally z,y,x, transposed to x,y,z
data = common.get_volume_pixeldata(dicom_input)
affine, max_slice_increment = common.create_affine(dicom_input)
# Convert to nifti
nii_image = nibabel.Nifti1Image(data, affine)
# Set TR and TE if available
if Tag(0x0018, 0x0081) in dicom_input[0] and Tag(0x0018, 0x0081) in dicom_input[0]:
common.set_tr_te(nii_image, float(dicom_input[0].RepetitionTime), float(dicom_input[0].EchoTime))
# Save to disk
if output_file is not None:
logger.info('Saving nifti to disk %s' % output_file)
nii_image.to_filename(output_file)
return {'NII_FILE': output_file,
'NII': nii_image,
'MAX_SLICE_INCREMENT': max_slice_increment} | def function[dicom_to_nifti, parameter[dicom_input, output_file]]:
constant[
This function will convert an anatomical dicom series to a nifti
Examples: See unit test
:param output_file: filepath to the output nifti
:param dicom_input: directory with the dicom files for a single scan, or list of read in dicoms
]
if compare[call[name[len], parameter[name[dicom_input]]] less_or_equal[<=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b15d2440>
variable[dicom_input] assign[=] call[name[_remove_duplicate_slices], parameter[name[dicom_input]]]
variable[dicom_input] assign[=] call[name[_remove_localizers_by_imagetype], parameter[name[dicom_input]]]
if name[settings].validate_slicecount begin[:]
variable[dicom_input] assign[=] call[name[_remove_localizers_by_orientation], parameter[name[dicom_input]]]
call[name[common].validate_slicecount, parameter[name[dicom_input]]]
if name[settings].validate_orientation begin[:]
call[name[common].validate_orientation, parameter[name[dicom_input]]]
if name[settings].validate_orthogonal begin[:]
call[name[common].validate_orthogonal, parameter[name[dicom_input]]]
variable[dicom_input] assign[=] call[name[common].sort_dicoms, parameter[name[dicom_input]]]
variable[slice_increment_inconsistent] assign[=] constant[False]
if name[settings].validate_slice_increment begin[:]
call[name[common].validate_slice_increment, parameter[name[dicom_input]]]
if <ast.BoolOp object at 0x7da1b15d1fc0> begin[:]
<ast.Tuple object at 0x7da1b15d1f90> assign[=] call[name[_convert_slice_incement_inconsistencies], parameter[name[dicom_input]]]
if <ast.BoolOp object at 0x7da1b15d0df0> begin[:]
call[name[common].set_tr_te, parameter[name[nii_image], call[name[float], parameter[call[name[dicom_input]][constant[0]].RepetitionTime]], call[name[float], parameter[call[name[dicom_input]][constant[0]].EchoTime]]]]
if compare[name[output_file] is_not constant[None]] begin[:]
call[name[logger].info, parameter[binary_operation[constant[Saving nifti to disk %s] <ast.Mod object at 0x7da2590d6920> name[output_file]]]]
call[name[nii_image].to_filename, parameter[name[output_file]]]
return[dictionary[[<ast.Constant object at 0x7da1b15d2530>, <ast.Constant object at 0x7da1b15d2590>, <ast.Constant object at 0x7da1b15d23e0>], [<ast.Name object at 0x7da1b15d11b0>, <ast.Name object at 0x7da1b15d2cb0>, <ast.Name object at 0x7da1b15d1360>]]] | keyword[def] identifier[dicom_to_nifti] ( identifier[dicom_input] , identifier[output_file] ):
literal[string]
keyword[if] identifier[len] ( identifier[dicom_input] )<= literal[int] :
keyword[raise] identifier[ConversionError] ( literal[string] )
identifier[dicom_input] = identifier[_remove_duplicate_slices] ( identifier[dicom_input] )
identifier[dicom_input] = identifier[_remove_localizers_by_imagetype] ( identifier[dicom_input] )
keyword[if] identifier[settings] . identifier[validate_slicecount] :
identifier[dicom_input] = identifier[_remove_localizers_by_orientation] ( identifier[dicom_input] )
identifier[common] . identifier[validate_slicecount] ( identifier[dicom_input] )
keyword[if] identifier[settings] . identifier[validate_orientation] :
identifier[common] . identifier[validate_orientation] ( identifier[dicom_input] )
keyword[if] identifier[settings] . identifier[validate_orthogonal] :
identifier[common] . identifier[validate_orthogonal] ( identifier[dicom_input] )
identifier[dicom_input] = identifier[common] . identifier[sort_dicoms] ( identifier[dicom_input] )
identifier[slice_increment_inconsistent] = keyword[False]
keyword[if] identifier[settings] . identifier[validate_slice_increment] :
identifier[common] . identifier[validate_slice_increment] ( identifier[dicom_input] )
keyword[elif] identifier[common] . identifier[is_slice_increment_inconsistent] ( identifier[dicom_input] ):
identifier[slice_increment_inconsistent] = keyword[True]
keyword[if] identifier[slice_increment_inconsistent] keyword[and] identifier[settings] . identifier[resample] :
identifier[nii_image] , identifier[max_slice_increment] = identifier[_convert_slice_incement_inconsistencies] ( identifier[dicom_input] )
keyword[else] :
identifier[data] = identifier[common] . identifier[get_volume_pixeldata] ( identifier[dicom_input] )
identifier[affine] , identifier[max_slice_increment] = identifier[common] . identifier[create_affine] ( identifier[dicom_input] )
identifier[nii_image] = identifier[nibabel] . identifier[Nifti1Image] ( identifier[data] , identifier[affine] )
keyword[if] identifier[Tag] ( literal[int] , literal[int] ) keyword[in] identifier[dicom_input] [ literal[int] ] keyword[and] identifier[Tag] ( literal[int] , literal[int] ) keyword[in] identifier[dicom_input] [ literal[int] ]:
identifier[common] . identifier[set_tr_te] ( identifier[nii_image] , identifier[float] ( identifier[dicom_input] [ literal[int] ]. identifier[RepetitionTime] ), identifier[float] ( identifier[dicom_input] [ literal[int] ]. identifier[EchoTime] ))
keyword[if] identifier[output_file] keyword[is] keyword[not] keyword[None] :
identifier[logger] . identifier[info] ( literal[string] % identifier[output_file] )
identifier[nii_image] . identifier[to_filename] ( identifier[output_file] )
keyword[return] { literal[string] : identifier[output_file] ,
literal[string] : identifier[nii_image] ,
literal[string] : identifier[max_slice_increment] } | def dicom_to_nifti(dicom_input, output_file):
"""
This function will convert an anatomical dicom series to a nifti
Examples: See unit test
:param output_file: filepath to the output nifti
:param dicom_input: directory with the dicom files for a single scan, or list of read in dicoms
"""
if len(dicom_input) <= 0:
raise ConversionError('NO_DICOM_FILES_FOUND') # depends on [control=['if'], data=[]]
# remove duplicate slices based on position and data
dicom_input = _remove_duplicate_slices(dicom_input)
# remove localizers based on image type
dicom_input = _remove_localizers_by_imagetype(dicom_input)
if settings.validate_slicecount:
# remove_localizers based on image orientation (only valid if slicecount is validated)
dicom_input = _remove_localizers_by_orientation(dicom_input)
# validate all the dicom files for correct orientations
common.validate_slicecount(dicom_input) # depends on [control=['if'], data=[]]
if settings.validate_orientation:
# validate that all slices have the same orientation
common.validate_orientation(dicom_input) # depends on [control=['if'], data=[]]
if settings.validate_orthogonal:
# validate that we have an orthogonal image (to detect gantry tilting etc)
common.validate_orthogonal(dicom_input) # depends on [control=['if'], data=[]]
# sort the dicoms
dicom_input = common.sort_dicoms(dicom_input)
# validate slice increment inconsistent
slice_increment_inconsistent = False
if settings.validate_slice_increment:
# validate that all slices have a consistent slice increment
common.validate_slice_increment(dicom_input) # depends on [control=['if'], data=[]]
elif common.is_slice_increment_inconsistent(dicom_input):
slice_increment_inconsistent = True # depends on [control=['if'], data=[]]
# if inconsistent increment and we allow resampling then do the resampling based conversion to maintain the correct geometric shape
if slice_increment_inconsistent and settings.resample:
(nii_image, max_slice_increment) = _convert_slice_incement_inconsistencies(dicom_input) # depends on [control=['if'], data=[]]
else:
# do the normal conversion
# Get data; originally z,y,x, transposed to x,y,z
data = common.get_volume_pixeldata(dicom_input)
(affine, max_slice_increment) = common.create_affine(dicom_input)
# Convert to nifti
nii_image = nibabel.Nifti1Image(data, affine)
# Set TR and TE if available
if Tag(24, 129) in dicom_input[0] and Tag(24, 129) in dicom_input[0]:
common.set_tr_te(nii_image, float(dicom_input[0].RepetitionTime), float(dicom_input[0].EchoTime)) # depends on [control=['if'], data=[]]
# Save to disk
if output_file is not None:
logger.info('Saving nifti to disk %s' % output_file)
nii_image.to_filename(output_file) # depends on [control=['if'], data=['output_file']]
return {'NII_FILE': output_file, 'NII': nii_image, 'MAX_SLICE_INCREMENT': max_slice_increment} |
async def _handle_bad_notification(self, message):
"""
Adjusts the current state to be correct based on the
received bad message notification whenever possible:
bad_msg_notification#a7eff811 bad_msg_id:long bad_msg_seqno:int
error_code:int = BadMsgNotification;
"""
bad_msg = message.obj
states = self._pop_states(bad_msg.bad_msg_id)
self._log.debug('Handling bad msg %s', bad_msg)
if bad_msg.error_code in (16, 17):
# Sent msg_id too low or too high (respectively).
# Use the current msg_id to determine the right time offset.
to = self._state.update_time_offset(
correct_msg_id=message.msg_id)
self._log.info('System clock is wrong, set time offset to %ds', to)
elif bad_msg.error_code == 32:
# msg_seqno too low, so just pump it up by some "large" amount
# TODO A better fix would be to start with a new fresh session ID
self._state._sequence += 64
elif bad_msg.error_code == 33:
# msg_seqno too high never seems to happen but just in case
self._state._sequence -= 16
else:
for state in states:
state.future.set_exception(
BadMessageError(state.request, bad_msg.error_code))
return
# Messages are to be re-sent once we've corrected the issue
self._send_queue.extend(states)
self._log.debug('%d messages will be resent due to bad msg',
len(states)) | <ast.AsyncFunctionDef object at 0x7da1b21db5b0> | keyword[async] keyword[def] identifier[_handle_bad_notification] ( identifier[self] , identifier[message] ):
literal[string]
identifier[bad_msg] = identifier[message] . identifier[obj]
identifier[states] = identifier[self] . identifier[_pop_states] ( identifier[bad_msg] . identifier[bad_msg_id] )
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] , identifier[bad_msg] )
keyword[if] identifier[bad_msg] . identifier[error_code] keyword[in] ( literal[int] , literal[int] ):
identifier[to] = identifier[self] . identifier[_state] . identifier[update_time_offset] (
identifier[correct_msg_id] = identifier[message] . identifier[msg_id] )
identifier[self] . identifier[_log] . identifier[info] ( literal[string] , identifier[to] )
keyword[elif] identifier[bad_msg] . identifier[error_code] == literal[int] :
identifier[self] . identifier[_state] . identifier[_sequence] += literal[int]
keyword[elif] identifier[bad_msg] . identifier[error_code] == literal[int] :
identifier[self] . identifier[_state] . identifier[_sequence] -= literal[int]
keyword[else] :
keyword[for] identifier[state] keyword[in] identifier[states] :
identifier[state] . identifier[future] . identifier[set_exception] (
identifier[BadMessageError] ( identifier[state] . identifier[request] , identifier[bad_msg] . identifier[error_code] ))
keyword[return]
identifier[self] . identifier[_send_queue] . identifier[extend] ( identifier[states] )
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] ,
identifier[len] ( identifier[states] )) | async def _handle_bad_notification(self, message):
"""
Adjusts the current state to be correct based on the
received bad message notification whenever possible:
bad_msg_notification#a7eff811 bad_msg_id:long bad_msg_seqno:int
error_code:int = BadMsgNotification;
"""
bad_msg = message.obj
states = self._pop_states(bad_msg.bad_msg_id)
self._log.debug('Handling bad msg %s', bad_msg)
if bad_msg.error_code in (16, 17):
# Sent msg_id too low or too high (respectively).
# Use the current msg_id to determine the right time offset.
to = self._state.update_time_offset(correct_msg_id=message.msg_id)
self._log.info('System clock is wrong, set time offset to %ds', to) # depends on [control=['if'], data=[]]
elif bad_msg.error_code == 32:
# msg_seqno too low, so just pump it up by some "large" amount
# TODO A better fix would be to start with a new fresh session ID
self._state._sequence += 64 # depends on [control=['if'], data=[]]
elif bad_msg.error_code == 33:
# msg_seqno too high never seems to happen but just in case
self._state._sequence -= 16 # depends on [control=['if'], data=[]]
else:
for state in states:
state.future.set_exception(BadMessageError(state.request, bad_msg.error_code)) # depends on [control=['for'], data=['state']]
return
# Messages are to be re-sent once we've corrected the issue
self._send_queue.extend(states)
self._log.debug('%d messages will be resent due to bad msg', len(states)) |
def context_changed(self, context):
""" :type context: dict """
self._image.set_cmap(context['colormap'])
self._image.set_clim(context['min'], context['max'])
self._image.set_interpolation(context['interpolation'])
self._update_indicators(context)
self._set_view_limits()
if self._model.index_direction is not SliceDirection.depth:
self._image.axes.set_ylabel(context['samples_unit']) | def function[context_changed, parameter[self, context]]:
constant[ :type context: dict ]
call[name[self]._image.set_cmap, parameter[call[name[context]][constant[colormap]]]]
call[name[self]._image.set_clim, parameter[call[name[context]][constant[min]], call[name[context]][constant[max]]]]
call[name[self]._image.set_interpolation, parameter[call[name[context]][constant[interpolation]]]]
call[name[self]._update_indicators, parameter[name[context]]]
call[name[self]._set_view_limits, parameter[]]
if compare[name[self]._model.index_direction is_not name[SliceDirection].depth] begin[:]
call[name[self]._image.axes.set_ylabel, parameter[call[name[context]][constant[samples_unit]]]] | keyword[def] identifier[context_changed] ( identifier[self] , identifier[context] ):
literal[string]
identifier[self] . identifier[_image] . identifier[set_cmap] ( identifier[context] [ literal[string] ])
identifier[self] . identifier[_image] . identifier[set_clim] ( identifier[context] [ literal[string] ], identifier[context] [ literal[string] ])
identifier[self] . identifier[_image] . identifier[set_interpolation] ( identifier[context] [ literal[string] ])
identifier[self] . identifier[_update_indicators] ( identifier[context] )
identifier[self] . identifier[_set_view_limits] ()
keyword[if] identifier[self] . identifier[_model] . identifier[index_direction] keyword[is] keyword[not] identifier[SliceDirection] . identifier[depth] :
identifier[self] . identifier[_image] . identifier[axes] . identifier[set_ylabel] ( identifier[context] [ literal[string] ]) | def context_changed(self, context):
""" :type context: dict """
self._image.set_cmap(context['colormap'])
self._image.set_clim(context['min'], context['max'])
self._image.set_interpolation(context['interpolation'])
self._update_indicators(context)
self._set_view_limits()
if self._model.index_direction is not SliceDirection.depth:
self._image.axes.set_ylabel(context['samples_unit']) # depends on [control=['if'], data=[]] |
def _retrieve_resources(self, url, class_, full):
'''Retrieve HTTP resources, return related objects (with pagination)'''
objects_to_return = []
response = self.session.get(url)
if response.status_code == 200:
result = response.json()
resources = result['results']
objects_to_return.extend([class_(session=self.session, **resource)
for resource in resources])
while full and result['next'] is not None:
response = self.session.get(result['next'])
if response.status_code == 200:
result = response.json()
resources = result['results']
objects_to_return.extend([class_(session=self.session,
**resource)
for resource in resources])
else:
raise RuntimeError("Failed downloading data with status {}"
". The response was: '{}'"
.format(response.status_code, response.text))
return objects_to_return
else:
raise RuntimeError("Failed downloading data with status {}"
". The response was: '{}'"
.format(response.status_code, response.text)) | def function[_retrieve_resources, parameter[self, url, class_, full]]:
constant[Retrieve HTTP resources, return related objects (with pagination)]
variable[objects_to_return] assign[=] list[[]]
variable[response] assign[=] call[name[self].session.get, parameter[name[url]]]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
variable[result] assign[=] call[name[response].json, parameter[]]
variable[resources] assign[=] call[name[result]][constant[results]]
call[name[objects_to_return].extend, parameter[<ast.ListComp object at 0x7da1b1306140>]]
while <ast.BoolOp object at 0x7da1b1307c40> begin[:]
variable[response] assign[=] call[name[self].session.get, parameter[call[name[result]][constant[next]]]]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
variable[result] assign[=] call[name[response].json, parameter[]]
variable[resources] assign[=] call[name[result]][constant[results]]
call[name[objects_to_return].extend, parameter[<ast.ListComp object at 0x7da1b133c130>]]
return[name[objects_to_return]] | keyword[def] identifier[_retrieve_resources] ( identifier[self] , identifier[url] , identifier[class_] , identifier[full] ):
literal[string]
identifier[objects_to_return] =[]
identifier[response] = identifier[self] . identifier[session] . identifier[get] ( identifier[url] )
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
identifier[result] = identifier[response] . identifier[json] ()
identifier[resources] = identifier[result] [ literal[string] ]
identifier[objects_to_return] . identifier[extend] ([ identifier[class_] ( identifier[session] = identifier[self] . identifier[session] ,** identifier[resource] )
keyword[for] identifier[resource] keyword[in] identifier[resources] ])
keyword[while] identifier[full] keyword[and] identifier[result] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
identifier[response] = identifier[self] . identifier[session] . identifier[get] ( identifier[result] [ literal[string] ])
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
identifier[result] = identifier[response] . identifier[json] ()
identifier[resources] = identifier[result] [ literal[string] ]
identifier[objects_to_return] . identifier[extend] ([ identifier[class_] ( identifier[session] = identifier[self] . identifier[session] ,
** identifier[resource] )
keyword[for] identifier[resource] keyword[in] identifier[resources] ])
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string]
. identifier[format] ( identifier[response] . identifier[status_code] , identifier[response] . identifier[text] ))
keyword[return] identifier[objects_to_return]
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string]
. identifier[format] ( identifier[response] . identifier[status_code] , identifier[response] . identifier[text] )) | def _retrieve_resources(self, url, class_, full):
"""Retrieve HTTP resources, return related objects (with pagination)"""
objects_to_return = []
response = self.session.get(url)
if response.status_code == 200:
result = response.json()
resources = result['results']
objects_to_return.extend([class_(session=self.session, **resource) for resource in resources])
while full and result['next'] is not None:
response = self.session.get(result['next'])
if response.status_code == 200:
result = response.json()
resources = result['results']
objects_to_return.extend([class_(session=self.session, **resource) for resource in resources]) # depends on [control=['if'], data=[]]
else:
raise RuntimeError("Failed downloading data with status {}. The response was: '{}'".format(response.status_code, response.text)) # depends on [control=['while'], data=[]]
return objects_to_return # depends on [control=['if'], data=[]]
else:
raise RuntimeError("Failed downloading data with status {}. The response was: '{}'".format(response.status_code, response.text)) |
def get_revoked(self):
"""
Return the revocations in this certificate revocation list.
These revocations will be provided by value, not by reference.
That means it's okay to mutate them: it won't affect this CRL.
:return: The revocations in this CRL.
:rtype: :class:`tuple` of :class:`Revocation`
"""
results = []
revoked_stack = _lib.X509_CRL_get_REVOKED(self._crl)
for i in range(_lib.sk_X509_REVOKED_num(revoked_stack)):
revoked = _lib.sk_X509_REVOKED_value(revoked_stack, i)
revoked_copy = _lib.Cryptography_X509_REVOKED_dup(revoked)
pyrev = Revoked.__new__(Revoked)
pyrev._revoked = _ffi.gc(revoked_copy, _lib.X509_REVOKED_free)
results.append(pyrev)
if results:
return tuple(results) | def function[get_revoked, parameter[self]]:
constant[
Return the revocations in this certificate revocation list.
These revocations will be provided by value, not by reference.
That means it's okay to mutate them: it won't affect this CRL.
:return: The revocations in this CRL.
:rtype: :class:`tuple` of :class:`Revocation`
]
variable[results] assign[=] list[[]]
variable[revoked_stack] assign[=] call[name[_lib].X509_CRL_get_REVOKED, parameter[name[self]._crl]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[_lib].sk_X509_REVOKED_num, parameter[name[revoked_stack]]]]]] begin[:]
variable[revoked] assign[=] call[name[_lib].sk_X509_REVOKED_value, parameter[name[revoked_stack], name[i]]]
variable[revoked_copy] assign[=] call[name[_lib].Cryptography_X509_REVOKED_dup, parameter[name[revoked]]]
variable[pyrev] assign[=] call[name[Revoked].__new__, parameter[name[Revoked]]]
name[pyrev]._revoked assign[=] call[name[_ffi].gc, parameter[name[revoked_copy], name[_lib].X509_REVOKED_free]]
call[name[results].append, parameter[name[pyrev]]]
if name[results] begin[:]
return[call[name[tuple], parameter[name[results]]]] | keyword[def] identifier[get_revoked] ( identifier[self] ):
literal[string]
identifier[results] =[]
identifier[revoked_stack] = identifier[_lib] . identifier[X509_CRL_get_REVOKED] ( identifier[self] . identifier[_crl] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[_lib] . identifier[sk_X509_REVOKED_num] ( identifier[revoked_stack] )):
identifier[revoked] = identifier[_lib] . identifier[sk_X509_REVOKED_value] ( identifier[revoked_stack] , identifier[i] )
identifier[revoked_copy] = identifier[_lib] . identifier[Cryptography_X509_REVOKED_dup] ( identifier[revoked] )
identifier[pyrev] = identifier[Revoked] . identifier[__new__] ( identifier[Revoked] )
identifier[pyrev] . identifier[_revoked] = identifier[_ffi] . identifier[gc] ( identifier[revoked_copy] , identifier[_lib] . identifier[X509_REVOKED_free] )
identifier[results] . identifier[append] ( identifier[pyrev] )
keyword[if] identifier[results] :
keyword[return] identifier[tuple] ( identifier[results] ) | def get_revoked(self):
"""
Return the revocations in this certificate revocation list.
These revocations will be provided by value, not by reference.
That means it's okay to mutate them: it won't affect this CRL.
:return: The revocations in this CRL.
:rtype: :class:`tuple` of :class:`Revocation`
"""
results = []
revoked_stack = _lib.X509_CRL_get_REVOKED(self._crl)
for i in range(_lib.sk_X509_REVOKED_num(revoked_stack)):
revoked = _lib.sk_X509_REVOKED_value(revoked_stack, i)
revoked_copy = _lib.Cryptography_X509_REVOKED_dup(revoked)
pyrev = Revoked.__new__(Revoked)
pyrev._revoked = _ffi.gc(revoked_copy, _lib.X509_REVOKED_free)
results.append(pyrev) # depends on [control=['for'], data=['i']]
if results:
return tuple(results) # depends on [control=['if'], data=[]] |
def clear_first_angle_projection(self):
"""stub"""
if (self.get_first_angle_projection_metadata().is_read_only() or
self.get_first_angle_projection_metadata().is_required()):
raise NoAccess()
self.my_osid_object_form._my_map['firstAngle'] = \
self._first_angle_metadata['default_boolean_values'][0] | def function[clear_first_angle_projection, parameter[self]]:
constant[stub]
if <ast.BoolOp object at 0x7da20c6a9b10> begin[:]
<ast.Raise object at 0x7da20e955bd0>
call[name[self].my_osid_object_form._my_map][constant[firstAngle]] assign[=] call[call[name[self]._first_angle_metadata][constant[default_boolean_values]]][constant[0]] | keyword[def] identifier[clear_first_angle_projection] ( identifier[self] ):
literal[string]
keyword[if] ( identifier[self] . identifier[get_first_angle_projection_metadata] (). identifier[is_read_only] () keyword[or]
identifier[self] . identifier[get_first_angle_projection_metadata] (). identifier[is_required] ()):
keyword[raise] identifier[NoAccess] ()
identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]= identifier[self] . identifier[_first_angle_metadata] [ literal[string] ][ literal[int] ] | def clear_first_angle_projection(self):
"""stub"""
if self.get_first_angle_projection_metadata().is_read_only() or self.get_first_angle_projection_metadata().is_required():
raise NoAccess() # depends on [control=['if'], data=[]]
self.my_osid_object_form._my_map['firstAngle'] = self._first_angle_metadata['default_boolean_values'][0] |
def userinfo_claims_only_specified_when_access_token_is_issued(authentication_request):
"""
According to <a href="http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter">
"OpenID Connect Core 1.0", Section 5.5</a>: "When the userinfo member is used, the request MUST
also use a response_type value that results in an Access Token being issued to the Client for
use at the UserInfo Endpoint."
:param authentication_request: the authentication request to verify
:raise InvalidAuthenticationRequest: if the requested claims can not be returned according to the request
"""
will_issue_access_token = authentication_request['response_type'] != ['id_token']
contains_userinfo_claims_request = 'claims' in authentication_request and 'userinfo' in authentication_request[
'claims']
if not will_issue_access_token and contains_userinfo_claims_request:
raise InvalidAuthenticationRequest('Userinfo claims cannot be requested, when response_type=\'id_token\'',
authentication_request,
oauth_error='invalid_request') | def function[userinfo_claims_only_specified_when_access_token_is_issued, parameter[authentication_request]]:
constant[
According to <a href="http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter">
"OpenID Connect Core 1.0", Section 5.5</a>: "When the userinfo member is used, the request MUST
also use a response_type value that results in an Access Token being issued to the Client for
use at the UserInfo Endpoint."
:param authentication_request: the authentication request to verify
:raise InvalidAuthenticationRequest: if the requested claims can not be returned according to the request
]
variable[will_issue_access_token] assign[=] compare[call[name[authentication_request]][constant[response_type]] not_equal[!=] list[[<ast.Constant object at 0x7da1b1a12770>]]]
variable[contains_userinfo_claims_request] assign[=] <ast.BoolOp object at 0x7da1b1a12e30>
if <ast.BoolOp object at 0x7da1b1a12710> begin[:]
<ast.Raise object at 0x7da1b1a128f0> | keyword[def] identifier[userinfo_claims_only_specified_when_access_token_is_issued] ( identifier[authentication_request] ):
literal[string]
identifier[will_issue_access_token] = identifier[authentication_request] [ literal[string] ]!=[ literal[string] ]
identifier[contains_userinfo_claims_request] = literal[string] keyword[in] identifier[authentication_request] keyword[and] literal[string] keyword[in] identifier[authentication_request] [
literal[string] ]
keyword[if] keyword[not] identifier[will_issue_access_token] keyword[and] identifier[contains_userinfo_claims_request] :
keyword[raise] identifier[InvalidAuthenticationRequest] ( literal[string] ,
identifier[authentication_request] ,
identifier[oauth_error] = literal[string] ) | def userinfo_claims_only_specified_when_access_token_is_issued(authentication_request):
"""
According to <a href="http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter">
"OpenID Connect Core 1.0", Section 5.5</a>: "When the userinfo member is used, the request MUST
also use a response_type value that results in an Access Token being issued to the Client for
use at the UserInfo Endpoint."
:param authentication_request: the authentication request to verify
:raise InvalidAuthenticationRequest: if the requested claims can not be returned according to the request
"""
will_issue_access_token = authentication_request['response_type'] != ['id_token']
contains_userinfo_claims_request = 'claims' in authentication_request and 'userinfo' in authentication_request['claims']
if not will_issue_access_token and contains_userinfo_claims_request:
raise InvalidAuthenticationRequest("Userinfo claims cannot be requested, when response_type='id_token'", authentication_request, oauth_error='invalid_request') # depends on [control=['if'], data=[]] |
def _get_session(self):
"""
S3 Boto3 Session.
Returns:
boto3.session.Session: session
"""
if self._session is None:
self._session = _boto3.session.Session(
**self._storage_parameters.get('session', dict()))
return self._session | def function[_get_session, parameter[self]]:
constant[
S3 Boto3 Session.
Returns:
boto3.session.Session: session
]
if compare[name[self]._session is constant[None]] begin[:]
name[self]._session assign[=] call[name[_boto3].session.Session, parameter[]]
return[name[self]._session] | keyword[def] identifier[_get_session] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_session] keyword[is] keyword[None] :
identifier[self] . identifier[_session] = identifier[_boto3] . identifier[session] . identifier[Session] (
** identifier[self] . identifier[_storage_parameters] . identifier[get] ( literal[string] , identifier[dict] ()))
keyword[return] identifier[self] . identifier[_session] | def _get_session(self):
"""
S3 Boto3 Session.
Returns:
boto3.session.Session: session
"""
if self._session is None:
self._session = _boto3.session.Session(**self._storage_parameters.get('session', dict())) # depends on [control=['if'], data=[]]
return self._session |
def decrypt(self, data):
"""
Decrypt the data. Also, update the cipher iv. This is needed for SSLv3
and TLS 1.0. For TLS 1.1/1.2, it is overwritten in TLS.pre_dissect().
If we lack the key, we raise a CipherError which contains the input.
"""
if False in six.itervalues(self.ready):
raise CipherError(data)
decryptor = self._cipher.decryptor()
tmp = decryptor.update(data) + decryptor.finalize()
self.iv = data[-self.block_size:]
return tmp | def function[decrypt, parameter[self, data]]:
constant[
Decrypt the data. Also, update the cipher iv. This is needed for SSLv3
and TLS 1.0. For TLS 1.1/1.2, it is overwritten in TLS.pre_dissect().
If we lack the key, we raise a CipherError which contains the input.
]
if compare[constant[False] in call[name[six].itervalues, parameter[name[self].ready]]] begin[:]
<ast.Raise object at 0x7da1b21a0d00>
variable[decryptor] assign[=] call[name[self]._cipher.decryptor, parameter[]]
variable[tmp] assign[=] binary_operation[call[name[decryptor].update, parameter[name[data]]] + call[name[decryptor].finalize, parameter[]]]
name[self].iv assign[=] call[name[data]][<ast.Slice object at 0x7da1b21bd4b0>]
return[name[tmp]] | keyword[def] identifier[decrypt] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] keyword[False] keyword[in] identifier[six] . identifier[itervalues] ( identifier[self] . identifier[ready] ):
keyword[raise] identifier[CipherError] ( identifier[data] )
identifier[decryptor] = identifier[self] . identifier[_cipher] . identifier[decryptor] ()
identifier[tmp] = identifier[decryptor] . identifier[update] ( identifier[data] )+ identifier[decryptor] . identifier[finalize] ()
identifier[self] . identifier[iv] = identifier[data] [- identifier[self] . identifier[block_size] :]
keyword[return] identifier[tmp] | def decrypt(self, data):
"""
Decrypt the data. Also, update the cipher iv. This is needed for SSLv3
and TLS 1.0. For TLS 1.1/1.2, it is overwritten in TLS.pre_dissect().
If we lack the key, we raise a CipherError which contains the input.
"""
if False in six.itervalues(self.ready):
raise CipherError(data) # depends on [control=['if'], data=[]]
decryptor = self._cipher.decryptor()
tmp = decryptor.update(data) + decryptor.finalize()
self.iv = data[-self.block_size:]
return tmp |
def _custom_rdd_reduce(self, reduce_func):
"""Provides a custom RDD reduce which preserves ordering if the RDD has
been sorted. This is useful for us because we need this functionality
as many pandas operations support sorting the results. The standard
reduce in PySpark does not have this property. Note that when PySpark
no longer does partition reduces locally this code will also need to
be updated."""
def accumulating_iter(iterator):
acc = None
for obj in iterator:
if acc is None:
acc = obj
else:
acc = reduce_func(acc, obj)
if acc is not None:
yield acc
vals = self._rdd.mapPartitions(accumulating_iter).collect()
return reduce(accumulating_iter, vals) | def function[_custom_rdd_reduce, parameter[self, reduce_func]]:
constant[Provides a custom RDD reduce which preserves ordering if the RDD has
been sorted. This is useful for us because we need this functionality
as many pandas operations support sorting the results. The standard
reduce in PySpark does not have this property. Note that when PySpark
no longer does partition reduces locally this code will also need to
be updated.]
def function[accumulating_iter, parameter[iterator]]:
variable[acc] assign[=] constant[None]
for taget[name[obj]] in starred[name[iterator]] begin[:]
if compare[name[acc] is constant[None]] begin[:]
variable[acc] assign[=] name[obj]
if compare[name[acc] is_not constant[None]] begin[:]
<ast.Yield object at 0x7da1b031e950>
variable[vals] assign[=] call[call[name[self]._rdd.mapPartitions, parameter[name[accumulating_iter]]].collect, parameter[]]
return[call[name[reduce], parameter[name[accumulating_iter], name[vals]]]] | keyword[def] identifier[_custom_rdd_reduce] ( identifier[self] , identifier[reduce_func] ):
literal[string]
keyword[def] identifier[accumulating_iter] ( identifier[iterator] ):
identifier[acc] = keyword[None]
keyword[for] identifier[obj] keyword[in] identifier[iterator] :
keyword[if] identifier[acc] keyword[is] keyword[None] :
identifier[acc] = identifier[obj]
keyword[else] :
identifier[acc] = identifier[reduce_func] ( identifier[acc] , identifier[obj] )
keyword[if] identifier[acc] keyword[is] keyword[not] keyword[None] :
keyword[yield] identifier[acc]
identifier[vals] = identifier[self] . identifier[_rdd] . identifier[mapPartitions] ( identifier[accumulating_iter] ). identifier[collect] ()
keyword[return] identifier[reduce] ( identifier[accumulating_iter] , identifier[vals] ) | def _custom_rdd_reduce(self, reduce_func):
"""Provides a custom RDD reduce which preserves ordering if the RDD has
been sorted. This is useful for us because we need this functionality
as many pandas operations support sorting the results. The standard
reduce in PySpark does not have this property. Note that when PySpark
no longer does partition reduces locally this code will also need to
be updated."""
def accumulating_iter(iterator):
acc = None
for obj in iterator:
if acc is None:
acc = obj # depends on [control=['if'], data=['acc']]
else:
acc = reduce_func(acc, obj) # depends on [control=['for'], data=['obj']]
if acc is not None:
yield acc # depends on [control=['if'], data=['acc']]
vals = self._rdd.mapPartitions(accumulating_iter).collect()
return reduce(accumulating_iter, vals) |
def _compute_linear_weights_edge(idcs, ndist):
"""Helper for linear interpolation."""
# Get out-of-bounds indices from the norm_distances. Negative
# means "too low", larger than or equal to 1 means "too high"
lo = np.where(ndist < 0)
hi = np.where(ndist > 1)
# For "too low" nodes, the lower neighbor gets weight zero;
# "too high" gets 2 - yi (since yi >= 1)
w_lo = (1 - ndist)
w_lo[lo] = 0
w_lo[hi] += 1
# For "too high" nodes, the upper neighbor gets weight zero;
# "too low" gets 1 + yi (since yi < 0)
w_hi = np.copy(ndist)
w_hi[lo] += 1
w_hi[hi] = 0
# For upper/lower out-of-bounds nodes, we need to set the
# lower/upper neighbors to the last/first grid point
edge = [idcs, idcs + 1]
edge[0][hi] = -1
edge[1][lo] = 0
return w_lo, w_hi, edge | def function[_compute_linear_weights_edge, parameter[idcs, ndist]]:
constant[Helper for linear interpolation.]
variable[lo] assign[=] call[name[np].where, parameter[compare[name[ndist] less[<] constant[0]]]]
variable[hi] assign[=] call[name[np].where, parameter[compare[name[ndist] greater[>] constant[1]]]]
variable[w_lo] assign[=] binary_operation[constant[1] - name[ndist]]
call[name[w_lo]][name[lo]] assign[=] constant[0]
<ast.AugAssign object at 0x7da18bc700d0>
variable[w_hi] assign[=] call[name[np].copy, parameter[name[ndist]]]
<ast.AugAssign object at 0x7da18bc71d80>
call[name[w_hi]][name[hi]] assign[=] constant[0]
variable[edge] assign[=] list[[<ast.Name object at 0x7da18bc73ac0>, <ast.BinOp object at 0x7da18bc70d60>]]
call[call[name[edge]][constant[0]]][name[hi]] assign[=] <ast.UnaryOp object at 0x7da18bc71e10>
call[call[name[edge]][constant[1]]][name[lo]] assign[=] constant[0]
return[tuple[[<ast.Name object at 0x7da18bc73f10>, <ast.Name object at 0x7da18bc719c0>, <ast.Name object at 0x7da18bc724a0>]]] | keyword[def] identifier[_compute_linear_weights_edge] ( identifier[idcs] , identifier[ndist] ):
literal[string]
identifier[lo] = identifier[np] . identifier[where] ( identifier[ndist] < literal[int] )
identifier[hi] = identifier[np] . identifier[where] ( identifier[ndist] > literal[int] )
identifier[w_lo] =( literal[int] - identifier[ndist] )
identifier[w_lo] [ identifier[lo] ]= literal[int]
identifier[w_lo] [ identifier[hi] ]+= literal[int]
identifier[w_hi] = identifier[np] . identifier[copy] ( identifier[ndist] )
identifier[w_hi] [ identifier[lo] ]+= literal[int]
identifier[w_hi] [ identifier[hi] ]= literal[int]
identifier[edge] =[ identifier[idcs] , identifier[idcs] + literal[int] ]
identifier[edge] [ literal[int] ][ identifier[hi] ]=- literal[int]
identifier[edge] [ literal[int] ][ identifier[lo] ]= literal[int]
keyword[return] identifier[w_lo] , identifier[w_hi] , identifier[edge] | def _compute_linear_weights_edge(idcs, ndist):
"""Helper for linear interpolation."""
# Get out-of-bounds indices from the norm_distances. Negative
# means "too low", larger than or equal to 1 means "too high"
lo = np.where(ndist < 0)
hi = np.where(ndist > 1)
# For "too low" nodes, the lower neighbor gets weight zero;
# "too high" gets 2 - yi (since yi >= 1)
w_lo = 1 - ndist
w_lo[lo] = 0
w_lo[hi] += 1
# For "too high" nodes, the upper neighbor gets weight zero;
# "too low" gets 1 + yi (since yi < 0)
w_hi = np.copy(ndist)
w_hi[lo] += 1
w_hi[hi] = 0
# For upper/lower out-of-bounds nodes, we need to set the
# lower/upper neighbors to the last/first grid point
edge = [idcs, idcs + 1]
edge[0][hi] = -1
edge[1][lo] = 0
return (w_lo, w_hi, edge) |
def positiveints(value):
"""
>>> positiveints('1, -1')
Traceback (most recent call last):
...
ValueError: -1 is negative in '1, -1'
"""
ints = integers(value)
for val in ints:
if val < 0:
raise ValueError('%d is negative in %r' % (val, value))
return ints | def function[positiveints, parameter[value]]:
constant[
>>> positiveints('1, -1')
Traceback (most recent call last):
...
ValueError: -1 is negative in '1, -1'
]
variable[ints] assign[=] call[name[integers], parameter[name[value]]]
for taget[name[val]] in starred[name[ints]] begin[:]
if compare[name[val] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da1b26ae200>
return[name[ints]] | keyword[def] identifier[positiveints] ( identifier[value] ):
literal[string]
identifier[ints] = identifier[integers] ( identifier[value] )
keyword[for] identifier[val] keyword[in] identifier[ints] :
keyword[if] identifier[val] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[val] , identifier[value] ))
keyword[return] identifier[ints] | def positiveints(value):
"""
>>> positiveints('1, -1')
Traceback (most recent call last):
...
ValueError: -1 is negative in '1, -1'
"""
ints = integers(value)
for val in ints:
if val < 0:
raise ValueError('%d is negative in %r' % (val, value)) # depends on [control=['if'], data=['val']] # depends on [control=['for'], data=['val']]
return ints |
def activate_crontab(self):
"""Activate polling function and register first crontab
"""
self._crontab = []
if hasattr(self, 'CRONTAB'):
for crontab_spec in self.CRONTAB:
args = cronjob.parse_crontab(crontab_spec)
job = cronjob.CronJob()
if args['_timer'] == 'datetime':
job.set_triggers(args['trigger_format'], args['trigger_time'])
if args['_timer'] == 'crontab':
job.set_crontab(args['crontab'])
if args['action'].startswith('.'):
action_name = args['action'][1:]
action_ = getattr(self.__class__, action_name)
else:
action_ = args['action']
job.set_action(action_, *args['args'])
self._crontab.append(job)
self.start_poller(30, self.poll_crontab) | def function[activate_crontab, parameter[self]]:
constant[Activate polling function and register first crontab
]
name[self]._crontab assign[=] list[[]]
if call[name[hasattr], parameter[name[self], constant[CRONTAB]]] begin[:]
for taget[name[crontab_spec]] in starred[name[self].CRONTAB] begin[:]
variable[args] assign[=] call[name[cronjob].parse_crontab, parameter[name[crontab_spec]]]
variable[job] assign[=] call[name[cronjob].CronJob, parameter[]]
if compare[call[name[args]][constant[_timer]] equal[==] constant[datetime]] begin[:]
call[name[job].set_triggers, parameter[call[name[args]][constant[trigger_format]], call[name[args]][constant[trigger_time]]]]
if compare[call[name[args]][constant[_timer]] equal[==] constant[crontab]] begin[:]
call[name[job].set_crontab, parameter[call[name[args]][constant[crontab]]]]
if call[call[name[args]][constant[action]].startswith, parameter[constant[.]]] begin[:]
variable[action_name] assign[=] call[call[name[args]][constant[action]]][<ast.Slice object at 0x7da1b0aa0700>]
variable[action_] assign[=] call[name[getattr], parameter[name[self].__class__, name[action_name]]]
call[name[job].set_action, parameter[name[action_], <ast.Starred object at 0x7da1b0aa0d60>]]
call[name[self]._crontab.append, parameter[name[job]]]
call[name[self].start_poller, parameter[constant[30], name[self].poll_crontab]] | keyword[def] identifier[activate_crontab] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_crontab] =[]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[for] identifier[crontab_spec] keyword[in] identifier[self] . identifier[CRONTAB] :
identifier[args] = identifier[cronjob] . identifier[parse_crontab] ( identifier[crontab_spec] )
identifier[job] = identifier[cronjob] . identifier[CronJob] ()
keyword[if] identifier[args] [ literal[string] ]== literal[string] :
identifier[job] . identifier[set_triggers] ( identifier[args] [ literal[string] ], identifier[args] [ literal[string] ])
keyword[if] identifier[args] [ literal[string] ]== literal[string] :
identifier[job] . identifier[set_crontab] ( identifier[args] [ literal[string] ])
keyword[if] identifier[args] [ literal[string] ]. identifier[startswith] ( literal[string] ):
identifier[action_name] = identifier[args] [ literal[string] ][ literal[int] :]
identifier[action_] = identifier[getattr] ( identifier[self] . identifier[__class__] , identifier[action_name] )
keyword[else] :
identifier[action_] = identifier[args] [ literal[string] ]
identifier[job] . identifier[set_action] ( identifier[action_] ,* identifier[args] [ literal[string] ])
identifier[self] . identifier[_crontab] . identifier[append] ( identifier[job] )
identifier[self] . identifier[start_poller] ( literal[int] , identifier[self] . identifier[poll_crontab] ) | def activate_crontab(self):
"""Activate polling function and register first crontab
"""
self._crontab = []
if hasattr(self, 'CRONTAB'):
for crontab_spec in self.CRONTAB:
args = cronjob.parse_crontab(crontab_spec)
job = cronjob.CronJob()
if args['_timer'] == 'datetime':
job.set_triggers(args['trigger_format'], args['trigger_time']) # depends on [control=['if'], data=[]]
if args['_timer'] == 'crontab':
job.set_crontab(args['crontab']) # depends on [control=['if'], data=[]]
if args['action'].startswith('.'):
action_name = args['action'][1:]
action_ = getattr(self.__class__, action_name) # depends on [control=['if'], data=[]]
else:
action_ = args['action']
job.set_action(action_, *args['args'])
self._crontab.append(job) # depends on [control=['for'], data=['crontab_spec']] # depends on [control=['if'], data=[]]
self.start_poller(30, self.poll_crontab) |
def until_not_synced(self, timeout=None):
"""Return a tornado Future; resolves when any subordinate client is not synced"""
yield until_any(*[r.until_not_synced() for r in dict.values(self.children)],
timeout=timeout) | def function[until_not_synced, parameter[self, timeout]]:
constant[Return a tornado Future; resolves when any subordinate client is not synced]
<ast.Yield object at 0x7da1b05db3a0> | keyword[def] identifier[until_not_synced] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[yield] identifier[until_any] (*[ identifier[r] . identifier[until_not_synced] () keyword[for] identifier[r] keyword[in] identifier[dict] . identifier[values] ( identifier[self] . identifier[children] )],
identifier[timeout] = identifier[timeout] ) | def until_not_synced(self, timeout=None):
"""Return a tornado Future; resolves when any subordinate client is not synced"""
yield until_any(*[r.until_not_synced() for r in dict.values(self.children)], timeout=timeout) |
def gfdist(target, abcorr, obsrvr, relate, refval, adjust, step, nintvls,
cnfine, result=None):
"""
Return the time window over which a specified constraint on
observer-target distance is met.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfdist_c.html
:param target: Name of the target body.
:type target: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param relate: Relational operator.
:type relate: str
:param refval: Reference value.
:type refval: float
:param adjust: Adjustment value for absolute extrema searches.
:type adjust: float
:param step: Step size used for locating extrema and roots.
:type step: float
:param nintvls: Workspace window interval count.
:type nintvls: int
:param cnfine: SPICE window to which the search is confined.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: Optional SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
if result is None:
result = stypes.SPICEDOUBLE_CELL(2000)
else:
assert isinstance(result, stypes.SpiceCell)
assert result.is_double()
target = stypes.stringToCharP(target)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
relate = stypes.stringToCharP(relate)
refval = ctypes.c_double(refval)
adjust = ctypes.c_double(adjust)
step = ctypes.c_double(step)
nintvls = ctypes.c_int(nintvls)
libspice.gfdist_c(target, abcorr, obsrvr, relate, refval, adjust,
step, nintvls, ctypes.byref(cnfine), ctypes.byref(result))
return result | def function[gfdist, parameter[target, abcorr, obsrvr, relate, refval, adjust, step, nintvls, cnfine, result]]:
constant[
Return the time window over which a specified constraint on
observer-target distance is met.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfdist_c.html
:param target: Name of the target body.
:type target: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param relate: Relational operator.
:type relate: str
:param refval: Reference value.
:type refval: float
:param adjust: Adjustment value for absolute extrema searches.
:type adjust: float
:param step: Step size used for locating extrema and roots.
:type step: float
:param nintvls: Workspace window interval count.
:type nintvls: int
:param cnfine: SPICE window to which the search is confined.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: Optional SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
]
assert[call[name[isinstance], parameter[name[cnfine], name[stypes].SpiceCell]]]
assert[call[name[cnfine].is_double, parameter[]]]
if compare[name[result] is constant[None]] begin[:]
variable[result] assign[=] call[name[stypes].SPICEDOUBLE_CELL, parameter[constant[2000]]]
variable[target] assign[=] call[name[stypes].stringToCharP, parameter[name[target]]]
variable[abcorr] assign[=] call[name[stypes].stringToCharP, parameter[name[abcorr]]]
variable[obsrvr] assign[=] call[name[stypes].stringToCharP, parameter[name[obsrvr]]]
variable[relate] assign[=] call[name[stypes].stringToCharP, parameter[name[relate]]]
variable[refval] assign[=] call[name[ctypes].c_double, parameter[name[refval]]]
variable[adjust] assign[=] call[name[ctypes].c_double, parameter[name[adjust]]]
variable[step] assign[=] call[name[ctypes].c_double, parameter[name[step]]]
variable[nintvls] assign[=] call[name[ctypes].c_int, parameter[name[nintvls]]]
call[name[libspice].gfdist_c, parameter[name[target], name[abcorr], name[obsrvr], name[relate], name[refval], name[adjust], name[step], name[nintvls], call[name[ctypes].byref, parameter[name[cnfine]]], call[name[ctypes].byref, parameter[name[result]]]]]
return[name[result]] | keyword[def] identifier[gfdist] ( identifier[target] , identifier[abcorr] , identifier[obsrvr] , identifier[relate] , identifier[refval] , identifier[adjust] , identifier[step] , identifier[nintvls] ,
identifier[cnfine] , identifier[result] = keyword[None] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[cnfine] , identifier[stypes] . identifier[SpiceCell] )
keyword[assert] identifier[cnfine] . identifier[is_double] ()
keyword[if] identifier[result] keyword[is] keyword[None] :
identifier[result] = identifier[stypes] . identifier[SPICEDOUBLE_CELL] ( literal[int] )
keyword[else] :
keyword[assert] identifier[isinstance] ( identifier[result] , identifier[stypes] . identifier[SpiceCell] )
keyword[assert] identifier[result] . identifier[is_double] ()
identifier[target] = identifier[stypes] . identifier[stringToCharP] ( identifier[target] )
identifier[abcorr] = identifier[stypes] . identifier[stringToCharP] ( identifier[abcorr] )
identifier[obsrvr] = identifier[stypes] . identifier[stringToCharP] ( identifier[obsrvr] )
identifier[relate] = identifier[stypes] . identifier[stringToCharP] ( identifier[relate] )
identifier[refval] = identifier[ctypes] . identifier[c_double] ( identifier[refval] )
identifier[adjust] = identifier[ctypes] . identifier[c_double] ( identifier[adjust] )
identifier[step] = identifier[ctypes] . identifier[c_double] ( identifier[step] )
identifier[nintvls] = identifier[ctypes] . identifier[c_int] ( identifier[nintvls] )
identifier[libspice] . identifier[gfdist_c] ( identifier[target] , identifier[abcorr] , identifier[obsrvr] , identifier[relate] , identifier[refval] , identifier[adjust] ,
identifier[step] , identifier[nintvls] , identifier[ctypes] . identifier[byref] ( identifier[cnfine] ), identifier[ctypes] . identifier[byref] ( identifier[result] ))
keyword[return] identifier[result] | def gfdist(target, abcorr, obsrvr, relate, refval, adjust, step, nintvls, cnfine, result=None):
"""
Return the time window over which a specified constraint on
observer-target distance is met.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/gfdist_c.html
:param target: Name of the target body.
:type target: str
:param abcorr: Aberration correction flag.
:type abcorr: str
:param obsrvr: Name of the observing body.
:type obsrvr: str
:param relate: Relational operator.
:type relate: str
:param refval: Reference value.
:type refval: float
:param adjust: Adjustment value for absolute extrema searches.
:type adjust: float
:param step: Step size used for locating extrema and roots.
:type step: float
:param nintvls: Workspace window interval count.
:type nintvls: int
:param cnfine: SPICE window to which the search is confined.
:type cnfine: spiceypy.utils.support_types.SpiceCell
:param result: Optional SPICE window containing results.
:type result: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(cnfine, stypes.SpiceCell)
assert cnfine.is_double()
if result is None:
result = stypes.SPICEDOUBLE_CELL(2000) # depends on [control=['if'], data=['result']]
else:
assert isinstance(result, stypes.SpiceCell)
assert result.is_double()
target = stypes.stringToCharP(target)
abcorr = stypes.stringToCharP(abcorr)
obsrvr = stypes.stringToCharP(obsrvr)
relate = stypes.stringToCharP(relate)
refval = ctypes.c_double(refval)
adjust = ctypes.c_double(adjust)
step = ctypes.c_double(step)
nintvls = ctypes.c_int(nintvls)
libspice.gfdist_c(target, abcorr, obsrvr, relate, refval, adjust, step, nintvls, ctypes.byref(cnfine), ctypes.byref(result))
return result |
def get_map(self):
"""
Collects all the points coordinates from this ``pyny.Space``
instance.
In order to keep the reference, it returns an index with the
following key:
* The first column is the Place.
* The second column is the body (-1: points, 0: surface,
n: polyhedron)
* The third column is the polygon (-n: holes)
* The fourth column is the point.
:returns: [index, points]
:rtype: list of ndarray
.. note:: This method automatically stores the solution in order
to do not repeat calculations if the user needs to call it
more than once.
.. seealso::
* :func:`get_seed`
* :func:`map2pyny`
* :func:`map2seed`
* :func:`explode_map`
"""
seed = self.get_seed()['places'] # template
points = []
index = []
for i, place in enumerate(seed):
# Set of points [_, -1, 0, _]
n_points = place['set_of_points'].shape[0]
if n_points != 0: # It can be False (no set_of_points)
points.append(place['set_of_points'])
index.append(np.vstack((np.tile(np.array([[i], [-1], [0]]),
n_points),
np.arange(n_points))))
#Holes [_, 0, -N, _]
for ii, hole in enumerate(place['surface']['holes']):
n_points = hole.shape[0]
points.append(hole)
index.append(np.vstack((np.tile(np.array([[i], [0], [-ii-1]]),
n_points),
np.arange(n_points))))
#Surface [_, 0, N, _]
for ii, polygon in enumerate(place['surface']['polygons']):
n_points = polygon.shape[0]
points.append(polygon)
index.append(np.vstack((np.tile(np.array([[i], [0], [ii]]),
n_points),
np.arange(n_points))))
#Polyhedras [_, N, _, _]
if len(place['polyhedra']) != 0: # It can be False (no obstacles)
for iii, polygon_list in enumerate(place['polyhedra']):
for iv, polygon in enumerate(polygon_list):
n_points = polygon.shape[0]
points.append(polygon)
index.append(np.vstack((np.tile(np.array([[i], [1+iii],
[iv]]), n_points),
np.arange(n_points))))
index = np.concatenate(index, axis=1).T
points = np.concatenate(points)
self.map = [index, points]
return self.map | def function[get_map, parameter[self]]:
constant[
Collects all the points coordinates from this ``pyny.Space``
instance.
In order to keep the reference, it returns an index with the
following key:
* The first column is the Place.
* The second column is the body (-1: points, 0: surface,
n: polyhedron)
* The third column is the polygon (-n: holes)
* The fourth column is the point.
:returns: [index, points]
:rtype: list of ndarray
.. note:: This method automatically stores the solution in order
to do not repeat calculations if the user needs to call it
more than once.
.. seealso::
* :func:`get_seed`
* :func:`map2pyny`
* :func:`map2seed`
* :func:`explode_map`
]
variable[seed] assign[=] call[call[name[self].get_seed, parameter[]]][constant[places]]
variable[points] assign[=] list[[]]
variable[index] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b246b580>, <ast.Name object at 0x7da1b246ba60>]]] in starred[call[name[enumerate], parameter[name[seed]]]] begin[:]
variable[n_points] assign[=] call[call[name[place]][constant[set_of_points]].shape][constant[0]]
if compare[name[n_points] not_equal[!=] constant[0]] begin[:]
call[name[points].append, parameter[call[name[place]][constant[set_of_points]]]]
call[name[index].append, parameter[call[name[np].vstack, parameter[tuple[[<ast.Call object at 0x7da20e9625c0>, <ast.Call object at 0x7da20e961f00>]]]]]]
for taget[tuple[[<ast.Name object at 0x7da20e961390>, <ast.Name object at 0x7da20e961f30>]]] in starred[call[name[enumerate], parameter[call[call[name[place]][constant[surface]]][constant[holes]]]]] begin[:]
variable[n_points] assign[=] call[name[hole].shape][constant[0]]
call[name[points].append, parameter[name[hole]]]
call[name[index].append, parameter[call[name[np].vstack, parameter[tuple[[<ast.Call object at 0x7da20e962ce0>, <ast.Call object at 0x7da20e963e20>]]]]]]
for taget[tuple[[<ast.Name object at 0x7da20e963640>, <ast.Name object at 0x7da20e962f80>]]] in starred[call[name[enumerate], parameter[call[call[name[place]][constant[surface]]][constant[polygons]]]]] begin[:]
variable[n_points] assign[=] call[name[polygon].shape][constant[0]]
call[name[points].append, parameter[name[polygon]]]
call[name[index].append, parameter[call[name[np].vstack, parameter[tuple[[<ast.Call object at 0x7da20e962320>, <ast.Call object at 0x7da20e960550>]]]]]]
if compare[call[name[len], parameter[call[name[place]][constant[polyhedra]]]] not_equal[!=] constant[0]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b24af7c0>, <ast.Name object at 0x7da1b24af070>]]] in starred[call[name[enumerate], parameter[call[name[place]][constant[polyhedra]]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b24adb10>, <ast.Name object at 0x7da1b24ae6e0>]]] in starred[call[name[enumerate], parameter[name[polygon_list]]]] begin[:]
variable[n_points] assign[=] call[name[polygon].shape][constant[0]]
call[name[points].append, parameter[name[polygon]]]
call[name[index].append, parameter[call[name[np].vstack, parameter[tuple[[<ast.Call object at 0x7da1b24afc40>, <ast.Call object at 0x7da1b24af1c0>]]]]]]
variable[index] assign[=] call[name[np].concatenate, parameter[name[index]]].T
variable[points] assign[=] call[name[np].concatenate, parameter[name[points]]]
name[self].map assign[=] list[[<ast.Name object at 0x7da1b24af280>, <ast.Name object at 0x7da1b24afd60>]]
return[name[self].map] | keyword[def] identifier[get_map] ( identifier[self] ):
literal[string]
identifier[seed] = identifier[self] . identifier[get_seed] ()[ literal[string] ]
identifier[points] =[]
identifier[index] =[]
keyword[for] identifier[i] , identifier[place] keyword[in] identifier[enumerate] ( identifier[seed] ):
identifier[n_points] = identifier[place] [ literal[string] ]. identifier[shape] [ literal[int] ]
keyword[if] identifier[n_points] != literal[int] :
identifier[points] . identifier[append] ( identifier[place] [ literal[string] ])
identifier[index] . identifier[append] ( identifier[np] . identifier[vstack] (( identifier[np] . identifier[tile] ( identifier[np] . identifier[array] ([[ identifier[i] ],[- literal[int] ],[ literal[int] ]]),
identifier[n_points] ),
identifier[np] . identifier[arange] ( identifier[n_points] ))))
keyword[for] identifier[ii] , identifier[hole] keyword[in] identifier[enumerate] ( identifier[place] [ literal[string] ][ literal[string] ]):
identifier[n_points] = identifier[hole] . identifier[shape] [ literal[int] ]
identifier[points] . identifier[append] ( identifier[hole] )
identifier[index] . identifier[append] ( identifier[np] . identifier[vstack] (( identifier[np] . identifier[tile] ( identifier[np] . identifier[array] ([[ identifier[i] ],[ literal[int] ],[- identifier[ii] - literal[int] ]]),
identifier[n_points] ),
identifier[np] . identifier[arange] ( identifier[n_points] ))))
keyword[for] identifier[ii] , identifier[polygon] keyword[in] identifier[enumerate] ( identifier[place] [ literal[string] ][ literal[string] ]):
identifier[n_points] = identifier[polygon] . identifier[shape] [ literal[int] ]
identifier[points] . identifier[append] ( identifier[polygon] )
identifier[index] . identifier[append] ( identifier[np] . identifier[vstack] (( identifier[np] . identifier[tile] ( identifier[np] . identifier[array] ([[ identifier[i] ],[ literal[int] ],[ identifier[ii] ]]),
identifier[n_points] ),
identifier[np] . identifier[arange] ( identifier[n_points] ))))
keyword[if] identifier[len] ( identifier[place] [ literal[string] ])!= literal[int] :
keyword[for] identifier[iii] , identifier[polygon_list] keyword[in] identifier[enumerate] ( identifier[place] [ literal[string] ]):
keyword[for] identifier[iv] , identifier[polygon] keyword[in] identifier[enumerate] ( identifier[polygon_list] ):
identifier[n_points] = identifier[polygon] . identifier[shape] [ literal[int] ]
identifier[points] . identifier[append] ( identifier[polygon] )
identifier[index] . identifier[append] ( identifier[np] . identifier[vstack] (( identifier[np] . identifier[tile] ( identifier[np] . identifier[array] ([[ identifier[i] ],[ literal[int] + identifier[iii] ],
[ identifier[iv] ]]), identifier[n_points] ),
identifier[np] . identifier[arange] ( identifier[n_points] ))))
identifier[index] = identifier[np] . identifier[concatenate] ( identifier[index] , identifier[axis] = literal[int] ). identifier[T]
identifier[points] = identifier[np] . identifier[concatenate] ( identifier[points] )
identifier[self] . identifier[map] =[ identifier[index] , identifier[points] ]
keyword[return] identifier[self] . identifier[map] | def get_map(self):
"""
Collects all the points coordinates from this ``pyny.Space``
instance.
In order to keep the reference, it returns an index with the
following key:
* The first column is the Place.
* The second column is the body (-1: points, 0: surface,
n: polyhedron)
* The third column is the polygon (-n: holes)
* The fourth column is the point.
:returns: [index, points]
:rtype: list of ndarray
.. note:: This method automatically stores the solution in order
to do not repeat calculations if the user needs to call it
more than once.
.. seealso::
* :func:`get_seed`
* :func:`map2pyny`
* :func:`map2seed`
* :func:`explode_map`
"""
seed = self.get_seed()['places'] # template
points = []
index = []
for (i, place) in enumerate(seed): # Set of points [_, -1, 0, _]
n_points = place['set_of_points'].shape[0]
if n_points != 0: # It can be False (no set_of_points)
points.append(place['set_of_points'])
index.append(np.vstack((np.tile(np.array([[i], [-1], [0]]), n_points), np.arange(n_points)))) # depends on [control=['if'], data=['n_points']] #Holes [_, 0, -N, _]
for (ii, hole) in enumerate(place['surface']['holes']):
n_points = hole.shape[0]
points.append(hole)
index.append(np.vstack((np.tile(np.array([[i], [0], [-ii - 1]]), n_points), np.arange(n_points)))) # depends on [control=['for'], data=[]] #Surface [_, 0, N, _]
for (ii, polygon) in enumerate(place['surface']['polygons']):
n_points = polygon.shape[0]
points.append(polygon)
index.append(np.vstack((np.tile(np.array([[i], [0], [ii]]), n_points), np.arange(n_points)))) # depends on [control=['for'], data=[]] #Polyhedras [_, N, _, _]
if len(place['polyhedra']) != 0: # It can be False (no obstacles)
for (iii, polygon_list) in enumerate(place['polyhedra']):
for (iv, polygon) in enumerate(polygon_list):
n_points = polygon.shape[0]
points.append(polygon)
index.append(np.vstack((np.tile(np.array([[i], [1 + iii], [iv]]), n_points), np.arange(n_points)))) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
index = np.concatenate(index, axis=1).T
points = np.concatenate(points)
self.map = [index, points]
return self.map |
def _add_signal_history(self, df, symbol):
""" Initilize signal history """
if symbol not in self.signals.keys() or len(self.signals[symbol]) == 0:
self.signals[symbol] = [nan] * len(df.index)
else:
self.signals[symbol].append(nan)
self.signals[symbol] = self.signals[symbol][-len(df.index):]
signal_count = len(self.signals[symbol])
df.loc[-signal_count:, 'signal'] = self.signals[symbol][-signal_count:]
return df | def function[_add_signal_history, parameter[self, df, symbol]]:
constant[ Initilize signal history ]
if <ast.BoolOp object at 0x7da204622230> begin[:]
call[name[self].signals][name[symbol]] assign[=] binary_operation[list[[<ast.Name object at 0x7da20c7c9a50>]] * call[name[len], parameter[name[df].index]]]
call[name[self].signals][name[symbol]] assign[=] call[call[name[self].signals][name[symbol]]][<ast.Slice object at 0x7da1b1c1a620>]
variable[signal_count] assign[=] call[name[len], parameter[call[name[self].signals][name[symbol]]]]
call[name[df].loc][tuple[[<ast.Slice object at 0x7da1b1c19240>, <ast.Constant object at 0x7da1b1c18280>]]] assign[=] call[call[name[self].signals][name[symbol]]][<ast.Slice object at 0x7da1b1c1b640>]
return[name[df]] | keyword[def] identifier[_add_signal_history] ( identifier[self] , identifier[df] , identifier[symbol] ):
literal[string]
keyword[if] identifier[symbol] keyword[not] keyword[in] identifier[self] . identifier[signals] . identifier[keys] () keyword[or] identifier[len] ( identifier[self] . identifier[signals] [ identifier[symbol] ])== literal[int] :
identifier[self] . identifier[signals] [ identifier[symbol] ]=[ identifier[nan] ]* identifier[len] ( identifier[df] . identifier[index] )
keyword[else] :
identifier[self] . identifier[signals] [ identifier[symbol] ]. identifier[append] ( identifier[nan] )
identifier[self] . identifier[signals] [ identifier[symbol] ]= identifier[self] . identifier[signals] [ identifier[symbol] ][- identifier[len] ( identifier[df] . identifier[index] ):]
identifier[signal_count] = identifier[len] ( identifier[self] . identifier[signals] [ identifier[symbol] ])
identifier[df] . identifier[loc] [- identifier[signal_count] :, literal[string] ]= identifier[self] . identifier[signals] [ identifier[symbol] ][- identifier[signal_count] :]
keyword[return] identifier[df] | def _add_signal_history(self, df, symbol):
""" Initilize signal history """
if symbol not in self.signals.keys() or len(self.signals[symbol]) == 0:
self.signals[symbol] = [nan] * len(df.index) # depends on [control=['if'], data=[]]
else:
self.signals[symbol].append(nan)
self.signals[symbol] = self.signals[symbol][-len(df.index):]
signal_count = len(self.signals[symbol])
df.loc[-signal_count:, 'signal'] = self.signals[symbol][-signal_count:]
return df |
def check_file_size(file_size, path):
"""
Raise an error if we didn't get all of the file.
:param file_size: int: size of this file
:param path: str path where we downloaded the file to
"""
stat_info = os.stat(path)
if stat_info.st_size != file_size:
format_str = "Error occurred downloading {}. Got a file size {}. Expected file size:{}"
msg = format_str.format(path, stat_info.st_size, file_size)
raise ValueError(msg) | def function[check_file_size, parameter[file_size, path]]:
constant[
Raise an error if we didn't get all of the file.
:param file_size: int: size of this file
:param path: str path where we downloaded the file to
]
variable[stat_info] assign[=] call[name[os].stat, parameter[name[path]]]
if compare[name[stat_info].st_size not_equal[!=] name[file_size]] begin[:]
variable[format_str] assign[=] constant[Error occurred downloading {}. Got a file size {}. Expected file size:{}]
variable[msg] assign[=] call[name[format_str].format, parameter[name[path], name[stat_info].st_size, name[file_size]]]
<ast.Raise object at 0x7da1b1a5d540> | keyword[def] identifier[check_file_size] ( identifier[file_size] , identifier[path] ):
literal[string]
identifier[stat_info] = identifier[os] . identifier[stat] ( identifier[path] )
keyword[if] identifier[stat_info] . identifier[st_size] != identifier[file_size] :
identifier[format_str] = literal[string]
identifier[msg] = identifier[format_str] . identifier[format] ( identifier[path] , identifier[stat_info] . identifier[st_size] , identifier[file_size] )
keyword[raise] identifier[ValueError] ( identifier[msg] ) | def check_file_size(file_size, path):
"""
Raise an error if we didn't get all of the file.
:param file_size: int: size of this file
:param path: str path where we downloaded the file to
"""
stat_info = os.stat(path)
if stat_info.st_size != file_size:
format_str = 'Error occurred downloading {}. Got a file size {}. Expected file size:{}'
msg = format_str.format(path, stat_info.st_size, file_size)
raise ValueError(msg) # depends on [control=['if'], data=['file_size']] |
def get_environments():
"""
:return: all knows environments
"""
LOGGER.debug("EnvironmentService.get_environments")
args = {'http_operation': 'GET', 'operation_path': ''}
response = EnvironmentService.requester.call(args)
ret = None
if response.rc == 0:
ret = []
for environment in response.response_content['environments']:
ret.append(Environment.json_2_environment(environment))
elif response.rc != 404:
err_msg = 'EnvironmentService.get_environments - Problem while getting environments. ' \
'Reason: ' + str(response.response_content) + '-' + str(response.error_message) + \
" (" + str(response.rc) + ")"
LOGGER.warning(err_msg)
return ret | def function[get_environments, parameter[]]:
constant[
:return: all knows environments
]
call[name[LOGGER].debug, parameter[constant[EnvironmentService.get_environments]]]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b1450b80>, <ast.Constant object at 0x7da1b1450eb0>], [<ast.Constant object at 0x7da1b14504c0>, <ast.Constant object at 0x7da1b1451ff0>]]
variable[response] assign[=] call[name[EnvironmentService].requester.call, parameter[name[args]]]
variable[ret] assign[=] constant[None]
if compare[name[response].rc equal[==] constant[0]] begin[:]
variable[ret] assign[=] list[[]]
for taget[name[environment]] in starred[call[name[response].response_content][constant[environments]]] begin[:]
call[name[ret].append, parameter[call[name[Environment].json_2_environment, parameter[name[environment]]]]]
return[name[ret]] | keyword[def] identifier[get_environments] ():
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
identifier[args] ={ literal[string] : literal[string] , literal[string] : literal[string] }
identifier[response] = identifier[EnvironmentService] . identifier[requester] . identifier[call] ( identifier[args] )
identifier[ret] = keyword[None]
keyword[if] identifier[response] . identifier[rc] == literal[int] :
identifier[ret] =[]
keyword[for] identifier[environment] keyword[in] identifier[response] . identifier[response_content] [ literal[string] ]:
identifier[ret] . identifier[append] ( identifier[Environment] . identifier[json_2_environment] ( identifier[environment] ))
keyword[elif] identifier[response] . identifier[rc] != literal[int] :
identifier[err_msg] = literal[string] literal[string] + identifier[str] ( identifier[response] . identifier[response_content] )+ literal[string] + identifier[str] ( identifier[response] . identifier[error_message] )+ literal[string] + identifier[str] ( identifier[response] . identifier[rc] )+ literal[string]
identifier[LOGGER] . identifier[warning] ( identifier[err_msg] )
keyword[return] identifier[ret] | def get_environments():
"""
:return: all knows environments
"""
LOGGER.debug('EnvironmentService.get_environments')
args = {'http_operation': 'GET', 'operation_path': ''}
response = EnvironmentService.requester.call(args)
ret = None
if response.rc == 0:
ret = []
for environment in response.response_content['environments']:
ret.append(Environment.json_2_environment(environment)) # depends on [control=['for'], data=['environment']] # depends on [control=['if'], data=[]]
elif response.rc != 404:
err_msg = 'EnvironmentService.get_environments - Problem while getting environments. Reason: ' + str(response.response_content) + '-' + str(response.error_message) + ' (' + str(response.rc) + ')'
LOGGER.warning(err_msg) # depends on [control=['if'], data=[]]
return ret |
def _handle_end_node(self):
"""
Handle closing node element
"""
self._result.append(Node(result=self._result, **self._curr))
self._curr = {} | def function[_handle_end_node, parameter[self]]:
constant[
Handle closing node element
]
call[name[self]._result.append, parameter[call[name[Node], parameter[]]]]
name[self]._curr assign[=] dictionary[[], []] | keyword[def] identifier[_handle_end_node] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_result] . identifier[append] ( identifier[Node] ( identifier[result] = identifier[self] . identifier[_result] ,** identifier[self] . identifier[_curr] ))
identifier[self] . identifier[_curr] ={} | def _handle_end_node(self):
"""
Handle closing node element
"""
self._result.append(Node(result=self._result, **self._curr))
self._curr = {} |
def _gen_identity(self, key, param=None):
"""generate identity according to key and param given"""
if self.identity_generator and param is not None:
if self.serializer:
param = self.serializer.serialize(param)
if self.compressor:
param = self.compressor.compress(param)
identity = self.identity_generator.generate(key, param)
else:
identity = key
return identity | def function[_gen_identity, parameter[self, key, param]]:
constant[generate identity according to key and param given]
if <ast.BoolOp object at 0x7da1b07cee00> begin[:]
if name[self].serializer begin[:]
variable[param] assign[=] call[name[self].serializer.serialize, parameter[name[param]]]
if name[self].compressor begin[:]
variable[param] assign[=] call[name[self].compressor.compress, parameter[name[param]]]
variable[identity] assign[=] call[name[self].identity_generator.generate, parameter[name[key], name[param]]]
return[name[identity]] | keyword[def] identifier[_gen_identity] ( identifier[self] , identifier[key] , identifier[param] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[identity_generator] keyword[and] identifier[param] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[serializer] :
identifier[param] = identifier[self] . identifier[serializer] . identifier[serialize] ( identifier[param] )
keyword[if] identifier[self] . identifier[compressor] :
identifier[param] = identifier[self] . identifier[compressor] . identifier[compress] ( identifier[param] )
identifier[identity] = identifier[self] . identifier[identity_generator] . identifier[generate] ( identifier[key] , identifier[param] )
keyword[else] :
identifier[identity] = identifier[key]
keyword[return] identifier[identity] | def _gen_identity(self, key, param=None):
"""generate identity according to key and param given"""
if self.identity_generator and param is not None:
if self.serializer:
param = self.serializer.serialize(param) # depends on [control=['if'], data=[]]
if self.compressor:
param = self.compressor.compress(param) # depends on [control=['if'], data=[]]
identity = self.identity_generator.generate(key, param) # depends on [control=['if'], data=[]]
else:
identity = key
return identity |
def attach_hardware(self, ticket_id=None, hardware_id=None):
"""Attach hardware to a ticket.
:param integer ticket_id: the id of the ticket to attach to
:param integer hardware_id: the id of the hardware to attach
:returns: dict -- The new ticket attachment
"""
return self.ticket.addAttachedHardware(hardware_id, id=ticket_id) | def function[attach_hardware, parameter[self, ticket_id, hardware_id]]:
constant[Attach hardware to a ticket.
:param integer ticket_id: the id of the ticket to attach to
:param integer hardware_id: the id of the hardware to attach
:returns: dict -- The new ticket attachment
]
return[call[name[self].ticket.addAttachedHardware, parameter[name[hardware_id]]]] | keyword[def] identifier[attach_hardware] ( identifier[self] , identifier[ticket_id] = keyword[None] , identifier[hardware_id] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[ticket] . identifier[addAttachedHardware] ( identifier[hardware_id] , identifier[id] = identifier[ticket_id] ) | def attach_hardware(self, ticket_id=None, hardware_id=None):
"""Attach hardware to a ticket.
:param integer ticket_id: the id of the ticket to attach to
:param integer hardware_id: the id of the hardware to attach
:returns: dict -- The new ticket attachment
"""
return self.ticket.addAttachedHardware(hardware_id, id=ticket_id) |
def update_instance(InstanceId=None, LayerIds=None, InstanceType=None, AutoScalingType=None, Hostname=None, Os=None, AmiId=None, SshKeyName=None, Architecture=None, InstallUpdatesOnBoot=None, EbsOptimized=None, AgentVersion=None):
"""
Updates a specified instance.
See also: AWS API Documentation
:example: response = client.update_instance(
InstanceId='string',
LayerIds=[
'string',
],
InstanceType='string',
AutoScalingType='load'|'timer',
Hostname='string',
Os='string',
AmiId='string',
SshKeyName='string',
Architecture='x86_64'|'i386',
InstallUpdatesOnBoot=True|False,
EbsOptimized=True|False,
AgentVersion='string'
)
:type InstanceId: string
:param InstanceId: [REQUIRED]
The instance ID.
:type LayerIds: list
:param LayerIds: The instance's layer IDs.
(string) --
:type InstanceType: string
:param InstanceType: The instance type, such as t2.micro . For a list of supported instance types, open the stack in the console, choose Instances , and choose + Instance . The Size list contains the currently supported types. For more information, see Instance Families and Types . The parameter values that you use to specify the various types are in the API Name column of the Available Instance Types table.
:type AutoScalingType: string
:param AutoScalingType: For load-based or time-based instances, the type. Windows stacks can use only time-based instances.
:type Hostname: string
:param Hostname: The instance host name.
:type Os: string
:param Os: The instance's operating system, which must be set to one of the following. You cannot update an instance that is using a custom AMI.
A supported Linux operating system: An Amazon Linux version, such as Amazon Linux 2016.09 , Amazon Linux 2016.03 , Amazon Linux 2015.09 , or Amazon Linux 2015.03 .
A supported Ubuntu operating system, such as Ubuntu 16.04 LTS , Ubuntu 14.04 LTS , or Ubuntu 12.04 LTS .
CentOS Linux 7
Red Hat Enterprise Linux 7
A supported Windows operating system, such as Microsoft Windows Server 2012 R2 Base , Microsoft Windows Server 2012 R2 with SQL Server Express , Microsoft Windows Server 2012 R2 with SQL Server Standard , or Microsoft Windows Server 2012 R2 with SQL Server Web .
For more information on the supported operating systems, see AWS OpsWorks Stacks Operating Systems .
The default option is the current Amazon Linux version. If you set this parameter to Custom , you must use the AmiId parameter to specify the custom AMI that you want to use. For more information on the supported operating systems, see Operating Systems . For more information on how to use custom AMIs with OpsWorks, see Using Custom AMIs .
Note
You can specify a different Linux operating system for the updated stack, but you cannot change from Linux to Windows or Windows to Linux.
:type AmiId: string
:param AmiId: The ID of the AMI that was used to create the instance. The value of this parameter must be the same AMI ID that the instance is already using. You cannot apply a new AMI to an instance by running UpdateInstance. UpdateInstance does not work on instances that are using custom AMIs.
:type SshKeyName: string
:param SshKeyName: The instance's Amazon EC2 key name.
:type Architecture: string
:param Architecture: The instance architecture. Instance types do not necessarily support both architectures. For a list of the architectures that are supported by the different instance types, see Instance Families and Types .
:type InstallUpdatesOnBoot: boolean
:param InstallUpdatesOnBoot: Whether to install operating system and package updates when the instance boots. The default value is true . To control when updates are installed, set this value to false . You must then update your instances manually by using CreateDeployment to run the update_dependencies stack command or by manually running yum (Amazon Linux) or apt-get (Ubuntu) on the instances.
Note
We strongly recommend using the default value of true , to ensure that your instances have the latest security updates.
:type EbsOptimized: boolean
:param EbsOptimized: This property cannot be updated.
:type AgentVersion: string
:param AgentVersion: The default AWS OpsWorks Stacks agent version. You have the following options:
INHERIT - Use the stack's default agent version setting.
version_number - Use the specified agent version. This value overrides the stack's default setting. To update the agent version, you must edit the instance configuration and specify a new version. AWS OpsWorks Stacks then automatically installs that version on the instance.
The default setting is INHERIT . To specify an agent version, you must use the complete version number, not the abbreviated number shown on the console. For a list of available agent version numbers, call DescribeAgentVersions .
AgentVersion cannot be set to Chef 12.2.
"""
pass | def function[update_instance, parameter[InstanceId, LayerIds, InstanceType, AutoScalingType, Hostname, Os, AmiId, SshKeyName, Architecture, InstallUpdatesOnBoot, EbsOptimized, AgentVersion]]:
constant[
Updates a specified instance.
See also: AWS API Documentation
:example: response = client.update_instance(
InstanceId='string',
LayerIds=[
'string',
],
InstanceType='string',
AutoScalingType='load'|'timer',
Hostname='string',
Os='string',
AmiId='string',
SshKeyName='string',
Architecture='x86_64'|'i386',
InstallUpdatesOnBoot=True|False,
EbsOptimized=True|False,
AgentVersion='string'
)
:type InstanceId: string
:param InstanceId: [REQUIRED]
The instance ID.
:type LayerIds: list
:param LayerIds: The instance's layer IDs.
(string) --
:type InstanceType: string
:param InstanceType: The instance type, such as t2.micro . For a list of supported instance types, open the stack in the console, choose Instances , and choose + Instance . The Size list contains the currently supported types. For more information, see Instance Families and Types . The parameter values that you use to specify the various types are in the API Name column of the Available Instance Types table.
:type AutoScalingType: string
:param AutoScalingType: For load-based or time-based instances, the type. Windows stacks can use only time-based instances.
:type Hostname: string
:param Hostname: The instance host name.
:type Os: string
:param Os: The instance's operating system, which must be set to one of the following. You cannot update an instance that is using a custom AMI.
A supported Linux operating system: An Amazon Linux version, such as Amazon Linux 2016.09 , Amazon Linux 2016.03 , Amazon Linux 2015.09 , or Amazon Linux 2015.03 .
A supported Ubuntu operating system, such as Ubuntu 16.04 LTS , Ubuntu 14.04 LTS , or Ubuntu 12.04 LTS .
CentOS Linux 7
Red Hat Enterprise Linux 7
A supported Windows operating system, such as Microsoft Windows Server 2012 R2 Base , Microsoft Windows Server 2012 R2 with SQL Server Express , Microsoft Windows Server 2012 R2 with SQL Server Standard , or Microsoft Windows Server 2012 R2 with SQL Server Web .
For more information on the supported operating systems, see AWS OpsWorks Stacks Operating Systems .
The default option is the current Amazon Linux version. If you set this parameter to Custom , you must use the AmiId parameter to specify the custom AMI that you want to use. For more information on the supported operating systems, see Operating Systems . For more information on how to use custom AMIs with OpsWorks, see Using Custom AMIs .
Note
You can specify a different Linux operating system for the updated stack, but you cannot change from Linux to Windows or Windows to Linux.
:type AmiId: string
:param AmiId: The ID of the AMI that was used to create the instance. The value of this parameter must be the same AMI ID that the instance is already using. You cannot apply a new AMI to an instance by running UpdateInstance. UpdateInstance does not work on instances that are using custom AMIs.
:type SshKeyName: string
:param SshKeyName: The instance's Amazon EC2 key name.
:type Architecture: string
:param Architecture: The instance architecture. Instance types do not necessarily support both architectures. For a list of the architectures that are supported by the different instance types, see Instance Families and Types .
:type InstallUpdatesOnBoot: boolean
:param InstallUpdatesOnBoot: Whether to install operating system and package updates when the instance boots. The default value is true . To control when updates are installed, set this value to false . You must then update your instances manually by using CreateDeployment to run the update_dependencies stack command or by manually running yum (Amazon Linux) or apt-get (Ubuntu) on the instances.
Note
We strongly recommend using the default value of true , to ensure that your instances have the latest security updates.
:type EbsOptimized: boolean
:param EbsOptimized: This property cannot be updated.
:type AgentVersion: string
:param AgentVersion: The default AWS OpsWorks Stacks agent version. You have the following options:
INHERIT - Use the stack's default agent version setting.
version_number - Use the specified agent version. This value overrides the stack's default setting. To update the agent version, you must edit the instance configuration and specify a new version. AWS OpsWorks Stacks then automatically installs that version on the instance.
The default setting is INHERIT . To specify an agent version, you must use the complete version number, not the abbreviated number shown on the console. For a list of available agent version numbers, call DescribeAgentVersions .
AgentVersion cannot be set to Chef 12.2.
]
pass | keyword[def] identifier[update_instance] ( identifier[InstanceId] = keyword[None] , identifier[LayerIds] = keyword[None] , identifier[InstanceType] = keyword[None] , identifier[AutoScalingType] = keyword[None] , identifier[Hostname] = keyword[None] , identifier[Os] = keyword[None] , identifier[AmiId] = keyword[None] , identifier[SshKeyName] = keyword[None] , identifier[Architecture] = keyword[None] , identifier[InstallUpdatesOnBoot] = keyword[None] , identifier[EbsOptimized] = keyword[None] , identifier[AgentVersion] = keyword[None] ):
literal[string]
keyword[pass] | def update_instance(InstanceId=None, LayerIds=None, InstanceType=None, AutoScalingType=None, Hostname=None, Os=None, AmiId=None, SshKeyName=None, Architecture=None, InstallUpdatesOnBoot=None, EbsOptimized=None, AgentVersion=None):
"""
Updates a specified instance.
See also: AWS API Documentation
:example: response = client.update_instance(
InstanceId='string',
LayerIds=[
'string',
],
InstanceType='string',
AutoScalingType='load'|'timer',
Hostname='string',
Os='string',
AmiId='string',
SshKeyName='string',
Architecture='x86_64'|'i386',
InstallUpdatesOnBoot=True|False,
EbsOptimized=True|False,
AgentVersion='string'
)
:type InstanceId: string
:param InstanceId: [REQUIRED]
The instance ID.
:type LayerIds: list
:param LayerIds: The instance's layer IDs.
(string) --
:type InstanceType: string
:param InstanceType: The instance type, such as t2.micro . For a list of supported instance types, open the stack in the console, choose Instances , and choose + Instance . The Size list contains the currently supported types. For more information, see Instance Families and Types . The parameter values that you use to specify the various types are in the API Name column of the Available Instance Types table.
:type AutoScalingType: string
:param AutoScalingType: For load-based or time-based instances, the type. Windows stacks can use only time-based instances.
:type Hostname: string
:param Hostname: The instance host name.
:type Os: string
:param Os: The instance's operating system, which must be set to one of the following. You cannot update an instance that is using a custom AMI.
A supported Linux operating system: An Amazon Linux version, such as Amazon Linux 2016.09 , Amazon Linux 2016.03 , Amazon Linux 2015.09 , or Amazon Linux 2015.03 .
A supported Ubuntu operating system, such as Ubuntu 16.04 LTS , Ubuntu 14.04 LTS , or Ubuntu 12.04 LTS .
CentOS Linux 7
Red Hat Enterprise Linux 7
A supported Windows operating system, such as Microsoft Windows Server 2012 R2 Base , Microsoft Windows Server 2012 R2 with SQL Server Express , Microsoft Windows Server 2012 R2 with SQL Server Standard , or Microsoft Windows Server 2012 R2 with SQL Server Web .
For more information on the supported operating systems, see AWS OpsWorks Stacks Operating Systems .
The default option is the current Amazon Linux version. If you set this parameter to Custom , you must use the AmiId parameter to specify the custom AMI that you want to use. For more information on the supported operating systems, see Operating Systems . For more information on how to use custom AMIs with OpsWorks, see Using Custom AMIs .
Note
You can specify a different Linux operating system for the updated stack, but you cannot change from Linux to Windows or Windows to Linux.
:type AmiId: string
:param AmiId: The ID of the AMI that was used to create the instance. The value of this parameter must be the same AMI ID that the instance is already using. You cannot apply a new AMI to an instance by running UpdateInstance. UpdateInstance does not work on instances that are using custom AMIs.
:type SshKeyName: string
:param SshKeyName: The instance's Amazon EC2 key name.
:type Architecture: string
:param Architecture: The instance architecture. Instance types do not necessarily support both architectures. For a list of the architectures that are supported by the different instance types, see Instance Families and Types .
:type InstallUpdatesOnBoot: boolean
:param InstallUpdatesOnBoot: Whether to install operating system and package updates when the instance boots. The default value is true . To control when updates are installed, set this value to false . You must then update your instances manually by using CreateDeployment to run the update_dependencies stack command or by manually running yum (Amazon Linux) or apt-get (Ubuntu) on the instances.
Note
We strongly recommend using the default value of true , to ensure that your instances have the latest security updates.
:type EbsOptimized: boolean
:param EbsOptimized: This property cannot be updated.
:type AgentVersion: string
:param AgentVersion: The default AWS OpsWorks Stacks agent version. You have the following options:
INHERIT - Use the stack's default agent version setting.
version_number - Use the specified agent version. This value overrides the stack's default setting. To update the agent version, you must edit the instance configuration and specify a new version. AWS OpsWorks Stacks then automatically installs that version on the instance.
The default setting is INHERIT . To specify an agent version, you must use the complete version number, not the abbreviated number shown on the console. For a list of available agent version numbers, call DescribeAgentVersions .
AgentVersion cannot be set to Chef 12.2.
"""
pass |
def report_exception(self, filename, exc):
"""
This method is used when self.parser raises an Exception so that
we can report a customized :class:`EventReport` object with info the exception.
"""
# Build fake event.
event = AbinitError(src_file="Unknown", src_line=0, message=str(exc))
return EventReport(filename, events=[event]) | def function[report_exception, parameter[self, filename, exc]]:
constant[
This method is used when self.parser raises an Exception so that
we can report a customized :class:`EventReport` object with info the exception.
]
variable[event] assign[=] call[name[AbinitError], parameter[]]
return[call[name[EventReport], parameter[name[filename]]]] | keyword[def] identifier[report_exception] ( identifier[self] , identifier[filename] , identifier[exc] ):
literal[string]
identifier[event] = identifier[AbinitError] ( identifier[src_file] = literal[string] , identifier[src_line] = literal[int] , identifier[message] = identifier[str] ( identifier[exc] ))
keyword[return] identifier[EventReport] ( identifier[filename] , identifier[events] =[ identifier[event] ]) | def report_exception(self, filename, exc):
"""
This method is used when self.parser raises an Exception so that
we can report a customized :class:`EventReport` object with info the exception.
"""
# Build fake event.
event = AbinitError(src_file='Unknown', src_line=0, message=str(exc))
return EventReport(filename, events=[event]) |
def get_goone2ntletter(self, go2dcnt, depth2goobjs):
"""Assign letters to depth-01 GO terms ordered using descendants cnt."""
# 1. Group level-01/depth-01 GO terms by namespace
ns2dcntgoobj = cx.defaultdict(list)
for goobj in depth2goobjs[1]:
dcnt = go2dcnt[goobj.id]
ns2dcntgoobj[goobj.namespace].append((dcnt, goobj))
# 2. Assign letters to level-01/depth-01 GO terms
go2nt = {}
ntobj = cx.namedtuple("NtGoLetters", "D1 dcnt goobj")
_go2abc = self.go2letter
letters = list(chain(range(ord('A'), ord('Z') + 1), range(ord('a'), ord('z') + 1)))
for list_dcnt_goobj in ns2dcntgoobj.values():
letter_idx = 0
for dcnt, goobj in sorted(list_dcnt_goobj, key=lambda t: t[0], reverse=True):
letter = chr(letters[letter_idx]) if _go2abc is None else _go2abc.get(goobj.id, '')
go2nt[goobj.id] = ntobj._make([letter, dcnt, goobj])
letter_idx += 1
return go2nt | def function[get_goone2ntletter, parameter[self, go2dcnt, depth2goobjs]]:
constant[Assign letters to depth-01 GO terms ordered using descendants cnt.]
variable[ns2dcntgoobj] assign[=] call[name[cx].defaultdict, parameter[name[list]]]
for taget[name[goobj]] in starred[call[name[depth2goobjs]][constant[1]]] begin[:]
variable[dcnt] assign[=] call[name[go2dcnt]][name[goobj].id]
call[call[name[ns2dcntgoobj]][name[goobj].namespace].append, parameter[tuple[[<ast.Name object at 0x7da20e9b1330>, <ast.Name object at 0x7da20e9b1870>]]]]
variable[go2nt] assign[=] dictionary[[], []]
variable[ntobj] assign[=] call[name[cx].namedtuple, parameter[constant[NtGoLetters], constant[D1 dcnt goobj]]]
variable[_go2abc] assign[=] name[self].go2letter
variable[letters] assign[=] call[name[list], parameter[call[name[chain], parameter[call[name[range], parameter[call[name[ord], parameter[constant[A]]], binary_operation[call[name[ord], parameter[constant[Z]]] + constant[1]]]], call[name[range], parameter[call[name[ord], parameter[constant[a]]], binary_operation[call[name[ord], parameter[constant[z]]] + constant[1]]]]]]]]
for taget[name[list_dcnt_goobj]] in starred[call[name[ns2dcntgoobj].values, parameter[]]] begin[:]
variable[letter_idx] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da20e9b2cb0>, <ast.Name object at 0x7da20e9b1930>]]] in starred[call[name[sorted], parameter[name[list_dcnt_goobj]]]] begin[:]
variable[letter] assign[=] <ast.IfExp object at 0x7da20e9b1e10>
call[name[go2nt]][name[goobj].id] assign[=] call[name[ntobj]._make, parameter[list[[<ast.Name object at 0x7da20e9b3820>, <ast.Name object at 0x7da20e9b1780>, <ast.Name object at 0x7da18bc707c0>]]]]
<ast.AugAssign object at 0x7da18bc70460>
return[name[go2nt]] | keyword[def] identifier[get_goone2ntletter] ( identifier[self] , identifier[go2dcnt] , identifier[depth2goobjs] ):
literal[string]
identifier[ns2dcntgoobj] = identifier[cx] . identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[goobj] keyword[in] identifier[depth2goobjs] [ literal[int] ]:
identifier[dcnt] = identifier[go2dcnt] [ identifier[goobj] . identifier[id] ]
identifier[ns2dcntgoobj] [ identifier[goobj] . identifier[namespace] ]. identifier[append] (( identifier[dcnt] , identifier[goobj] ))
identifier[go2nt] ={}
identifier[ntobj] = identifier[cx] . identifier[namedtuple] ( literal[string] , literal[string] )
identifier[_go2abc] = identifier[self] . identifier[go2letter]
identifier[letters] = identifier[list] ( identifier[chain] ( identifier[range] ( identifier[ord] ( literal[string] ), identifier[ord] ( literal[string] )+ literal[int] ), identifier[range] ( identifier[ord] ( literal[string] ), identifier[ord] ( literal[string] )+ literal[int] )))
keyword[for] identifier[list_dcnt_goobj] keyword[in] identifier[ns2dcntgoobj] . identifier[values] ():
identifier[letter_idx] = literal[int]
keyword[for] identifier[dcnt] , identifier[goobj] keyword[in] identifier[sorted] ( identifier[list_dcnt_goobj] , identifier[key] = keyword[lambda] identifier[t] : identifier[t] [ literal[int] ], identifier[reverse] = keyword[True] ):
identifier[letter] = identifier[chr] ( identifier[letters] [ identifier[letter_idx] ]) keyword[if] identifier[_go2abc] keyword[is] keyword[None] keyword[else] identifier[_go2abc] . identifier[get] ( identifier[goobj] . identifier[id] , literal[string] )
identifier[go2nt] [ identifier[goobj] . identifier[id] ]= identifier[ntobj] . identifier[_make] ([ identifier[letter] , identifier[dcnt] , identifier[goobj] ])
identifier[letter_idx] += literal[int]
keyword[return] identifier[go2nt] | def get_goone2ntletter(self, go2dcnt, depth2goobjs):
"""Assign letters to depth-01 GO terms ordered using descendants cnt."""
# 1. Group level-01/depth-01 GO terms by namespace
ns2dcntgoobj = cx.defaultdict(list)
for goobj in depth2goobjs[1]:
dcnt = go2dcnt[goobj.id]
ns2dcntgoobj[goobj.namespace].append((dcnt, goobj)) # depends on [control=['for'], data=['goobj']]
# 2. Assign letters to level-01/depth-01 GO terms
go2nt = {}
ntobj = cx.namedtuple('NtGoLetters', 'D1 dcnt goobj')
_go2abc = self.go2letter
letters = list(chain(range(ord('A'), ord('Z') + 1), range(ord('a'), ord('z') + 1)))
for list_dcnt_goobj in ns2dcntgoobj.values():
letter_idx = 0
for (dcnt, goobj) in sorted(list_dcnt_goobj, key=lambda t: t[0], reverse=True):
letter = chr(letters[letter_idx]) if _go2abc is None else _go2abc.get(goobj.id, '')
go2nt[goobj.id] = ntobj._make([letter, dcnt, goobj])
letter_idx += 1 # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['list_dcnt_goobj']]
return go2nt |
def config_value_changed(option):
"""
Determine if config value changed since last call to this function.
"""
hook_data = unitdata.HookData()
with hook_data():
db = unitdata.kv()
current = config(option)
saved = db.get(option)
db.set(option, current)
if saved is None:
return False
return current != saved | def function[config_value_changed, parameter[option]]:
constant[
Determine if config value changed since last call to this function.
]
variable[hook_data] assign[=] call[name[unitdata].HookData, parameter[]]
with call[name[hook_data], parameter[]] begin[:]
variable[db] assign[=] call[name[unitdata].kv, parameter[]]
variable[current] assign[=] call[name[config], parameter[name[option]]]
variable[saved] assign[=] call[name[db].get, parameter[name[option]]]
call[name[db].set, parameter[name[option], name[current]]]
if compare[name[saved] is constant[None]] begin[:]
return[constant[False]]
return[compare[name[current] not_equal[!=] name[saved]]] | keyword[def] identifier[config_value_changed] ( identifier[option] ):
literal[string]
identifier[hook_data] = identifier[unitdata] . identifier[HookData] ()
keyword[with] identifier[hook_data] ():
identifier[db] = identifier[unitdata] . identifier[kv] ()
identifier[current] = identifier[config] ( identifier[option] )
identifier[saved] = identifier[db] . identifier[get] ( identifier[option] )
identifier[db] . identifier[set] ( identifier[option] , identifier[current] )
keyword[if] identifier[saved] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[return] identifier[current] != identifier[saved] | def config_value_changed(option):
"""
Determine if config value changed since last call to this function.
"""
hook_data = unitdata.HookData()
with hook_data():
db = unitdata.kv()
current = config(option)
saved = db.get(option)
db.set(option, current)
if saved is None:
return False # depends on [control=['if'], data=[]]
return current != saved # depends on [control=['with'], data=[]] |
def site_info(request):
'''Expose the site's info to templates'''
site = get_current_site(request)
context = {
'WAFER_CONFERENCE_NAME': site.name,
'WAFER_CONFERENCE_DOMAIN': site.domain,
}
return context | def function[site_info, parameter[request]]:
constant[Expose the site's info to templates]
variable[site] assign[=] call[name[get_current_site], parameter[name[request]]]
variable[context] assign[=] dictionary[[<ast.Constant object at 0x7da1b0ebee90>, <ast.Constant object at 0x7da1b0ebdea0>], [<ast.Attribute object at 0x7da1b0ebef20>, <ast.Attribute object at 0x7da1b0ebe410>]]
return[name[context]] | keyword[def] identifier[site_info] ( identifier[request] ):
literal[string]
identifier[site] = identifier[get_current_site] ( identifier[request] )
identifier[context] ={
literal[string] : identifier[site] . identifier[name] ,
literal[string] : identifier[site] . identifier[domain] ,
}
keyword[return] identifier[context] | def site_info(request):
"""Expose the site's info to templates"""
site = get_current_site(request)
context = {'WAFER_CONFERENCE_NAME': site.name, 'WAFER_CONFERENCE_DOMAIN': site.domain}
return context |
def _enforce_space(self, item):
"""Enforce a space in certain situations.
There are cases where we will want a space where normally we
wouldn't put one. This just enforces the addition of a space.
"""
if isinstance(self._lines[-1],
(self._Space, self._LineBreak, self._Indent)):
return
if not self._prev_item:
return
item_text = unicode(item)
prev_text = unicode(self._prev_item)
# Prefer a space around a '.' in an import statement, and between the
# 'import' and '('.
if (
(item_text == '.' and prev_text == 'from') or
(item_text == 'import' and prev_text == '.') or
(item_text == '(' and prev_text == 'import')
):
self._lines.append(self._Space()) | def function[_enforce_space, parameter[self, item]]:
constant[Enforce a space in certain situations.
There are cases where we will want a space where normally we
wouldn't put one. This just enforces the addition of a space.
]
if call[name[isinstance], parameter[call[name[self]._lines][<ast.UnaryOp object at 0x7da18ede6020>], tuple[[<ast.Attribute object at 0x7da18ede7820>, <ast.Attribute object at 0x7da18ede4cd0>, <ast.Attribute object at 0x7da18ede4d90>]]]] begin[:]
return[None]
if <ast.UnaryOp object at 0x7da18ede4100> begin[:]
return[None]
variable[item_text] assign[=] call[name[unicode], parameter[name[item]]]
variable[prev_text] assign[=] call[name[unicode], parameter[name[self]._prev_item]]
if <ast.BoolOp object at 0x7da18fe90d30> begin[:]
call[name[self]._lines.append, parameter[call[name[self]._Space, parameter[]]]] | keyword[def] identifier[_enforce_space] ( identifier[self] , identifier[item] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[_lines] [- literal[int] ],
( identifier[self] . identifier[_Space] , identifier[self] . identifier[_LineBreak] , identifier[self] . identifier[_Indent] )):
keyword[return]
keyword[if] keyword[not] identifier[self] . identifier[_prev_item] :
keyword[return]
identifier[item_text] = identifier[unicode] ( identifier[item] )
identifier[prev_text] = identifier[unicode] ( identifier[self] . identifier[_prev_item] )
keyword[if] (
( identifier[item_text] == literal[string] keyword[and] identifier[prev_text] == literal[string] ) keyword[or]
( identifier[item_text] == literal[string] keyword[and] identifier[prev_text] == literal[string] ) keyword[or]
( identifier[item_text] == literal[string] keyword[and] identifier[prev_text] == literal[string] )
):
identifier[self] . identifier[_lines] . identifier[append] ( identifier[self] . identifier[_Space] ()) | def _enforce_space(self, item):
"""Enforce a space in certain situations.
There are cases where we will want a space where normally we
wouldn't put one. This just enforces the addition of a space.
"""
if isinstance(self._lines[-1], (self._Space, self._LineBreak, self._Indent)):
return # depends on [control=['if'], data=[]]
if not self._prev_item:
return # depends on [control=['if'], data=[]]
item_text = unicode(item)
prev_text = unicode(self._prev_item)
# Prefer a space around a '.' in an import statement, and between the
# 'import' and '('.
if item_text == '.' and prev_text == 'from' or (item_text == 'import' and prev_text == '.') or (item_text == '(' and prev_text == 'import'):
self._lines.append(self._Space()) # depends on [control=['if'], data=[]] |
def get_template(template_name,fields=None):
'''get_template will return a template in the template folder,
with some substitutions (eg, {'{{ graph | safe }}':"fill this in!"}
'''
template = None
if not template_name.endswith('.html'):
template_name = "%s.html" %(template_name)
here = "%s/cli/app/templates" %(get_installdir())
template_path = "%s/%s" %(here,template_name)
if os.path.exists(template_path):
template = ''.join(read_file(template_path))
if fields is not None:
for tag,sub in fields.items():
template = template.replace(tag,sub)
return template | def function[get_template, parameter[template_name, fields]]:
constant[get_template will return a template in the template folder,
with some substitutions (eg, {'{{ graph | safe }}':"fill this in!"}
]
variable[template] assign[=] constant[None]
if <ast.UnaryOp object at 0x7da1b2344850> begin[:]
variable[template_name] assign[=] binary_operation[constant[%s.html] <ast.Mod object at 0x7da2590d6920> name[template_name]]
variable[here] assign[=] binary_operation[constant[%s/cli/app/templates] <ast.Mod object at 0x7da2590d6920> call[name[get_installdir], parameter[]]]
variable[template_path] assign[=] binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2347580>, <ast.Name object at 0x7da1b23448b0>]]]
if call[name[os].path.exists, parameter[name[template_path]]] begin[:]
variable[template] assign[=] call[constant[].join, parameter[call[name[read_file], parameter[name[template_path]]]]]
if compare[name[fields] is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b23452a0>, <ast.Name object at 0x7da1b23444f0>]]] in starred[call[name[fields].items, parameter[]]] begin[:]
variable[template] assign[=] call[name[template].replace, parameter[name[tag], name[sub]]]
return[name[template]] | keyword[def] identifier[get_template] ( identifier[template_name] , identifier[fields] = keyword[None] ):
literal[string]
identifier[template] = keyword[None]
keyword[if] keyword[not] identifier[template_name] . identifier[endswith] ( literal[string] ):
identifier[template_name] = literal[string] %( identifier[template_name] )
identifier[here] = literal[string] %( identifier[get_installdir] ())
identifier[template_path] = literal[string] %( identifier[here] , identifier[template_name] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[template_path] ):
identifier[template] = literal[string] . identifier[join] ( identifier[read_file] ( identifier[template_path] ))
keyword[if] identifier[fields] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[tag] , identifier[sub] keyword[in] identifier[fields] . identifier[items] ():
identifier[template] = identifier[template] . identifier[replace] ( identifier[tag] , identifier[sub] )
keyword[return] identifier[template] | def get_template(template_name, fields=None):
"""get_template will return a template in the template folder,
with some substitutions (eg, {'{{ graph | safe }}':"fill this in!"}
"""
template = None
if not template_name.endswith('.html'):
template_name = '%s.html' % template_name # depends on [control=['if'], data=[]]
here = '%s/cli/app/templates' % get_installdir()
template_path = '%s/%s' % (here, template_name)
if os.path.exists(template_path):
template = ''.join(read_file(template_path)) # depends on [control=['if'], data=[]]
if fields is not None:
for (tag, sub) in fields.items():
template = template.replace(tag, sub) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['fields']]
return template |
def instantiate(self, params, auth=None):
"""
Allows you to fetch the map tiles of a created map
:param params: The json with the styling info for the named map
:param auth: The auth client
:type params: dict
:type auth: :class:`carto.auth.APIKeyAuthClient`
:return:
:raise: CartoException
"""
try:
endpoint = (self.Meta.collection_endpoint
+ "{template_id}"). \
format(template_id=self.template_id)
if (auth is not None):
endpoint = (endpoint + "?auth_token={auth_token}"). \
format(auth_token=auth)
self.send(endpoint, "POST", json=params)
except CartoRateLimitException as e:
raise e
except Exception as e:
raise CartoException(e) | def function[instantiate, parameter[self, params, auth]]:
constant[
Allows you to fetch the map tiles of a created map
:param params: The json with the styling info for the named map
:param auth: The auth client
:type params: dict
:type auth: :class:`carto.auth.APIKeyAuthClient`
:return:
:raise: CartoException
]
<ast.Try object at 0x7da1b0f18160> | keyword[def] identifier[instantiate] ( identifier[self] , identifier[params] , identifier[auth] = keyword[None] ):
literal[string]
keyword[try] :
identifier[endpoint] =( identifier[self] . identifier[Meta] . identifier[collection_endpoint]
+ literal[string] ). identifier[format] ( identifier[template_id] = identifier[self] . identifier[template_id] )
keyword[if] ( identifier[auth] keyword[is] keyword[not] keyword[None] ):
identifier[endpoint] =( identifier[endpoint] + literal[string] ). identifier[format] ( identifier[auth_token] = identifier[auth] )
identifier[self] . identifier[send] ( identifier[endpoint] , literal[string] , identifier[json] = identifier[params] )
keyword[except] identifier[CartoRateLimitException] keyword[as] identifier[e] :
keyword[raise] identifier[e]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[raise] identifier[CartoException] ( identifier[e] ) | def instantiate(self, params, auth=None):
"""
Allows you to fetch the map tiles of a created map
:param params: The json with the styling info for the named map
:param auth: The auth client
:type params: dict
:type auth: :class:`carto.auth.APIKeyAuthClient`
:return:
:raise: CartoException
"""
try:
endpoint = (self.Meta.collection_endpoint + '{template_id}').format(template_id=self.template_id)
if auth is not None:
endpoint = (endpoint + '?auth_token={auth_token}').format(auth_token=auth) # depends on [control=['if'], data=['auth']]
self.send(endpoint, 'POST', json=params) # depends on [control=['try'], data=[]]
except CartoRateLimitException as e:
raise e # depends on [control=['except'], data=['e']]
except Exception as e:
raise CartoException(e) # depends on [control=['except'], data=['e']] |
def recv_match(self, condition=None, type=None, blocking=False):
'''recv the next message that matches the given condition
type can be a string or a list of strings'''
if type is not None and not isinstance(type, list):
type = [type]
while True:
m = self.recv_msg()
if m is None:
return None
if type is not None and not m.get_type() in type:
continue
if not mavutil.evaluate_condition(condition, self.messages):
continue
return m | def function[recv_match, parameter[self, condition, type, blocking]]:
constant[recv the next message that matches the given condition
type can be a string or a list of strings]
if <ast.BoolOp object at 0x7da1b2345720> begin[:]
variable[type] assign[=] list[[<ast.Name object at 0x7da1b2344bb0>]]
while constant[True] begin[:]
variable[m] assign[=] call[name[self].recv_msg, parameter[]]
if compare[name[m] is constant[None]] begin[:]
return[constant[None]]
if <ast.BoolOp object at 0x7da1b2344820> begin[:]
continue
if <ast.UnaryOp object at 0x7da1b2346170> begin[:]
continue
return[name[m]] | keyword[def] identifier[recv_match] ( identifier[self] , identifier[condition] = keyword[None] , identifier[type] = keyword[None] , identifier[blocking] = keyword[False] ):
literal[string]
keyword[if] identifier[type] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[type] , identifier[list] ):
identifier[type] =[ identifier[type] ]
keyword[while] keyword[True] :
identifier[m] = identifier[self] . identifier[recv_msg] ()
keyword[if] identifier[m] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[if] identifier[type] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[m] . identifier[get_type] () keyword[in] identifier[type] :
keyword[continue]
keyword[if] keyword[not] identifier[mavutil] . identifier[evaluate_condition] ( identifier[condition] , identifier[self] . identifier[messages] ):
keyword[continue]
keyword[return] identifier[m] | def recv_match(self, condition=None, type=None, blocking=False):
"""recv the next message that matches the given condition
type can be a string or a list of strings"""
if type is not None and (not isinstance(type, list)):
type = [type] # depends on [control=['if'], data=[]]
while True:
m = self.recv_msg()
if m is None:
return None # depends on [control=['if'], data=[]]
if type is not None and (not m.get_type() in type):
continue # depends on [control=['if'], data=[]]
if not mavutil.evaluate_condition(condition, self.messages):
continue # depends on [control=['if'], data=[]]
return m # depends on [control=['while'], data=[]] |
def _getOutputElegant(self, **kws):
""" get results from elegant output according to the given keywords,
input parameter format: key = sdds field name tuple, e.g.:
available keywords are:
- 'file': sdds fielname, file = test.sig
- 'data': data array, data = ('s','Sx')
- 'dump': h5file name, if defined, dump data to hdf5 format
"""
datascript = "sddsprintdata.sh"
datapath = self.sim_path
trajparam_list = kws['data']
sddsfile = os.path.expanduser(os.path.join(self.sim_path, kws['file']))
dh = datautils.DataExtracter(sddsfile, *trajparam_list)
dh.setDataScript(datascript)
dh.setDataPath(datapath)
if 'dump' in kws:
dh.setH5file(kws['dump'])
dh.extractData().dump()
data = dh.extractData().getH5Data()
return data | def function[_getOutputElegant, parameter[self]]:
constant[ get results from elegant output according to the given keywords,
input parameter format: key = sdds field name tuple, e.g.:
available keywords are:
- 'file': sdds fielname, file = test.sig
- 'data': data array, data = ('s','Sx')
- 'dump': h5file name, if defined, dump data to hdf5 format
]
variable[datascript] assign[=] constant[sddsprintdata.sh]
variable[datapath] assign[=] name[self].sim_path
variable[trajparam_list] assign[=] call[name[kws]][constant[data]]
variable[sddsfile] assign[=] call[name[os].path.expanduser, parameter[call[name[os].path.join, parameter[name[self].sim_path, call[name[kws]][constant[file]]]]]]
variable[dh] assign[=] call[name[datautils].DataExtracter, parameter[name[sddsfile], <ast.Starred object at 0x7da1b094b040>]]
call[name[dh].setDataScript, parameter[name[datascript]]]
call[name[dh].setDataPath, parameter[name[datapath]]]
if compare[constant[dump] in name[kws]] begin[:]
call[name[dh].setH5file, parameter[call[name[kws]][constant[dump]]]]
call[call[name[dh].extractData, parameter[]].dump, parameter[]]
variable[data] assign[=] call[call[name[dh].extractData, parameter[]].getH5Data, parameter[]]
return[name[data]] | keyword[def] identifier[_getOutputElegant] ( identifier[self] ,** identifier[kws] ):
literal[string]
identifier[datascript] = literal[string]
identifier[datapath] = identifier[self] . identifier[sim_path]
identifier[trajparam_list] = identifier[kws] [ literal[string] ]
identifier[sddsfile] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[sim_path] , identifier[kws] [ literal[string] ]))
identifier[dh] = identifier[datautils] . identifier[DataExtracter] ( identifier[sddsfile] ,* identifier[trajparam_list] )
identifier[dh] . identifier[setDataScript] ( identifier[datascript] )
identifier[dh] . identifier[setDataPath] ( identifier[datapath] )
keyword[if] literal[string] keyword[in] identifier[kws] :
identifier[dh] . identifier[setH5file] ( identifier[kws] [ literal[string] ])
identifier[dh] . identifier[extractData] (). identifier[dump] ()
identifier[data] = identifier[dh] . identifier[extractData] (). identifier[getH5Data] ()
keyword[return] identifier[data] | def _getOutputElegant(self, **kws):
""" get results from elegant output according to the given keywords,
input parameter format: key = sdds field name tuple, e.g.:
available keywords are:
- 'file': sdds fielname, file = test.sig
- 'data': data array, data = ('s','Sx')
- 'dump': h5file name, if defined, dump data to hdf5 format
"""
datascript = 'sddsprintdata.sh'
datapath = self.sim_path
trajparam_list = kws['data']
sddsfile = os.path.expanduser(os.path.join(self.sim_path, kws['file']))
dh = datautils.DataExtracter(sddsfile, *trajparam_list)
dh.setDataScript(datascript)
dh.setDataPath(datapath)
if 'dump' in kws:
dh.setH5file(kws['dump'])
dh.extractData().dump() # depends on [control=['if'], data=['kws']]
data = dh.extractData().getH5Data()
return data |
def hash(self, *args, **kwargs):
"""
:param args:
:param kwargs:
joiner - string to join values (args)
as_bytes - bool to return hash bytes instead of default int
:rtype: int|bytes
"""
joiner = kwargs.get('joiner', '').encode('utf-8')
as_bytes = kwargs.get('as_bytes', False)
def conv(arg):
if isinstance(arg, integer_types):
arg = int_to_bytes(arg)
if PY3:
if isinstance(arg, str):
arg = arg.encode('utf-8')
return arg
return str(arg)
digest = joiner.join(map(conv, args))
hash_obj = self._hash_func(digest)
if as_bytes:
return hash_obj.digest()
return int_from_hex(hash_obj.hexdigest()) | def function[hash, parameter[self]]:
constant[
:param args:
:param kwargs:
joiner - string to join values (args)
as_bytes - bool to return hash bytes instead of default int
:rtype: int|bytes
]
variable[joiner] assign[=] call[call[name[kwargs].get, parameter[constant[joiner], constant[]]].encode, parameter[constant[utf-8]]]
variable[as_bytes] assign[=] call[name[kwargs].get, parameter[constant[as_bytes], constant[False]]]
def function[conv, parameter[arg]]:
if call[name[isinstance], parameter[name[arg], name[integer_types]]] begin[:]
variable[arg] assign[=] call[name[int_to_bytes], parameter[name[arg]]]
if name[PY3] begin[:]
if call[name[isinstance], parameter[name[arg], name[str]]] begin[:]
variable[arg] assign[=] call[name[arg].encode, parameter[constant[utf-8]]]
return[name[arg]]
return[call[name[str], parameter[name[arg]]]]
variable[digest] assign[=] call[name[joiner].join, parameter[call[name[map], parameter[name[conv], name[args]]]]]
variable[hash_obj] assign[=] call[name[self]._hash_func, parameter[name[digest]]]
if name[as_bytes] begin[:]
return[call[name[hash_obj].digest, parameter[]]]
return[call[name[int_from_hex], parameter[call[name[hash_obj].hexdigest, parameter[]]]]] | keyword[def] identifier[hash] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[joiner] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ). identifier[encode] ( literal[string] )
identifier[as_bytes] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
keyword[def] identifier[conv] ( identifier[arg] ):
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[integer_types] ):
identifier[arg] = identifier[int_to_bytes] ( identifier[arg] )
keyword[if] identifier[PY3] :
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[str] ):
identifier[arg] = identifier[arg] . identifier[encode] ( literal[string] )
keyword[return] identifier[arg]
keyword[return] identifier[str] ( identifier[arg] )
identifier[digest] = identifier[joiner] . identifier[join] ( identifier[map] ( identifier[conv] , identifier[args] ))
identifier[hash_obj] = identifier[self] . identifier[_hash_func] ( identifier[digest] )
keyword[if] identifier[as_bytes] :
keyword[return] identifier[hash_obj] . identifier[digest] ()
keyword[return] identifier[int_from_hex] ( identifier[hash_obj] . identifier[hexdigest] ()) | def hash(self, *args, **kwargs):
"""
:param args:
:param kwargs:
joiner - string to join values (args)
as_bytes - bool to return hash bytes instead of default int
:rtype: int|bytes
"""
joiner = kwargs.get('joiner', '').encode('utf-8')
as_bytes = kwargs.get('as_bytes', False)
def conv(arg):
if isinstance(arg, integer_types):
arg = int_to_bytes(arg) # depends on [control=['if'], data=[]]
if PY3:
if isinstance(arg, str):
arg = arg.encode('utf-8') # depends on [control=['if'], data=[]]
return arg # depends on [control=['if'], data=[]]
return str(arg)
digest = joiner.join(map(conv, args))
hash_obj = self._hash_func(digest)
if as_bytes:
return hash_obj.digest() # depends on [control=['if'], data=[]]
return int_from_hex(hash_obj.hexdigest()) |
def show_event_analysis_dialog(self):
"""Create the event analysis dialog."""
self.event_analysis_dialog.update_types()
self.event_analysis_dialog.update_groups()
self.event_analysis_dialog.update_cycles()
self.event_analysis_dialog.show() | def function[show_event_analysis_dialog, parameter[self]]:
constant[Create the event analysis dialog.]
call[name[self].event_analysis_dialog.update_types, parameter[]]
call[name[self].event_analysis_dialog.update_groups, parameter[]]
call[name[self].event_analysis_dialog.update_cycles, parameter[]]
call[name[self].event_analysis_dialog.show, parameter[]] | keyword[def] identifier[show_event_analysis_dialog] ( identifier[self] ):
literal[string]
identifier[self] . identifier[event_analysis_dialog] . identifier[update_types] ()
identifier[self] . identifier[event_analysis_dialog] . identifier[update_groups] ()
identifier[self] . identifier[event_analysis_dialog] . identifier[update_cycles] ()
identifier[self] . identifier[event_analysis_dialog] . identifier[show] () | def show_event_analysis_dialog(self):
"""Create the event analysis dialog."""
self.event_analysis_dialog.update_types()
self.event_analysis_dialog.update_groups()
self.event_analysis_dialog.update_cycles()
self.event_analysis_dialog.show() |
def address(cls, address, bits = None):
"""
@type address: int
@param address: Memory address.
@type bits: int
@param bits:
(Optional) Number of bits of the target architecture.
The default is platform dependent. See: L{HexDump.address_size}
@rtype: str
@return: Text output.
"""
if bits is None:
address_size = cls.address_size
bits = win32.bits
else:
address_size = bits / 4
if address < 0:
address = ((2 ** bits) - 1) ^ ~address
return ('%%.%dX' % address_size) % address | def function[address, parameter[cls, address, bits]]:
constant[
@type address: int
@param address: Memory address.
@type bits: int
@param bits:
(Optional) Number of bits of the target architecture.
The default is platform dependent. See: L{HexDump.address_size}
@rtype: str
@return: Text output.
]
if compare[name[bits] is constant[None]] begin[:]
variable[address_size] assign[=] name[cls].address_size
variable[bits] assign[=] name[win32].bits
if compare[name[address] less[<] constant[0]] begin[:]
variable[address] assign[=] binary_operation[binary_operation[binary_operation[constant[2] ** name[bits]] - constant[1]] <ast.BitXor object at 0x7da2590d6b00> <ast.UnaryOp object at 0x7da18dc986d0>]
return[binary_operation[binary_operation[constant[%%.%dX] <ast.Mod object at 0x7da2590d6920> name[address_size]] <ast.Mod object at 0x7da2590d6920> name[address]]] | keyword[def] identifier[address] ( identifier[cls] , identifier[address] , identifier[bits] = keyword[None] ):
literal[string]
keyword[if] identifier[bits] keyword[is] keyword[None] :
identifier[address_size] = identifier[cls] . identifier[address_size]
identifier[bits] = identifier[win32] . identifier[bits]
keyword[else] :
identifier[address_size] = identifier[bits] / literal[int]
keyword[if] identifier[address] < literal[int] :
identifier[address] =(( literal[int] ** identifier[bits] )- literal[int] )^~ identifier[address]
keyword[return] ( literal[string] % identifier[address_size] )% identifier[address] | def address(cls, address, bits=None):
"""
@type address: int
@param address: Memory address.
@type bits: int
@param bits:
(Optional) Number of bits of the target architecture.
The default is platform dependent. See: L{HexDump.address_size}
@rtype: str
@return: Text output.
"""
if bits is None:
address_size = cls.address_size
bits = win32.bits # depends on [control=['if'], data=['bits']]
else:
address_size = bits / 4
if address < 0:
address = 2 ** bits - 1 ^ ~address # depends on [control=['if'], data=['address']]
return '%%.%dX' % address_size % address |
def HEAD(self, *args, **kwargs):
""" HEAD request """
return self._handle_api(self.API_HEAD, args, kwargs) | def function[HEAD, parameter[self]]:
constant[ HEAD request ]
return[call[name[self]._handle_api, parameter[name[self].API_HEAD, name[args], name[kwargs]]]] | keyword[def] identifier[HEAD] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_handle_api] ( identifier[self] . identifier[API_HEAD] , identifier[args] , identifier[kwargs] ) | def HEAD(self, *args, **kwargs):
""" HEAD request """
return self._handle_api(self.API_HEAD, args, kwargs) |
def wr_txt_section_hdrgos(self, fout_txt, sortby=None, prt_section=True):
"""Write high GO IDs that are actually used to group current set of GO IDs."""
sec2d_go = self.grprobj.get_sections_2d() # lists of GO IDs
sec2d_nt = self.get_sections_2dnt(sec2d_go) # lists of GO Grouper namedtuples
if sortby is None:
sortby = self.fncsortnt
with open(fout_txt, 'w') as prt:
self.prt_ver(prt)
prt.write("# GROUP NAME: {NAME}\n".format(NAME=self.grprobj.grpname))
for section_name, nthdrgos_actual in sec2d_nt:
if prt_section:
prt.write("# SECTION: {SECTION}\n".format(SECTION=section_name))
self.prt_ntgos(prt, nthdrgos_actual)
if prt_section:
prt.write("\n")
dat = SummarySec2dHdrGos().summarize_sec2hdrgos(sec2d_go)
sys.stdout.write(self.grprobj.fmtsum.format(
GO_DESC='hdr', SECs=len(dat['S']), GOs=len(dat['G']),
UNGRP=len(dat['U']), undesc="unused",
ACTION="WROTE:", FILE=fout_txt))
return sec2d_nt | def function[wr_txt_section_hdrgos, parameter[self, fout_txt, sortby, prt_section]]:
constant[Write high GO IDs that are actually used to group current set of GO IDs.]
variable[sec2d_go] assign[=] call[name[self].grprobj.get_sections_2d, parameter[]]
variable[sec2d_nt] assign[=] call[name[self].get_sections_2dnt, parameter[name[sec2d_go]]]
if compare[name[sortby] is constant[None]] begin[:]
variable[sortby] assign[=] name[self].fncsortnt
with call[name[open], parameter[name[fout_txt], constant[w]]] begin[:]
call[name[self].prt_ver, parameter[name[prt]]]
call[name[prt].write, parameter[call[constant[# GROUP NAME: {NAME}
].format, parameter[]]]]
for taget[tuple[[<ast.Name object at 0x7da20c6a9b10>, <ast.Name object at 0x7da20c6a88b0>]]] in starred[name[sec2d_nt]] begin[:]
if name[prt_section] begin[:]
call[name[prt].write, parameter[call[constant[# SECTION: {SECTION}
].format, parameter[]]]]
call[name[self].prt_ntgos, parameter[name[prt], name[nthdrgos_actual]]]
if name[prt_section] begin[:]
call[name[prt].write, parameter[constant[
]]]
variable[dat] assign[=] call[call[name[SummarySec2dHdrGos], parameter[]].summarize_sec2hdrgos, parameter[name[sec2d_go]]]
call[name[sys].stdout.write, parameter[call[name[self].grprobj.fmtsum.format, parameter[]]]]
return[name[sec2d_nt]] | keyword[def] identifier[wr_txt_section_hdrgos] ( identifier[self] , identifier[fout_txt] , identifier[sortby] = keyword[None] , identifier[prt_section] = keyword[True] ):
literal[string]
identifier[sec2d_go] = identifier[self] . identifier[grprobj] . identifier[get_sections_2d] ()
identifier[sec2d_nt] = identifier[self] . identifier[get_sections_2dnt] ( identifier[sec2d_go] )
keyword[if] identifier[sortby] keyword[is] keyword[None] :
identifier[sortby] = identifier[self] . identifier[fncsortnt]
keyword[with] identifier[open] ( identifier[fout_txt] , literal[string] ) keyword[as] identifier[prt] :
identifier[self] . identifier[prt_ver] ( identifier[prt] )
identifier[prt] . identifier[write] ( literal[string] . identifier[format] ( identifier[NAME] = identifier[self] . identifier[grprobj] . identifier[grpname] ))
keyword[for] identifier[section_name] , identifier[nthdrgos_actual] keyword[in] identifier[sec2d_nt] :
keyword[if] identifier[prt_section] :
identifier[prt] . identifier[write] ( literal[string] . identifier[format] ( identifier[SECTION] = identifier[section_name] ))
identifier[self] . identifier[prt_ntgos] ( identifier[prt] , identifier[nthdrgos_actual] )
keyword[if] identifier[prt_section] :
identifier[prt] . identifier[write] ( literal[string] )
identifier[dat] = identifier[SummarySec2dHdrGos] (). identifier[summarize_sec2hdrgos] ( identifier[sec2d_go] )
identifier[sys] . identifier[stdout] . identifier[write] ( identifier[self] . identifier[grprobj] . identifier[fmtsum] . identifier[format] (
identifier[GO_DESC] = literal[string] , identifier[SECs] = identifier[len] ( identifier[dat] [ literal[string] ]), identifier[GOs] = identifier[len] ( identifier[dat] [ literal[string] ]),
identifier[UNGRP] = identifier[len] ( identifier[dat] [ literal[string] ]), identifier[undesc] = literal[string] ,
identifier[ACTION] = literal[string] , identifier[FILE] = identifier[fout_txt] ))
keyword[return] identifier[sec2d_nt] | def wr_txt_section_hdrgos(self, fout_txt, sortby=None, prt_section=True):
"""Write high GO IDs that are actually used to group current set of GO IDs."""
sec2d_go = self.grprobj.get_sections_2d() # lists of GO IDs
sec2d_nt = self.get_sections_2dnt(sec2d_go) # lists of GO Grouper namedtuples
if sortby is None:
sortby = self.fncsortnt # depends on [control=['if'], data=['sortby']]
with open(fout_txt, 'w') as prt:
self.prt_ver(prt)
prt.write('# GROUP NAME: {NAME}\n'.format(NAME=self.grprobj.grpname))
for (section_name, nthdrgos_actual) in sec2d_nt:
if prt_section:
prt.write('# SECTION: {SECTION}\n'.format(SECTION=section_name)) # depends on [control=['if'], data=[]]
self.prt_ntgos(prt, nthdrgos_actual)
if prt_section:
prt.write('\n') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
dat = SummarySec2dHdrGos().summarize_sec2hdrgos(sec2d_go)
sys.stdout.write(self.grprobj.fmtsum.format(GO_DESC='hdr', SECs=len(dat['S']), GOs=len(dat['G']), UNGRP=len(dat['U']), undesc='unused', ACTION='WROTE:', FILE=fout_txt)) # depends on [control=['with'], data=['prt']]
return sec2d_nt |
def parse(text, encoding='utf-8', handler=None, **defaults):
"""
Parse text with frontmatter, return metadata and content.
Pass in optional metadata defaults as keyword args.
If frontmatter is not found, returns an empty metadata dictionary
(or defaults) and original text content.
::
>>> with open('tests/hello-world.markdown') as f:
... metadata, content = frontmatter.parse(f.read())
>>> print(metadata['title'])
Hello, world!
"""
# ensure unicode first
text = u(text, encoding).strip()
# metadata starts with defaults
metadata = defaults.copy()
# this will only run if a handler hasn't been set higher up
handler = handler or detect_format(text, handlers)
if handler is None:
return metadata, text
# split on the delimiters
try:
fm, content = handler.split(text)
except ValueError:
# if we can't split, bail
return metadata, text
# parse, now that we have frontmatter
fm = handler.load(fm)
if isinstance(fm, dict):
metadata.update(fm)
return metadata, content.strip() | def function[parse, parameter[text, encoding, handler]]:
constant[
Parse text with frontmatter, return metadata and content.
Pass in optional metadata defaults as keyword args.
If frontmatter is not found, returns an empty metadata dictionary
(or defaults) and original text content.
::
>>> with open('tests/hello-world.markdown') as f:
... metadata, content = frontmatter.parse(f.read())
>>> print(metadata['title'])
Hello, world!
]
variable[text] assign[=] call[call[name[u], parameter[name[text], name[encoding]]].strip, parameter[]]
variable[metadata] assign[=] call[name[defaults].copy, parameter[]]
variable[handler] assign[=] <ast.BoolOp object at 0x7da1b03a5870>
if compare[name[handler] is constant[None]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b03a5db0>, <ast.Name object at 0x7da1b03a7e50>]]]
<ast.Try object at 0x7da1b03a7eb0>
variable[fm] assign[=] call[name[handler].load, parameter[name[fm]]]
if call[name[isinstance], parameter[name[fm], name[dict]]] begin[:]
call[name[metadata].update, parameter[name[fm]]]
return[tuple[[<ast.Name object at 0x7da1b03a40d0>, <ast.Call object at 0x7da1b03a4670>]]] | keyword[def] identifier[parse] ( identifier[text] , identifier[encoding] = literal[string] , identifier[handler] = keyword[None] ,** identifier[defaults] ):
literal[string]
identifier[text] = identifier[u] ( identifier[text] , identifier[encoding] ). identifier[strip] ()
identifier[metadata] = identifier[defaults] . identifier[copy] ()
identifier[handler] = identifier[handler] keyword[or] identifier[detect_format] ( identifier[text] , identifier[handlers] )
keyword[if] identifier[handler] keyword[is] keyword[None] :
keyword[return] identifier[metadata] , identifier[text]
keyword[try] :
identifier[fm] , identifier[content] = identifier[handler] . identifier[split] ( identifier[text] )
keyword[except] identifier[ValueError] :
keyword[return] identifier[metadata] , identifier[text]
identifier[fm] = identifier[handler] . identifier[load] ( identifier[fm] )
keyword[if] identifier[isinstance] ( identifier[fm] , identifier[dict] ):
identifier[metadata] . identifier[update] ( identifier[fm] )
keyword[return] identifier[metadata] , identifier[content] . identifier[strip] () | def parse(text, encoding='utf-8', handler=None, **defaults):
"""
Parse text with frontmatter, return metadata and content.
Pass in optional metadata defaults as keyword args.
If frontmatter is not found, returns an empty metadata dictionary
(or defaults) and original text content.
::
>>> with open('tests/hello-world.markdown') as f:
... metadata, content = frontmatter.parse(f.read())
>>> print(metadata['title'])
Hello, world!
"""
# ensure unicode first
text = u(text, encoding).strip()
# metadata starts with defaults
metadata = defaults.copy()
# this will only run if a handler hasn't been set higher up
handler = handler or detect_format(text, handlers)
if handler is None:
return (metadata, text) # depends on [control=['if'], data=[]]
# split on the delimiters
try:
(fm, content) = handler.split(text) # depends on [control=['try'], data=[]]
except ValueError:
# if we can't split, bail
return (metadata, text) # depends on [control=['except'], data=[]]
# parse, now that we have frontmatter
fm = handler.load(fm)
if isinstance(fm, dict):
metadata.update(fm) # depends on [control=['if'], data=[]]
return (metadata, content.strip()) |
def get(api_code=None):
"""Get network statistics.
:param str api_code: Blockchain.info API code (optional)
:return: an instance of :class:`Stats` class
"""
resource = 'stats?format=json'
if api_code is not None:
resource += '&api_code=' + api_code
response = util.call_api(resource)
json_response = json.loads(response)
return Stats(json_response) | def function[get, parameter[api_code]]:
constant[Get network statistics.
:param str api_code: Blockchain.info API code (optional)
:return: an instance of :class:`Stats` class
]
variable[resource] assign[=] constant[stats?format=json]
if compare[name[api_code] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b1d39510>
variable[response] assign[=] call[name[util].call_api, parameter[name[resource]]]
variable[json_response] assign[=] call[name[json].loads, parameter[name[response]]]
return[call[name[Stats], parameter[name[json_response]]]] | keyword[def] identifier[get] ( identifier[api_code] = keyword[None] ):
literal[string]
identifier[resource] = literal[string]
keyword[if] identifier[api_code] keyword[is] keyword[not] keyword[None] :
identifier[resource] += literal[string] + identifier[api_code]
identifier[response] = identifier[util] . identifier[call_api] ( identifier[resource] )
identifier[json_response] = identifier[json] . identifier[loads] ( identifier[response] )
keyword[return] identifier[Stats] ( identifier[json_response] ) | def get(api_code=None):
"""Get network statistics.
:param str api_code: Blockchain.info API code (optional)
:return: an instance of :class:`Stats` class
"""
resource = 'stats?format=json'
if api_code is not None:
resource += '&api_code=' + api_code # depends on [control=['if'], data=['api_code']]
response = util.call_api(resource)
json_response = json.loads(response)
return Stats(json_response) |
def observed(obj=None, **kwds):
"""
Decorator function to instantiate data objects.
If given a Stochastic, sets a the observed flag to True.
Can be used as
@observed
def A(value = ., parent_name = ., ...):
return foo(value, parent_name, ...)
or as
@stochastic(observed=True)
def A(value = ., parent_name = ., ...):
return foo(value, parent_name, ...)
:SeeAlso:
stochastic, Stochastic, dtrm, Deterministic, potential, Potential, Model,
distributions
"""
if obj is not None:
if isinstance(obj, Stochastic):
obj._observed = True
return obj
else:
p = stochastic(__func__=obj, observed=True, **kwds)
return p
kwds['observed'] = True
def instantiate_observed(func):
return stochastic(func, **kwds)
return instantiate_observed | def function[observed, parameter[obj]]:
constant[
Decorator function to instantiate data objects.
If given a Stochastic, sets a the observed flag to True.
Can be used as
@observed
def A(value = ., parent_name = ., ...):
return foo(value, parent_name, ...)
or as
@stochastic(observed=True)
def A(value = ., parent_name = ., ...):
return foo(value, parent_name, ...)
:SeeAlso:
stochastic, Stochastic, dtrm, Deterministic, potential, Potential, Model,
distributions
]
if compare[name[obj] is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[obj], name[Stochastic]]] begin[:]
name[obj]._observed assign[=] constant[True]
return[name[obj]]
call[name[kwds]][constant[observed]] assign[=] constant[True]
def function[instantiate_observed, parameter[func]]:
return[call[name[stochastic], parameter[name[func]]]]
return[name[instantiate_observed]] | keyword[def] identifier[observed] ( identifier[obj] = keyword[None] ,** identifier[kwds] ):
literal[string]
keyword[if] identifier[obj] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Stochastic] ):
identifier[obj] . identifier[_observed] = keyword[True]
keyword[return] identifier[obj]
keyword[else] :
identifier[p] = identifier[stochastic] ( identifier[__func__] = identifier[obj] , identifier[observed] = keyword[True] ,** identifier[kwds] )
keyword[return] identifier[p]
identifier[kwds] [ literal[string] ]= keyword[True]
keyword[def] identifier[instantiate_observed] ( identifier[func] ):
keyword[return] identifier[stochastic] ( identifier[func] ,** identifier[kwds] )
keyword[return] identifier[instantiate_observed] | def observed(obj=None, **kwds):
"""
Decorator function to instantiate data objects.
If given a Stochastic, sets a the observed flag to True.
Can be used as
@observed
def A(value = ., parent_name = ., ...):
return foo(value, parent_name, ...)
or as
@stochastic(observed=True)
def A(value = ., parent_name = ., ...):
return foo(value, parent_name, ...)
:SeeAlso:
stochastic, Stochastic, dtrm, Deterministic, potential, Potential, Model,
distributions
"""
if obj is not None:
if isinstance(obj, Stochastic):
obj._observed = True
return obj # depends on [control=['if'], data=[]]
else:
p = stochastic(__func__=obj, observed=True, **kwds)
return p # depends on [control=['if'], data=['obj']]
kwds['observed'] = True
def instantiate_observed(func):
return stochastic(func, **kwds)
return instantiate_observed |
def _event_func_names(self, event: str) -> List[str]:
""" Returns string name of each function subscribed to an event.
:param event: Name of the event.
:type event: str
:return: Names of functions subscribed to a specific event.
:rtype: list
"""
return [func.__name__ for func in self._events[event]] | def function[_event_func_names, parameter[self, event]]:
constant[ Returns string name of each function subscribed to an event.
:param event: Name of the event.
:type event: str
:return: Names of functions subscribed to a specific event.
:rtype: list
]
return[<ast.ListComp object at 0x7da2044c3a60>] | keyword[def] identifier[_event_func_names] ( identifier[self] , identifier[event] : identifier[str] )-> identifier[List] [ identifier[str] ]:
literal[string]
keyword[return] [ identifier[func] . identifier[__name__] keyword[for] identifier[func] keyword[in] identifier[self] . identifier[_events] [ identifier[event] ]] | def _event_func_names(self, event: str) -> List[str]:
""" Returns string name of each function subscribed to an event.
:param event: Name of the event.
:type event: str
:return: Names of functions subscribed to a specific event.
:rtype: list
"""
return [func.__name__ for func in self._events[event]] |
def generate_zip_data(M, L, n_cells, cluster_probs=None):
"""
Generates zero-inflated poisson-distributed data, given a set of means and zero probs for each cluster.
Args:
M (array): genes x clusters matrix
L (array): genes x clusters matrix - zero-inflation parameters
n_cells (int): number of output cells
cluster_probs (array): prior probability for each cluster.
Default: uniform.
Returns:
output - array with shape genes x n_cells
labels - array of cluster labels
"""
genes, clusters = M.shape
output = np.zeros((genes, n_cells))
if cluster_probs is None:
cluster_probs = np.ones(clusters)/clusters
zip_p = np.random.random((genes, n_cells))
labels = []
for i in range(n_cells):
c = np.random.choice(range(clusters), p=cluster_probs)
labels.append(c)
output[:,i] = np.where(zip_p[:,i] < L[:,c], 0, np.random.poisson(M[:,c]))
return output, np.array(labels) | def function[generate_zip_data, parameter[M, L, n_cells, cluster_probs]]:
constant[
Generates zero-inflated poisson-distributed data, given a set of means and zero probs for each cluster.
Args:
M (array): genes x clusters matrix
L (array): genes x clusters matrix - zero-inflation parameters
n_cells (int): number of output cells
cluster_probs (array): prior probability for each cluster.
Default: uniform.
Returns:
output - array with shape genes x n_cells
labels - array of cluster labels
]
<ast.Tuple object at 0x7da1b1bb88b0> assign[=] name[M].shape
variable[output] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1bb94e0>, <ast.Name object at 0x7da1b1bb9b70>]]]]
if compare[name[cluster_probs] is constant[None]] begin[:]
variable[cluster_probs] assign[=] binary_operation[call[name[np].ones, parameter[name[clusters]]] / name[clusters]]
variable[zip_p] assign[=] call[name[np].random.random, parameter[tuple[[<ast.Name object at 0x7da1b1bb9c00>, <ast.Name object at 0x7da1b1bba7a0>]]]]
variable[labels] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[n_cells]]]] begin[:]
variable[c] assign[=] call[name[np].random.choice, parameter[call[name[range], parameter[name[clusters]]]]]
call[name[labels].append, parameter[name[c]]]
call[name[output]][tuple[[<ast.Slice object at 0x7da1b1bbb430>, <ast.Name object at 0x7da1b1bbbc10>]]] assign[=] call[name[np].where, parameter[compare[call[name[zip_p]][tuple[[<ast.Slice object at 0x7da1b1bbbac0>, <ast.Name object at 0x7da1b1bbbe50>]]] less[<] call[name[L]][tuple[[<ast.Slice object at 0x7da1b1bbbb50>, <ast.Name object at 0x7da1b1bbb310>]]]], constant[0], call[name[np].random.poisson, parameter[call[name[M]][tuple[[<ast.Slice object at 0x7da1b1bbabf0>, <ast.Name object at 0x7da1b1bbacb0>]]]]]]]
return[tuple[[<ast.Name object at 0x7da1b1bba890>, <ast.Call object at 0x7da1b1bbb640>]]] | keyword[def] identifier[generate_zip_data] ( identifier[M] , identifier[L] , identifier[n_cells] , identifier[cluster_probs] = keyword[None] ):
literal[string]
identifier[genes] , identifier[clusters] = identifier[M] . identifier[shape]
identifier[output] = identifier[np] . identifier[zeros] (( identifier[genes] , identifier[n_cells] ))
keyword[if] identifier[cluster_probs] keyword[is] keyword[None] :
identifier[cluster_probs] = identifier[np] . identifier[ones] ( identifier[clusters] )/ identifier[clusters]
identifier[zip_p] = identifier[np] . identifier[random] . identifier[random] (( identifier[genes] , identifier[n_cells] ))
identifier[labels] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_cells] ):
identifier[c] = identifier[np] . identifier[random] . identifier[choice] ( identifier[range] ( identifier[clusters] ), identifier[p] = identifier[cluster_probs] )
identifier[labels] . identifier[append] ( identifier[c] )
identifier[output] [:, identifier[i] ]= identifier[np] . identifier[where] ( identifier[zip_p] [:, identifier[i] ]< identifier[L] [:, identifier[c] ], literal[int] , identifier[np] . identifier[random] . identifier[poisson] ( identifier[M] [:, identifier[c] ]))
keyword[return] identifier[output] , identifier[np] . identifier[array] ( identifier[labels] ) | def generate_zip_data(M, L, n_cells, cluster_probs=None):
"""
Generates zero-inflated poisson-distributed data, given a set of means and zero probs for each cluster.
Args:
M (array): genes x clusters matrix
L (array): genes x clusters matrix - zero-inflation parameters
n_cells (int): number of output cells
cluster_probs (array): prior probability for each cluster.
Default: uniform.
Returns:
output - array with shape genes x n_cells
labels - array of cluster labels
"""
(genes, clusters) = M.shape
output = np.zeros((genes, n_cells))
if cluster_probs is None:
cluster_probs = np.ones(clusters) / clusters # depends on [control=['if'], data=['cluster_probs']]
zip_p = np.random.random((genes, n_cells))
labels = []
for i in range(n_cells):
c = np.random.choice(range(clusters), p=cluster_probs)
labels.append(c)
output[:, i] = np.where(zip_p[:, i] < L[:, c], 0, np.random.poisson(M[:, c])) # depends on [control=['for'], data=['i']]
return (output, np.array(labels)) |
def get_free_voice(self, item):
""" Free voice
Returns the js menu compatible voice dict if the user
can see it, None otherwise
"""
view = True
if item.get('perms', None):
view = self.check_user_permission(item.get('perms', []))
elif item.get('apps', None):
view = self.check_apps_permission(item.get('apps', []))
if view:
return {
'type': 'free',
'label': item.get('label', ''),
'icon': item.get('icon', None),
'url': item.get('url', None)
}
return None | def function[get_free_voice, parameter[self, item]]:
constant[ Free voice
Returns the js menu compatible voice dict if the user
can see it, None otherwise
]
variable[view] assign[=] constant[True]
if call[name[item].get, parameter[constant[perms], constant[None]]] begin[:]
variable[view] assign[=] call[name[self].check_user_permission, parameter[call[name[item].get, parameter[constant[perms], list[[]]]]]]
if name[view] begin[:]
return[dictionary[[<ast.Constant object at 0x7da1b077a320>, <ast.Constant object at 0x7da1b077abf0>, <ast.Constant object at 0x7da1b0778850>, <ast.Constant object at 0x7da1b0778160>], [<ast.Constant object at 0x7da1b0779000>, <ast.Call object at 0x7da1b0779900>, <ast.Call object at 0x7da1b077ae90>, <ast.Call object at 0x7da1b077a1d0>]]]
return[constant[None]] | keyword[def] identifier[get_free_voice] ( identifier[self] , identifier[item] ):
literal[string]
identifier[view] = keyword[True]
keyword[if] identifier[item] . identifier[get] ( literal[string] , keyword[None] ):
identifier[view] = identifier[self] . identifier[check_user_permission] ( identifier[item] . identifier[get] ( literal[string] ,[]))
keyword[elif] identifier[item] . identifier[get] ( literal[string] , keyword[None] ):
identifier[view] = identifier[self] . identifier[check_apps_permission] ( identifier[item] . identifier[get] ( literal[string] ,[]))
keyword[if] identifier[view] :
keyword[return] {
literal[string] : literal[string] ,
literal[string] : identifier[item] . identifier[get] ( literal[string] , literal[string] ),
literal[string] : identifier[item] . identifier[get] ( literal[string] , keyword[None] ),
literal[string] : identifier[item] . identifier[get] ( literal[string] , keyword[None] )
}
keyword[return] keyword[None] | def get_free_voice(self, item):
""" Free voice
Returns the js menu compatible voice dict if the user
can see it, None otherwise
"""
view = True
if item.get('perms', None):
view = self.check_user_permission(item.get('perms', [])) # depends on [control=['if'], data=[]]
elif item.get('apps', None):
view = self.check_apps_permission(item.get('apps', [])) # depends on [control=['if'], data=[]]
if view:
return {'type': 'free', 'label': item.get('label', ''), 'icon': item.get('icon', None), 'url': item.get('url', None)} # depends on [control=['if'], data=[]]
return None |
def extract_log(rpath,extract=simple_attributes):
"""
Extracts Git commit test_data from a local repository.
:param rpath: The path to a local Git repo.
:param extract: A list of attribute name strings.
:return: A Pandas dataframe containing Git commit test_data.
"""
# Get repo
m_repo = git.Repo(rpath)
# Count commits
count = 0
m_commits = m_repo.iter_commits()
for commit in m_commits:
count += 1
# Initialize progress bar and index
with tqdm.tqdm(total=count) as pbar:
# Get commits again
m_commits = m_repo.iter_commits()
# Setup test_data extraction
update_interval = max(min(count//100,100),5)
index = 0
buffer = []
# Extract commit test_data
while True:
# Add the next commit to the buffer
try:
next_commit = next(m_commits)
buffer.append(make_object_dict(next_commit,extract))
index += 1
if index%update_interval == 0:
pbar.update(update_interval)
# If no more commits, clear the buffer
except StopIteration:
break
# final_df = pd.concat(sub_df_list)
return pd.DataFrame(buffer) | def function[extract_log, parameter[rpath, extract]]:
constant[
Extracts Git commit test_data from a local repository.
:param rpath: The path to a local Git repo.
:param extract: A list of attribute name strings.
:return: A Pandas dataframe containing Git commit test_data.
]
variable[m_repo] assign[=] call[name[git].Repo, parameter[name[rpath]]]
variable[count] assign[=] constant[0]
variable[m_commits] assign[=] call[name[m_repo].iter_commits, parameter[]]
for taget[name[commit]] in starred[name[m_commits]] begin[:]
<ast.AugAssign object at 0x7da1b24fd870>
with call[name[tqdm].tqdm, parameter[]] begin[:]
variable[m_commits] assign[=] call[name[m_repo].iter_commits, parameter[]]
variable[update_interval] assign[=] call[name[max], parameter[call[name[min], parameter[binary_operation[name[count] <ast.FloorDiv object at 0x7da2590d6bc0> constant[100]], constant[100]]], constant[5]]]
variable[index] assign[=] constant[0]
variable[buffer] assign[=] list[[]]
while constant[True] begin[:]
<ast.Try object at 0x7da1b24fdd20>
return[call[name[pd].DataFrame, parameter[name[buffer]]]] | keyword[def] identifier[extract_log] ( identifier[rpath] , identifier[extract] = identifier[simple_attributes] ):
literal[string]
identifier[m_repo] = identifier[git] . identifier[Repo] ( identifier[rpath] )
identifier[count] = literal[int]
identifier[m_commits] = identifier[m_repo] . identifier[iter_commits] ()
keyword[for] identifier[commit] keyword[in] identifier[m_commits] :
identifier[count] += literal[int]
keyword[with] identifier[tqdm] . identifier[tqdm] ( identifier[total] = identifier[count] ) keyword[as] identifier[pbar] :
identifier[m_commits] = identifier[m_repo] . identifier[iter_commits] ()
identifier[update_interval] = identifier[max] ( identifier[min] ( identifier[count] // literal[int] , literal[int] ), literal[int] )
identifier[index] = literal[int]
identifier[buffer] =[]
keyword[while] keyword[True] :
keyword[try] :
identifier[next_commit] = identifier[next] ( identifier[m_commits] )
identifier[buffer] . identifier[append] ( identifier[make_object_dict] ( identifier[next_commit] , identifier[extract] ))
identifier[index] += literal[int]
keyword[if] identifier[index] % identifier[update_interval] == literal[int] :
identifier[pbar] . identifier[update] ( identifier[update_interval] )
keyword[except] identifier[StopIteration] :
keyword[break]
keyword[return] identifier[pd] . identifier[DataFrame] ( identifier[buffer] ) | def extract_log(rpath, extract=simple_attributes):
"""
Extracts Git commit test_data from a local repository.
:param rpath: The path to a local Git repo.
:param extract: A list of attribute name strings.
:return: A Pandas dataframe containing Git commit test_data.
"""
# Get repo
m_repo = git.Repo(rpath)
# Count commits
count = 0
m_commits = m_repo.iter_commits()
for commit in m_commits:
count += 1 # depends on [control=['for'], data=[]]
# Initialize progress bar and index
with tqdm.tqdm(total=count) as pbar:
# Get commits again
m_commits = m_repo.iter_commits()
# Setup test_data extraction
update_interval = max(min(count // 100, 100), 5)
index = 0
buffer = []
# Extract commit test_data
while True:
# Add the next commit to the buffer
try:
next_commit = next(m_commits)
buffer.append(make_object_dict(next_commit, extract))
index += 1
if index % update_interval == 0:
pbar.update(update_interval) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
# If no more commits, clear the buffer
except StopIteration:
break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['pbar']]
# final_df = pd.concat(sub_df_list)
return pd.DataFrame(buffer) |
def aggregate_hazard_summary(impact, aggregate_hazard):
"""Compute the summary from the source layer to the aggregate_hazard layer.
Source layer :
|exp_id|exp_class|haz_id|haz_class|aggr_id|aggr_name|affected|extra*|
Target layer :
| aggr_id | aggr_name | haz_id | haz_class | extra* |
Output layer :
|aggr_id| aggr_name|haz_id|haz_class|affected|extra*|count ber exposure*|
:param impact: The layer to aggregate vector layer.
:type impact: QgsVectorLayer
:param aggregate_hazard: The aggregate_hazard vector layer where to write
statistics.
:type aggregate_hazard: QgsVectorLayer
:return: The new aggregate_hazard layer with summary.
:rtype: QgsVectorLayer
.. versionadded:: 4.0
"""
source_fields = impact.keywords['inasafe_fields']
target_fields = aggregate_hazard.keywords['inasafe_fields']
target_compulsory_fields = [
aggregation_id_field,
aggregation_name_field,
hazard_id_field,
hazard_class_field
]
check_inputs(target_compulsory_fields, target_fields)
source_compulsory_fields = [
exposure_id_field,
exposure_class_field,
aggregation_id_field,
aggregation_name_field,
hazard_id_field,
hazard_class_field
]
check_inputs(source_compulsory_fields, source_fields)
aggregation_id = target_fields[aggregation_id_field['key']]
hazard_id = target_fields[hazard_id_field['key']]
hazard_class = target_fields[hazard_class_field['key']]
exposure_class = source_fields[exposure_class_field['key']]
exposure_class_index = impact.fields().lookupField(exposure_class)
unique_exposure = list(impact.uniqueValues(exposure_class_index))
fields = ['aggregation_id', 'hazard_id']
absolute_values = create_absolute_values_structure(impact, fields)
# We need to know what kind of exposure we are going to count.
# the size, or the number of features or population.
field_index = report_on_field(impact)
aggregate_hazard.startEditing()
shift = aggregate_hazard.fields().count()
dynamic_structure = [
[exposure_count_field, unique_exposure],
]
add_fields(
aggregate_hazard,
absolute_values,
[affected_field, total_field],
dynamic_structure,
)
flat_table = FlatTable('aggregation_id', 'hazard_id', 'exposure_class')
request = QgsFeatureRequest()
request.setFlags(QgsFeatureRequest.NoGeometry)
LOGGER.debug('Computing the aggregate hazard summary.')
for feature in impact.getFeatures(request):
# Field_index can be equal to 0.
if field_index is not None:
value = feature[field_index]
else:
value = 1
aggregation_value = feature[aggregation_id]
hazard_value = feature[hazard_id]
if (hazard_value is None
or hazard_value == ''
or (hasattr(hazard_value, 'isNull')
and hazard_value.isNull())):
hazard_value = not_exposed_class['key']
exposure_value = feature[exposure_class]
if (exposure_value is None
or exposure_value == ''
or (hasattr(exposure_value, 'isNull')
and exposure_value.isNull())):
exposure_value = 'NULL'
flat_table.add_value(
value,
aggregation_id=aggregation_value,
hazard_id=hazard_value,
exposure_class=exposure_value
)
# We summarize every absolute values.
for field, field_definition in list(absolute_values.items()):
value = feature[field]
if (value == ''
or value is None
or (hasattr(value, 'isNull')
and value.isNull())):
value = 0
field_definition[0].add_value(
value,
aggregation_id=aggregation_value,
hazard_id=hazard_value
)
hazard_keywords = aggregate_hazard.keywords['hazard_keywords']
hazard = hazard_keywords['hazard']
classification = hazard_keywords['classification']
exposure_keywords = impact.keywords['exposure_keywords']
exposure = exposure_keywords['exposure']
for area in aggregate_hazard.getFeatures(request):
aggregation_value = area[aggregation_id]
feature_hazard_id = area[hazard_id]
if (feature_hazard_id == ''
or feature_hazard_id is None
or (hasattr(feature_hazard_id, 'isNull')
and feature_hazard_id.isNull())):
feature_hazard_id = not_exposed_class['key']
feature_hazard_value = area[hazard_class]
total = 0
for i, val in enumerate(unique_exposure):
sum = flat_table.get_value(
aggregation_id=aggregation_value,
hazard_id=feature_hazard_id,
exposure_class=val
)
total += sum
aggregate_hazard.changeAttributeValue(area.id(), shift + i, sum)
affected = post_processor_affected_function(
exposure=exposure,
hazard=hazard,
classification=classification,
hazard_class=feature_hazard_value)
affected = tr(str(affected))
aggregate_hazard.changeAttributeValue(
area.id(), shift + len(unique_exposure), affected)
aggregate_hazard.changeAttributeValue(
area.id(), shift + len(unique_exposure) + 1, total)
for i, field in enumerate(absolute_values.values()):
value = field[0].get_value(
aggregation_id=aggregation_value,
hazard_id=feature_hazard_id
)
aggregate_hazard.changeAttributeValue(
area.id(), shift + len(unique_exposure) + 2 + i, value)
aggregate_hazard.commitChanges()
aggregate_hazard.keywords['title'] = (
layer_purpose_aggregate_hazard_impacted['name'])
if qgis_version() >= 21800:
aggregate_hazard.setName(aggregate_hazard.keywords['title'])
else:
aggregate_hazard.setLayerName(aggregate_hazard.keywords['title'])
aggregate_hazard.keywords['layer_purpose'] = (
layer_purpose_aggregate_hazard_impacted['key'])
aggregate_hazard.keywords['exposure_keywords'] = impact.keywords.copy()
check_layer(aggregate_hazard)
return aggregate_hazard | def function[aggregate_hazard_summary, parameter[impact, aggregate_hazard]]:
constant[Compute the summary from the source layer to the aggregate_hazard layer.
Source layer :
|exp_id|exp_class|haz_id|haz_class|aggr_id|aggr_name|affected|extra*|
Target layer :
| aggr_id | aggr_name | haz_id | haz_class | extra* |
Output layer :
|aggr_id| aggr_name|haz_id|haz_class|affected|extra*|count ber exposure*|
:param impact: The layer to aggregate vector layer.
:type impact: QgsVectorLayer
:param aggregate_hazard: The aggregate_hazard vector layer where to write
statistics.
:type aggregate_hazard: QgsVectorLayer
:return: The new aggregate_hazard layer with summary.
:rtype: QgsVectorLayer
.. versionadded:: 4.0
]
variable[source_fields] assign[=] call[name[impact].keywords][constant[inasafe_fields]]
variable[target_fields] assign[=] call[name[aggregate_hazard].keywords][constant[inasafe_fields]]
variable[target_compulsory_fields] assign[=] list[[<ast.Name object at 0x7da2041d8b80>, <ast.Name object at 0x7da2041d8100>, <ast.Name object at 0x7da2041d9ba0>, <ast.Name object at 0x7da2041d82e0>]]
call[name[check_inputs], parameter[name[target_compulsory_fields], name[target_fields]]]
variable[source_compulsory_fields] assign[=] list[[<ast.Name object at 0x7da2041da020>, <ast.Name object at 0x7da2041db070>, <ast.Name object at 0x7da2041da080>, <ast.Name object at 0x7da2041d8850>, <ast.Name object at 0x7da2041da560>, <ast.Name object at 0x7da2041dad40>]]
call[name[check_inputs], parameter[name[source_compulsory_fields], name[source_fields]]]
variable[aggregation_id] assign[=] call[name[target_fields]][call[name[aggregation_id_field]][constant[key]]]
variable[hazard_id] assign[=] call[name[target_fields]][call[name[hazard_id_field]][constant[key]]]
variable[hazard_class] assign[=] call[name[target_fields]][call[name[hazard_class_field]][constant[key]]]
variable[exposure_class] assign[=] call[name[source_fields]][call[name[exposure_class_field]][constant[key]]]
variable[exposure_class_index] assign[=] call[call[name[impact].fields, parameter[]].lookupField, parameter[name[exposure_class]]]
variable[unique_exposure] assign[=] call[name[list], parameter[call[name[impact].uniqueValues, parameter[name[exposure_class_index]]]]]
variable[fields] assign[=] list[[<ast.Constant object at 0x7da2041d9930>, <ast.Constant object at 0x7da2041d8a00>]]
variable[absolute_values] assign[=] call[name[create_absolute_values_structure], parameter[name[impact], name[fields]]]
variable[field_index] assign[=] call[name[report_on_field], parameter[name[impact]]]
call[name[aggregate_hazard].startEditing, parameter[]]
variable[shift] assign[=] call[call[name[aggregate_hazard].fields, parameter[]].count, parameter[]]
variable[dynamic_structure] assign[=] list[[<ast.List object at 0x7da2041d8550>]]
call[name[add_fields], parameter[name[aggregate_hazard], name[absolute_values], list[[<ast.Name object at 0x7da2041da860>, <ast.Name object at 0x7da2041db0a0>]], name[dynamic_structure]]]
variable[flat_table] assign[=] call[name[FlatTable], parameter[constant[aggregation_id], constant[hazard_id], constant[exposure_class]]]
variable[request] assign[=] call[name[QgsFeatureRequest], parameter[]]
call[name[request].setFlags, parameter[name[QgsFeatureRequest].NoGeometry]]
call[name[LOGGER].debug, parameter[constant[Computing the aggregate hazard summary.]]]
for taget[name[feature]] in starred[call[name[impact].getFeatures, parameter[name[request]]]] begin[:]
if compare[name[field_index] is_not constant[None]] begin[:]
variable[value] assign[=] call[name[feature]][name[field_index]]
variable[aggregation_value] assign[=] call[name[feature]][name[aggregation_id]]
variable[hazard_value] assign[=] call[name[feature]][name[hazard_id]]
if <ast.BoolOp object at 0x7da2041dbca0> begin[:]
variable[hazard_value] assign[=] call[name[not_exposed_class]][constant[key]]
variable[exposure_value] assign[=] call[name[feature]][name[exposure_class]]
if <ast.BoolOp object at 0x7da2041dab00> begin[:]
variable[exposure_value] assign[=] constant[NULL]
call[name[flat_table].add_value, parameter[name[value]]]
for taget[tuple[[<ast.Name object at 0x7da2041dbdf0>, <ast.Name object at 0x7da2041da800>]]] in starred[call[name[list], parameter[call[name[absolute_values].items, parameter[]]]]] begin[:]
variable[value] assign[=] call[name[feature]][name[field]]
if <ast.BoolOp object at 0x7da2041daa70> begin[:]
variable[value] assign[=] constant[0]
call[call[name[field_definition]][constant[0]].add_value, parameter[name[value]]]
variable[hazard_keywords] assign[=] call[name[aggregate_hazard].keywords][constant[hazard_keywords]]
variable[hazard] assign[=] call[name[hazard_keywords]][constant[hazard]]
variable[classification] assign[=] call[name[hazard_keywords]][constant[classification]]
variable[exposure_keywords] assign[=] call[name[impact].keywords][constant[exposure_keywords]]
variable[exposure] assign[=] call[name[exposure_keywords]][constant[exposure]]
for taget[name[area]] in starred[call[name[aggregate_hazard].getFeatures, parameter[name[request]]]] begin[:]
variable[aggregation_value] assign[=] call[name[area]][name[aggregation_id]]
variable[feature_hazard_id] assign[=] call[name[area]][name[hazard_id]]
if <ast.BoolOp object at 0x7da18f722d40> begin[:]
variable[feature_hazard_id] assign[=] call[name[not_exposed_class]][constant[key]]
variable[feature_hazard_value] assign[=] call[name[area]][name[hazard_class]]
variable[total] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da18f7210c0>, <ast.Name object at 0x7da18f722e60>]]] in starred[call[name[enumerate], parameter[name[unique_exposure]]]] begin[:]
variable[sum] assign[=] call[name[flat_table].get_value, parameter[]]
<ast.AugAssign object at 0x7da18f722050>
call[name[aggregate_hazard].changeAttributeValue, parameter[call[name[area].id, parameter[]], binary_operation[name[shift] + name[i]], name[sum]]]
variable[affected] assign[=] call[name[post_processor_affected_function], parameter[]]
variable[affected] assign[=] call[name[tr], parameter[call[name[str], parameter[name[affected]]]]]
call[name[aggregate_hazard].changeAttributeValue, parameter[call[name[area].id, parameter[]], binary_operation[name[shift] + call[name[len], parameter[name[unique_exposure]]]], name[affected]]]
call[name[aggregate_hazard].changeAttributeValue, parameter[call[name[area].id, parameter[]], binary_operation[binary_operation[name[shift] + call[name[len], parameter[name[unique_exposure]]]] + constant[1]], name[total]]]
for taget[tuple[[<ast.Name object at 0x7da18f7213f0>, <ast.Name object at 0x7da18f720eb0>]]] in starred[call[name[enumerate], parameter[call[name[absolute_values].values, parameter[]]]]] begin[:]
variable[value] assign[=] call[call[name[field]][constant[0]].get_value, parameter[]]
call[name[aggregate_hazard].changeAttributeValue, parameter[call[name[area].id, parameter[]], binary_operation[binary_operation[binary_operation[name[shift] + call[name[len], parameter[name[unique_exposure]]]] + constant[2]] + name[i]], name[value]]]
call[name[aggregate_hazard].commitChanges, parameter[]]
call[name[aggregate_hazard].keywords][constant[title]] assign[=] call[name[layer_purpose_aggregate_hazard_impacted]][constant[name]]
if compare[call[name[qgis_version], parameter[]] greater_or_equal[>=] constant[21800]] begin[:]
call[name[aggregate_hazard].setName, parameter[call[name[aggregate_hazard].keywords][constant[title]]]]
call[name[aggregate_hazard].keywords][constant[layer_purpose]] assign[=] call[name[layer_purpose_aggregate_hazard_impacted]][constant[key]]
call[name[aggregate_hazard].keywords][constant[exposure_keywords]] assign[=] call[name[impact].keywords.copy, parameter[]]
call[name[check_layer], parameter[name[aggregate_hazard]]]
return[name[aggregate_hazard]] | keyword[def] identifier[aggregate_hazard_summary] ( identifier[impact] , identifier[aggregate_hazard] ):
literal[string]
identifier[source_fields] = identifier[impact] . identifier[keywords] [ literal[string] ]
identifier[target_fields] = identifier[aggregate_hazard] . identifier[keywords] [ literal[string] ]
identifier[target_compulsory_fields] =[
identifier[aggregation_id_field] ,
identifier[aggregation_name_field] ,
identifier[hazard_id_field] ,
identifier[hazard_class_field]
]
identifier[check_inputs] ( identifier[target_compulsory_fields] , identifier[target_fields] )
identifier[source_compulsory_fields] =[
identifier[exposure_id_field] ,
identifier[exposure_class_field] ,
identifier[aggregation_id_field] ,
identifier[aggregation_name_field] ,
identifier[hazard_id_field] ,
identifier[hazard_class_field]
]
identifier[check_inputs] ( identifier[source_compulsory_fields] , identifier[source_fields] )
identifier[aggregation_id] = identifier[target_fields] [ identifier[aggregation_id_field] [ literal[string] ]]
identifier[hazard_id] = identifier[target_fields] [ identifier[hazard_id_field] [ literal[string] ]]
identifier[hazard_class] = identifier[target_fields] [ identifier[hazard_class_field] [ literal[string] ]]
identifier[exposure_class] = identifier[source_fields] [ identifier[exposure_class_field] [ literal[string] ]]
identifier[exposure_class_index] = identifier[impact] . identifier[fields] (). identifier[lookupField] ( identifier[exposure_class] )
identifier[unique_exposure] = identifier[list] ( identifier[impact] . identifier[uniqueValues] ( identifier[exposure_class_index] ))
identifier[fields] =[ literal[string] , literal[string] ]
identifier[absolute_values] = identifier[create_absolute_values_structure] ( identifier[impact] , identifier[fields] )
identifier[field_index] = identifier[report_on_field] ( identifier[impact] )
identifier[aggregate_hazard] . identifier[startEditing] ()
identifier[shift] = identifier[aggregate_hazard] . identifier[fields] (). identifier[count] ()
identifier[dynamic_structure] =[
[ identifier[exposure_count_field] , identifier[unique_exposure] ],
]
identifier[add_fields] (
identifier[aggregate_hazard] ,
identifier[absolute_values] ,
[ identifier[affected_field] , identifier[total_field] ],
identifier[dynamic_structure] ,
)
identifier[flat_table] = identifier[FlatTable] ( literal[string] , literal[string] , literal[string] )
identifier[request] = identifier[QgsFeatureRequest] ()
identifier[request] . identifier[setFlags] ( identifier[QgsFeatureRequest] . identifier[NoGeometry] )
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[for] identifier[feature] keyword[in] identifier[impact] . identifier[getFeatures] ( identifier[request] ):
keyword[if] identifier[field_index] keyword[is] keyword[not] keyword[None] :
identifier[value] = identifier[feature] [ identifier[field_index] ]
keyword[else] :
identifier[value] = literal[int]
identifier[aggregation_value] = identifier[feature] [ identifier[aggregation_id] ]
identifier[hazard_value] = identifier[feature] [ identifier[hazard_id] ]
keyword[if] ( identifier[hazard_value] keyword[is] keyword[None]
keyword[or] identifier[hazard_value] == literal[string]
keyword[or] ( identifier[hasattr] ( identifier[hazard_value] , literal[string] )
keyword[and] identifier[hazard_value] . identifier[isNull] ())):
identifier[hazard_value] = identifier[not_exposed_class] [ literal[string] ]
identifier[exposure_value] = identifier[feature] [ identifier[exposure_class] ]
keyword[if] ( identifier[exposure_value] keyword[is] keyword[None]
keyword[or] identifier[exposure_value] == literal[string]
keyword[or] ( identifier[hasattr] ( identifier[exposure_value] , literal[string] )
keyword[and] identifier[exposure_value] . identifier[isNull] ())):
identifier[exposure_value] = literal[string]
identifier[flat_table] . identifier[add_value] (
identifier[value] ,
identifier[aggregation_id] = identifier[aggregation_value] ,
identifier[hazard_id] = identifier[hazard_value] ,
identifier[exposure_class] = identifier[exposure_value]
)
keyword[for] identifier[field] , identifier[field_definition] keyword[in] identifier[list] ( identifier[absolute_values] . identifier[items] ()):
identifier[value] = identifier[feature] [ identifier[field] ]
keyword[if] ( identifier[value] == literal[string]
keyword[or] identifier[value] keyword[is] keyword[None]
keyword[or] ( identifier[hasattr] ( identifier[value] , literal[string] )
keyword[and] identifier[value] . identifier[isNull] ())):
identifier[value] = literal[int]
identifier[field_definition] [ literal[int] ]. identifier[add_value] (
identifier[value] ,
identifier[aggregation_id] = identifier[aggregation_value] ,
identifier[hazard_id] = identifier[hazard_value]
)
identifier[hazard_keywords] = identifier[aggregate_hazard] . identifier[keywords] [ literal[string] ]
identifier[hazard] = identifier[hazard_keywords] [ literal[string] ]
identifier[classification] = identifier[hazard_keywords] [ literal[string] ]
identifier[exposure_keywords] = identifier[impact] . identifier[keywords] [ literal[string] ]
identifier[exposure] = identifier[exposure_keywords] [ literal[string] ]
keyword[for] identifier[area] keyword[in] identifier[aggregate_hazard] . identifier[getFeatures] ( identifier[request] ):
identifier[aggregation_value] = identifier[area] [ identifier[aggregation_id] ]
identifier[feature_hazard_id] = identifier[area] [ identifier[hazard_id] ]
keyword[if] ( identifier[feature_hazard_id] == literal[string]
keyword[or] identifier[feature_hazard_id] keyword[is] keyword[None]
keyword[or] ( identifier[hasattr] ( identifier[feature_hazard_id] , literal[string] )
keyword[and] identifier[feature_hazard_id] . identifier[isNull] ())):
identifier[feature_hazard_id] = identifier[not_exposed_class] [ literal[string] ]
identifier[feature_hazard_value] = identifier[area] [ identifier[hazard_class] ]
identifier[total] = literal[int]
keyword[for] identifier[i] , identifier[val] keyword[in] identifier[enumerate] ( identifier[unique_exposure] ):
identifier[sum] = identifier[flat_table] . identifier[get_value] (
identifier[aggregation_id] = identifier[aggregation_value] ,
identifier[hazard_id] = identifier[feature_hazard_id] ,
identifier[exposure_class] = identifier[val]
)
identifier[total] += identifier[sum]
identifier[aggregate_hazard] . identifier[changeAttributeValue] ( identifier[area] . identifier[id] (), identifier[shift] + identifier[i] , identifier[sum] )
identifier[affected] = identifier[post_processor_affected_function] (
identifier[exposure] = identifier[exposure] ,
identifier[hazard] = identifier[hazard] ,
identifier[classification] = identifier[classification] ,
identifier[hazard_class] = identifier[feature_hazard_value] )
identifier[affected] = identifier[tr] ( identifier[str] ( identifier[affected] ))
identifier[aggregate_hazard] . identifier[changeAttributeValue] (
identifier[area] . identifier[id] (), identifier[shift] + identifier[len] ( identifier[unique_exposure] ), identifier[affected] )
identifier[aggregate_hazard] . identifier[changeAttributeValue] (
identifier[area] . identifier[id] (), identifier[shift] + identifier[len] ( identifier[unique_exposure] )+ literal[int] , identifier[total] )
keyword[for] identifier[i] , identifier[field] keyword[in] identifier[enumerate] ( identifier[absolute_values] . identifier[values] ()):
identifier[value] = identifier[field] [ literal[int] ]. identifier[get_value] (
identifier[aggregation_id] = identifier[aggregation_value] ,
identifier[hazard_id] = identifier[feature_hazard_id]
)
identifier[aggregate_hazard] . identifier[changeAttributeValue] (
identifier[area] . identifier[id] (), identifier[shift] + identifier[len] ( identifier[unique_exposure] )+ literal[int] + identifier[i] , identifier[value] )
identifier[aggregate_hazard] . identifier[commitChanges] ()
identifier[aggregate_hazard] . identifier[keywords] [ literal[string] ]=(
identifier[layer_purpose_aggregate_hazard_impacted] [ literal[string] ])
keyword[if] identifier[qgis_version] ()>= literal[int] :
identifier[aggregate_hazard] . identifier[setName] ( identifier[aggregate_hazard] . identifier[keywords] [ literal[string] ])
keyword[else] :
identifier[aggregate_hazard] . identifier[setLayerName] ( identifier[aggregate_hazard] . identifier[keywords] [ literal[string] ])
identifier[aggregate_hazard] . identifier[keywords] [ literal[string] ]=(
identifier[layer_purpose_aggregate_hazard_impacted] [ literal[string] ])
identifier[aggregate_hazard] . identifier[keywords] [ literal[string] ]= identifier[impact] . identifier[keywords] . identifier[copy] ()
identifier[check_layer] ( identifier[aggregate_hazard] )
keyword[return] identifier[aggregate_hazard] | def aggregate_hazard_summary(impact, aggregate_hazard):
"""Compute the summary from the source layer to the aggregate_hazard layer.
Source layer :
|exp_id|exp_class|haz_id|haz_class|aggr_id|aggr_name|affected|extra*|
Target layer :
| aggr_id | aggr_name | haz_id | haz_class | extra* |
Output layer :
|aggr_id| aggr_name|haz_id|haz_class|affected|extra*|count ber exposure*|
:param impact: The layer to aggregate vector layer.
:type impact: QgsVectorLayer
:param aggregate_hazard: The aggregate_hazard vector layer where to write
statistics.
:type aggregate_hazard: QgsVectorLayer
:return: The new aggregate_hazard layer with summary.
:rtype: QgsVectorLayer
.. versionadded:: 4.0
"""
source_fields = impact.keywords['inasafe_fields']
target_fields = aggregate_hazard.keywords['inasafe_fields']
target_compulsory_fields = [aggregation_id_field, aggregation_name_field, hazard_id_field, hazard_class_field]
check_inputs(target_compulsory_fields, target_fields)
source_compulsory_fields = [exposure_id_field, exposure_class_field, aggregation_id_field, aggregation_name_field, hazard_id_field, hazard_class_field]
check_inputs(source_compulsory_fields, source_fields)
aggregation_id = target_fields[aggregation_id_field['key']]
hazard_id = target_fields[hazard_id_field['key']]
hazard_class = target_fields[hazard_class_field['key']]
exposure_class = source_fields[exposure_class_field['key']]
exposure_class_index = impact.fields().lookupField(exposure_class)
unique_exposure = list(impact.uniqueValues(exposure_class_index))
fields = ['aggregation_id', 'hazard_id']
absolute_values = create_absolute_values_structure(impact, fields)
# We need to know what kind of exposure we are going to count.
# the size, or the number of features or population.
field_index = report_on_field(impact)
aggregate_hazard.startEditing()
shift = aggregate_hazard.fields().count()
dynamic_structure = [[exposure_count_field, unique_exposure]]
add_fields(aggregate_hazard, absolute_values, [affected_field, total_field], dynamic_structure)
flat_table = FlatTable('aggregation_id', 'hazard_id', 'exposure_class')
request = QgsFeatureRequest()
request.setFlags(QgsFeatureRequest.NoGeometry)
LOGGER.debug('Computing the aggregate hazard summary.')
for feature in impact.getFeatures(request):
# Field_index can be equal to 0.
if field_index is not None:
value = feature[field_index] # depends on [control=['if'], data=['field_index']]
else:
value = 1
aggregation_value = feature[aggregation_id]
hazard_value = feature[hazard_id]
if hazard_value is None or hazard_value == '' or (hasattr(hazard_value, 'isNull') and hazard_value.isNull()):
hazard_value = not_exposed_class['key'] # depends on [control=['if'], data=[]]
exposure_value = feature[exposure_class]
if exposure_value is None or exposure_value == '' or (hasattr(exposure_value, 'isNull') and exposure_value.isNull()):
exposure_value = 'NULL' # depends on [control=['if'], data=[]]
flat_table.add_value(value, aggregation_id=aggregation_value, hazard_id=hazard_value, exposure_class=exposure_value)
# We summarize every absolute values.
for (field, field_definition) in list(absolute_values.items()):
value = feature[field]
if value == '' or value is None or (hasattr(value, 'isNull') and value.isNull()):
value = 0 # depends on [control=['if'], data=[]]
field_definition[0].add_value(value, aggregation_id=aggregation_value, hazard_id=hazard_value) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['feature']]
hazard_keywords = aggregate_hazard.keywords['hazard_keywords']
hazard = hazard_keywords['hazard']
classification = hazard_keywords['classification']
exposure_keywords = impact.keywords['exposure_keywords']
exposure = exposure_keywords['exposure']
for area in aggregate_hazard.getFeatures(request):
aggregation_value = area[aggregation_id]
feature_hazard_id = area[hazard_id]
if feature_hazard_id == '' or feature_hazard_id is None or (hasattr(feature_hazard_id, 'isNull') and feature_hazard_id.isNull()):
feature_hazard_id = not_exposed_class['key'] # depends on [control=['if'], data=[]]
feature_hazard_value = area[hazard_class]
total = 0
for (i, val) in enumerate(unique_exposure):
sum = flat_table.get_value(aggregation_id=aggregation_value, hazard_id=feature_hazard_id, exposure_class=val)
total += sum
aggregate_hazard.changeAttributeValue(area.id(), shift + i, sum) # depends on [control=['for'], data=[]]
affected = post_processor_affected_function(exposure=exposure, hazard=hazard, classification=classification, hazard_class=feature_hazard_value)
affected = tr(str(affected))
aggregate_hazard.changeAttributeValue(area.id(), shift + len(unique_exposure), affected)
aggregate_hazard.changeAttributeValue(area.id(), shift + len(unique_exposure) + 1, total)
for (i, field) in enumerate(absolute_values.values()):
value = field[0].get_value(aggregation_id=aggregation_value, hazard_id=feature_hazard_id)
aggregate_hazard.changeAttributeValue(area.id(), shift + len(unique_exposure) + 2 + i, value) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['area']]
aggregate_hazard.commitChanges()
aggregate_hazard.keywords['title'] = layer_purpose_aggregate_hazard_impacted['name']
if qgis_version() >= 21800:
aggregate_hazard.setName(aggregate_hazard.keywords['title']) # depends on [control=['if'], data=[]]
else:
aggregate_hazard.setLayerName(aggregate_hazard.keywords['title'])
aggregate_hazard.keywords['layer_purpose'] = layer_purpose_aggregate_hazard_impacted['key']
aggregate_hazard.keywords['exposure_keywords'] = impact.keywords.copy()
check_layer(aggregate_hazard)
return aggregate_hazard |
def refresh_menu(self):
"""Refresh context menu"""
index = self.currentIndex()
condition = index.isValid()
self.edit_action.setEnabled( condition )
self.remove_action.setEnabled( condition )
self.refresh_plot_entries(index) | def function[refresh_menu, parameter[self]]:
constant[Refresh context menu]
variable[index] assign[=] call[name[self].currentIndex, parameter[]]
variable[condition] assign[=] call[name[index].isValid, parameter[]]
call[name[self].edit_action.setEnabled, parameter[name[condition]]]
call[name[self].remove_action.setEnabled, parameter[name[condition]]]
call[name[self].refresh_plot_entries, parameter[name[index]]] | keyword[def] identifier[refresh_menu] ( identifier[self] ):
literal[string]
identifier[index] = identifier[self] . identifier[currentIndex] ()
identifier[condition] = identifier[index] . identifier[isValid] ()
identifier[self] . identifier[edit_action] . identifier[setEnabled] ( identifier[condition] )
identifier[self] . identifier[remove_action] . identifier[setEnabled] ( identifier[condition] )
identifier[self] . identifier[refresh_plot_entries] ( identifier[index] ) | def refresh_menu(self):
"""Refresh context menu"""
index = self.currentIndex()
condition = index.isValid()
self.edit_action.setEnabled(condition)
self.remove_action.setEnabled(condition)
self.refresh_plot_entries(index) |
def wtFromUTCpy(pyUTC, leapSecs=14):
"""convenience function:
allows to use python UTC times and
returns only week and tow"""
ymdhms = ymdhmsFromPyUTC(pyUTC)
wSowDSoD = apply(gpsFromUTC, ymdhms + (leapSecs,))
return wSowDSoD[0:2] | def function[wtFromUTCpy, parameter[pyUTC, leapSecs]]:
constant[convenience function:
allows to use python UTC times and
returns only week and tow]
variable[ymdhms] assign[=] call[name[ymdhmsFromPyUTC], parameter[name[pyUTC]]]
variable[wSowDSoD] assign[=] call[name[apply], parameter[name[gpsFromUTC], binary_operation[name[ymdhms] + tuple[[<ast.Name object at 0x7da18f722230>]]]]]
return[call[name[wSowDSoD]][<ast.Slice object at 0x7da18f723e20>]] | keyword[def] identifier[wtFromUTCpy] ( identifier[pyUTC] , identifier[leapSecs] = literal[int] ):
literal[string]
identifier[ymdhms] = identifier[ymdhmsFromPyUTC] ( identifier[pyUTC] )
identifier[wSowDSoD] = identifier[apply] ( identifier[gpsFromUTC] , identifier[ymdhms] +( identifier[leapSecs] ,))
keyword[return] identifier[wSowDSoD] [ literal[int] : literal[int] ] | def wtFromUTCpy(pyUTC, leapSecs=14):
"""convenience function:
allows to use python UTC times and
returns only week and tow"""
ymdhms = ymdhmsFromPyUTC(pyUTC)
wSowDSoD = apply(gpsFromUTC, ymdhms + (leapSecs,))
return wSowDSoD[0:2] |
def get_formatted_path(self, **kwargs):
"""
Format this endpoint's path with the supplied keyword arguments
:return:
The fully-formatted path
:rtype:
str
"""
self._validate_path_placeholders(self.path_placeholders, kwargs)
return self.path.format(**kwargs) | def function[get_formatted_path, parameter[self]]:
constant[
Format this endpoint's path with the supplied keyword arguments
:return:
The fully-formatted path
:rtype:
str
]
call[name[self]._validate_path_placeholders, parameter[name[self].path_placeholders, name[kwargs]]]
return[call[name[self].path.format, parameter[]]] | keyword[def] identifier[get_formatted_path] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_validate_path_placeholders] ( identifier[self] . identifier[path_placeholders] , identifier[kwargs] )
keyword[return] identifier[self] . identifier[path] . identifier[format] (** identifier[kwargs] ) | def get_formatted_path(self, **kwargs):
"""
Format this endpoint's path with the supplied keyword arguments
:return:
The fully-formatted path
:rtype:
str
"""
self._validate_path_placeholders(self.path_placeholders, kwargs)
return self.path.format(**kwargs) |
def _examine_key(self, key_name, key_val, p, i, option_parsing):
""" Examine the current matching key
Extracts information, such as function to execute and command
options, from the current key (passed to function as 'key_name' and
'key_val').
"""
# if the element we reached has an executable registered, save it!
if 'exec' in key_val:
self.exe = key_val['exec']
# simple bool options, save value
if 'type' in key_val and key_val['type'] == 'bool':
self.exe_options[key_name] = True
# Elements wich takes arguments need special attention
if 'argument' in key_val:
# is there an argument (the next element)?
if len(self.inp_cmd) > i+1:
self.key = { 'argument': key_val['argument'] }
# there is - save it
if key_val['type'] == 'option':
# if argument is of type multiple, store result in a list
if 'multiple' in key_val and key_val['multiple'] == True:
if key_name not in self.exe_options:
self.exe_options[key_name] = []
self.exe_options[key_name].append(self.inp_cmd[i+1])
else:
self.exe_options[key_name] = self.inp_cmd[i+1]
else:
self.arg = self.inp_cmd[i+1]
# Validate the argument if possible
if 'validator' in key_val['argument']:
self.key_complete = key_val['argument']['validator'](self.inp_cmd[i+1])
else:
self.key_complete = True
# if there are sub parameters, add them
if 'children' in key_val:
self.children = key_val['children']
# If we reached a command without parameters (which
# should be the end of the command), unset the children
# dict.
elif key_val['type'] == 'command':
self.children = None
# if the command is finished (there is an element after the argument) and
# there is an exec_immediately-function, execute it now
if 'exec_immediately' in key_val and len(self.inp_cmd) > i+2:
key_val['exec_immediately'](self.inp_cmd[i+1], self.exe_options)
# clear exe_options as these were options for exec_immediately
self.exe_options = {}
i += 1
else:
# if there is no next element, let key_complete be true
# and set children to the option argument
self.children = { 'argument': key_val['argument'] }
# remove option from further tab completion as it has been filled in,
# unless it has the 'multiple' key set, which means it can be filled
# in multiple types and will return a list of all values
if option_parsing and p == key_name and key_name in self.children:
# if multiple, then pass
if 'multiple' in self.children[key_name] and self.children[key_name]['multiple'] == True:
pass
else:
del self.children[key_name]
# otherwise we are handling a command without arguments
else:
# Rest arguments?
if 'rest_argument' in key_val:
self._scoop_rest_arguments = True
self.arg = []
self.children = key_val.get('children')
if self.exe is not None:
option_parsing = True
return i, option_parsing | def function[_examine_key, parameter[self, key_name, key_val, p, i, option_parsing]]:
constant[ Examine the current matching key
Extracts information, such as function to execute and command
options, from the current key (passed to function as 'key_name' and
'key_val').
]
if compare[constant[exec] in name[key_val]] begin[:]
name[self].exe assign[=] call[name[key_val]][constant[exec]]
if <ast.BoolOp object at 0x7da20c76c940> begin[:]
call[name[self].exe_options][name[key_name]] assign[=] constant[True]
if compare[constant[argument] in name[key_val]] begin[:]
if compare[call[name[len], parameter[name[self].inp_cmd]] greater[>] binary_operation[name[i] + constant[1]]] begin[:]
name[self].key assign[=] dictionary[[<ast.Constant object at 0x7da2044c00a0>], [<ast.Subscript object at 0x7da2044c24d0>]]
if compare[call[name[key_val]][constant[type]] equal[==] constant[option]] begin[:]
if <ast.BoolOp object at 0x7da2044c1120> begin[:]
if compare[name[key_name] <ast.NotIn object at 0x7da2590d7190> name[self].exe_options] begin[:]
call[name[self].exe_options][name[key_name]] assign[=] list[[]]
call[call[name[self].exe_options][name[key_name]].append, parameter[call[name[self].inp_cmd][binary_operation[name[i] + constant[1]]]]]
if compare[constant[validator] in call[name[key_val]][constant[argument]]] begin[:]
name[self].key_complete assign[=] call[call[call[name[key_val]][constant[argument]]][constant[validator]], parameter[call[name[self].inp_cmd][binary_operation[name[i] + constant[1]]]]]
if compare[constant[children] in name[key_val]] begin[:]
name[self].children assign[=] call[name[key_val]][constant[children]]
if <ast.BoolOp object at 0x7da18dc06440> begin[:]
call[call[name[key_val]][constant[exec_immediately]], parameter[call[name[self].inp_cmd][binary_operation[name[i] + constant[1]]], name[self].exe_options]]
name[self].exe_options assign[=] dictionary[[], []]
<ast.AugAssign object at 0x7da20c7c8fa0>
if <ast.BoolOp object at 0x7da20c7c9db0> begin[:]
if <ast.BoolOp object at 0x7da20c7c86a0> begin[:]
pass
return[tuple[[<ast.Name object at 0x7da20c7cac80>, <ast.Name object at 0x7da20c7cb040>]]] | keyword[def] identifier[_examine_key] ( identifier[self] , identifier[key_name] , identifier[key_val] , identifier[p] , identifier[i] , identifier[option_parsing] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[key_val] :
identifier[self] . identifier[exe] = identifier[key_val] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[key_val] keyword[and] identifier[key_val] [ literal[string] ]== literal[string] :
identifier[self] . identifier[exe_options] [ identifier[key_name] ]= keyword[True]
keyword[if] literal[string] keyword[in] identifier[key_val] :
keyword[if] identifier[len] ( identifier[self] . identifier[inp_cmd] )> identifier[i] + literal[int] :
identifier[self] . identifier[key] ={ literal[string] : identifier[key_val] [ literal[string] ]}
keyword[if] identifier[key_val] [ literal[string] ]== literal[string] :
keyword[if] literal[string] keyword[in] identifier[key_val] keyword[and] identifier[key_val] [ literal[string] ]== keyword[True] :
keyword[if] identifier[key_name] keyword[not] keyword[in] identifier[self] . identifier[exe_options] :
identifier[self] . identifier[exe_options] [ identifier[key_name] ]=[]
identifier[self] . identifier[exe_options] [ identifier[key_name] ]. identifier[append] ( identifier[self] . identifier[inp_cmd] [ identifier[i] + literal[int] ])
keyword[else] :
identifier[self] . identifier[exe_options] [ identifier[key_name] ]= identifier[self] . identifier[inp_cmd] [ identifier[i] + literal[int] ]
keyword[else] :
identifier[self] . identifier[arg] = identifier[self] . identifier[inp_cmd] [ identifier[i] + literal[int] ]
keyword[if] literal[string] keyword[in] identifier[key_val] [ literal[string] ]:
identifier[self] . identifier[key_complete] = identifier[key_val] [ literal[string] ][ literal[string] ]( identifier[self] . identifier[inp_cmd] [ identifier[i] + literal[int] ])
keyword[else] :
identifier[self] . identifier[key_complete] = keyword[True]
keyword[if] literal[string] keyword[in] identifier[key_val] :
identifier[self] . identifier[children] = identifier[key_val] [ literal[string] ]
keyword[elif] identifier[key_val] [ literal[string] ]== literal[string] :
identifier[self] . identifier[children] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[key_val] keyword[and] identifier[len] ( identifier[self] . identifier[inp_cmd] )> identifier[i] + literal[int] :
identifier[key_val] [ literal[string] ]( identifier[self] . identifier[inp_cmd] [ identifier[i] + literal[int] ], identifier[self] . identifier[exe_options] )
identifier[self] . identifier[exe_options] ={}
identifier[i] += literal[int]
keyword[else] :
identifier[self] . identifier[children] ={ literal[string] : identifier[key_val] [ literal[string] ]}
keyword[if] identifier[option_parsing] keyword[and] identifier[p] == identifier[key_name] keyword[and] identifier[key_name] keyword[in] identifier[self] . identifier[children] :
keyword[if] literal[string] keyword[in] identifier[self] . identifier[children] [ identifier[key_name] ] keyword[and] identifier[self] . identifier[children] [ identifier[key_name] ][ literal[string] ]== keyword[True] :
keyword[pass]
keyword[else] :
keyword[del] identifier[self] . identifier[children] [ identifier[key_name] ]
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[key_val] :
identifier[self] . identifier[_scoop_rest_arguments] = keyword[True]
identifier[self] . identifier[arg] =[]
identifier[self] . identifier[children] = identifier[key_val] . identifier[get] ( literal[string] )
keyword[if] identifier[self] . identifier[exe] keyword[is] keyword[not] keyword[None] :
identifier[option_parsing] = keyword[True]
keyword[return] identifier[i] , identifier[option_parsing] | def _examine_key(self, key_name, key_val, p, i, option_parsing):
""" Examine the current matching key
Extracts information, such as function to execute and command
options, from the current key (passed to function as 'key_name' and
'key_val').
"""
# if the element we reached has an executable registered, save it!
if 'exec' in key_val:
self.exe = key_val['exec'] # depends on [control=['if'], data=['key_val']]
# simple bool options, save value
if 'type' in key_val and key_val['type'] == 'bool':
self.exe_options[key_name] = True # depends on [control=['if'], data=[]]
# Elements wich takes arguments need special attention
if 'argument' in key_val:
# is there an argument (the next element)?
if len(self.inp_cmd) > i + 1:
self.key = {'argument': key_val['argument']}
# there is - save it
if key_val['type'] == 'option':
# if argument is of type multiple, store result in a list
if 'multiple' in key_val and key_val['multiple'] == True:
if key_name not in self.exe_options:
self.exe_options[key_name] = [] # depends on [control=['if'], data=['key_name']]
self.exe_options[key_name].append(self.inp_cmd[i + 1]) # depends on [control=['if'], data=[]]
else:
self.exe_options[key_name] = self.inp_cmd[i + 1] # depends on [control=['if'], data=[]]
else:
self.arg = self.inp_cmd[i + 1]
# Validate the argument if possible
if 'validator' in key_val['argument']:
self.key_complete = key_val['argument']['validator'](self.inp_cmd[i + 1]) # depends on [control=['if'], data=[]]
else:
self.key_complete = True
# if there are sub parameters, add them
if 'children' in key_val:
self.children = key_val['children'] # depends on [control=['if'], data=['key_val']]
# If we reached a command without parameters (which
# should be the end of the command), unset the children
# dict.
elif key_val['type'] == 'command':
self.children = None # depends on [control=['if'], data=[]]
# if the command is finished (there is an element after the argument) and
# there is an exec_immediately-function, execute it now
if 'exec_immediately' in key_val and len(self.inp_cmd) > i + 2:
key_val['exec_immediately'](self.inp_cmd[i + 1], self.exe_options)
# clear exe_options as these were options for exec_immediately
self.exe_options = {} # depends on [control=['if'], data=[]]
i += 1 # depends on [control=['if'], data=[]]
else:
# if there is no next element, let key_complete be true
# and set children to the option argument
self.children = {'argument': key_val['argument']}
# remove option from further tab completion as it has been filled in,
# unless it has the 'multiple' key set, which means it can be filled
# in multiple types and will return a list of all values
if option_parsing and p == key_name and (key_name in self.children):
# if multiple, then pass
if 'multiple' in self.children[key_name] and self.children[key_name]['multiple'] == True:
pass # depends on [control=['if'], data=[]]
else:
del self.children[key_name] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['key_val']]
else:
# otherwise we are handling a command without arguments
# Rest arguments?
if 'rest_argument' in key_val:
self._scoop_rest_arguments = True
self.arg = [] # depends on [control=['if'], data=[]]
self.children = key_val.get('children')
if self.exe is not None:
option_parsing = True # depends on [control=['if'], data=[]]
return (i, option_parsing) |
def get_field_by_name(self, name):
"""
the field member matching name, or None if no such field is found
"""
for f in self.fields:
if f.get_name() == name:
return f
return None | def function[get_field_by_name, parameter[self, name]]:
constant[
the field member matching name, or None if no such field is found
]
for taget[name[f]] in starred[name[self].fields] begin[:]
if compare[call[name[f].get_name, parameter[]] equal[==] name[name]] begin[:]
return[name[f]]
return[constant[None]] | keyword[def] identifier[get_field_by_name] ( identifier[self] , identifier[name] ):
literal[string]
keyword[for] identifier[f] keyword[in] identifier[self] . identifier[fields] :
keyword[if] identifier[f] . identifier[get_name] ()== identifier[name] :
keyword[return] identifier[f]
keyword[return] keyword[None] | def get_field_by_name(self, name):
"""
the field member matching name, or None if no such field is found
"""
for f in self.fields:
if f.get_name() == name:
return f # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
return None |
def get_ptr(data, offset=None, ptr_type=ctypes.c_void_p):
"""Returns a void pointer to the data"""
ptr = ctypes.cast(ctypes.pointer(data), ctypes.c_void_p)
if offset:
ptr = ctypes.c_void_p(ptr.value + offset)
if ptr_type != ctypes.c_void_p:
ptr = ctypes.cast(ptr, ptr_type)
return ptr | def function[get_ptr, parameter[data, offset, ptr_type]]:
constant[Returns a void pointer to the data]
variable[ptr] assign[=] call[name[ctypes].cast, parameter[call[name[ctypes].pointer, parameter[name[data]]], name[ctypes].c_void_p]]
if name[offset] begin[:]
variable[ptr] assign[=] call[name[ctypes].c_void_p, parameter[binary_operation[name[ptr].value + name[offset]]]]
if compare[name[ptr_type] not_equal[!=] name[ctypes].c_void_p] begin[:]
variable[ptr] assign[=] call[name[ctypes].cast, parameter[name[ptr], name[ptr_type]]]
return[name[ptr]] | keyword[def] identifier[get_ptr] ( identifier[data] , identifier[offset] = keyword[None] , identifier[ptr_type] = identifier[ctypes] . identifier[c_void_p] ):
literal[string]
identifier[ptr] = identifier[ctypes] . identifier[cast] ( identifier[ctypes] . identifier[pointer] ( identifier[data] ), identifier[ctypes] . identifier[c_void_p] )
keyword[if] identifier[offset] :
identifier[ptr] = identifier[ctypes] . identifier[c_void_p] ( identifier[ptr] . identifier[value] + identifier[offset] )
keyword[if] identifier[ptr_type] != identifier[ctypes] . identifier[c_void_p] :
identifier[ptr] = identifier[ctypes] . identifier[cast] ( identifier[ptr] , identifier[ptr_type] )
keyword[return] identifier[ptr] | def get_ptr(data, offset=None, ptr_type=ctypes.c_void_p):
"""Returns a void pointer to the data"""
ptr = ctypes.cast(ctypes.pointer(data), ctypes.c_void_p)
if offset:
ptr = ctypes.c_void_p(ptr.value + offset) # depends on [control=['if'], data=[]]
if ptr_type != ctypes.c_void_p:
ptr = ctypes.cast(ptr, ptr_type) # depends on [control=['if'], data=['ptr_type']]
return ptr |
def is_full(cls, pid):
"""Return a bool indicating if the specified pool is full
:param str pid: The pool id
:rtype: bool
"""
with cls._lock:
cls._ensure_pool_exists(pid)
return cls._pools[pid].is_full | def function[is_full, parameter[cls, pid]]:
constant[Return a bool indicating if the specified pool is full
:param str pid: The pool id
:rtype: bool
]
with name[cls]._lock begin[:]
call[name[cls]._ensure_pool_exists, parameter[name[pid]]]
return[call[name[cls]._pools][name[pid]].is_full] | keyword[def] identifier[is_full] ( identifier[cls] , identifier[pid] ):
literal[string]
keyword[with] identifier[cls] . identifier[_lock] :
identifier[cls] . identifier[_ensure_pool_exists] ( identifier[pid] )
keyword[return] identifier[cls] . identifier[_pools] [ identifier[pid] ]. identifier[is_full] | def is_full(cls, pid):
"""Return a bool indicating if the specified pool is full
:param str pid: The pool id
:rtype: bool
"""
with cls._lock:
cls._ensure_pool_exists(pid)
return cls._pools[pid].is_full # depends on [control=['with'], data=[]] |
def select_extended_word(self, continuation_chars=('.',)):
"""
Performs extended word selection. Extended selection consists in
selecting the word under cursor and any other words that are linked
by a ``continuation_chars``.
:param continuation_chars: the list of characters that may extend a
word.
"""
cursor = self._editor.textCursor()
original_pos = cursor.position()
start_pos = None
end_pos = None
# go left
stop = False
seps = self._editor.word_separators + [' ']
while not stop:
cursor.clearSelection()
cursor.movePosition(cursor.Left, cursor.KeepAnchor)
char = cursor.selectedText()
if cursor.atBlockStart():
stop = True
start_pos = cursor.position()
elif char in seps and char not in continuation_chars:
stop = True
start_pos = cursor.position() + 1
# go right
cursor.setPosition(original_pos)
stop = False
while not stop:
cursor.clearSelection()
cursor.movePosition(cursor.Right, cursor.KeepAnchor)
char = cursor.selectedText()
if cursor.atBlockEnd():
stop = True
end_pos = cursor.position()
if char in seps:
end_pos -= 1
elif char in seps and char not in continuation_chars:
stop = True
end_pos = cursor.position() - 1
if start_pos and end_pos:
cursor.setPosition(start_pos)
cursor.movePosition(cursor.Right, cursor.KeepAnchor,
end_pos - start_pos)
self._editor.setTextCursor(cursor) | def function[select_extended_word, parameter[self, continuation_chars]]:
constant[
Performs extended word selection. Extended selection consists in
selecting the word under cursor and any other words that are linked
by a ``continuation_chars``.
:param continuation_chars: the list of characters that may extend a
word.
]
variable[cursor] assign[=] call[name[self]._editor.textCursor, parameter[]]
variable[original_pos] assign[=] call[name[cursor].position, parameter[]]
variable[start_pos] assign[=] constant[None]
variable[end_pos] assign[=] constant[None]
variable[stop] assign[=] constant[False]
variable[seps] assign[=] binary_operation[name[self]._editor.word_separators + list[[<ast.Constant object at 0x7da2054a4670>]]]
while <ast.UnaryOp object at 0x7da2054a4a60> begin[:]
call[name[cursor].clearSelection, parameter[]]
call[name[cursor].movePosition, parameter[name[cursor].Left, name[cursor].KeepAnchor]]
variable[char] assign[=] call[name[cursor].selectedText, parameter[]]
if call[name[cursor].atBlockStart, parameter[]] begin[:]
variable[stop] assign[=] constant[True]
variable[start_pos] assign[=] call[name[cursor].position, parameter[]]
call[name[cursor].setPosition, parameter[name[original_pos]]]
variable[stop] assign[=] constant[False]
while <ast.UnaryOp object at 0x7da2054a7fa0> begin[:]
call[name[cursor].clearSelection, parameter[]]
call[name[cursor].movePosition, parameter[name[cursor].Right, name[cursor].KeepAnchor]]
variable[char] assign[=] call[name[cursor].selectedText, parameter[]]
if call[name[cursor].atBlockEnd, parameter[]] begin[:]
variable[stop] assign[=] constant[True]
variable[end_pos] assign[=] call[name[cursor].position, parameter[]]
if compare[name[char] in name[seps]] begin[:]
<ast.AugAssign object at 0x7da2054a6050>
if <ast.BoolOp object at 0x7da1b2041ab0> begin[:]
call[name[cursor].setPosition, parameter[name[start_pos]]]
call[name[cursor].movePosition, parameter[name[cursor].Right, name[cursor].KeepAnchor, binary_operation[name[end_pos] - name[start_pos]]]]
call[name[self]._editor.setTextCursor, parameter[name[cursor]]] | keyword[def] identifier[select_extended_word] ( identifier[self] , identifier[continuation_chars] =( literal[string] ,)):
literal[string]
identifier[cursor] = identifier[self] . identifier[_editor] . identifier[textCursor] ()
identifier[original_pos] = identifier[cursor] . identifier[position] ()
identifier[start_pos] = keyword[None]
identifier[end_pos] = keyword[None]
identifier[stop] = keyword[False]
identifier[seps] = identifier[self] . identifier[_editor] . identifier[word_separators] +[ literal[string] ]
keyword[while] keyword[not] identifier[stop] :
identifier[cursor] . identifier[clearSelection] ()
identifier[cursor] . identifier[movePosition] ( identifier[cursor] . identifier[Left] , identifier[cursor] . identifier[KeepAnchor] )
identifier[char] = identifier[cursor] . identifier[selectedText] ()
keyword[if] identifier[cursor] . identifier[atBlockStart] ():
identifier[stop] = keyword[True]
identifier[start_pos] = identifier[cursor] . identifier[position] ()
keyword[elif] identifier[char] keyword[in] identifier[seps] keyword[and] identifier[char] keyword[not] keyword[in] identifier[continuation_chars] :
identifier[stop] = keyword[True]
identifier[start_pos] = identifier[cursor] . identifier[position] ()+ literal[int]
identifier[cursor] . identifier[setPosition] ( identifier[original_pos] )
identifier[stop] = keyword[False]
keyword[while] keyword[not] identifier[stop] :
identifier[cursor] . identifier[clearSelection] ()
identifier[cursor] . identifier[movePosition] ( identifier[cursor] . identifier[Right] , identifier[cursor] . identifier[KeepAnchor] )
identifier[char] = identifier[cursor] . identifier[selectedText] ()
keyword[if] identifier[cursor] . identifier[atBlockEnd] ():
identifier[stop] = keyword[True]
identifier[end_pos] = identifier[cursor] . identifier[position] ()
keyword[if] identifier[char] keyword[in] identifier[seps] :
identifier[end_pos] -= literal[int]
keyword[elif] identifier[char] keyword[in] identifier[seps] keyword[and] identifier[char] keyword[not] keyword[in] identifier[continuation_chars] :
identifier[stop] = keyword[True]
identifier[end_pos] = identifier[cursor] . identifier[position] ()- literal[int]
keyword[if] identifier[start_pos] keyword[and] identifier[end_pos] :
identifier[cursor] . identifier[setPosition] ( identifier[start_pos] )
identifier[cursor] . identifier[movePosition] ( identifier[cursor] . identifier[Right] , identifier[cursor] . identifier[KeepAnchor] ,
identifier[end_pos] - identifier[start_pos] )
identifier[self] . identifier[_editor] . identifier[setTextCursor] ( identifier[cursor] ) | def select_extended_word(self, continuation_chars=('.',)):
"""
Performs extended word selection. Extended selection consists in
selecting the word under cursor and any other words that are linked
by a ``continuation_chars``.
:param continuation_chars: the list of characters that may extend a
word.
"""
cursor = self._editor.textCursor()
original_pos = cursor.position()
start_pos = None
end_pos = None
# go left
stop = False
seps = self._editor.word_separators + [' ']
while not stop:
cursor.clearSelection()
cursor.movePosition(cursor.Left, cursor.KeepAnchor)
char = cursor.selectedText()
if cursor.atBlockStart():
stop = True
start_pos = cursor.position() # depends on [control=['if'], data=[]]
elif char in seps and char not in continuation_chars:
stop = True
start_pos = cursor.position() + 1 # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
# go right
cursor.setPosition(original_pos)
stop = False
while not stop:
cursor.clearSelection()
cursor.movePosition(cursor.Right, cursor.KeepAnchor)
char = cursor.selectedText()
if cursor.atBlockEnd():
stop = True
end_pos = cursor.position()
if char in seps:
end_pos -= 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif char in seps and char not in continuation_chars:
stop = True
end_pos = cursor.position() - 1 # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
if start_pos and end_pos:
cursor.setPosition(start_pos)
cursor.movePosition(cursor.Right, cursor.KeepAnchor, end_pos - start_pos)
self._editor.setTextCursor(cursor) # depends on [control=['if'], data=[]] |
def post(self, request, *args, **kwargs):
""" Validates subscription data before creating Outbound message
"""
# Look up subscriber
subscription_id = kwargs["subscription_id"]
if Subscription.objects.filter(id=subscription_id).exists():
status = 202
accepted = {"accepted": True}
store_resend_request.apply_async(args=[subscription_id])
else:
status = 400
accepted = {
"accepted": False,
"reason": "Cannot find subscription with ID {}".format(subscription_id),
}
return Response(accepted, status=status) | def function[post, parameter[self, request]]:
constant[ Validates subscription data before creating Outbound message
]
variable[subscription_id] assign[=] call[name[kwargs]][constant[subscription_id]]
if call[call[name[Subscription].objects.filter, parameter[]].exists, parameter[]] begin[:]
variable[status] assign[=] constant[202]
variable[accepted] assign[=] dictionary[[<ast.Constant object at 0x7da18dc983d0>], [<ast.Constant object at 0x7da18dc9af20>]]
call[name[store_resend_request].apply_async, parameter[]]
return[call[name[Response], parameter[name[accepted]]]] | keyword[def] identifier[post] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[subscription_id] = identifier[kwargs] [ literal[string] ]
keyword[if] identifier[Subscription] . identifier[objects] . identifier[filter] ( identifier[id] = identifier[subscription_id] ). identifier[exists] ():
identifier[status] = literal[int]
identifier[accepted] ={ literal[string] : keyword[True] }
identifier[store_resend_request] . identifier[apply_async] ( identifier[args] =[ identifier[subscription_id] ])
keyword[else] :
identifier[status] = literal[int]
identifier[accepted] ={
literal[string] : keyword[False] ,
literal[string] : literal[string] . identifier[format] ( identifier[subscription_id] ),
}
keyword[return] identifier[Response] ( identifier[accepted] , identifier[status] = identifier[status] ) | def post(self, request, *args, **kwargs):
""" Validates subscription data before creating Outbound message
"""
# Look up subscriber
subscription_id = kwargs['subscription_id']
if Subscription.objects.filter(id=subscription_id).exists():
status = 202
accepted = {'accepted': True}
store_resend_request.apply_async(args=[subscription_id]) # depends on [control=['if'], data=[]]
else:
status = 400
accepted = {'accepted': False, 'reason': 'Cannot find subscription with ID {}'.format(subscription_id)}
return Response(accepted, status=status) |
def remove_duplicates(apps, schema_editor):
"""
Remove any duplicates from the entity relationship table
:param apps:
:param schema_editor:
:return:
"""
# Get the model
EntityRelationship = apps.get_model('entity', 'EntityRelationship')
# Find the duplicates
duplicates = EntityRelationship.objects.all().order_by(
'sub_entity_id',
'super_entity_id'
).values(
'sub_entity_id',
'super_entity_id'
).annotate(
Count('sub_entity_id'),
Count('super_entity_id'),
max_id=Max('id')
).filter(
super_entity_id__count__gt=1
)
# Loop over the duplicates and delete
for duplicate in duplicates:
EntityRelationship.objects.filter(
sub_entity_id=duplicate['sub_entity_id'],
super_entity_id=duplicate['super_entity_id']
).exclude(
id=duplicate['max_id']
).delete() | def function[remove_duplicates, parameter[apps, schema_editor]]:
constant[
Remove any duplicates from the entity relationship table
:param apps:
:param schema_editor:
:return:
]
variable[EntityRelationship] assign[=] call[name[apps].get_model, parameter[constant[entity], constant[EntityRelationship]]]
variable[duplicates] assign[=] call[call[call[call[call[name[EntityRelationship].objects.all, parameter[]].order_by, parameter[constant[sub_entity_id], constant[super_entity_id]]].values, parameter[constant[sub_entity_id], constant[super_entity_id]]].annotate, parameter[call[name[Count], parameter[constant[sub_entity_id]]], call[name[Count], parameter[constant[super_entity_id]]]]].filter, parameter[]]
for taget[name[duplicate]] in starred[name[duplicates]] begin[:]
call[call[call[name[EntityRelationship].objects.filter, parameter[]].exclude, parameter[]].delete, parameter[]] | keyword[def] identifier[remove_duplicates] ( identifier[apps] , identifier[schema_editor] ):
literal[string]
identifier[EntityRelationship] = identifier[apps] . identifier[get_model] ( literal[string] , literal[string] )
identifier[duplicates] = identifier[EntityRelationship] . identifier[objects] . identifier[all] (). identifier[order_by] (
literal[string] ,
literal[string]
). identifier[values] (
literal[string] ,
literal[string]
). identifier[annotate] (
identifier[Count] ( literal[string] ),
identifier[Count] ( literal[string] ),
identifier[max_id] = identifier[Max] ( literal[string] )
). identifier[filter] (
identifier[super_entity_id__count__gt] = literal[int]
)
keyword[for] identifier[duplicate] keyword[in] identifier[duplicates] :
identifier[EntityRelationship] . identifier[objects] . identifier[filter] (
identifier[sub_entity_id] = identifier[duplicate] [ literal[string] ],
identifier[super_entity_id] = identifier[duplicate] [ literal[string] ]
). identifier[exclude] (
identifier[id] = identifier[duplicate] [ literal[string] ]
). identifier[delete] () | def remove_duplicates(apps, schema_editor):
"""
Remove any duplicates from the entity relationship table
:param apps:
:param schema_editor:
:return:
"""
# Get the model
EntityRelationship = apps.get_model('entity', 'EntityRelationship')
# Find the duplicates
duplicates = EntityRelationship.objects.all().order_by('sub_entity_id', 'super_entity_id').values('sub_entity_id', 'super_entity_id').annotate(Count('sub_entity_id'), Count('super_entity_id'), max_id=Max('id')).filter(super_entity_id__count__gt=1)
# Loop over the duplicates and delete
for duplicate in duplicates:
EntityRelationship.objects.filter(sub_entity_id=duplicate['sub_entity_id'], super_entity_id=duplicate['super_entity_id']).exclude(id=duplicate['max_id']).delete() # depends on [control=['for'], data=['duplicate']] |
def register_device(ctx, device, model, nickname, client_type):
"""Registers a device instance under an existing device model.
Device instance fields must start with a letter or number. The device ID
can only contain letters, numbers, and the following symbols: period (.),
hyphen (-), underscore (_), and plus (+). The device nickname can only
contain numbers, letters, and the space ( ) symbol.
"""
session, api_url, project_id = build_client_from_context(ctx)
device_base_url = '/'.join([api_url, 'devices'])
device_url = '/'.join([device_base_url, device])
payload = {
'id': device,
'model_id': model,
}
if client_type:
payload['client_type'] = 'SDK_' + client_type
if nickname:
payload['nickname'] = nickname
logging.debug(json.dumps(payload))
r = session.get(device_url)
if r.status_code == 200:
click.echo('Updating existing device: %s' % device)
session.delete(device_url)
r = session.post(device_base_url, data=json.dumps(payload))
elif r.status_code in (400, 403, 404):
click.echo('Creating new device')
r = session.post(device_base_url, data=json.dumps(payload))
else:
raise failed_request_exception('Failed to check existing device', r)
if r.status_code != 200:
raise failed_request_exception('Failed to register device', r)
click.echo('Device instance %s successfully registered' % device)
logging.debug(r.text) | def function[register_device, parameter[ctx, device, model, nickname, client_type]]:
constant[Registers a device instance under an existing device model.
Device instance fields must start with a letter or number. The device ID
can only contain letters, numbers, and the following symbols: period (.),
hyphen (-), underscore (_), and plus (+). The device nickname can only
contain numbers, letters, and the space ( ) symbol.
]
<ast.Tuple object at 0x7da2041d85e0> assign[=] call[name[build_client_from_context], parameter[name[ctx]]]
variable[device_base_url] assign[=] call[constant[/].join, parameter[list[[<ast.Name object at 0x7da18bc73be0>, <ast.Constant object at 0x7da18bc71660>]]]]
variable[device_url] assign[=] call[constant[/].join, parameter[list[[<ast.Name object at 0x7da18bc70be0>, <ast.Name object at 0x7da18bc72e60>]]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da18bc73d30>, <ast.Constant object at 0x7da18bc73e50>], [<ast.Name object at 0x7da18bc72230>, <ast.Name object at 0x7da18bc70730>]]
if name[client_type] begin[:]
call[name[payload]][constant[client_type]] assign[=] binary_operation[constant[SDK_] + name[client_type]]
if name[nickname] begin[:]
call[name[payload]][constant[nickname]] assign[=] name[nickname]
call[name[logging].debug, parameter[call[name[json].dumps, parameter[name[payload]]]]]
variable[r] assign[=] call[name[session].get, parameter[name[device_url]]]
if compare[name[r].status_code equal[==] constant[200]] begin[:]
call[name[click].echo, parameter[binary_operation[constant[Updating existing device: %s] <ast.Mod object at 0x7da2590d6920> name[device]]]]
call[name[session].delete, parameter[name[device_url]]]
variable[r] assign[=] call[name[session].post, parameter[name[device_base_url]]]
if compare[name[r].status_code not_equal[!=] constant[200]] begin[:]
<ast.Raise object at 0x7da18bc70640>
call[name[click].echo, parameter[binary_operation[constant[Device instance %s successfully registered] <ast.Mod object at 0x7da2590d6920> name[device]]]]
call[name[logging].debug, parameter[name[r].text]] | keyword[def] identifier[register_device] ( identifier[ctx] , identifier[device] , identifier[model] , identifier[nickname] , identifier[client_type] ):
literal[string]
identifier[session] , identifier[api_url] , identifier[project_id] = identifier[build_client_from_context] ( identifier[ctx] )
identifier[device_base_url] = literal[string] . identifier[join] ([ identifier[api_url] , literal[string] ])
identifier[device_url] = literal[string] . identifier[join] ([ identifier[device_base_url] , identifier[device] ])
identifier[payload] ={
literal[string] : identifier[device] ,
literal[string] : identifier[model] ,
}
keyword[if] identifier[client_type] :
identifier[payload] [ literal[string] ]= literal[string] + identifier[client_type]
keyword[if] identifier[nickname] :
identifier[payload] [ literal[string] ]= identifier[nickname]
identifier[logging] . identifier[debug] ( identifier[json] . identifier[dumps] ( identifier[payload] ))
identifier[r] = identifier[session] . identifier[get] ( identifier[device_url] )
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
identifier[click] . identifier[echo] ( literal[string] % identifier[device] )
identifier[session] . identifier[delete] ( identifier[device_url] )
identifier[r] = identifier[session] . identifier[post] ( identifier[device_base_url] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[payload] ))
keyword[elif] identifier[r] . identifier[status_code] keyword[in] ( literal[int] , literal[int] , literal[int] ):
identifier[click] . identifier[echo] ( literal[string] )
identifier[r] = identifier[session] . identifier[post] ( identifier[device_base_url] , identifier[data] = identifier[json] . identifier[dumps] ( identifier[payload] ))
keyword[else] :
keyword[raise] identifier[failed_request_exception] ( literal[string] , identifier[r] )
keyword[if] identifier[r] . identifier[status_code] != literal[int] :
keyword[raise] identifier[failed_request_exception] ( literal[string] , identifier[r] )
identifier[click] . identifier[echo] ( literal[string] % identifier[device] )
identifier[logging] . identifier[debug] ( identifier[r] . identifier[text] ) | def register_device(ctx, device, model, nickname, client_type):
"""Registers a device instance under an existing device model.
Device instance fields must start with a letter or number. The device ID
can only contain letters, numbers, and the following symbols: period (.),
hyphen (-), underscore (_), and plus (+). The device nickname can only
contain numbers, letters, and the space ( ) symbol.
"""
(session, api_url, project_id) = build_client_from_context(ctx)
device_base_url = '/'.join([api_url, 'devices'])
device_url = '/'.join([device_base_url, device])
payload = {'id': device, 'model_id': model}
if client_type:
payload['client_type'] = 'SDK_' + client_type # depends on [control=['if'], data=[]]
if nickname:
payload['nickname'] = nickname # depends on [control=['if'], data=[]]
logging.debug(json.dumps(payload))
r = session.get(device_url)
if r.status_code == 200:
click.echo('Updating existing device: %s' % device)
session.delete(device_url)
r = session.post(device_base_url, data=json.dumps(payload)) # depends on [control=['if'], data=[]]
elif r.status_code in (400, 403, 404):
click.echo('Creating new device')
r = session.post(device_base_url, data=json.dumps(payload)) # depends on [control=['if'], data=[]]
else:
raise failed_request_exception('Failed to check existing device', r)
if r.status_code != 200:
raise failed_request_exception('Failed to register device', r) # depends on [control=['if'], data=[]]
click.echo('Device instance %s successfully registered' % device)
logging.debug(r.text) |
def _find_devices_mac(self):
"""Find devices on Mac."""
self.keyboards.append(Keyboard(self))
self.mice.append(MightyMouse(self))
self.mice.append(Mouse(self)) | def function[_find_devices_mac, parameter[self]]:
constant[Find devices on Mac.]
call[name[self].keyboards.append, parameter[call[name[Keyboard], parameter[name[self]]]]]
call[name[self].mice.append, parameter[call[name[MightyMouse], parameter[name[self]]]]]
call[name[self].mice.append, parameter[call[name[Mouse], parameter[name[self]]]]] | keyword[def] identifier[_find_devices_mac] ( identifier[self] ):
literal[string]
identifier[self] . identifier[keyboards] . identifier[append] ( identifier[Keyboard] ( identifier[self] ))
identifier[self] . identifier[mice] . identifier[append] ( identifier[MightyMouse] ( identifier[self] ))
identifier[self] . identifier[mice] . identifier[append] ( identifier[Mouse] ( identifier[self] )) | def _find_devices_mac(self):
"""Find devices on Mac."""
self.keyboards.append(Keyboard(self))
self.mice.append(MightyMouse(self))
self.mice.append(Mouse(self)) |
def loglevel(level):
"""
Convert any representation of `level` to an int appropriately.
:type level: int or str
:rtype: int
>>> loglevel('DEBUG') == logging.DEBUG
True
>>> loglevel(10)
10
>>> loglevel(None)
Traceback (most recent call last):
...
ValueError: None is not a proper log level.
"""
if isinstance(level, str):
level = getattr(logging, level.upper())
elif isinstance(level, int):
pass
else:
raise ValueError('{0!r} is not a proper log level.'.format(level))
return level | def function[loglevel, parameter[level]]:
constant[
Convert any representation of `level` to an int appropriately.
:type level: int or str
:rtype: int
>>> loglevel('DEBUG') == logging.DEBUG
True
>>> loglevel(10)
10
>>> loglevel(None)
Traceback (most recent call last):
...
ValueError: None is not a proper log level.
]
if call[name[isinstance], parameter[name[level], name[str]]] begin[:]
variable[level] assign[=] call[name[getattr], parameter[name[logging], call[name[level].upper, parameter[]]]]
return[name[level]] | keyword[def] identifier[loglevel] ( identifier[level] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[level] , identifier[str] ):
identifier[level] = identifier[getattr] ( identifier[logging] , identifier[level] . identifier[upper] ())
keyword[elif] identifier[isinstance] ( identifier[level] , identifier[int] ):
keyword[pass]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[level] ))
keyword[return] identifier[level] | def loglevel(level):
"""
Convert any representation of `level` to an int appropriately.
:type level: int or str
:rtype: int
>>> loglevel('DEBUG') == logging.DEBUG
True
>>> loglevel(10)
10
>>> loglevel(None)
Traceback (most recent call last):
...
ValueError: None is not a proper log level.
"""
if isinstance(level, str):
level = getattr(logging, level.upper()) # depends on [control=['if'], data=[]]
elif isinstance(level, int):
pass # depends on [control=['if'], data=[]]
else:
raise ValueError('{0!r} is not a proper log level.'.format(level))
return level |
async def verify_worker_impls(chain):
"""Verify the task type (e.g. decision, build) of each link in the chain.
Args:
chain (ChainOfTrust): the chain we're operating on
Raises:
CoTError: on failure
"""
valid_worker_impls = get_valid_worker_impls()
for obj in chain.get_all_links_in_chain():
worker_impl = obj.worker_impl
log.info("Verifying {} {} as a {} task...".format(obj.name, obj.task_id, worker_impl))
# Run tests synchronously for now. We can parallelize if efficiency
# is more important than a single simple logfile.
await valid_worker_impls[worker_impl](chain, obj) | <ast.AsyncFunctionDef object at 0x7da204565ff0> | keyword[async] keyword[def] identifier[verify_worker_impls] ( identifier[chain] ):
literal[string]
identifier[valid_worker_impls] = identifier[get_valid_worker_impls] ()
keyword[for] identifier[obj] keyword[in] identifier[chain] . identifier[get_all_links_in_chain] ():
identifier[worker_impl] = identifier[obj] . identifier[worker_impl]
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[obj] . identifier[name] , identifier[obj] . identifier[task_id] , identifier[worker_impl] ))
keyword[await] identifier[valid_worker_impls] [ identifier[worker_impl] ]( identifier[chain] , identifier[obj] ) | async def verify_worker_impls(chain):
"""Verify the task type (e.g. decision, build) of each link in the chain.
Args:
chain (ChainOfTrust): the chain we're operating on
Raises:
CoTError: on failure
"""
valid_worker_impls = get_valid_worker_impls()
for obj in chain.get_all_links_in_chain():
worker_impl = obj.worker_impl
log.info('Verifying {} {} as a {} task...'.format(obj.name, obj.task_id, worker_impl))
# Run tests synchronously for now. We can parallelize if efficiency
# is more important than a single simple logfile.
await valid_worker_impls[worker_impl](chain, obj) # depends on [control=['for'], data=['obj']] |
def get_child_directories(path):
"""Return names of immediate child directories"""
if not _is_valid_directory(path):
raise exceptions.InvalidDirectory
entries = os.listdir(path)
directory_names = []
for entry in entries:
abs_entry_path = os.path.join(path, entry)
if _is_valid_directory(abs_entry_path):
directory_names.append(entry)
return directory_names | def function[get_child_directories, parameter[path]]:
constant[Return names of immediate child directories]
if <ast.UnaryOp object at 0x7da1b1643970> begin[:]
<ast.Raise object at 0x7da1b1641510>
variable[entries] assign[=] call[name[os].listdir, parameter[name[path]]]
variable[directory_names] assign[=] list[[]]
for taget[name[entry]] in starred[name[entries]] begin[:]
variable[abs_entry_path] assign[=] call[name[os].path.join, parameter[name[path], name[entry]]]
if call[name[_is_valid_directory], parameter[name[abs_entry_path]]] begin[:]
call[name[directory_names].append, parameter[name[entry]]]
return[name[directory_names]] | keyword[def] identifier[get_child_directories] ( identifier[path] ):
literal[string]
keyword[if] keyword[not] identifier[_is_valid_directory] ( identifier[path] ):
keyword[raise] identifier[exceptions] . identifier[InvalidDirectory]
identifier[entries] = identifier[os] . identifier[listdir] ( identifier[path] )
identifier[directory_names] =[]
keyword[for] identifier[entry] keyword[in] identifier[entries] :
identifier[abs_entry_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[entry] )
keyword[if] identifier[_is_valid_directory] ( identifier[abs_entry_path] ):
identifier[directory_names] . identifier[append] ( identifier[entry] )
keyword[return] identifier[directory_names] | def get_child_directories(path):
"""Return names of immediate child directories"""
if not _is_valid_directory(path):
raise exceptions.InvalidDirectory # depends on [control=['if'], data=[]]
entries = os.listdir(path)
directory_names = []
for entry in entries:
abs_entry_path = os.path.join(path, entry)
if _is_valid_directory(abs_entry_path):
directory_names.append(entry) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['entry']]
return directory_names |
async def close(self, *args, _conn=None, **kwargs):
"""
Perform any resource clean up necessary to exit the program safely.
After closing, cmd execution is still possible but you will have to
close again before exiting.
:raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
"""
start = time.monotonic()
ret = await self._close(*args, _conn=_conn, **kwargs)
logger.debug("CLOSE (%.4f)s", time.monotonic() - start)
return ret | <ast.AsyncFunctionDef object at 0x7da18f58c760> | keyword[async] keyword[def] identifier[close] ( identifier[self] ,* identifier[args] , identifier[_conn] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[start] = identifier[time] . identifier[monotonic] ()
identifier[ret] = keyword[await] identifier[self] . identifier[_close] (* identifier[args] , identifier[_conn] = identifier[_conn] ,** identifier[kwargs] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[time] . identifier[monotonic] ()- identifier[start] )
keyword[return] identifier[ret] | async def close(self, *args, _conn=None, **kwargs):
"""
Perform any resource clean up necessary to exit the program safely.
After closing, cmd execution is still possible but you will have to
close again before exiting.
:raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout
"""
start = time.monotonic()
ret = await self._close(*args, _conn=_conn, **kwargs)
logger.debug('CLOSE (%.4f)s', time.monotonic() - start)
return ret |
def process_npdu(self, npdu):
"""encode NPDUs from the service access point and send them downstream."""
if _debug: DeviceToDeviceClientService._debug("process_npdu %r", npdu)
# broadcast messages go to everyone
if npdu.pduDestination.addrType == Address.localBroadcastAddr:
destList = self.connections.keys()
else:
conn = self.connections.get(npdu.pduDestination, None)
if not conn:
if _debug: DeviceToDeviceClientService._debug(" - not a connected client")
# start a connection attempt
conn = self.connect(npdu.pduDestination)
if not conn.connected:
# keep a reference to this npdu to send after the ack comes back
conn.pendingNPDU.append(npdu)
return
destList = [npdu.pduDestination]
if _debug: DeviceToDeviceClientService._debug(" - destList: %r", destList)
for dest in destList:
# make device-to-device APDU
xpdu = DeviceToDeviceAPDU(npdu)
xpdu.pduDestination = dest
# send it down to the multiplexer
self.service_request(xpdu) | def function[process_npdu, parameter[self, npdu]]:
constant[encode NPDUs from the service access point and send them downstream.]
if name[_debug] begin[:]
call[name[DeviceToDeviceClientService]._debug, parameter[constant[process_npdu %r], name[npdu]]]
if compare[name[npdu].pduDestination.addrType equal[==] name[Address].localBroadcastAddr] begin[:]
variable[destList] assign[=] call[name[self].connections.keys, parameter[]]
if name[_debug] begin[:]
call[name[DeviceToDeviceClientService]._debug, parameter[constant[ - destList: %r], name[destList]]]
for taget[name[dest]] in starred[name[destList]] begin[:]
variable[xpdu] assign[=] call[name[DeviceToDeviceAPDU], parameter[name[npdu]]]
name[xpdu].pduDestination assign[=] name[dest]
call[name[self].service_request, parameter[name[xpdu]]] | keyword[def] identifier[process_npdu] ( identifier[self] , identifier[npdu] ):
literal[string]
keyword[if] identifier[_debug] : identifier[DeviceToDeviceClientService] . identifier[_debug] ( literal[string] , identifier[npdu] )
keyword[if] identifier[npdu] . identifier[pduDestination] . identifier[addrType] == identifier[Address] . identifier[localBroadcastAddr] :
identifier[destList] = identifier[self] . identifier[connections] . identifier[keys] ()
keyword[else] :
identifier[conn] = identifier[self] . identifier[connections] . identifier[get] ( identifier[npdu] . identifier[pduDestination] , keyword[None] )
keyword[if] keyword[not] identifier[conn] :
keyword[if] identifier[_debug] : identifier[DeviceToDeviceClientService] . identifier[_debug] ( literal[string] )
identifier[conn] = identifier[self] . identifier[connect] ( identifier[npdu] . identifier[pduDestination] )
keyword[if] keyword[not] identifier[conn] . identifier[connected] :
identifier[conn] . identifier[pendingNPDU] . identifier[append] ( identifier[npdu] )
keyword[return]
identifier[destList] =[ identifier[npdu] . identifier[pduDestination] ]
keyword[if] identifier[_debug] : identifier[DeviceToDeviceClientService] . identifier[_debug] ( literal[string] , identifier[destList] )
keyword[for] identifier[dest] keyword[in] identifier[destList] :
identifier[xpdu] = identifier[DeviceToDeviceAPDU] ( identifier[npdu] )
identifier[xpdu] . identifier[pduDestination] = identifier[dest]
identifier[self] . identifier[service_request] ( identifier[xpdu] ) | def process_npdu(self, npdu):
"""encode NPDUs from the service access point and send them downstream."""
if _debug:
DeviceToDeviceClientService._debug('process_npdu %r', npdu) # depends on [control=['if'], data=[]]
# broadcast messages go to everyone
if npdu.pduDestination.addrType == Address.localBroadcastAddr:
destList = self.connections.keys() # depends on [control=['if'], data=[]]
else:
conn = self.connections.get(npdu.pduDestination, None)
if not conn:
if _debug:
DeviceToDeviceClientService._debug(' - not a connected client') # depends on [control=['if'], data=[]]
# start a connection attempt
conn = self.connect(npdu.pduDestination) # depends on [control=['if'], data=[]]
if not conn.connected:
# keep a reference to this npdu to send after the ack comes back
conn.pendingNPDU.append(npdu)
return # depends on [control=['if'], data=[]]
destList = [npdu.pduDestination]
if _debug:
DeviceToDeviceClientService._debug(' - destList: %r', destList) # depends on [control=['if'], data=[]]
for dest in destList:
# make device-to-device APDU
xpdu = DeviceToDeviceAPDU(npdu)
xpdu.pduDestination = dest
# send it down to the multiplexer
self.service_request(xpdu) # depends on [control=['for'], data=['dest']] |
def with_global_options(method):
"""Apply the global options that we desire on every method within
tower-cli to the given click command.
"""
# Create global options for the Tower host, username, and password.
#
# These are runtime options that will override the configuration file
# settings.
method = click.option(
'-h', '--tower-host',
help='The location of the Ansible Tower host. '
'HTTPS is assumed as the protocol unless "http://" is explicitly '
'provided. This will take precedence over a host provided to '
'`tower config`, if any.',
required=False, callback=_apply_runtime_setting,
is_eager=True,
expose_value=False
)(method)
method = click.option(
'-t', '--tower-oauth-token',
help='OAuth2 token to use to authenticate to Ansible Tower. '
'This will take precedence over a token provided to '
'`tower config`, if any.',
required=False, callback=_apply_runtime_setting,
is_eager=True,
expose_value=False
)(method)
method = click.option(
'-u', '--tower-username',
help='Username to use to authenticate to Ansible Tower. '
'This will take precedence over a username provided to '
'`tower config`, if any.',
required=False, callback=_apply_runtime_setting,
is_eager=True,
expose_value=False
)(method)
method = click.option(
'-p', '--tower-password',
help='Password to use to authenticate to Ansible Tower. '
'This will take precedence over a password provided to '
'`tower config`, if any. If value is ASK you will be '
'prompted for the password',
required=False, callback=_apply_runtime_setting,
is_eager=True,
expose_value=False
)(method)
# Create a global verbose/debug option.
method = click.option(
'-f', '--format',
help='Output format. The "human" format is intended for humans '
'reading output on the CLI; the "json" and "yaml" formats '
'provide more data, and "id" echos the object id only.',
type=click.Choice(['human', 'json', 'yaml', 'id']),
required=False, callback=_apply_runtime_setting,
is_eager=True,
expose_value=False
)(method)
method = click.option(
'-v', '--verbose',
default=None,
help='Show information about requests being made.',
is_flag=True,
required=False, callback=_apply_runtime_setting,
is_eager=True,
expose_value=False
)(method)
method = click.option(
'--description-on',
default=None,
help='Show description in human-formatted output.',
is_flag=True,
required=False, callback=_apply_runtime_setting,
is_eager=True,
expose_value=False
)(method)
# Create a global SSL warning option.
method = click.option(
'--insecure',
default=None,
help='Turn off insecure connection warnings. Set config verify_ssl '
'to make this permanent.',
is_flag=True,
required=False, callback=_apply_runtime_setting,
is_eager=True,
expose_value=False
)(method)
# Create a custom certificate specification option.
method = click.option(
'--certificate',
default=None,
help='Path to a custom certificate file that will be used throughout'
' the command. Overwritten by --insecure flag if set.',
required=False, callback=_apply_runtime_setting,
is_eager=True,
expose_value=False
)(method)
method = click.option(
'--use-token',
default=None,
help='Turn on Tower\'s token-based authentication. No longer supported '
'in Tower 3.3 and above.',
is_flag=True,
required=False, callback=_apply_runtime_setting,
is_eager=True,
expose_value=False
)(method)
# Manage the runtime settings context
method = runtime_context_manager(method)
# Okay, we're done adding options; return the method.
return method | def function[with_global_options, parameter[method]]:
constant[Apply the global options that we desire on every method within
tower-cli to the given click command.
]
variable[method] assign[=] call[call[name[click].option, parameter[constant[-h], constant[--tower-host]]], parameter[name[method]]]
variable[method] assign[=] call[call[name[click].option, parameter[constant[-t], constant[--tower-oauth-token]]], parameter[name[method]]]
variable[method] assign[=] call[call[name[click].option, parameter[constant[-u], constant[--tower-username]]], parameter[name[method]]]
variable[method] assign[=] call[call[name[click].option, parameter[constant[-p], constant[--tower-password]]], parameter[name[method]]]
variable[method] assign[=] call[call[name[click].option, parameter[constant[-f], constant[--format]]], parameter[name[method]]]
variable[method] assign[=] call[call[name[click].option, parameter[constant[-v], constant[--verbose]]], parameter[name[method]]]
variable[method] assign[=] call[call[name[click].option, parameter[constant[--description-on]]], parameter[name[method]]]
variable[method] assign[=] call[call[name[click].option, parameter[constant[--insecure]]], parameter[name[method]]]
variable[method] assign[=] call[call[name[click].option, parameter[constant[--certificate]]], parameter[name[method]]]
variable[method] assign[=] call[call[name[click].option, parameter[constant[--use-token]]], parameter[name[method]]]
variable[method] assign[=] call[name[runtime_context_manager], parameter[name[method]]]
return[name[method]] | keyword[def] identifier[with_global_options] ( identifier[method] ):
literal[string]
identifier[method] = identifier[click] . identifier[option] (
literal[string] , literal[string] ,
identifier[help] = literal[string]
literal[string]
literal[string]
literal[string] ,
identifier[required] = keyword[False] , identifier[callback] = identifier[_apply_runtime_setting] ,
identifier[is_eager] = keyword[True] ,
identifier[expose_value] = keyword[False]
)( identifier[method] )
identifier[method] = identifier[click] . identifier[option] (
literal[string] , literal[string] ,
identifier[help] = literal[string]
literal[string]
literal[string] ,
identifier[required] = keyword[False] , identifier[callback] = identifier[_apply_runtime_setting] ,
identifier[is_eager] = keyword[True] ,
identifier[expose_value] = keyword[False]
)( identifier[method] )
identifier[method] = identifier[click] . identifier[option] (
literal[string] , literal[string] ,
identifier[help] = literal[string]
literal[string]
literal[string] ,
identifier[required] = keyword[False] , identifier[callback] = identifier[_apply_runtime_setting] ,
identifier[is_eager] = keyword[True] ,
identifier[expose_value] = keyword[False]
)( identifier[method] )
identifier[method] = identifier[click] . identifier[option] (
literal[string] , literal[string] ,
identifier[help] = literal[string]
literal[string]
literal[string]
literal[string] ,
identifier[required] = keyword[False] , identifier[callback] = identifier[_apply_runtime_setting] ,
identifier[is_eager] = keyword[True] ,
identifier[expose_value] = keyword[False]
)( identifier[method] )
identifier[method] = identifier[click] . identifier[option] (
literal[string] , literal[string] ,
identifier[help] = literal[string]
literal[string]
literal[string] ,
identifier[type] = identifier[click] . identifier[Choice] ([ literal[string] , literal[string] , literal[string] , literal[string] ]),
identifier[required] = keyword[False] , identifier[callback] = identifier[_apply_runtime_setting] ,
identifier[is_eager] = keyword[True] ,
identifier[expose_value] = keyword[False]
)( identifier[method] )
identifier[method] = identifier[click] . identifier[option] (
literal[string] , literal[string] ,
identifier[default] = keyword[None] ,
identifier[help] = literal[string] ,
identifier[is_flag] = keyword[True] ,
identifier[required] = keyword[False] , identifier[callback] = identifier[_apply_runtime_setting] ,
identifier[is_eager] = keyword[True] ,
identifier[expose_value] = keyword[False]
)( identifier[method] )
identifier[method] = identifier[click] . identifier[option] (
literal[string] ,
identifier[default] = keyword[None] ,
identifier[help] = literal[string] ,
identifier[is_flag] = keyword[True] ,
identifier[required] = keyword[False] , identifier[callback] = identifier[_apply_runtime_setting] ,
identifier[is_eager] = keyword[True] ,
identifier[expose_value] = keyword[False]
)( identifier[method] )
identifier[method] = identifier[click] . identifier[option] (
literal[string] ,
identifier[default] = keyword[None] ,
identifier[help] = literal[string]
literal[string] ,
identifier[is_flag] = keyword[True] ,
identifier[required] = keyword[False] , identifier[callback] = identifier[_apply_runtime_setting] ,
identifier[is_eager] = keyword[True] ,
identifier[expose_value] = keyword[False]
)( identifier[method] )
identifier[method] = identifier[click] . identifier[option] (
literal[string] ,
identifier[default] = keyword[None] ,
identifier[help] = literal[string]
literal[string] ,
identifier[required] = keyword[False] , identifier[callback] = identifier[_apply_runtime_setting] ,
identifier[is_eager] = keyword[True] ,
identifier[expose_value] = keyword[False]
)( identifier[method] )
identifier[method] = identifier[click] . identifier[option] (
literal[string] ,
identifier[default] = keyword[None] ,
identifier[help] = literal[string]
literal[string] ,
identifier[is_flag] = keyword[True] ,
identifier[required] = keyword[False] , identifier[callback] = identifier[_apply_runtime_setting] ,
identifier[is_eager] = keyword[True] ,
identifier[expose_value] = keyword[False]
)( identifier[method] )
identifier[method] = identifier[runtime_context_manager] ( identifier[method] )
keyword[return] identifier[method] | def with_global_options(method):
"""Apply the global options that we desire on every method within
tower-cli to the given click command.
"""
# Create global options for the Tower host, username, and password.
#
# These are runtime options that will override the configuration file
# settings.
method = click.option('-h', '--tower-host', help='The location of the Ansible Tower host. HTTPS is assumed as the protocol unless "http://" is explicitly provided. This will take precedence over a host provided to `tower config`, if any.', required=False, callback=_apply_runtime_setting, is_eager=True, expose_value=False)(method)
method = click.option('-t', '--tower-oauth-token', help='OAuth2 token to use to authenticate to Ansible Tower. This will take precedence over a token provided to `tower config`, if any.', required=False, callback=_apply_runtime_setting, is_eager=True, expose_value=False)(method)
method = click.option('-u', '--tower-username', help='Username to use to authenticate to Ansible Tower. This will take precedence over a username provided to `tower config`, if any.', required=False, callback=_apply_runtime_setting, is_eager=True, expose_value=False)(method)
method = click.option('-p', '--tower-password', help='Password to use to authenticate to Ansible Tower. This will take precedence over a password provided to `tower config`, if any. If value is ASK you will be prompted for the password', required=False, callback=_apply_runtime_setting, is_eager=True, expose_value=False)(method)
# Create a global verbose/debug option.
method = click.option('-f', '--format', help='Output format. The "human" format is intended for humans reading output on the CLI; the "json" and "yaml" formats provide more data, and "id" echos the object id only.', type=click.Choice(['human', 'json', 'yaml', 'id']), required=False, callback=_apply_runtime_setting, is_eager=True, expose_value=False)(method)
method = click.option('-v', '--verbose', default=None, help='Show information about requests being made.', is_flag=True, required=False, callback=_apply_runtime_setting, is_eager=True, expose_value=False)(method)
method = click.option('--description-on', default=None, help='Show description in human-formatted output.', is_flag=True, required=False, callback=_apply_runtime_setting, is_eager=True, expose_value=False)(method)
# Create a global SSL warning option.
method = click.option('--insecure', default=None, help='Turn off insecure connection warnings. Set config verify_ssl to make this permanent.', is_flag=True, required=False, callback=_apply_runtime_setting, is_eager=True, expose_value=False)(method)
# Create a custom certificate specification option.
method = click.option('--certificate', default=None, help='Path to a custom certificate file that will be used throughout the command. Overwritten by --insecure flag if set.', required=False, callback=_apply_runtime_setting, is_eager=True, expose_value=False)(method)
method = click.option('--use-token', default=None, help="Turn on Tower's token-based authentication. No longer supported in Tower 3.3 and above.", is_flag=True, required=False, callback=_apply_runtime_setting, is_eager=True, expose_value=False)(method)
# Manage the runtime settings context
method = runtime_context_manager(method)
# Okay, we're done adding options; return the method.
return method |
def safe_exit(output):
"""exit without breaking pipes."""
try:
sys.stdout.write(output)
sys.stdout.flush()
except IOError:
pass | def function[safe_exit, parameter[output]]:
constant[exit without breaking pipes.]
<ast.Try object at 0x7da1b27e33d0> | keyword[def] identifier[safe_exit] ( identifier[output] ):
literal[string]
keyword[try] :
identifier[sys] . identifier[stdout] . identifier[write] ( identifier[output] )
identifier[sys] . identifier[stdout] . identifier[flush] ()
keyword[except] identifier[IOError] :
keyword[pass] | def safe_exit(output):
"""exit without breaking pipes."""
try:
sys.stdout.write(output)
sys.stdout.flush() # depends on [control=['try'], data=[]]
except IOError:
pass # depends on [control=['except'], data=[]] |
async def create_ffmpeg_player(self, filepath):
"""Creates a streamer that plays from a file"""
self.current_download_elapsed = 0
self.streamer = self.vclient.create_ffmpeg_player(filepath, after=self.vafter_ts)
self.state = "ready"
await self.setup_streamer()
try:
# Read from the info json
info_filename = "{}.info.json".format(filepath)
with open(info_filename, 'r') as file:
info = json.load(file)
self.nowplayinglog.debug(info["title"])
self.is_live = False
if "duration" in info and info["duration"] is not None:
self.current_duration = info["duration"]
else:
self.current_duration = 0
if "uploader" in info:
self.nowplayingauthorlog.info(info["uploader"])
else:
self.nowplayingauthorlog.info("Unknown")
self.nowplayingsourcelog.info(api_music.parse_source(info))
play_state = "Streaming" if self.is_live else "Playing"
await self.set_topic("{} {}".format(play_state, info["title"]))
self.statuslog.debug(play_state)
except Exception as e:
logger.exception(e) | <ast.AsyncFunctionDef object at 0x7da1b1914e50> | keyword[async] keyword[def] identifier[create_ffmpeg_player] ( identifier[self] , identifier[filepath] ):
literal[string]
identifier[self] . identifier[current_download_elapsed] = literal[int]
identifier[self] . identifier[streamer] = identifier[self] . identifier[vclient] . identifier[create_ffmpeg_player] ( identifier[filepath] , identifier[after] = identifier[self] . identifier[vafter_ts] )
identifier[self] . identifier[state] = literal[string]
keyword[await] identifier[self] . identifier[setup_streamer] ()
keyword[try] :
identifier[info_filename] = literal[string] . identifier[format] ( identifier[filepath] )
keyword[with] identifier[open] ( identifier[info_filename] , literal[string] ) keyword[as] identifier[file] :
identifier[info] = identifier[json] . identifier[load] ( identifier[file] )
identifier[self] . identifier[nowplayinglog] . identifier[debug] ( identifier[info] [ literal[string] ])
identifier[self] . identifier[is_live] = keyword[False]
keyword[if] literal[string] keyword[in] identifier[info] keyword[and] identifier[info] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[current_duration] = identifier[info] [ literal[string] ]
keyword[else] :
identifier[self] . identifier[current_duration] = literal[int]
keyword[if] literal[string] keyword[in] identifier[info] :
identifier[self] . identifier[nowplayingauthorlog] . identifier[info] ( identifier[info] [ literal[string] ])
keyword[else] :
identifier[self] . identifier[nowplayingauthorlog] . identifier[info] ( literal[string] )
identifier[self] . identifier[nowplayingsourcelog] . identifier[info] ( identifier[api_music] . identifier[parse_source] ( identifier[info] ))
identifier[play_state] = literal[string] keyword[if] identifier[self] . identifier[is_live] keyword[else] literal[string]
keyword[await] identifier[self] . identifier[set_topic] ( literal[string] . identifier[format] ( identifier[play_state] , identifier[info] [ literal[string] ]))
identifier[self] . identifier[statuslog] . identifier[debug] ( identifier[play_state] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[exception] ( identifier[e] ) | async def create_ffmpeg_player(self, filepath):
"""Creates a streamer that plays from a file"""
self.current_download_elapsed = 0
self.streamer = self.vclient.create_ffmpeg_player(filepath, after=self.vafter_ts)
self.state = 'ready'
await self.setup_streamer()
try:
# Read from the info json
info_filename = '{}.info.json'.format(filepath)
with open(info_filename, 'r') as file:
info = json.load(file)
self.nowplayinglog.debug(info['title'])
self.is_live = False
if 'duration' in info and info['duration'] is not None:
self.current_duration = info['duration'] # depends on [control=['if'], data=[]]
else:
self.current_duration = 0
if 'uploader' in info:
self.nowplayingauthorlog.info(info['uploader']) # depends on [control=['if'], data=['info']]
else:
self.nowplayingauthorlog.info('Unknown')
self.nowplayingsourcelog.info(api_music.parse_source(info))
play_state = 'Streaming' if self.is_live else 'Playing'
await self.set_topic('{} {}'.format(play_state, info['title']))
self.statuslog.debug(play_state) # depends on [control=['with'], data=['file']] # depends on [control=['try'], data=[]]
except Exception as e:
logger.exception(e) # depends on [control=['except'], data=['e']] |
def set_mode_broodlord_params(
self, zerg_count=None,
vassal_overload_sos_interval=None, vassal_queue_items_sos=None):
"""This mode is a way for a vassal to ask for reinforcements to the Emperor.
Reinforcements are new vassals spawned on demand generally bound on the same socket.
.. warning:: If you are looking for a way to dynamically adapt the number
of workers of an instance, check the Cheaper subsystem - adaptive process spawning mode.
*Broodlord mode is for spawning totally new instances.*
:param int zerg_count: Maximum number of zergs to spawn.
:param int vassal_overload_sos_interval: Ask emperor for reinforcement when overloaded.
Accepts the number of seconds to wait between asking for a new reinforcements.
:param int vassal_queue_items_sos: Ask emperor for sos if listen queue (backlog) has more
items than the value specified
"""
self._set('emperor-broodlord', zerg_count)
self._set('vassal-sos', vassal_overload_sos_interval)
self._set('vassal-sos-backlog', vassal_queue_items_sos)
return self._section | def function[set_mode_broodlord_params, parameter[self, zerg_count, vassal_overload_sos_interval, vassal_queue_items_sos]]:
constant[This mode is a way for a vassal to ask for reinforcements to the Emperor.
Reinforcements are new vassals spawned on demand generally bound on the same socket.
.. warning:: If you are looking for a way to dynamically adapt the number
of workers of an instance, check the Cheaper subsystem - adaptive process spawning mode.
*Broodlord mode is for spawning totally new instances.*
:param int zerg_count: Maximum number of zergs to spawn.
:param int vassal_overload_sos_interval: Ask emperor for reinforcement when overloaded.
Accepts the number of seconds to wait between asking for a new reinforcements.
:param int vassal_queue_items_sos: Ask emperor for sos if listen queue (backlog) has more
items than the value specified
]
call[name[self]._set, parameter[constant[emperor-broodlord], name[zerg_count]]]
call[name[self]._set, parameter[constant[vassal-sos], name[vassal_overload_sos_interval]]]
call[name[self]._set, parameter[constant[vassal-sos-backlog], name[vassal_queue_items_sos]]]
return[name[self]._section] | keyword[def] identifier[set_mode_broodlord_params] (
identifier[self] , identifier[zerg_count] = keyword[None] ,
identifier[vassal_overload_sos_interval] = keyword[None] , identifier[vassal_queue_items_sos] = keyword[None] ):
literal[string]
identifier[self] . identifier[_set] ( literal[string] , identifier[zerg_count] )
identifier[self] . identifier[_set] ( literal[string] , identifier[vassal_overload_sos_interval] )
identifier[self] . identifier[_set] ( literal[string] , identifier[vassal_queue_items_sos] )
keyword[return] identifier[self] . identifier[_section] | def set_mode_broodlord_params(self, zerg_count=None, vassal_overload_sos_interval=None, vassal_queue_items_sos=None):
"""This mode is a way for a vassal to ask for reinforcements to the Emperor.
Reinforcements are new vassals spawned on demand generally bound on the same socket.
.. warning:: If you are looking for a way to dynamically adapt the number
of workers of an instance, check the Cheaper subsystem - adaptive process spawning mode.
*Broodlord mode is for spawning totally new instances.*
:param int zerg_count: Maximum number of zergs to spawn.
:param int vassal_overload_sos_interval: Ask emperor for reinforcement when overloaded.
Accepts the number of seconds to wait between asking for a new reinforcements.
:param int vassal_queue_items_sos: Ask emperor for sos if listen queue (backlog) has more
items than the value specified
"""
self._set('emperor-broodlord', zerg_count)
self._set('vassal-sos', vassal_overload_sos_interval)
self._set('vassal-sos-backlog', vassal_queue_items_sos)
return self._section |
def pool(n=None, dummy=False):
"""
create a multiprocessing pool that responds to interrupts.
"""
if dummy:
from multiprocessing.dummy import Pool
else:
from multiprocessing import Pool
if n is None:
import multiprocessing
n = multiprocessing.cpu_count() - 1
return Pool(n) | def function[pool, parameter[n, dummy]]:
constant[
create a multiprocessing pool that responds to interrupts.
]
if name[dummy] begin[:]
from relative_module[multiprocessing.dummy] import module[Pool]
if compare[name[n] is constant[None]] begin[:]
import module[multiprocessing]
variable[n] assign[=] binary_operation[call[name[multiprocessing].cpu_count, parameter[]] - constant[1]]
return[call[name[Pool], parameter[name[n]]]] | keyword[def] identifier[pool] ( identifier[n] = keyword[None] , identifier[dummy] = keyword[False] ):
literal[string]
keyword[if] identifier[dummy] :
keyword[from] identifier[multiprocessing] . identifier[dummy] keyword[import] identifier[Pool]
keyword[else] :
keyword[from] identifier[multiprocessing] keyword[import] identifier[Pool]
keyword[if] identifier[n] keyword[is] keyword[None] :
keyword[import] identifier[multiprocessing]
identifier[n] = identifier[multiprocessing] . identifier[cpu_count] ()- literal[int]
keyword[return] identifier[Pool] ( identifier[n] ) | def pool(n=None, dummy=False):
"""
create a multiprocessing pool that responds to interrupts.
"""
if dummy:
from multiprocessing.dummy import Pool # depends on [control=['if'], data=[]]
else:
from multiprocessing import Pool
if n is None:
import multiprocessing
n = multiprocessing.cpu_count() - 1 # depends on [control=['if'], data=['n']]
return Pool(n) |
def keysym_to_keycodes(self, keysym):
"""Look up all the keycodes that is bound to keysym. A list of
tuples (keycode, index) is returned, sorted primarily on the
lowest index and secondarily on the lowest keycode."""
try:
# Copy the map list, reversing the arguments
return map(lambda x: (x[1], x[0]), self._keymap_syms[keysym])
except KeyError:
return [] | def function[keysym_to_keycodes, parameter[self, keysym]]:
constant[Look up all the keycodes that is bound to keysym. A list of
tuples (keycode, index) is returned, sorted primarily on the
lowest index and secondarily on the lowest keycode.]
<ast.Try object at 0x7da1b26ae320> | keyword[def] identifier[keysym_to_keycodes] ( identifier[self] , identifier[keysym] ):
literal[string]
keyword[try] :
keyword[return] identifier[map] ( keyword[lambda] identifier[x] :( identifier[x] [ literal[int] ], identifier[x] [ literal[int] ]), identifier[self] . identifier[_keymap_syms] [ identifier[keysym] ])
keyword[except] identifier[KeyError] :
keyword[return] [] | def keysym_to_keycodes(self, keysym):
"""Look up all the keycodes that is bound to keysym. A list of
tuples (keycode, index) is returned, sorted primarily on the
lowest index and secondarily on the lowest keycode."""
try:
# Copy the map list, reversing the arguments
return map(lambda x: (x[1], x[0]), self._keymap_syms[keysym]) # depends on [control=['try'], data=[]]
except KeyError:
return [] # depends on [control=['except'], data=[]] |
def nonoverlap(item_a, time_a, item_b, time_b, max_value):
"""
Percentage of pixels in each object that do not overlap with the other object
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
return np.minimum(1 - item_a.count_overlap(time_a, item_b, time_b), max_value) / float(max_value) | def function[nonoverlap, parameter[item_a, time_a, item_b, time_b, max_value]]:
constant[
Percentage of pixels in each object that do not overlap with the other object
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
]
return[binary_operation[call[name[np].minimum, parameter[binary_operation[constant[1] - call[name[item_a].count_overlap, parameter[name[time_a], name[item_b], name[time_b]]]], name[max_value]]] / call[name[float], parameter[name[max_value]]]]] | keyword[def] identifier[nonoverlap] ( identifier[item_a] , identifier[time_a] , identifier[item_b] , identifier[time_b] , identifier[max_value] ):
literal[string]
keyword[return] identifier[np] . identifier[minimum] ( literal[int] - identifier[item_a] . identifier[count_overlap] ( identifier[time_a] , identifier[item_b] , identifier[time_b] ), identifier[max_value] )/ identifier[float] ( identifier[max_value] ) | def nonoverlap(item_a, time_a, item_b, time_b, max_value):
"""
Percentage of pixels in each object that do not overlap with the other object
Args:
item_a: STObject from the first set in ObjectMatcher
time_a: Time integer being evaluated
item_b: STObject from the second set in ObjectMatcher
time_b: Time integer being evaluated
max_value: Maximum distance value used as scaling value and upper constraint.
Returns:
Distance value between 0 and 1.
"""
return np.minimum(1 - item_a.count_overlap(time_a, item_b, time_b), max_value) / float(max_value) |
def index_by(self, column_or_label):
"""Return a dict keyed by values in a column that contains lists of
rows corresponding to each value.
"""
column = self._get_column(column_or_label)
index = {}
for key, row in zip(column, self.rows):
index.setdefault(key, []).append(row)
return index | def function[index_by, parameter[self, column_or_label]]:
constant[Return a dict keyed by values in a column that contains lists of
rows corresponding to each value.
]
variable[column] assign[=] call[name[self]._get_column, parameter[name[column_or_label]]]
variable[index] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0882ef0>, <ast.Name object at 0x7da1b08832b0>]]] in starred[call[name[zip], parameter[name[column], name[self].rows]]] begin[:]
call[call[name[index].setdefault, parameter[name[key], list[[]]]].append, parameter[name[row]]]
return[name[index]] | keyword[def] identifier[index_by] ( identifier[self] , identifier[column_or_label] ):
literal[string]
identifier[column] = identifier[self] . identifier[_get_column] ( identifier[column_or_label] )
identifier[index] ={}
keyword[for] identifier[key] , identifier[row] keyword[in] identifier[zip] ( identifier[column] , identifier[self] . identifier[rows] ):
identifier[index] . identifier[setdefault] ( identifier[key] ,[]). identifier[append] ( identifier[row] )
keyword[return] identifier[index] | def index_by(self, column_or_label):
"""Return a dict keyed by values in a column that contains lists of
rows corresponding to each value.
"""
column = self._get_column(column_or_label)
index = {}
for (key, row) in zip(column, self.rows):
index.setdefault(key, []).append(row) # depends on [control=['for'], data=[]]
return index |
def set_positions(self, positions=None, position_mapper='name', ids=None):
'''
checks for position validity & collisions,
but not that all measurements are assigned.
Parameters
-----------
positions : is dict-like of measurement_key:(row,col)
parser :
callable - gets key and returns position
mapping - key:pos
'name' - parses things like 'A1', 'G12'
'number' - converts number to positions, going over rows first.
ids :
parser will be applied to specified ids only.
If None is given, parser will be applied to all measurements.
TODO: output a more informative message for position collisions
'''
if positions is None:
if ids is None:
ids = self.keys()
else:
ids = to_list(ids)
mapper = self._get_ID2position_mapper(position_mapper)
positions = dict((ID, mapper(ID)) for ID in ids)
else:
pass
# check that resulting assignment is unique (one measurement per position)
temp = self._positions.copy()
temp.update(positions)
if not len(temp.values()) == len(set(temp.values())):
msg = 'A position can only be occupied by a single measurement'
raise Exception(msg)
for k, pos in positions.items():
if not self._is_valid_position(pos):
msg = 'Position {} is not supported for this collection'.format(pos)
raise ValueError(msg)
self._positions[k] = pos
self[k]._set_position(self.ID, pos) | def function[set_positions, parameter[self, positions, position_mapper, ids]]:
constant[
checks for position validity & collisions,
but not that all measurements are assigned.
Parameters
-----------
positions : is dict-like of measurement_key:(row,col)
parser :
callable - gets key and returns position
mapping - key:pos
'name' - parses things like 'A1', 'G12'
'number' - converts number to positions, going over rows first.
ids :
parser will be applied to specified ids only.
If None is given, parser will be applied to all measurements.
TODO: output a more informative message for position collisions
]
if compare[name[positions] is constant[None]] begin[:]
if compare[name[ids] is constant[None]] begin[:]
variable[ids] assign[=] call[name[self].keys, parameter[]]
variable[mapper] assign[=] call[name[self]._get_ID2position_mapper, parameter[name[position_mapper]]]
variable[positions] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da204345f00>]]
variable[temp] assign[=] call[name[self]._positions.copy, parameter[]]
call[name[temp].update, parameter[name[positions]]]
if <ast.UnaryOp object at 0x7da204344970> begin[:]
variable[msg] assign[=] constant[A position can only be occupied by a single measurement]
<ast.Raise object at 0x7da204346f80>
for taget[tuple[[<ast.Name object at 0x7da204344100>, <ast.Name object at 0x7da204346c20>]]] in starred[call[name[positions].items, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da204347850> begin[:]
variable[msg] assign[=] call[constant[Position {} is not supported for this collection].format, parameter[name[pos]]]
<ast.Raise object at 0x7da204346b90>
call[name[self]._positions][name[k]] assign[=] name[pos]
call[call[name[self]][name[k]]._set_position, parameter[name[self].ID, name[pos]]] | keyword[def] identifier[set_positions] ( identifier[self] , identifier[positions] = keyword[None] , identifier[position_mapper] = literal[string] , identifier[ids] = keyword[None] ):
literal[string]
keyword[if] identifier[positions] keyword[is] keyword[None] :
keyword[if] identifier[ids] keyword[is] keyword[None] :
identifier[ids] = identifier[self] . identifier[keys] ()
keyword[else] :
identifier[ids] = identifier[to_list] ( identifier[ids] )
identifier[mapper] = identifier[self] . identifier[_get_ID2position_mapper] ( identifier[position_mapper] )
identifier[positions] = identifier[dict] (( identifier[ID] , identifier[mapper] ( identifier[ID] )) keyword[for] identifier[ID] keyword[in] identifier[ids] )
keyword[else] :
keyword[pass]
identifier[temp] = identifier[self] . identifier[_positions] . identifier[copy] ()
identifier[temp] . identifier[update] ( identifier[positions] )
keyword[if] keyword[not] identifier[len] ( identifier[temp] . identifier[values] ())== identifier[len] ( identifier[set] ( identifier[temp] . identifier[values] ())):
identifier[msg] = literal[string]
keyword[raise] identifier[Exception] ( identifier[msg] )
keyword[for] identifier[k] , identifier[pos] keyword[in] identifier[positions] . identifier[items] ():
keyword[if] keyword[not] identifier[self] . identifier[_is_valid_position] ( identifier[pos] ):
identifier[msg] = literal[string] . identifier[format] ( identifier[pos] )
keyword[raise] identifier[ValueError] ( identifier[msg] )
identifier[self] . identifier[_positions] [ identifier[k] ]= identifier[pos]
identifier[self] [ identifier[k] ]. identifier[_set_position] ( identifier[self] . identifier[ID] , identifier[pos] ) | def set_positions(self, positions=None, position_mapper='name', ids=None):
"""
checks for position validity & collisions,
but not that all measurements are assigned.
Parameters
-----------
positions : is dict-like of measurement_key:(row,col)
parser :
callable - gets key and returns position
mapping - key:pos
'name' - parses things like 'A1', 'G12'
'number' - converts number to positions, going over rows first.
ids :
parser will be applied to specified ids only.
If None is given, parser will be applied to all measurements.
TODO: output a more informative message for position collisions
"""
if positions is None:
if ids is None:
ids = self.keys() # depends on [control=['if'], data=['ids']]
else:
ids = to_list(ids)
mapper = self._get_ID2position_mapper(position_mapper)
positions = dict(((ID, mapper(ID)) for ID in ids)) # depends on [control=['if'], data=['positions']]
else:
pass
# check that resulting assignment is unique (one measurement per position)
temp = self._positions.copy()
temp.update(positions)
if not len(temp.values()) == len(set(temp.values())):
msg = 'A position can only be occupied by a single measurement'
raise Exception(msg) # depends on [control=['if'], data=[]]
for (k, pos) in positions.items():
if not self._is_valid_position(pos):
msg = 'Position {} is not supported for this collection'.format(pos)
raise ValueError(msg) # depends on [control=['if'], data=[]]
self._positions[k] = pos
self[k]._set_position(self.ID, pos) # depends on [control=['for'], data=[]] |
def assign_objective_requisite(self, objective_id, requisite_objective_id):
"""Creates a requirement dependency between two ``Objectives``.
arg: objective_id (osid.id.Id): the ``Id`` of the dependent
``Objective``
arg: requisite_objective_id (osid.id.Id): the ``Id`` of the
required ``Objective``
raise: AlreadyExists - ``objective_id`` already mapped to
``requisite_objective_id``
raise: NotFound - ``objective_id`` or
``requisite_objective_id`` not found
raise: NullArgument - ``objective_id`` or
``requisite_objective_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
requisite_type = Type(**Relationship().get_type_data('OBJECTIVE.REQUISITE'))
ras = self._get_provider_manager(
'RELATIONSHIP').get_relationship_admin_session_for_family(
self.get_objective_bank_id(), proxy=self._proxy)
rfc = ras.get_relationship_form_for_create(objective_id, requisite_objective_id, [])
rfc.set_display_name('Objective Requisite')
rfc.set_description('An Objective Requisite created by the ObjectiveRequisiteAssignmentSession')
rfc.set_genus_type(requisite_type)
ras.create_relationship(rfc) | def function[assign_objective_requisite, parameter[self, objective_id, requisite_objective_id]]:
constant[Creates a requirement dependency between two ``Objectives``.
arg: objective_id (osid.id.Id): the ``Id`` of the dependent
``Objective``
arg: requisite_objective_id (osid.id.Id): the ``Id`` of the
required ``Objective``
raise: AlreadyExists - ``objective_id`` already mapped to
``requisite_objective_id``
raise: NotFound - ``objective_id`` or
``requisite_objective_id`` not found
raise: NullArgument - ``objective_id`` or
``requisite_objective_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
variable[requisite_type] assign[=] call[name[Type], parameter[]]
variable[ras] assign[=] call[call[name[self]._get_provider_manager, parameter[constant[RELATIONSHIP]]].get_relationship_admin_session_for_family, parameter[call[name[self].get_objective_bank_id, parameter[]]]]
variable[rfc] assign[=] call[name[ras].get_relationship_form_for_create, parameter[name[objective_id], name[requisite_objective_id], list[[]]]]
call[name[rfc].set_display_name, parameter[constant[Objective Requisite]]]
call[name[rfc].set_description, parameter[constant[An Objective Requisite created by the ObjectiveRequisiteAssignmentSession]]]
call[name[rfc].set_genus_type, parameter[name[requisite_type]]]
call[name[ras].create_relationship, parameter[name[rfc]]] | keyword[def] identifier[assign_objective_requisite] ( identifier[self] , identifier[objective_id] , identifier[requisite_objective_id] ):
literal[string]
identifier[requisite_type] = identifier[Type] (** identifier[Relationship] (). identifier[get_type_data] ( literal[string] ))
identifier[ras] = identifier[self] . identifier[_get_provider_manager] (
literal[string] ). identifier[get_relationship_admin_session_for_family] (
identifier[self] . identifier[get_objective_bank_id] (), identifier[proxy] = identifier[self] . identifier[_proxy] )
identifier[rfc] = identifier[ras] . identifier[get_relationship_form_for_create] ( identifier[objective_id] , identifier[requisite_objective_id] ,[])
identifier[rfc] . identifier[set_display_name] ( literal[string] )
identifier[rfc] . identifier[set_description] ( literal[string] )
identifier[rfc] . identifier[set_genus_type] ( identifier[requisite_type] )
identifier[ras] . identifier[create_relationship] ( identifier[rfc] ) | def assign_objective_requisite(self, objective_id, requisite_objective_id):
"""Creates a requirement dependency between two ``Objectives``.
arg: objective_id (osid.id.Id): the ``Id`` of the dependent
``Objective``
arg: requisite_objective_id (osid.id.Id): the ``Id`` of the
required ``Objective``
raise: AlreadyExists - ``objective_id`` already mapped to
``requisite_objective_id``
raise: NotFound - ``objective_id`` or
``requisite_objective_id`` not found
raise: NullArgument - ``objective_id`` or
``requisite_objective_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
requisite_type = Type(**Relationship().get_type_data('OBJECTIVE.REQUISITE'))
ras = self._get_provider_manager('RELATIONSHIP').get_relationship_admin_session_for_family(self.get_objective_bank_id(), proxy=self._proxy)
rfc = ras.get_relationship_form_for_create(objective_id, requisite_objective_id, [])
rfc.set_display_name('Objective Requisite')
rfc.set_description('An Objective Requisite created by the ObjectiveRequisiteAssignmentSession')
rfc.set_genus_type(requisite_type)
ras.create_relationship(rfc) |
async def trigger_callback(self, sid, namespace, id, data):
"""Invoke an application callback.
Note: this method is a coroutine.
"""
callback = None
try:
callback = self.callbacks[sid][namespace][id]
except KeyError:
# if we get an unknown callback we just ignore it
self._get_logger().warning('Unknown callback received, ignoring.')
else:
del self.callbacks[sid][namespace][id]
if callback is not None:
ret = callback(*data)
if asyncio.iscoroutine(ret):
try:
await ret
except asyncio.CancelledError: # pragma: no cover
pass | <ast.AsyncFunctionDef object at 0x7da1b1cb09d0> | keyword[async] keyword[def] identifier[trigger_callback] ( identifier[self] , identifier[sid] , identifier[namespace] , identifier[id] , identifier[data] ):
literal[string]
identifier[callback] = keyword[None]
keyword[try] :
identifier[callback] = identifier[self] . identifier[callbacks] [ identifier[sid] ][ identifier[namespace] ][ identifier[id] ]
keyword[except] identifier[KeyError] :
identifier[self] . identifier[_get_logger] (). identifier[warning] ( literal[string] )
keyword[else] :
keyword[del] identifier[self] . identifier[callbacks] [ identifier[sid] ][ identifier[namespace] ][ identifier[id] ]
keyword[if] identifier[callback] keyword[is] keyword[not] keyword[None] :
identifier[ret] = identifier[callback] (* identifier[data] )
keyword[if] identifier[asyncio] . identifier[iscoroutine] ( identifier[ret] ):
keyword[try] :
keyword[await] identifier[ret]
keyword[except] identifier[asyncio] . identifier[CancelledError] :
keyword[pass] | async def trigger_callback(self, sid, namespace, id, data):
"""Invoke an application callback.
Note: this method is a coroutine.
"""
callback = None
try:
callback = self.callbacks[sid][namespace][id] # depends on [control=['try'], data=[]]
except KeyError:
# if we get an unknown callback we just ignore it
self._get_logger().warning('Unknown callback received, ignoring.') # depends on [control=['except'], data=[]]
else:
del self.callbacks[sid][namespace][id]
if callback is not None:
ret = callback(*data)
if asyncio.iscoroutine(ret):
try:
await ret # depends on [control=['try'], data=[]]
except asyncio.CancelledError: # pragma: no cover
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['callback']] |
def port_bindings(val, **kwargs):
'''
On the CLI, these are passed as multiple instances of a given CLI option.
In Salt, we accept these as a comma-delimited list but the API expects a
Python dictionary mapping ports to their bindings. The format the API
expects is complicated depending on whether or not the external port maps
to a different internal port, or if the port binding is for UDP instead of
TCP (the default). For reference, see the "Port bindings" section in the
docker-py documentation at the following URL:
http://docker-py.readthedocs.io/en/stable/api.html
'''
validate_ip_addrs = kwargs.get('validate_ip_addrs', True)
if not isinstance(val, dict):
if not isinstance(val, list):
try:
val = helpers.split(val)
except AttributeError:
val = helpers.split(six.text_type(val))
for idx in range(len(val)):
if not isinstance(val[idx], six.string_types):
val[idx] = six.text_type(val[idx])
def _format_port(port_num, proto):
return six.text_type(port_num) + '/udp' if proto.lower() == 'udp' else port_num
bindings = {}
for binding in val:
bind_parts = helpers.split(binding, ':')
num_bind_parts = len(bind_parts)
if num_bind_parts == 1:
# Single port or port range being passed through (no
# special mapping)
container_port = six.text_type(bind_parts[0])
if container_port == '':
raise SaltInvocationError(
'Empty port binding definition found'
)
container_port, _, proto = container_port.partition('/')
try:
start, end = helpers.get_port_range(container_port)
except ValueError as exc:
# Using __str__() to avoid deprecation warning for using
# the message attribute of the ValueError.
raise SaltInvocationError(exc.__str__())
bind_vals = [
(_format_port(port_num, proto), None)
for port_num in range(start, end + 1)
]
elif num_bind_parts == 2:
if bind_parts[0] == '':
raise SaltInvocationError(
'Empty host port in port binding definition '
'\'{0}\''.format(binding)
)
if bind_parts[1] == '':
raise SaltInvocationError(
'Empty container port in port binding definition '
'\'{0}\''.format(binding)
)
container_port, _, proto = bind_parts[1].partition('/')
try:
cport_start, cport_end = \
helpers.get_port_range(container_port)
hport_start, hport_end = \
helpers.get_port_range(bind_parts[0])
except ValueError as exc:
# Using __str__() to avoid deprecation warning for
# using the message attribute of the ValueError.
raise SaltInvocationError(exc.__str__())
if (hport_end - hport_start) != (cport_end - cport_start):
# Port range is mismatched
raise SaltInvocationError(
'Host port range ({0}) does not have the same '
'number of ports as the container port range '
'({1})'.format(bind_parts[0], container_port)
)
cport_list = list(range(cport_start, cport_end + 1))
hport_list = list(range(hport_start, hport_end + 1))
bind_vals = [
(_format_port(cport_list[x], proto), hport_list[x])
for x in range(len(cport_list))
]
elif num_bind_parts == 3:
host_ip, host_port = bind_parts[0:2]
if validate_ip_addrs:
helpers.validate_ip(host_ip)
container_port, _, proto = bind_parts[2].partition('/')
try:
cport_start, cport_end = \
helpers.get_port_range(container_port)
except ValueError as exc:
# Using __str__() to avoid deprecation warning for
# using the message attribute of the ValueError.
raise SaltInvocationError(exc.__str__())
cport_list = list(range(cport_start, cport_end + 1))
if host_port == '':
hport_list = [None] * len(cport_list)
else:
try:
hport_start, hport_end = \
helpers.get_port_range(host_port)
except ValueError as exc:
# Using __str__() to avoid deprecation warning for
# using the message attribute of the ValueError.
raise SaltInvocationError(exc.__str__())
hport_list = list(range(hport_start, hport_end + 1))
if (hport_end - hport_start) != (cport_end - cport_start):
# Port range is mismatched
raise SaltInvocationError(
'Host port range ({0}) does not have the same '
'number of ports as the container port range '
'({1})'.format(host_port, container_port)
)
bind_vals = [(
_format_port(val, proto),
(host_ip,) if hport_list[idx] is None
else (host_ip, hport_list[idx])
) for idx, val in enumerate(cport_list)]
else:
raise SaltInvocationError(
'\'{0}\' is an invalid port binding definition (at most '
'3 components are allowed, found {1})'.format(
binding, num_bind_parts
)
)
for cport, bind_def in bind_vals:
if cport not in bindings:
bindings[cport] = bind_def
else:
if isinstance(bindings[cport], list):
# Append to existing list of bindings for this
# container port.
bindings[cport].append(bind_def)
else:
bindings[cport] = [bindings[cport], bind_def]
for idx in range(len(bindings[cport])):
if bindings[cport][idx] is None:
# Now that we are adding multiple
# bindings
try:
# Convert 1234/udp to 1234
bindings[cport][idx] = int(cport.split('/')[0])
except AttributeError:
# Port was tcp, the AttributeError
# signifies that the split failed
# because the port number was
# already defined as an integer.
# Just use the cport.
bindings[cport][idx] = cport
val = bindings
return val | def function[port_bindings, parameter[val]]:
constant[
On the CLI, these are passed as multiple instances of a given CLI option.
In Salt, we accept these as a comma-delimited list but the API expects a
Python dictionary mapping ports to their bindings. The format the API
expects is complicated depending on whether or not the external port maps
to a different internal port, or if the port binding is for UDP instead of
TCP (the default). For reference, see the "Port bindings" section in the
docker-py documentation at the following URL:
http://docker-py.readthedocs.io/en/stable/api.html
]
variable[validate_ip_addrs] assign[=] call[name[kwargs].get, parameter[constant[validate_ip_addrs], constant[True]]]
if <ast.UnaryOp object at 0x7da20e74b2e0> begin[:]
if <ast.UnaryOp object at 0x7da20e74a830> begin[:]
<ast.Try object at 0x7da20e74a8f0>
for taget[name[idx]] in starred[call[name[range], parameter[call[name[len], parameter[name[val]]]]]] begin[:]
if <ast.UnaryOp object at 0x7da207f01690> begin[:]
call[name[val]][name[idx]] assign[=] call[name[six].text_type, parameter[call[name[val]][name[idx]]]]
def function[_format_port, parameter[port_num, proto]]:
return[<ast.IfExp object at 0x7da207f02c50>]
variable[bindings] assign[=] dictionary[[], []]
for taget[name[binding]] in starred[name[val]] begin[:]
variable[bind_parts] assign[=] call[name[helpers].split, parameter[name[binding], constant[:]]]
variable[num_bind_parts] assign[=] call[name[len], parameter[name[bind_parts]]]
if compare[name[num_bind_parts] equal[==] constant[1]] begin[:]
variable[container_port] assign[=] call[name[six].text_type, parameter[call[name[bind_parts]][constant[0]]]]
if compare[name[container_port] equal[==] constant[]] begin[:]
<ast.Raise object at 0x7da207f009d0>
<ast.Tuple object at 0x7da207f01ab0> assign[=] call[name[container_port].partition, parameter[constant[/]]]
<ast.Try object at 0x7da207f035b0>
variable[bind_vals] assign[=] <ast.ListComp object at 0x7da207f02ec0>
for taget[tuple[[<ast.Name object at 0x7da1b2344af0>, <ast.Name object at 0x7da1b2346ef0>]]] in starred[name[bind_vals]] begin[:]
if compare[name[cport] <ast.NotIn object at 0x7da2590d7190> name[bindings]] begin[:]
call[name[bindings]][name[cport]] assign[=] name[bind_def]
variable[val] assign[=] name[bindings]
return[name[val]] | keyword[def] identifier[port_bindings] ( identifier[val] ,** identifier[kwargs] ):
literal[string]
identifier[validate_ip_addrs] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] , identifier[dict] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] , identifier[list] ):
keyword[try] :
identifier[val] = identifier[helpers] . identifier[split] ( identifier[val] )
keyword[except] identifier[AttributeError] :
identifier[val] = identifier[helpers] . identifier[split] ( identifier[six] . identifier[text_type] ( identifier[val] ))
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[len] ( identifier[val] )):
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] [ identifier[idx] ], identifier[six] . identifier[string_types] ):
identifier[val] [ identifier[idx] ]= identifier[six] . identifier[text_type] ( identifier[val] [ identifier[idx] ])
keyword[def] identifier[_format_port] ( identifier[port_num] , identifier[proto] ):
keyword[return] identifier[six] . identifier[text_type] ( identifier[port_num] )+ literal[string] keyword[if] identifier[proto] . identifier[lower] ()== literal[string] keyword[else] identifier[port_num]
identifier[bindings] ={}
keyword[for] identifier[binding] keyword[in] identifier[val] :
identifier[bind_parts] = identifier[helpers] . identifier[split] ( identifier[binding] , literal[string] )
identifier[num_bind_parts] = identifier[len] ( identifier[bind_parts] )
keyword[if] identifier[num_bind_parts] == literal[int] :
identifier[container_port] = identifier[six] . identifier[text_type] ( identifier[bind_parts] [ literal[int] ])
keyword[if] identifier[container_port] == literal[string] :
keyword[raise] identifier[SaltInvocationError] (
literal[string]
)
identifier[container_port] , identifier[_] , identifier[proto] = identifier[container_port] . identifier[partition] ( literal[string] )
keyword[try] :
identifier[start] , identifier[end] = identifier[helpers] . identifier[get_port_range] ( identifier[container_port] )
keyword[except] identifier[ValueError] keyword[as] identifier[exc] :
keyword[raise] identifier[SaltInvocationError] ( identifier[exc] . identifier[__str__] ())
identifier[bind_vals] =[
( identifier[_format_port] ( identifier[port_num] , identifier[proto] ), keyword[None] )
keyword[for] identifier[port_num] keyword[in] identifier[range] ( identifier[start] , identifier[end] + literal[int] )
]
keyword[elif] identifier[num_bind_parts] == literal[int] :
keyword[if] identifier[bind_parts] [ literal[int] ]== literal[string] :
keyword[raise] identifier[SaltInvocationError] (
literal[string]
literal[string] . identifier[format] ( identifier[binding] )
)
keyword[if] identifier[bind_parts] [ literal[int] ]== literal[string] :
keyword[raise] identifier[SaltInvocationError] (
literal[string]
literal[string] . identifier[format] ( identifier[binding] )
)
identifier[container_port] , identifier[_] , identifier[proto] = identifier[bind_parts] [ literal[int] ]. identifier[partition] ( literal[string] )
keyword[try] :
identifier[cport_start] , identifier[cport_end] = identifier[helpers] . identifier[get_port_range] ( identifier[container_port] )
identifier[hport_start] , identifier[hport_end] = identifier[helpers] . identifier[get_port_range] ( identifier[bind_parts] [ literal[int] ])
keyword[except] identifier[ValueError] keyword[as] identifier[exc] :
keyword[raise] identifier[SaltInvocationError] ( identifier[exc] . identifier[__str__] ())
keyword[if] ( identifier[hport_end] - identifier[hport_start] )!=( identifier[cport_end] - identifier[cport_start] ):
keyword[raise] identifier[SaltInvocationError] (
literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[bind_parts] [ literal[int] ], identifier[container_port] )
)
identifier[cport_list] = identifier[list] ( identifier[range] ( identifier[cport_start] , identifier[cport_end] + literal[int] ))
identifier[hport_list] = identifier[list] ( identifier[range] ( identifier[hport_start] , identifier[hport_end] + literal[int] ))
identifier[bind_vals] =[
( identifier[_format_port] ( identifier[cport_list] [ identifier[x] ], identifier[proto] ), identifier[hport_list] [ identifier[x] ])
keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[len] ( identifier[cport_list] ))
]
keyword[elif] identifier[num_bind_parts] == literal[int] :
identifier[host_ip] , identifier[host_port] = identifier[bind_parts] [ literal[int] : literal[int] ]
keyword[if] identifier[validate_ip_addrs] :
identifier[helpers] . identifier[validate_ip] ( identifier[host_ip] )
identifier[container_port] , identifier[_] , identifier[proto] = identifier[bind_parts] [ literal[int] ]. identifier[partition] ( literal[string] )
keyword[try] :
identifier[cport_start] , identifier[cport_end] = identifier[helpers] . identifier[get_port_range] ( identifier[container_port] )
keyword[except] identifier[ValueError] keyword[as] identifier[exc] :
keyword[raise] identifier[SaltInvocationError] ( identifier[exc] . identifier[__str__] ())
identifier[cport_list] = identifier[list] ( identifier[range] ( identifier[cport_start] , identifier[cport_end] + literal[int] ))
keyword[if] identifier[host_port] == literal[string] :
identifier[hport_list] =[ keyword[None] ]* identifier[len] ( identifier[cport_list] )
keyword[else] :
keyword[try] :
identifier[hport_start] , identifier[hport_end] = identifier[helpers] . identifier[get_port_range] ( identifier[host_port] )
keyword[except] identifier[ValueError] keyword[as] identifier[exc] :
keyword[raise] identifier[SaltInvocationError] ( identifier[exc] . identifier[__str__] ())
identifier[hport_list] = identifier[list] ( identifier[range] ( identifier[hport_start] , identifier[hport_end] + literal[int] ))
keyword[if] ( identifier[hport_end] - identifier[hport_start] )!=( identifier[cport_end] - identifier[cport_start] ):
keyword[raise] identifier[SaltInvocationError] (
literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[host_port] , identifier[container_port] )
)
identifier[bind_vals] =[(
identifier[_format_port] ( identifier[val] , identifier[proto] ),
( identifier[host_ip] ,) keyword[if] identifier[hport_list] [ identifier[idx] ] keyword[is] keyword[None]
keyword[else] ( identifier[host_ip] , identifier[hport_list] [ identifier[idx] ])
) keyword[for] identifier[idx] , identifier[val] keyword[in] identifier[enumerate] ( identifier[cport_list] )]
keyword[else] :
keyword[raise] identifier[SaltInvocationError] (
literal[string]
literal[string] . identifier[format] (
identifier[binding] , identifier[num_bind_parts]
)
)
keyword[for] identifier[cport] , identifier[bind_def] keyword[in] identifier[bind_vals] :
keyword[if] identifier[cport] keyword[not] keyword[in] identifier[bindings] :
identifier[bindings] [ identifier[cport] ]= identifier[bind_def]
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[bindings] [ identifier[cport] ], identifier[list] ):
identifier[bindings] [ identifier[cport] ]. identifier[append] ( identifier[bind_def] )
keyword[else] :
identifier[bindings] [ identifier[cport] ]=[ identifier[bindings] [ identifier[cport] ], identifier[bind_def] ]
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[len] ( identifier[bindings] [ identifier[cport] ])):
keyword[if] identifier[bindings] [ identifier[cport] ][ identifier[idx] ] keyword[is] keyword[None] :
keyword[try] :
identifier[bindings] [ identifier[cport] ][ identifier[idx] ]= identifier[int] ( identifier[cport] . identifier[split] ( literal[string] )[ literal[int] ])
keyword[except] identifier[AttributeError] :
identifier[bindings] [ identifier[cport] ][ identifier[idx] ]= identifier[cport]
identifier[val] = identifier[bindings]
keyword[return] identifier[val] | def port_bindings(val, **kwargs):
"""
On the CLI, these are passed as multiple instances of a given CLI option.
In Salt, we accept these as a comma-delimited list but the API expects a
Python dictionary mapping ports to their bindings. The format the API
expects is complicated depending on whether or not the external port maps
to a different internal port, or if the port binding is for UDP instead of
TCP (the default). For reference, see the "Port bindings" section in the
docker-py documentation at the following URL:
http://docker-py.readthedocs.io/en/stable/api.html
"""
validate_ip_addrs = kwargs.get('validate_ip_addrs', True)
if not isinstance(val, dict):
if not isinstance(val, list):
try:
val = helpers.split(val) # depends on [control=['try'], data=[]]
except AttributeError:
val = helpers.split(six.text_type(val)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
for idx in range(len(val)):
if not isinstance(val[idx], six.string_types):
val[idx] = six.text_type(val[idx]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['idx']]
def _format_port(port_num, proto):
return six.text_type(port_num) + '/udp' if proto.lower() == 'udp' else port_num
bindings = {}
for binding in val:
bind_parts = helpers.split(binding, ':')
num_bind_parts = len(bind_parts)
if num_bind_parts == 1:
# Single port or port range being passed through (no
# special mapping)
container_port = six.text_type(bind_parts[0])
if container_port == '':
raise SaltInvocationError('Empty port binding definition found') # depends on [control=['if'], data=[]]
(container_port, _, proto) = container_port.partition('/')
try:
(start, end) = helpers.get_port_range(container_port) # depends on [control=['try'], data=[]]
except ValueError as exc:
# Using __str__() to avoid deprecation warning for using
# the message attribute of the ValueError.
raise SaltInvocationError(exc.__str__()) # depends on [control=['except'], data=['exc']]
bind_vals = [(_format_port(port_num, proto), None) for port_num in range(start, end + 1)] # depends on [control=['if'], data=[]]
elif num_bind_parts == 2:
if bind_parts[0] == '':
raise SaltInvocationError("Empty host port in port binding definition '{0}'".format(binding)) # depends on [control=['if'], data=[]]
if bind_parts[1] == '':
raise SaltInvocationError("Empty container port in port binding definition '{0}'".format(binding)) # depends on [control=['if'], data=[]]
(container_port, _, proto) = bind_parts[1].partition('/')
try:
(cport_start, cport_end) = helpers.get_port_range(container_port)
(hport_start, hport_end) = helpers.get_port_range(bind_parts[0]) # depends on [control=['try'], data=[]]
except ValueError as exc:
# Using __str__() to avoid deprecation warning for
# using the message attribute of the ValueError.
raise SaltInvocationError(exc.__str__()) # depends on [control=['except'], data=['exc']]
if hport_end - hport_start != cport_end - cport_start:
# Port range is mismatched
raise SaltInvocationError('Host port range ({0}) does not have the same number of ports as the container port range ({1})'.format(bind_parts[0], container_port)) # depends on [control=['if'], data=[]]
cport_list = list(range(cport_start, cport_end + 1))
hport_list = list(range(hport_start, hport_end + 1))
bind_vals = [(_format_port(cport_list[x], proto), hport_list[x]) for x in range(len(cport_list))] # depends on [control=['if'], data=[]]
elif num_bind_parts == 3:
(host_ip, host_port) = bind_parts[0:2]
if validate_ip_addrs:
helpers.validate_ip(host_ip) # depends on [control=['if'], data=[]]
(container_port, _, proto) = bind_parts[2].partition('/')
try:
(cport_start, cport_end) = helpers.get_port_range(container_port) # depends on [control=['try'], data=[]]
except ValueError as exc:
# Using __str__() to avoid deprecation warning for
# using the message attribute of the ValueError.
raise SaltInvocationError(exc.__str__()) # depends on [control=['except'], data=['exc']]
cport_list = list(range(cport_start, cport_end + 1))
if host_port == '':
hport_list = [None] * len(cport_list) # depends on [control=['if'], data=[]]
else:
try:
(hport_start, hport_end) = helpers.get_port_range(host_port) # depends on [control=['try'], data=[]]
except ValueError as exc:
# Using __str__() to avoid deprecation warning for
# using the message attribute of the ValueError.
raise SaltInvocationError(exc.__str__()) # depends on [control=['except'], data=['exc']]
hport_list = list(range(hport_start, hport_end + 1))
if hport_end - hport_start != cport_end - cport_start:
# Port range is mismatched
raise SaltInvocationError('Host port range ({0}) does not have the same number of ports as the container port range ({1})'.format(host_port, container_port)) # depends on [control=['if'], data=[]]
bind_vals = [(_format_port(val, proto), (host_ip,) if hport_list[idx] is None else (host_ip, hport_list[idx])) for (idx, val) in enumerate(cport_list)] # depends on [control=['if'], data=[]]
else:
raise SaltInvocationError("'{0}' is an invalid port binding definition (at most 3 components are allowed, found {1})".format(binding, num_bind_parts))
for (cport, bind_def) in bind_vals:
if cport not in bindings:
bindings[cport] = bind_def # depends on [control=['if'], data=['cport', 'bindings']]
else:
if isinstance(bindings[cport], list):
# Append to existing list of bindings for this
# container port.
bindings[cport].append(bind_def) # depends on [control=['if'], data=[]]
else:
bindings[cport] = [bindings[cport], bind_def]
for idx in range(len(bindings[cport])):
if bindings[cport][idx] is None:
# Now that we are adding multiple
# bindings
try:
# Convert 1234/udp to 1234
bindings[cport][idx] = int(cport.split('/')[0]) # depends on [control=['try'], data=[]]
except AttributeError:
# Port was tcp, the AttributeError
# signifies that the split failed
# because the port number was
# already defined as an integer.
# Just use the cport.
bindings[cport][idx] = cport # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['idx']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['binding']]
val = bindings # depends on [control=['if'], data=[]]
return val |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.