code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def at_line(self, line: FileLine) -> Iterator[InsertionPoint]:
"""
Returns an iterator over all of the insertion points located at a
given line.
"""
logger.debug("finding insertion points at line: %s", str(line))
filename = line.filename # type: str
line_num = line.num # type: int
for ins in self.in_file(filename):
if line_num == ins.location.line:
logger.debug("found insertion point at line [%s]: %s",
str(line), ins)
yield ins | def function[at_line, parameter[self, line]]:
constant[
Returns an iterator over all of the insertion points located at a
given line.
]
call[name[logger].debug, parameter[constant[finding insertion points at line: %s], call[name[str], parameter[name[line]]]]]
variable[filename] assign[=] name[line].filename
variable[line_num] assign[=] name[line].num
for taget[name[ins]] in starred[call[name[self].in_file, parameter[name[filename]]]] begin[:]
if compare[name[line_num] equal[==] name[ins].location.line] begin[:]
call[name[logger].debug, parameter[constant[found insertion point at line [%s]: %s], call[name[str], parameter[name[line]]], name[ins]]]
<ast.Yield object at 0x7da2054a73a0> | keyword[def] identifier[at_line] ( identifier[self] , identifier[line] : identifier[FileLine] )-> identifier[Iterator] [ identifier[InsertionPoint] ]:
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[str] ( identifier[line] ))
identifier[filename] = identifier[line] . identifier[filename]
identifier[line_num] = identifier[line] . identifier[num]
keyword[for] identifier[ins] keyword[in] identifier[self] . identifier[in_file] ( identifier[filename] ):
keyword[if] identifier[line_num] == identifier[ins] . identifier[location] . identifier[line] :
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[str] ( identifier[line] ), identifier[ins] )
keyword[yield] identifier[ins] | def at_line(self, line: FileLine) -> Iterator[InsertionPoint]:
"""
Returns an iterator over all of the insertion points located at a
given line.
"""
logger.debug('finding insertion points at line: %s', str(line))
filename = line.filename # type: str
line_num = line.num # type: int
for ins in self.in_file(filename):
if line_num == ins.location.line:
logger.debug('found insertion point at line [%s]: %s', str(line), ins)
yield ins # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ins']] |
def theano_compiler(model):
"""Take a triflow model and return optimized theano routines.
Parameters
----------
model: triflow.Model:
Model to compile
Returns
-------
(theano function, theano_function):
Optimized routine that compute the evolution equations and their
jacobian matrix.
"""
from theano import tensor as T
from theano.ifelse import ifelse
import theano.sparse as ths
from theano import function
def th_Min(a, b):
if isinstance(a, T.TensorVariable) or isinstance(b, T.TensorVariable):
return T.where(a < b, a, b)
return min(a, b)
def th_Max(a, b):
if isinstance(a, T.TensorVariable) or isinstance(b, T.TensorVariable):
return T.where(a < b, b, a)
return max(a, b)
def th_Heaviside(a):
if isinstance(a, T.TensorVariable):
return T.where(a < 0, 1, 1)
return 0 if a < 0 else 1
mapargs = {arg: T.vector(arg)
for arg, sarg
in zip(model._args, model._symbolic_args)}
to_feed = mapargs.copy()
x_th = mapargs['x']
N = x_th.size
L = x_th[-1] - x_th[0]
dx = L / (N - 1)
to_feed['dx'] = dx
periodic = T.scalar("periodic", dtype="int32")
middle_point = int((model._window_range - 1) / 2)
th_args = [mapargs[key]
for key
in [*model._indep_vars,
*model._dep_vars,
*model._help_funcs,
*model._pars]] + [periodic]
map_extended = {}
for (varname, discretisation_tree) in \
model._symb_vars_with_spatial_diff_order.items():
pad_left, pad_right = model._bounds
th_arg = mapargs[varname]
per_extended_var = T.concatenate([th_arg[pad_left:],
th_arg,
th_arg[:pad_right]])
edge_extended_var = T.concatenate([[th_arg[0]] * middle_point,
th_arg,
[th_arg[-1]] * middle_point])
extended_var = ifelse(periodic,
per_extended_var,
edge_extended_var)
map_extended[varname] = extended_var
for order in range(pad_left, pad_right + 1):
if order != 0:
var = ("{}_{}{}").format(varname,
'm' if order < 0 else 'p',
np.abs(order))
else:
var = varname
new_var = extended_var[order - pad_left:
extended_var.size +
order - pad_right]
to_feed[var] = new_var
F = lambdify((model._symbolic_args),
expr=model.F_array.tolist(),
modules=[T, {"Max": th_Max,
"Min": th_Min,
"Heaviside": th_Heaviside}])(
*[to_feed[key]
for key
in model._args]
)
F = T.concatenate(F, axis=0).reshape((model._nvar, N)).T
F = T.stack(F).flatten()
J = lambdify((model._symbolic_args),
expr=model.J_array.tolist(),
modules=[T, {"Max": th_Max,
"Min": th_Min,
"Heaviside": th_Heaviside}])(
*[to_feed[key]
for key
in model._args]
)
J = [j if j != 0 else T.constant(0.)
for j in J]
J = [j if not isinstance(j, (int, float)) else T.constant(j)
for j in J]
J = T.stack([T.repeat(j, N) if j.ndim == 0 else j
for j in J])
J = J[model._sparse_indices[0]].T.squeeze()
i = T.arange(N).dimshuffle([0, 'x'])
idx = T.arange(N * model._nvar).reshape((N, model._nvar)).T
edge_extended_idx = T.concatenate([T.repeat(idx[:, :1],
middle_point,
axis=1),
idx,
T.repeat(idx[:, -1:],
middle_point,
axis=1)],
axis=1).T.flatten()
per_extended_idx = T.concatenate([idx[:, -middle_point:],
idx,
idx[:, :middle_point]],
axis=1).T.flatten()
extended_idx = ifelse(periodic,
per_extended_idx,
edge_extended_idx)
rows = T.tile(T.arange(model._nvar),
model._window_range * model._nvar) + i * model._nvar
cols = T.repeat(T.arange(model._window_range * model._nvar),
model._nvar) + i * model._nvar
rows = rows[:, model._sparse_indices].reshape(J.shape).flatten()
cols = extended_idx[cols][:, model._sparse_indices] \
.reshape(J.shape).flatten()
permutation = T.argsort(cols)
J = J.flatten()[permutation]
rows = rows[permutation]
cols = cols[permutation]
count = T.zeros((N * model._nvar + 1,), dtype=int)
uq, cnt = T.extra_ops.Unique(False, False, True)(cols)
count = T.set_subtensor(count[uq + 1], cnt)
indptr = T.cumsum(count)
shape = T.stack([N * model._nvar, N * model._nvar])
sparse_J = ths.CSC(J, rows, indptr, shape)
F_theano_function = function(inputs=th_args,
outputs=F,
on_unused_input='ignore',
allow_input_downcast=True)
J_theano_function = function(inputs=th_args,
outputs=sparse_J,
on_unused_input='ignore',
allow_input_downcast=True)
return F_theano_function, J_theano_function | def function[theano_compiler, parameter[model]]:
constant[Take a triflow model and return optimized theano routines.
Parameters
----------
model: triflow.Model:
Model to compile
Returns
-------
(theano function, theano_function):
Optimized routine that compute the evolution equations and their
jacobian matrix.
]
from relative_module[theano] import module[tensor]
from relative_module[theano.ifelse] import module[ifelse]
import module[theano.sparse] as alias[ths]
from relative_module[theano] import module[function]
def function[th_Min, parameter[a, b]]:
if <ast.BoolOp object at 0x7da207f00df0> begin[:]
return[call[name[T].where, parameter[compare[name[a] less[<] name[b]], name[a], name[b]]]]
return[call[name[min], parameter[name[a], name[b]]]]
def function[th_Max, parameter[a, b]]:
if <ast.BoolOp object at 0x7da207f01930> begin[:]
return[call[name[T].where, parameter[compare[name[a] less[<] name[b]], name[b], name[a]]]]
return[call[name[max], parameter[name[a], name[b]]]]
def function[th_Heaviside, parameter[a]]:
if call[name[isinstance], parameter[name[a], name[T].TensorVariable]] begin[:]
return[call[name[T].where, parameter[compare[name[a] less[<] constant[0]], constant[1], constant[1]]]]
return[<ast.IfExp object at 0x7da207f01f00>]
variable[mapargs] assign[=] <ast.DictComp object at 0x7da207f013c0>
variable[to_feed] assign[=] call[name[mapargs].copy, parameter[]]
variable[x_th] assign[=] call[name[mapargs]][constant[x]]
variable[N] assign[=] name[x_th].size
variable[L] assign[=] binary_operation[call[name[x_th]][<ast.UnaryOp object at 0x7da207f02fb0>] - call[name[x_th]][constant[0]]]
variable[dx] assign[=] binary_operation[name[L] / binary_operation[name[N] - constant[1]]]
call[name[to_feed]][constant[dx]] assign[=] name[dx]
variable[periodic] assign[=] call[name[T].scalar, parameter[constant[periodic]]]
variable[middle_point] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[model]._window_range - constant[1]] / constant[2]]]]
variable[th_args] assign[=] binary_operation[<ast.ListComp object at 0x7da207f02c20> + list[[<ast.Name object at 0x7da207f02230>]]]
variable[map_extended] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da207f03220>, <ast.Name object at 0x7da207f00c10>]]] in starred[call[name[model]._symb_vars_with_spatial_diff_order.items, parameter[]]] begin[:]
<ast.Tuple object at 0x7da207f01f30> assign[=] name[model]._bounds
variable[th_arg] assign[=] call[name[mapargs]][name[varname]]
variable[per_extended_var] assign[=] call[name[T].concatenate, parameter[list[[<ast.Subscript object at 0x7da207f00700>, <ast.Name object at 0x7da207f02680>, <ast.Subscript object at 0x7da207f00490>]]]]
variable[edge_extended_var] assign[=] call[name[T].concatenate, parameter[list[[<ast.BinOp object at 0x7da207f03280>, <ast.Name object at 0x7da207f01750>, <ast.BinOp object at 0x7da207f021d0>]]]]
variable[extended_var] assign[=] call[name[ifelse], parameter[name[periodic], name[per_extended_var], name[edge_extended_var]]]
call[name[map_extended]][name[varname]] assign[=] name[extended_var]
for taget[name[order]] in starred[call[name[range], parameter[name[pad_left], binary_operation[name[pad_right] + constant[1]]]]] begin[:]
if compare[name[order] not_equal[!=] constant[0]] begin[:]
variable[var] assign[=] call[constant[{}_{}{}].format, parameter[name[varname], <ast.IfExp object at 0x7da207f00e50>, call[name[np].abs, parameter[name[order]]]]]
variable[new_var] assign[=] call[name[extended_var]][<ast.Slice object at 0x7da207f02e90>]
call[name[to_feed]][name[var]] assign[=] name[new_var]
variable[F] assign[=] call[call[name[lambdify], parameter[name[model]._symbolic_args]], parameter[<ast.Starred object at 0x7da1b0a368f0>]]
variable[F] assign[=] call[call[name[T].concatenate, parameter[name[F]]].reshape, parameter[tuple[[<ast.Attribute object at 0x7da1b0a37730>, <ast.Name object at 0x7da1b0a37670>]]]].T
variable[F] assign[=] call[call[name[T].stack, parameter[name[F]]].flatten, parameter[]]
variable[J] assign[=] call[call[name[lambdify], parameter[name[model]._symbolic_args]], parameter[<ast.Starred object at 0x7da1b0a37220>]]
variable[J] assign[=] <ast.ListComp object at 0x7da1b0a373a0>
variable[J] assign[=] <ast.ListComp object at 0x7da1b0a365f0>
variable[J] assign[=] call[name[T].stack, parameter[<ast.ListComp object at 0x7da1b0a34fd0>]]
variable[J] assign[=] call[call[name[J]][call[name[model]._sparse_indices][constant[0]]].T.squeeze, parameter[]]
variable[i] assign[=] call[call[name[T].arange, parameter[name[N]]].dimshuffle, parameter[list[[<ast.Constant object at 0x7da1b0a36350>, <ast.Constant object at 0x7da1b0a36b90>]]]]
variable[idx] assign[=] call[call[name[T].arange, parameter[binary_operation[name[N] * name[model]._nvar]]].reshape, parameter[tuple[[<ast.Name object at 0x7da1b0a35240>, <ast.Attribute object at 0x7da1b0a34340>]]]].T
variable[edge_extended_idx] assign[=] call[call[name[T].concatenate, parameter[list[[<ast.Call object at 0x7da1b0a341c0>, <ast.Name object at 0x7da2054a5a20>, <ast.Call object at 0x7da2054a49d0>]]]].T.flatten, parameter[]]
variable[per_extended_idx] assign[=] call[call[name[T].concatenate, parameter[list[[<ast.Subscript object at 0x7da2054a7880>, <ast.Name object at 0x7da2054a7190>, <ast.Subscript object at 0x7da2054a4910>]]]].T.flatten, parameter[]]
variable[extended_idx] assign[=] call[name[ifelse], parameter[name[periodic], name[per_extended_idx], name[edge_extended_idx]]]
variable[rows] assign[=] binary_operation[call[name[T].tile, parameter[call[name[T].arange, parameter[name[model]._nvar]], binary_operation[name[model]._window_range * name[model]._nvar]]] + binary_operation[name[i] * name[model]._nvar]]
variable[cols] assign[=] binary_operation[call[name[T].repeat, parameter[call[name[T].arange, parameter[binary_operation[name[model]._window_range * name[model]._nvar]]], name[model]._nvar]] + binary_operation[name[i] * name[model]._nvar]]
variable[rows] assign[=] call[call[call[name[rows]][tuple[[<ast.Slice object at 0x7da2054a5c00>, <ast.Attribute object at 0x7da2054a73d0>]]].reshape, parameter[name[J].shape]].flatten, parameter[]]
variable[cols] assign[=] call[call[call[call[name[extended_idx]][name[cols]]][tuple[[<ast.Slice object at 0x7da2054a74c0>, <ast.Attribute object at 0x7da2054a5ab0>]]].reshape, parameter[name[J].shape]].flatten, parameter[]]
variable[permutation] assign[=] call[name[T].argsort, parameter[name[cols]]]
variable[J] assign[=] call[call[name[J].flatten, parameter[]]][name[permutation]]
variable[rows] assign[=] call[name[rows]][name[permutation]]
variable[cols] assign[=] call[name[cols]][name[permutation]]
variable[count] assign[=] call[name[T].zeros, parameter[tuple[[<ast.BinOp object at 0x7da2054a7970>]]]]
<ast.Tuple object at 0x7da2054a5b40> assign[=] call[call[name[T].extra_ops.Unique, parameter[constant[False], constant[False], constant[True]]], parameter[name[cols]]]
variable[count] assign[=] call[name[T].set_subtensor, parameter[call[name[count]][binary_operation[name[uq] + constant[1]]], name[cnt]]]
variable[indptr] assign[=] call[name[T].cumsum, parameter[name[count]]]
variable[shape] assign[=] call[name[T].stack, parameter[list[[<ast.BinOp object at 0x7da2054a4e80>, <ast.BinOp object at 0x7da2054a4760>]]]]
variable[sparse_J] assign[=] call[name[ths].CSC, parameter[name[J], name[rows], name[indptr], name[shape]]]
variable[F_theano_function] assign[=] call[name[function], parameter[]]
variable[J_theano_function] assign[=] call[name[function], parameter[]]
return[tuple[[<ast.Name object at 0x7da2054a42e0>, <ast.Name object at 0x7da2054a52d0>]]] | keyword[def] identifier[theano_compiler] ( identifier[model] ):
literal[string]
keyword[from] identifier[theano] keyword[import] identifier[tensor] keyword[as] identifier[T]
keyword[from] identifier[theano] . identifier[ifelse] keyword[import] identifier[ifelse]
keyword[import] identifier[theano] . identifier[sparse] keyword[as] identifier[ths]
keyword[from] identifier[theano] keyword[import] identifier[function]
keyword[def] identifier[th_Min] ( identifier[a] , identifier[b] ):
keyword[if] identifier[isinstance] ( identifier[a] , identifier[T] . identifier[TensorVariable] ) keyword[or] identifier[isinstance] ( identifier[b] , identifier[T] . identifier[TensorVariable] ):
keyword[return] identifier[T] . identifier[where] ( identifier[a] < identifier[b] , identifier[a] , identifier[b] )
keyword[return] identifier[min] ( identifier[a] , identifier[b] )
keyword[def] identifier[th_Max] ( identifier[a] , identifier[b] ):
keyword[if] identifier[isinstance] ( identifier[a] , identifier[T] . identifier[TensorVariable] ) keyword[or] identifier[isinstance] ( identifier[b] , identifier[T] . identifier[TensorVariable] ):
keyword[return] identifier[T] . identifier[where] ( identifier[a] < identifier[b] , identifier[b] , identifier[a] )
keyword[return] identifier[max] ( identifier[a] , identifier[b] )
keyword[def] identifier[th_Heaviside] ( identifier[a] ):
keyword[if] identifier[isinstance] ( identifier[a] , identifier[T] . identifier[TensorVariable] ):
keyword[return] identifier[T] . identifier[where] ( identifier[a] < literal[int] , literal[int] , literal[int] )
keyword[return] literal[int] keyword[if] identifier[a] < literal[int] keyword[else] literal[int]
identifier[mapargs] ={ identifier[arg] : identifier[T] . identifier[vector] ( identifier[arg] )
keyword[for] identifier[arg] , identifier[sarg]
keyword[in] identifier[zip] ( identifier[model] . identifier[_args] , identifier[model] . identifier[_symbolic_args] )}
identifier[to_feed] = identifier[mapargs] . identifier[copy] ()
identifier[x_th] = identifier[mapargs] [ literal[string] ]
identifier[N] = identifier[x_th] . identifier[size]
identifier[L] = identifier[x_th] [- literal[int] ]- identifier[x_th] [ literal[int] ]
identifier[dx] = identifier[L] /( identifier[N] - literal[int] )
identifier[to_feed] [ literal[string] ]= identifier[dx]
identifier[periodic] = identifier[T] . identifier[scalar] ( literal[string] , identifier[dtype] = literal[string] )
identifier[middle_point] = identifier[int] (( identifier[model] . identifier[_window_range] - literal[int] )/ literal[int] )
identifier[th_args] =[ identifier[mapargs] [ identifier[key] ]
keyword[for] identifier[key]
keyword[in] [* identifier[model] . identifier[_indep_vars] ,
* identifier[model] . identifier[_dep_vars] ,
* identifier[model] . identifier[_help_funcs] ,
* identifier[model] . identifier[_pars] ]]+[ identifier[periodic] ]
identifier[map_extended] ={}
keyword[for] ( identifier[varname] , identifier[discretisation_tree] ) keyword[in] identifier[model] . identifier[_symb_vars_with_spatial_diff_order] . identifier[items] ():
identifier[pad_left] , identifier[pad_right] = identifier[model] . identifier[_bounds]
identifier[th_arg] = identifier[mapargs] [ identifier[varname] ]
identifier[per_extended_var] = identifier[T] . identifier[concatenate] ([ identifier[th_arg] [ identifier[pad_left] :],
identifier[th_arg] ,
identifier[th_arg] [: identifier[pad_right] ]])
identifier[edge_extended_var] = identifier[T] . identifier[concatenate] ([[ identifier[th_arg] [ literal[int] ]]* identifier[middle_point] ,
identifier[th_arg] ,
[ identifier[th_arg] [- literal[int] ]]* identifier[middle_point] ])
identifier[extended_var] = identifier[ifelse] ( identifier[periodic] ,
identifier[per_extended_var] ,
identifier[edge_extended_var] )
identifier[map_extended] [ identifier[varname] ]= identifier[extended_var]
keyword[for] identifier[order] keyword[in] identifier[range] ( identifier[pad_left] , identifier[pad_right] + literal[int] ):
keyword[if] identifier[order] != literal[int] :
identifier[var] =( literal[string] ). identifier[format] ( identifier[varname] ,
literal[string] keyword[if] identifier[order] < literal[int] keyword[else] literal[string] ,
identifier[np] . identifier[abs] ( identifier[order] ))
keyword[else] :
identifier[var] = identifier[varname]
identifier[new_var] = identifier[extended_var] [ identifier[order] - identifier[pad_left] :
identifier[extended_var] . identifier[size] +
identifier[order] - identifier[pad_right] ]
identifier[to_feed] [ identifier[var] ]= identifier[new_var]
identifier[F] = identifier[lambdify] (( identifier[model] . identifier[_symbolic_args] ),
identifier[expr] = identifier[model] . identifier[F_array] . identifier[tolist] (),
identifier[modules] =[ identifier[T] ,{ literal[string] : identifier[th_Max] ,
literal[string] : identifier[th_Min] ,
literal[string] : identifier[th_Heaviside] }])(
*[ identifier[to_feed] [ identifier[key] ]
keyword[for] identifier[key]
keyword[in] identifier[model] . identifier[_args] ]
)
identifier[F] = identifier[T] . identifier[concatenate] ( identifier[F] , identifier[axis] = literal[int] ). identifier[reshape] (( identifier[model] . identifier[_nvar] , identifier[N] )). identifier[T]
identifier[F] = identifier[T] . identifier[stack] ( identifier[F] ). identifier[flatten] ()
identifier[J] = identifier[lambdify] (( identifier[model] . identifier[_symbolic_args] ),
identifier[expr] = identifier[model] . identifier[J_array] . identifier[tolist] (),
identifier[modules] =[ identifier[T] ,{ literal[string] : identifier[th_Max] ,
literal[string] : identifier[th_Min] ,
literal[string] : identifier[th_Heaviside] }])(
*[ identifier[to_feed] [ identifier[key] ]
keyword[for] identifier[key]
keyword[in] identifier[model] . identifier[_args] ]
)
identifier[J] =[ identifier[j] keyword[if] identifier[j] != literal[int] keyword[else] identifier[T] . identifier[constant] ( literal[int] )
keyword[for] identifier[j] keyword[in] identifier[J] ]
identifier[J] =[ identifier[j] keyword[if] keyword[not] identifier[isinstance] ( identifier[j] ,( identifier[int] , identifier[float] )) keyword[else] identifier[T] . identifier[constant] ( identifier[j] )
keyword[for] identifier[j] keyword[in] identifier[J] ]
identifier[J] = identifier[T] . identifier[stack] ([ identifier[T] . identifier[repeat] ( identifier[j] , identifier[N] ) keyword[if] identifier[j] . identifier[ndim] == literal[int] keyword[else] identifier[j]
keyword[for] identifier[j] keyword[in] identifier[J] ])
identifier[J] = identifier[J] [ identifier[model] . identifier[_sparse_indices] [ literal[int] ]]. identifier[T] . identifier[squeeze] ()
identifier[i] = identifier[T] . identifier[arange] ( identifier[N] ). identifier[dimshuffle] ([ literal[int] , literal[string] ])
identifier[idx] = identifier[T] . identifier[arange] ( identifier[N] * identifier[model] . identifier[_nvar] ). identifier[reshape] (( identifier[N] , identifier[model] . identifier[_nvar] )). identifier[T]
identifier[edge_extended_idx] = identifier[T] . identifier[concatenate] ([ identifier[T] . identifier[repeat] ( identifier[idx] [:,: literal[int] ],
identifier[middle_point] ,
identifier[axis] = literal[int] ),
identifier[idx] ,
identifier[T] . identifier[repeat] ( identifier[idx] [:,- literal[int] :],
identifier[middle_point] ,
identifier[axis] = literal[int] )],
identifier[axis] = literal[int] ). identifier[T] . identifier[flatten] ()
identifier[per_extended_idx] = identifier[T] . identifier[concatenate] ([ identifier[idx] [:,- identifier[middle_point] :],
identifier[idx] ,
identifier[idx] [:,: identifier[middle_point] ]],
identifier[axis] = literal[int] ). identifier[T] . identifier[flatten] ()
identifier[extended_idx] = identifier[ifelse] ( identifier[periodic] ,
identifier[per_extended_idx] ,
identifier[edge_extended_idx] )
identifier[rows] = identifier[T] . identifier[tile] ( identifier[T] . identifier[arange] ( identifier[model] . identifier[_nvar] ),
identifier[model] . identifier[_window_range] * identifier[model] . identifier[_nvar] )+ identifier[i] * identifier[model] . identifier[_nvar]
identifier[cols] = identifier[T] . identifier[repeat] ( identifier[T] . identifier[arange] ( identifier[model] . identifier[_window_range] * identifier[model] . identifier[_nvar] ),
identifier[model] . identifier[_nvar] )+ identifier[i] * identifier[model] . identifier[_nvar]
identifier[rows] = identifier[rows] [:, identifier[model] . identifier[_sparse_indices] ]. identifier[reshape] ( identifier[J] . identifier[shape] ). identifier[flatten] ()
identifier[cols] = identifier[extended_idx] [ identifier[cols] ][:, identifier[model] . identifier[_sparse_indices] ]. identifier[reshape] ( identifier[J] . identifier[shape] ). identifier[flatten] ()
identifier[permutation] = identifier[T] . identifier[argsort] ( identifier[cols] )
identifier[J] = identifier[J] . identifier[flatten] ()[ identifier[permutation] ]
identifier[rows] = identifier[rows] [ identifier[permutation] ]
identifier[cols] = identifier[cols] [ identifier[permutation] ]
identifier[count] = identifier[T] . identifier[zeros] (( identifier[N] * identifier[model] . identifier[_nvar] + literal[int] ,), identifier[dtype] = identifier[int] )
identifier[uq] , identifier[cnt] = identifier[T] . identifier[extra_ops] . identifier[Unique] ( keyword[False] , keyword[False] , keyword[True] )( identifier[cols] )
identifier[count] = identifier[T] . identifier[set_subtensor] ( identifier[count] [ identifier[uq] + literal[int] ], identifier[cnt] )
identifier[indptr] = identifier[T] . identifier[cumsum] ( identifier[count] )
identifier[shape] = identifier[T] . identifier[stack] ([ identifier[N] * identifier[model] . identifier[_nvar] , identifier[N] * identifier[model] . identifier[_nvar] ])
identifier[sparse_J] = identifier[ths] . identifier[CSC] ( identifier[J] , identifier[rows] , identifier[indptr] , identifier[shape] )
identifier[F_theano_function] = identifier[function] ( identifier[inputs] = identifier[th_args] ,
identifier[outputs] = identifier[F] ,
identifier[on_unused_input] = literal[string] ,
identifier[allow_input_downcast] = keyword[True] )
identifier[J_theano_function] = identifier[function] ( identifier[inputs] = identifier[th_args] ,
identifier[outputs] = identifier[sparse_J] ,
identifier[on_unused_input] = literal[string] ,
identifier[allow_input_downcast] = keyword[True] )
keyword[return] identifier[F_theano_function] , identifier[J_theano_function] | def theano_compiler(model):
"""Take a triflow model and return optimized theano routines.
Parameters
----------
model: triflow.Model:
Model to compile
Returns
-------
(theano function, theano_function):
Optimized routine that compute the evolution equations and their
jacobian matrix.
"""
from theano import tensor as T
from theano.ifelse import ifelse
import theano.sparse as ths
from theano import function
def th_Min(a, b):
if isinstance(a, T.TensorVariable) or isinstance(b, T.TensorVariable):
return T.where(a < b, a, b) # depends on [control=['if'], data=[]]
return min(a, b)
def th_Max(a, b):
if isinstance(a, T.TensorVariable) or isinstance(b, T.TensorVariable):
return T.where(a < b, b, a) # depends on [control=['if'], data=[]]
return max(a, b)
def th_Heaviside(a):
if isinstance(a, T.TensorVariable):
return T.where(a < 0, 1, 1) # depends on [control=['if'], data=[]]
return 0 if a < 0 else 1
mapargs = {arg: T.vector(arg) for (arg, sarg) in zip(model._args, model._symbolic_args)}
to_feed = mapargs.copy()
x_th = mapargs['x']
N = x_th.size
L = x_th[-1] - x_th[0]
dx = L / (N - 1)
to_feed['dx'] = dx
periodic = T.scalar('periodic', dtype='int32')
middle_point = int((model._window_range - 1) / 2)
th_args = [mapargs[key] for key in [*model._indep_vars, *model._dep_vars, *model._help_funcs, *model._pars]] + [periodic]
map_extended = {}
for (varname, discretisation_tree) in model._symb_vars_with_spatial_diff_order.items():
(pad_left, pad_right) = model._bounds
th_arg = mapargs[varname]
per_extended_var = T.concatenate([th_arg[pad_left:], th_arg, th_arg[:pad_right]])
edge_extended_var = T.concatenate([[th_arg[0]] * middle_point, th_arg, [th_arg[-1]] * middle_point])
extended_var = ifelse(periodic, per_extended_var, edge_extended_var)
map_extended[varname] = extended_var
for order in range(pad_left, pad_right + 1):
if order != 0:
var = '{}_{}{}'.format(varname, 'm' if order < 0 else 'p', np.abs(order)) # depends on [control=['if'], data=['order']]
else:
var = varname
new_var = extended_var[order - pad_left:extended_var.size + order - pad_right]
to_feed[var] = new_var # depends on [control=['for'], data=['order']] # depends on [control=['for'], data=[]]
F = lambdify(model._symbolic_args, expr=model.F_array.tolist(), modules=[T, {'Max': th_Max, 'Min': th_Min, 'Heaviside': th_Heaviside}])(*[to_feed[key] for key in model._args])
F = T.concatenate(F, axis=0).reshape((model._nvar, N)).T
F = T.stack(F).flatten()
J = lambdify(model._symbolic_args, expr=model.J_array.tolist(), modules=[T, {'Max': th_Max, 'Min': th_Min, 'Heaviside': th_Heaviside}])(*[to_feed[key] for key in model._args])
J = [j if j != 0 else T.constant(0.0) for j in J]
J = [j if not isinstance(j, (int, float)) else T.constant(j) for j in J]
J = T.stack([T.repeat(j, N) if j.ndim == 0 else j for j in J])
J = J[model._sparse_indices[0]].T.squeeze()
i = T.arange(N).dimshuffle([0, 'x'])
idx = T.arange(N * model._nvar).reshape((N, model._nvar)).T
edge_extended_idx = T.concatenate([T.repeat(idx[:, :1], middle_point, axis=1), idx, T.repeat(idx[:, -1:], middle_point, axis=1)], axis=1).T.flatten()
per_extended_idx = T.concatenate([idx[:, -middle_point:], idx, idx[:, :middle_point]], axis=1).T.flatten()
extended_idx = ifelse(periodic, per_extended_idx, edge_extended_idx)
rows = T.tile(T.arange(model._nvar), model._window_range * model._nvar) + i * model._nvar
cols = T.repeat(T.arange(model._window_range * model._nvar), model._nvar) + i * model._nvar
rows = rows[:, model._sparse_indices].reshape(J.shape).flatten()
cols = extended_idx[cols][:, model._sparse_indices].reshape(J.shape).flatten()
permutation = T.argsort(cols)
J = J.flatten()[permutation]
rows = rows[permutation]
cols = cols[permutation]
count = T.zeros((N * model._nvar + 1,), dtype=int)
(uq, cnt) = T.extra_ops.Unique(False, False, True)(cols)
count = T.set_subtensor(count[uq + 1], cnt)
indptr = T.cumsum(count)
shape = T.stack([N * model._nvar, N * model._nvar])
sparse_J = ths.CSC(J, rows, indptr, shape)
F_theano_function = function(inputs=th_args, outputs=F, on_unused_input='ignore', allow_input_downcast=True)
J_theano_function = function(inputs=th_args, outputs=sparse_J, on_unused_input='ignore', allow_input_downcast=True)
return (F_theano_function, J_theano_function) |
def _appendReportKeys(keys, prefix, results):
"""
Generate a set of possible report keys for an experiment's results.
A report key is a string of key names separated by colons, each key being one
level deeper into the experiment results dict. For example, 'key1:key2'.
This routine is called recursively to build keys that are multiple levels
deep from the results dict.
Parameters:
-----------------------------------------------------------
keys: Set of report keys accumulated so far
prefix: prefix formed so far, this is the colon separated list of key
names that led up to the dict passed in results
results: dictionary of results at this level.
"""
allKeys = results.keys()
allKeys.sort()
for key in allKeys:
if hasattr(results[key], 'keys'):
_appendReportKeys(keys, "%s%s:" % (prefix, key), results[key])
else:
keys.add("%s%s" % (prefix, key)) | def function[_appendReportKeys, parameter[keys, prefix, results]]:
constant[
Generate a set of possible report keys for an experiment's results.
A report key is a string of key names separated by colons, each key being one
level deeper into the experiment results dict. For example, 'key1:key2'.
This routine is called recursively to build keys that are multiple levels
deep from the results dict.
Parameters:
-----------------------------------------------------------
keys: Set of report keys accumulated so far
prefix: prefix formed so far, this is the colon separated list of key
names that led up to the dict passed in results
results: dictionary of results at this level.
]
variable[allKeys] assign[=] call[name[results].keys, parameter[]]
call[name[allKeys].sort, parameter[]]
for taget[name[key]] in starred[name[allKeys]] begin[:]
if call[name[hasattr], parameter[call[name[results]][name[key]], constant[keys]]] begin[:]
call[name[_appendReportKeys], parameter[name[keys], binary_operation[constant[%s%s:] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2045677f0>, <ast.Name object at 0x7da204564c70>]]], call[name[results]][name[key]]]] | keyword[def] identifier[_appendReportKeys] ( identifier[keys] , identifier[prefix] , identifier[results] ):
literal[string]
identifier[allKeys] = identifier[results] . identifier[keys] ()
identifier[allKeys] . identifier[sort] ()
keyword[for] identifier[key] keyword[in] identifier[allKeys] :
keyword[if] identifier[hasattr] ( identifier[results] [ identifier[key] ], literal[string] ):
identifier[_appendReportKeys] ( identifier[keys] , literal[string] %( identifier[prefix] , identifier[key] ), identifier[results] [ identifier[key] ])
keyword[else] :
identifier[keys] . identifier[add] ( literal[string] %( identifier[prefix] , identifier[key] )) | def _appendReportKeys(keys, prefix, results):
"""
Generate a set of possible report keys for an experiment's results.
A report key is a string of key names separated by colons, each key being one
level deeper into the experiment results dict. For example, 'key1:key2'.
This routine is called recursively to build keys that are multiple levels
deep from the results dict.
Parameters:
-----------------------------------------------------------
keys: Set of report keys accumulated so far
prefix: prefix formed so far, this is the colon separated list of key
names that led up to the dict passed in results
results: dictionary of results at this level.
"""
allKeys = results.keys()
allKeys.sort()
for key in allKeys:
if hasattr(results[key], 'keys'):
_appendReportKeys(keys, '%s%s:' % (prefix, key), results[key]) # depends on [control=['if'], data=[]]
else:
keys.add('%s%s' % (prefix, key)) # depends on [control=['for'], data=['key']] |
def _refresh_nvr(self):
""" Refresh our name-version-release attributes. """
rpm_info = juicer.utils.rpm_info(self.path)
self.name = rpm_info['name']
self.version = rpm_info['version']
self.release = rpm_info['release'] | def function[_refresh_nvr, parameter[self]]:
constant[ Refresh our name-version-release attributes. ]
variable[rpm_info] assign[=] call[name[juicer].utils.rpm_info, parameter[name[self].path]]
name[self].name assign[=] call[name[rpm_info]][constant[name]]
name[self].version assign[=] call[name[rpm_info]][constant[version]]
name[self].release assign[=] call[name[rpm_info]][constant[release]] | keyword[def] identifier[_refresh_nvr] ( identifier[self] ):
literal[string]
identifier[rpm_info] = identifier[juicer] . identifier[utils] . identifier[rpm_info] ( identifier[self] . identifier[path] )
identifier[self] . identifier[name] = identifier[rpm_info] [ literal[string] ]
identifier[self] . identifier[version] = identifier[rpm_info] [ literal[string] ]
identifier[self] . identifier[release] = identifier[rpm_info] [ literal[string] ] | def _refresh_nvr(self):
""" Refresh our name-version-release attributes. """
rpm_info = juicer.utils.rpm_info(self.path)
self.name = rpm_info['name']
self.version = rpm_info['version']
self.release = rpm_info['release'] |
def check_packet(self):
'''is there a valid packet (from another thread) for this app/instance?'''
if not os.path.exists(self.packet_file()):
# No packet file, we're good
return True
else:
# There's already a file, but is it still running?
try:
with open(self.packet_file()) as f:
packet = json.loads(f.read())
if time.time() - packet['last_time'] > 3.0*packet['poll_time']:
# We haven't heard a ping in too long. It's probably dead
return True
else:
# Still getting pings.. probably still a live process
return False
except:
# Failed to read file... try again in a second
time.sleep(random.random()*2)
return self.check_packet() | def function[check_packet, parameter[self]]:
constant[is there a valid packet (from another thread) for this app/instance?]
if <ast.UnaryOp object at 0x7da18f00d150> begin[:]
return[constant[True]] | keyword[def] identifier[check_packet] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[packet_file] ()):
keyword[return] keyword[True]
keyword[else] :
keyword[try] :
keyword[with] identifier[open] ( identifier[self] . identifier[packet_file] ()) keyword[as] identifier[f] :
identifier[packet] = identifier[json] . identifier[loads] ( identifier[f] . identifier[read] ())
keyword[if] identifier[time] . identifier[time] ()- identifier[packet] [ literal[string] ]> literal[int] * identifier[packet] [ literal[string] ]:
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False]
keyword[except] :
identifier[time] . identifier[sleep] ( identifier[random] . identifier[random] ()* literal[int] )
keyword[return] identifier[self] . identifier[check_packet] () | def check_packet(self):
"""is there a valid packet (from another thread) for this app/instance?"""
if not os.path.exists(self.packet_file()):
# No packet file, we're good
return True # depends on [control=['if'], data=[]]
else:
# There's already a file, but is it still running?
try:
with open(self.packet_file()) as f:
packet = json.loads(f.read()) # depends on [control=['with'], data=['f']]
if time.time() - packet['last_time'] > 3.0 * packet['poll_time']:
# We haven't heard a ping in too long. It's probably dead
return True # depends on [control=['if'], data=[]]
else:
# Still getting pings.. probably still a live process
return False # depends on [control=['try'], data=[]]
except:
# Failed to read file... try again in a second
time.sleep(random.random() * 2)
return self.check_packet() # depends on [control=['except'], data=[]] |
def decrypt_PBEWithSHAAndTwofishCBC(encrypted_data, password, salt, iteration_count):
"""
Decrypts PBEWithSHAAndTwofishCBC, assuming PKCS#12-generated PBE parameters.
(Not explicitly defined as an algorithm in RFC 7292, but defined here nevertheless because of the assumption of PKCS#12 parameters).
"""
iv = derive_key(hashlib.sha1, PURPOSE_IV_MATERIAL, password, salt, iteration_count, 16)
key = derive_key(hashlib.sha1, PURPOSE_KEY_MATERIAL, password, salt, iteration_count, 256//8)
encrypted_data = bytearray(encrypted_data)
encrypted_data_len = len(encrypted_data)
if encrypted_data_len % 16 != 0:
raise BadDataLengthException("encrypted data length is not a multiple of 16 bytes")
plaintext = bytearray()
# slow and dirty CBC decrypt
from twofish import Twofish
cipher = Twofish(key)
last_cipher_block = bytearray(iv)
for block_offset in range(0, encrypted_data_len, 16):
cipher_block = encrypted_data[block_offset:block_offset+16]
plaintext_block = xor_bytearrays(bytearray(cipher.decrypt(bytes(cipher_block))), last_cipher_block)
plaintext.extend(plaintext_block)
last_cipher_block = cipher_block
plaintext = strip_pkcs7_padding(plaintext, 16)
return bytes(plaintext) | def function[decrypt_PBEWithSHAAndTwofishCBC, parameter[encrypted_data, password, salt, iteration_count]]:
constant[
Decrypts PBEWithSHAAndTwofishCBC, assuming PKCS#12-generated PBE parameters.
(Not explicitly defined as an algorithm in RFC 7292, but defined here nevertheless because of the assumption of PKCS#12 parameters).
]
variable[iv] assign[=] call[name[derive_key], parameter[name[hashlib].sha1, name[PURPOSE_IV_MATERIAL], name[password], name[salt], name[iteration_count], constant[16]]]
variable[key] assign[=] call[name[derive_key], parameter[name[hashlib].sha1, name[PURPOSE_KEY_MATERIAL], name[password], name[salt], name[iteration_count], binary_operation[constant[256] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]]]
variable[encrypted_data] assign[=] call[name[bytearray], parameter[name[encrypted_data]]]
variable[encrypted_data_len] assign[=] call[name[len], parameter[name[encrypted_data]]]
if compare[binary_operation[name[encrypted_data_len] <ast.Mod object at 0x7da2590d6920> constant[16]] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b065a260>
variable[plaintext] assign[=] call[name[bytearray], parameter[]]
from relative_module[twofish] import module[Twofish]
variable[cipher] assign[=] call[name[Twofish], parameter[name[key]]]
variable[last_cipher_block] assign[=] call[name[bytearray], parameter[name[iv]]]
for taget[name[block_offset]] in starred[call[name[range], parameter[constant[0], name[encrypted_data_len], constant[16]]]] begin[:]
variable[cipher_block] assign[=] call[name[encrypted_data]][<ast.Slice object at 0x7da1b065b070>]
variable[plaintext_block] assign[=] call[name[xor_bytearrays], parameter[call[name[bytearray], parameter[call[name[cipher].decrypt, parameter[call[name[bytes], parameter[name[cipher_block]]]]]]], name[last_cipher_block]]]
call[name[plaintext].extend, parameter[name[plaintext_block]]]
variable[last_cipher_block] assign[=] name[cipher_block]
variable[plaintext] assign[=] call[name[strip_pkcs7_padding], parameter[name[plaintext], constant[16]]]
return[call[name[bytes], parameter[name[plaintext]]]] | keyword[def] identifier[decrypt_PBEWithSHAAndTwofishCBC] ( identifier[encrypted_data] , identifier[password] , identifier[salt] , identifier[iteration_count] ):
literal[string]
identifier[iv] = identifier[derive_key] ( identifier[hashlib] . identifier[sha1] , identifier[PURPOSE_IV_MATERIAL] , identifier[password] , identifier[salt] , identifier[iteration_count] , literal[int] )
identifier[key] = identifier[derive_key] ( identifier[hashlib] . identifier[sha1] , identifier[PURPOSE_KEY_MATERIAL] , identifier[password] , identifier[salt] , identifier[iteration_count] , literal[int] // literal[int] )
identifier[encrypted_data] = identifier[bytearray] ( identifier[encrypted_data] )
identifier[encrypted_data_len] = identifier[len] ( identifier[encrypted_data] )
keyword[if] identifier[encrypted_data_len] % literal[int] != literal[int] :
keyword[raise] identifier[BadDataLengthException] ( literal[string] )
identifier[plaintext] = identifier[bytearray] ()
keyword[from] identifier[twofish] keyword[import] identifier[Twofish]
identifier[cipher] = identifier[Twofish] ( identifier[key] )
identifier[last_cipher_block] = identifier[bytearray] ( identifier[iv] )
keyword[for] identifier[block_offset] keyword[in] identifier[range] ( literal[int] , identifier[encrypted_data_len] , literal[int] ):
identifier[cipher_block] = identifier[encrypted_data] [ identifier[block_offset] : identifier[block_offset] + literal[int] ]
identifier[plaintext_block] = identifier[xor_bytearrays] ( identifier[bytearray] ( identifier[cipher] . identifier[decrypt] ( identifier[bytes] ( identifier[cipher_block] ))), identifier[last_cipher_block] )
identifier[plaintext] . identifier[extend] ( identifier[plaintext_block] )
identifier[last_cipher_block] = identifier[cipher_block]
identifier[plaintext] = identifier[strip_pkcs7_padding] ( identifier[plaintext] , literal[int] )
keyword[return] identifier[bytes] ( identifier[plaintext] ) | def decrypt_PBEWithSHAAndTwofishCBC(encrypted_data, password, salt, iteration_count):
"""
Decrypts PBEWithSHAAndTwofishCBC, assuming PKCS#12-generated PBE parameters.
(Not explicitly defined as an algorithm in RFC 7292, but defined here nevertheless because of the assumption of PKCS#12 parameters).
"""
iv = derive_key(hashlib.sha1, PURPOSE_IV_MATERIAL, password, salt, iteration_count, 16)
key = derive_key(hashlib.sha1, PURPOSE_KEY_MATERIAL, password, salt, iteration_count, 256 // 8)
encrypted_data = bytearray(encrypted_data)
encrypted_data_len = len(encrypted_data)
if encrypted_data_len % 16 != 0:
raise BadDataLengthException('encrypted data length is not a multiple of 16 bytes') # depends on [control=['if'], data=[]]
plaintext = bytearray()
# slow and dirty CBC decrypt
from twofish import Twofish
cipher = Twofish(key)
last_cipher_block = bytearray(iv)
for block_offset in range(0, encrypted_data_len, 16):
cipher_block = encrypted_data[block_offset:block_offset + 16]
plaintext_block = xor_bytearrays(bytearray(cipher.decrypt(bytes(cipher_block))), last_cipher_block)
plaintext.extend(plaintext_block)
last_cipher_block = cipher_block # depends on [control=['for'], data=['block_offset']]
plaintext = strip_pkcs7_padding(plaintext, 16)
return bytes(plaintext) |
def retry_with_delay(f, delay=60):
"""
Retry the wrapped requests.request function in case of ConnectionError.
Optionally limit the number of retries or set the delay between retries.
"""
@wraps(f)
def inner(*args, **kwargs):
kwargs['timeout'] = 5
remaining = get_retries() + 1
while remaining:
remaining -= 1
try:
return f(*args, **kwargs)
except (requests.ConnectionError, requests.Timeout):
if not remaining:
raise
gevent.sleep(delay)
return inner | def function[retry_with_delay, parameter[f, delay]]:
constant[
Retry the wrapped requests.request function in case of ConnectionError.
Optionally limit the number of retries or set the delay between retries.
]
def function[inner, parameter[]]:
call[name[kwargs]][constant[timeout]] assign[=] constant[5]
variable[remaining] assign[=] binary_operation[call[name[get_retries], parameter[]] + constant[1]]
while name[remaining] begin[:]
<ast.AugAssign object at 0x7da18eb55c00>
<ast.Try object at 0x7da18eb55390>
return[name[inner]] | keyword[def] identifier[retry_with_delay] ( identifier[f] , identifier[delay] = literal[int] ):
literal[string]
@ identifier[wraps] ( identifier[f] )
keyword[def] identifier[inner] (* identifier[args] ,** identifier[kwargs] ):
identifier[kwargs] [ literal[string] ]= literal[int]
identifier[remaining] = identifier[get_retries] ()+ literal[int]
keyword[while] identifier[remaining] :
identifier[remaining] -= literal[int]
keyword[try] :
keyword[return] identifier[f] (* identifier[args] ,** identifier[kwargs] )
keyword[except] ( identifier[requests] . identifier[ConnectionError] , identifier[requests] . identifier[Timeout] ):
keyword[if] keyword[not] identifier[remaining] :
keyword[raise]
identifier[gevent] . identifier[sleep] ( identifier[delay] )
keyword[return] identifier[inner] | def retry_with_delay(f, delay=60):
"""
Retry the wrapped requests.request function in case of ConnectionError.
Optionally limit the number of retries or set the delay between retries.
"""
@wraps(f)
def inner(*args, **kwargs):
kwargs['timeout'] = 5
remaining = get_retries() + 1
while remaining:
remaining -= 1
try:
return f(*args, **kwargs) # depends on [control=['try'], data=[]]
except (requests.ConnectionError, requests.Timeout):
if not remaining:
raise # depends on [control=['if'], data=[]]
gevent.sleep(delay) # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
return inner |
def _get_rest_key_parts(cls, attr_key):
"""Get the RestAPI key of this attr, split it and decode part
:param str attr_key: Attribute key must be in attribute_map.
:returns: A list of RestAPI part
:rtype: list
"""
rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]['key'])
return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] | def function[_get_rest_key_parts, parameter[cls, attr_key]]:
constant[Get the RestAPI key of this attr, split it and decode part
:param str attr_key: Attribute key must be in attribute_map.
:returns: A list of RestAPI part
:rtype: list
]
variable[rest_split_key] assign[=] call[name[_FLATTEN].split, parameter[call[call[name[cls]._attribute_map][name[attr_key]]][constant[key]]]]
return[<ast.ListComp object at 0x7da207f98310>] | keyword[def] identifier[_get_rest_key_parts] ( identifier[cls] , identifier[attr_key] ):
literal[string]
identifier[rest_split_key] = identifier[_FLATTEN] . identifier[split] ( identifier[cls] . identifier[_attribute_map] [ identifier[attr_key] ][ literal[string] ])
keyword[return] [ identifier[_decode_attribute_map_key] ( identifier[key_part] ) keyword[for] identifier[key_part] keyword[in] identifier[rest_split_key] ] | def _get_rest_key_parts(cls, attr_key):
"""Get the RestAPI key of this attr, split it and decode part
:param str attr_key: Attribute key must be in attribute_map.
:returns: A list of RestAPI part
:rtype: list
"""
rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]['key'])
return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] |
async def preprocess_request(
self, request_context: Optional[RequestContext]=None,
) -> Optional[ResponseReturnValue]:
"""Preprocess the request i.e. call before_request functions.
Arguments:
request_context: The request context, optional as Flask
omits this argument.
"""
request_ = (request_context or _request_ctx_stack.top).request
blueprint = request_.blueprint
processors = self.url_value_preprocessors[None]
if blueprint is not None:
processors = chain(processors, self.url_value_preprocessors[blueprint]) # type: ignore
for processor in processors:
processor(request.endpoint, request.view_args)
functions = self.before_request_funcs[None]
if blueprint is not None:
functions = chain(functions, self.before_request_funcs[blueprint]) # type: ignore
for function in functions:
result = await function()
if result is not None:
return result
return None | <ast.AsyncFunctionDef object at 0x7da20e9b0dc0> | keyword[async] keyword[def] identifier[preprocess_request] (
identifier[self] , identifier[request_context] : identifier[Optional] [ identifier[RequestContext] ]= keyword[None] ,
)-> identifier[Optional] [ identifier[ResponseReturnValue] ]:
literal[string]
identifier[request_] =( identifier[request_context] keyword[or] identifier[_request_ctx_stack] . identifier[top] ). identifier[request]
identifier[blueprint] = identifier[request_] . identifier[blueprint]
identifier[processors] = identifier[self] . identifier[url_value_preprocessors] [ keyword[None] ]
keyword[if] identifier[blueprint] keyword[is] keyword[not] keyword[None] :
identifier[processors] = identifier[chain] ( identifier[processors] , identifier[self] . identifier[url_value_preprocessors] [ identifier[blueprint] ])
keyword[for] identifier[processor] keyword[in] identifier[processors] :
identifier[processor] ( identifier[request] . identifier[endpoint] , identifier[request] . identifier[view_args] )
identifier[functions] = identifier[self] . identifier[before_request_funcs] [ keyword[None] ]
keyword[if] identifier[blueprint] keyword[is] keyword[not] keyword[None] :
identifier[functions] = identifier[chain] ( identifier[functions] , identifier[self] . identifier[before_request_funcs] [ identifier[blueprint] ])
keyword[for] identifier[function] keyword[in] identifier[functions] :
identifier[result] = keyword[await] identifier[function] ()
keyword[if] identifier[result] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[result]
keyword[return] keyword[None] | async def preprocess_request(self, request_context: Optional[RequestContext]=None) -> Optional[ResponseReturnValue]:
"""Preprocess the request i.e. call before_request functions.
Arguments:
request_context: The request context, optional as Flask
omits this argument.
"""
request_ = (request_context or _request_ctx_stack.top).request
blueprint = request_.blueprint
processors = self.url_value_preprocessors[None]
if blueprint is not None:
processors = chain(processors, self.url_value_preprocessors[blueprint]) # type: ignore # depends on [control=['if'], data=['blueprint']]
for processor in processors:
processor(request.endpoint, request.view_args) # depends on [control=['for'], data=['processor']]
functions = self.before_request_funcs[None]
if blueprint is not None:
functions = chain(functions, self.before_request_funcs[blueprint]) # type: ignore # depends on [control=['if'], data=['blueprint']]
for function in functions:
result = await function()
if result is not None:
return result # depends on [control=['if'], data=['result']] # depends on [control=['for'], data=['function']]
return None |
def terminating_sip_domains(self):
"""
Access the terminating_sip_domains
:returns: twilio.rest.trunking.v1.trunk.terminating_sip_domain.TerminatingSipDomainList
:rtype: twilio.rest.trunking.v1.trunk.terminating_sip_domain.TerminatingSipDomainList
"""
if self._terminating_sip_domains is None:
self._terminating_sip_domains = TerminatingSipDomainList(
self._version,
trunk_sid=self._solution['sid'],
)
return self._terminating_sip_domains | def function[terminating_sip_domains, parameter[self]]:
constant[
Access the terminating_sip_domains
:returns: twilio.rest.trunking.v1.trunk.terminating_sip_domain.TerminatingSipDomainList
:rtype: twilio.rest.trunking.v1.trunk.terminating_sip_domain.TerminatingSipDomainList
]
if compare[name[self]._terminating_sip_domains is constant[None]] begin[:]
name[self]._terminating_sip_domains assign[=] call[name[TerminatingSipDomainList], parameter[name[self]._version]]
return[name[self]._terminating_sip_domains] | keyword[def] identifier[terminating_sip_domains] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_terminating_sip_domains] keyword[is] keyword[None] :
identifier[self] . identifier[_terminating_sip_domains] = identifier[TerminatingSipDomainList] (
identifier[self] . identifier[_version] ,
identifier[trunk_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
)
keyword[return] identifier[self] . identifier[_terminating_sip_domains] | def terminating_sip_domains(self):
"""
Access the terminating_sip_domains
:returns: twilio.rest.trunking.v1.trunk.terminating_sip_domain.TerminatingSipDomainList
:rtype: twilio.rest.trunking.v1.trunk.terminating_sip_domain.TerminatingSipDomainList
"""
if self._terminating_sip_domains is None:
self._terminating_sip_domains = TerminatingSipDomainList(self._version, trunk_sid=self._solution['sid']) # depends on [control=['if'], data=[]]
return self._terminating_sip_domains |
def add_image_info_cb(self, gshell, channel, iminfo):
"""Add entries related to an added image."""
timestamp = iminfo.time_modified
if timestamp is None:
# Not an image we are interested in tracking
return
self.add_entry(channel.name, iminfo) | def function[add_image_info_cb, parameter[self, gshell, channel, iminfo]]:
constant[Add entries related to an added image.]
variable[timestamp] assign[=] name[iminfo].time_modified
if compare[name[timestamp] is constant[None]] begin[:]
return[None]
call[name[self].add_entry, parameter[name[channel].name, name[iminfo]]] | keyword[def] identifier[add_image_info_cb] ( identifier[self] , identifier[gshell] , identifier[channel] , identifier[iminfo] ):
literal[string]
identifier[timestamp] = identifier[iminfo] . identifier[time_modified]
keyword[if] identifier[timestamp] keyword[is] keyword[None] :
keyword[return]
identifier[self] . identifier[add_entry] ( identifier[channel] . identifier[name] , identifier[iminfo] ) | def add_image_info_cb(self, gshell, channel, iminfo):
"""Add entries related to an added image."""
timestamp = iminfo.time_modified
if timestamp is None:
# Not an image we are interested in tracking
return # depends on [control=['if'], data=[]]
self.add_entry(channel.name, iminfo) |
def any_char_field(field, **kwargs):
"""
Return random value for CharField
>>> result = any_field(models.CharField(max_length=10))
>>> type(result)
<type 'str'>
"""
min_length = kwargs.get('min_length', 1)
max_length = kwargs.get('max_length', field.max_length)
return xunit.any_string(min_length=min_length, max_length=max_length) | def function[any_char_field, parameter[field]]:
constant[
Return random value for CharField
>>> result = any_field(models.CharField(max_length=10))
>>> type(result)
<type 'str'>
]
variable[min_length] assign[=] call[name[kwargs].get, parameter[constant[min_length], constant[1]]]
variable[max_length] assign[=] call[name[kwargs].get, parameter[constant[max_length], name[field].max_length]]
return[call[name[xunit].any_string, parameter[]]] | keyword[def] identifier[any_char_field] ( identifier[field] ,** identifier[kwargs] ):
literal[string]
identifier[min_length] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[max_length] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[field] . identifier[max_length] )
keyword[return] identifier[xunit] . identifier[any_string] ( identifier[min_length] = identifier[min_length] , identifier[max_length] = identifier[max_length] ) | def any_char_field(field, **kwargs):
"""
Return random value for CharField
>>> result = any_field(models.CharField(max_length=10))
>>> type(result)
<type 'str'>
"""
min_length = kwargs.get('min_length', 1)
max_length = kwargs.get('max_length', field.max_length)
return xunit.any_string(min_length=min_length, max_length=max_length) |
def make_yaml_patterns():
"Strongly inspired from sublime highlighter "
kw = any("keyword", [r":|>|-|\||\[|\]|[A-Za-z][\w\s\-\_ ]+(?=:)"])
links = any("normal", [r"#:[^\n]*"])
comment = any("comment", [r"#[^\n]*"])
number = any("number",
[r"\b[+-]?[0-9]+[lL]?\b",
r"\b[+-]?0[xX][0-9A-Fa-f]+[lL]?\b",
r"\b[+-]?[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?\b"])
sqstring = r"(\b[rRuU])?'[^'\\\n]*(\\.[^'\\\n]*)*'?"
dqstring = r'(\b[rRuU])?"[^"\\\n]*(\\.[^"\\\n]*)*"?'
string = any("string", [sqstring, dqstring])
return "|".join([kw, string, number, links, comment,
any("SYNC", [r"\n"])]) | def function[make_yaml_patterns, parameter[]]:
constant[Strongly inspired from sublime highlighter ]
variable[kw] assign[=] call[name[any], parameter[constant[keyword], list[[<ast.Constant object at 0x7da20c7c95a0>]]]]
variable[links] assign[=] call[name[any], parameter[constant[normal], list[[<ast.Constant object at 0x7da20c7c8a30>]]]]
variable[comment] assign[=] call[name[any], parameter[constant[comment], list[[<ast.Constant object at 0x7da20c7ca140>]]]]
variable[number] assign[=] call[name[any], parameter[constant[number], list[[<ast.Constant object at 0x7da2041dbf70>, <ast.Constant object at 0x7da2041dbe20>, <ast.Constant object at 0x7da2041d9810>]]]]
variable[sqstring] assign[=] constant[(\b[rRuU])?'[^'\\\n]*(\\.[^'\\\n]*)*'?]
variable[dqstring] assign[=] constant[(\b[rRuU])?"[^"\\\n]*(\\.[^"\\\n]*)*"?]
variable[string] assign[=] call[name[any], parameter[constant[string], list[[<ast.Name object at 0x7da2041da650>, <ast.Name object at 0x7da2041db2e0>]]]]
return[call[constant[|].join, parameter[list[[<ast.Name object at 0x7da2041dbee0>, <ast.Name object at 0x7da2041dbca0>, <ast.Name object at 0x7da2041d9ff0>, <ast.Name object at 0x7da2041dae90>, <ast.Name object at 0x7da1b26af040>, <ast.Call object at 0x7da1b26aeb00>]]]]] | keyword[def] identifier[make_yaml_patterns] ():
literal[string]
identifier[kw] = identifier[any] ( literal[string] ,[ literal[string] ])
identifier[links] = identifier[any] ( literal[string] ,[ literal[string] ])
identifier[comment] = identifier[any] ( literal[string] ,[ literal[string] ])
identifier[number] = identifier[any] ( literal[string] ,
[ literal[string] ,
literal[string] ,
literal[string] ])
identifier[sqstring] = literal[string]
identifier[dqstring] = literal[string]
identifier[string] = identifier[any] ( literal[string] ,[ identifier[sqstring] , identifier[dqstring] ])
keyword[return] literal[string] . identifier[join] ([ identifier[kw] , identifier[string] , identifier[number] , identifier[links] , identifier[comment] ,
identifier[any] ( literal[string] ,[ literal[string] ])]) | def make_yaml_patterns():
"""Strongly inspired from sublime highlighter """
kw = any('keyword', [':|>|-|\\||\\[|\\]|[A-Za-z][\\w\\s\\-\\_ ]+(?=:)'])
links = any('normal', ['#:[^\\n]*'])
comment = any('comment', ['#[^\\n]*'])
number = any('number', ['\\b[+-]?[0-9]+[lL]?\\b', '\\b[+-]?0[xX][0-9A-Fa-f]+[lL]?\\b', '\\b[+-]?[0-9]+(?:\\.[0-9]+)?(?:[eE][+-]?[0-9]+)?\\b'])
sqstring = "(\\b[rRuU])?'[^'\\\\\\n]*(\\\\.[^'\\\\\\n]*)*'?"
dqstring = '(\\b[rRuU])?"[^"\\\\\\n]*(\\\\.[^"\\\\\\n]*)*"?'
string = any('string', [sqstring, dqstring])
return '|'.join([kw, string, number, links, comment, any('SYNC', ['\\n'])]) |
def convert_utc_time(datetime_str):
"""
Handles datetime argument conversion to the GNIP API format, which is
`YYYYMMDDHHSS`. Flexible passing of date formats in the following types::
- YYYYmmDDHHMM
- YYYY-mm-DD
- YYYY-mm-DD HH:MM
- YYYY-mm-DDTHH:MM
Args:
datetime_str (str): valid formats are listed above.
Returns:
string of GNIP API formatted date.
Example:
>>> from searchtweets.utils import convert_utc_time
>>> convert_utc_time("201708020000")
'201708020000'
>>> convert_utc_time("2017-08-02")
'201708020000'
>>> convert_utc_time("2017-08-02 00:00")
'201708020000'
>>> convert_utc_time("2017-08-02T00:00")
'201708020000'
"""
if not datetime_str:
return None
if not set(['-', ':']) & set(datetime_str):
_date = datetime.datetime.strptime(datetime_str, "%Y%m%d%H%M")
else:
try:
if "T" in datetime_str:
# command line with 'T'
datetime_str = datetime_str.replace('T', ' ')
_date = datetime.datetime.strptime(datetime_str, "%Y-%m-%d %H:%M")
except ValueError:
_date = datetime.datetime.strptime(datetime_str, "%Y-%m-%d")
return _date.strftime("%Y%m%d%H%M") | def function[convert_utc_time, parameter[datetime_str]]:
constant[
Handles datetime argument conversion to the GNIP API format, which is
`YYYYMMDDHHSS`. Flexible passing of date formats in the following types::
- YYYYmmDDHHMM
- YYYY-mm-DD
- YYYY-mm-DD HH:MM
- YYYY-mm-DDTHH:MM
Args:
datetime_str (str): valid formats are listed above.
Returns:
string of GNIP API formatted date.
Example:
>>> from searchtweets.utils import convert_utc_time
>>> convert_utc_time("201708020000")
'201708020000'
>>> convert_utc_time("2017-08-02")
'201708020000'
>>> convert_utc_time("2017-08-02 00:00")
'201708020000'
>>> convert_utc_time("2017-08-02T00:00")
'201708020000'
]
if <ast.UnaryOp object at 0x7da1b1239870> begin[:]
return[constant[None]]
if <ast.UnaryOp object at 0x7da1b1239c90> begin[:]
variable[_date] assign[=] call[name[datetime].datetime.strptime, parameter[name[datetime_str], constant[%Y%m%d%H%M]]]
return[call[name[_date].strftime, parameter[constant[%Y%m%d%H%M]]]] | keyword[def] identifier[convert_utc_time] ( identifier[datetime_str] ):
literal[string]
keyword[if] keyword[not] identifier[datetime_str] :
keyword[return] keyword[None]
keyword[if] keyword[not] identifier[set] ([ literal[string] , literal[string] ])& identifier[set] ( identifier[datetime_str] ):
identifier[_date] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[datetime_str] , literal[string] )
keyword[else] :
keyword[try] :
keyword[if] literal[string] keyword[in] identifier[datetime_str] :
identifier[datetime_str] = identifier[datetime_str] . identifier[replace] ( literal[string] , literal[string] )
identifier[_date] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[datetime_str] , literal[string] )
keyword[except] identifier[ValueError] :
identifier[_date] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[datetime_str] , literal[string] )
keyword[return] identifier[_date] . identifier[strftime] ( literal[string] ) | def convert_utc_time(datetime_str):
"""
Handles datetime argument conversion to the GNIP API format, which is
`YYYYMMDDHHSS`. Flexible passing of date formats in the following types::
- YYYYmmDDHHMM
- YYYY-mm-DD
- YYYY-mm-DD HH:MM
- YYYY-mm-DDTHH:MM
Args:
datetime_str (str): valid formats are listed above.
Returns:
string of GNIP API formatted date.
Example:
>>> from searchtweets.utils import convert_utc_time
>>> convert_utc_time("201708020000")
'201708020000'
>>> convert_utc_time("2017-08-02")
'201708020000'
>>> convert_utc_time("2017-08-02 00:00")
'201708020000'
>>> convert_utc_time("2017-08-02T00:00")
'201708020000'
"""
if not datetime_str:
return None # depends on [control=['if'], data=[]]
if not set(['-', ':']) & set(datetime_str):
_date = datetime.datetime.strptime(datetime_str, '%Y%m%d%H%M') # depends on [control=['if'], data=[]]
else:
try:
if 'T' in datetime_str:
# command line with 'T'
datetime_str = datetime_str.replace('T', ' ') # depends on [control=['if'], data=['datetime_str']]
_date = datetime.datetime.strptime(datetime_str, '%Y-%m-%d %H:%M') # depends on [control=['try'], data=[]]
except ValueError:
_date = datetime.datetime.strptime(datetime_str, '%Y-%m-%d') # depends on [control=['except'], data=[]]
return _date.strftime('%Y%m%d%H%M') |
def find_team(self, color: str = None):
"""Find the :class:`~.Team` with the given properties
Returns the team whose attributes match the given properties, or
``None`` if no match is found.
:param color: The :class:`~.Team.Color` of the Team
"""
if color != None:
if color is Team.Color.BLUE:
return self.blue_team
else:
return self.orange_team
else:
return None | def function[find_team, parameter[self, color]]:
constant[Find the :class:`~.Team` with the given properties
Returns the team whose attributes match the given properties, or
``None`` if no match is found.
:param color: The :class:`~.Team.Color` of the Team
]
if compare[name[color] not_equal[!=] constant[None]] begin[:]
if compare[name[color] is name[Team].Color.BLUE] begin[:]
return[name[self].blue_team] | keyword[def] identifier[find_team] ( identifier[self] , identifier[color] : identifier[str] = keyword[None] ):
literal[string]
keyword[if] identifier[color] != keyword[None] :
keyword[if] identifier[color] keyword[is] identifier[Team] . identifier[Color] . identifier[BLUE] :
keyword[return] identifier[self] . identifier[blue_team]
keyword[else] :
keyword[return] identifier[self] . identifier[orange_team]
keyword[else] :
keyword[return] keyword[None] | def find_team(self, color: str=None):
"""Find the :class:`~.Team` with the given properties
Returns the team whose attributes match the given properties, or
``None`` if no match is found.
:param color: The :class:`~.Team.Color` of the Team
"""
if color != None:
if color is Team.Color.BLUE:
return self.blue_team # depends on [control=['if'], data=[]]
else:
return self.orange_team # depends on [control=['if'], data=['color']]
else:
return None |
def nr_cases(self, institute_id=None):
"""Return the number of cases
This function will change when we migrate to 3.7.1
Args:
collaborator(str): Institute id
Returns:
nr_cases(int)
"""
query = {}
if institute_id:
query['collaborators'] = institute_id
LOG.debug("Fetch all cases with query {0}".format(query))
nr_cases = self.case_collection.find(query).count()
return nr_cases | def function[nr_cases, parameter[self, institute_id]]:
constant[Return the number of cases
This function will change when we migrate to 3.7.1
Args:
collaborator(str): Institute id
Returns:
nr_cases(int)
]
variable[query] assign[=] dictionary[[], []]
if name[institute_id] begin[:]
call[name[query]][constant[collaborators]] assign[=] name[institute_id]
call[name[LOG].debug, parameter[call[constant[Fetch all cases with query {0}].format, parameter[name[query]]]]]
variable[nr_cases] assign[=] call[call[name[self].case_collection.find, parameter[name[query]]].count, parameter[]]
return[name[nr_cases]] | keyword[def] identifier[nr_cases] ( identifier[self] , identifier[institute_id] = keyword[None] ):
literal[string]
identifier[query] ={}
keyword[if] identifier[institute_id] :
identifier[query] [ literal[string] ]= identifier[institute_id]
identifier[LOG] . identifier[debug] ( literal[string] . identifier[format] ( identifier[query] ))
identifier[nr_cases] = identifier[self] . identifier[case_collection] . identifier[find] ( identifier[query] ). identifier[count] ()
keyword[return] identifier[nr_cases] | def nr_cases(self, institute_id=None):
"""Return the number of cases
This function will change when we migrate to 3.7.1
Args:
collaborator(str): Institute id
Returns:
nr_cases(int)
"""
query = {}
if institute_id:
query['collaborators'] = institute_id # depends on [control=['if'], data=[]]
LOG.debug('Fetch all cases with query {0}'.format(query))
nr_cases = self.case_collection.find(query).count()
return nr_cases |
def run(cmds, **kwargs):
"""
Wrapper around subprocess.run, with unicode decoding of output.
Additional kwargs are passed to subprocess.run.
"""
proc = sp.Popen(cmds, bufsize=-1, stdout=sp.PIPE, stderr=sp.STDOUT,
close_fds=sys.platform != 'win32')
for line in proc.stdout:
print(line[:-1].decode())
retcode = proc.wait()
if retcode:
raise sp.CalledProcessError(retcode, cmds) | def function[run, parameter[cmds]]:
constant[
Wrapper around subprocess.run, with unicode decoding of output.
Additional kwargs are passed to subprocess.run.
]
variable[proc] assign[=] call[name[sp].Popen, parameter[name[cmds]]]
for taget[name[line]] in starred[name[proc].stdout] begin[:]
call[name[print], parameter[call[call[name[line]][<ast.Slice object at 0x7da18f721960>].decode, parameter[]]]]
variable[retcode] assign[=] call[name[proc].wait, parameter[]]
if name[retcode] begin[:]
<ast.Raise object at 0x7da18f722bf0> | keyword[def] identifier[run] ( identifier[cmds] ,** identifier[kwargs] ):
literal[string]
identifier[proc] = identifier[sp] . identifier[Popen] ( identifier[cmds] , identifier[bufsize] =- literal[int] , identifier[stdout] = identifier[sp] . identifier[PIPE] , identifier[stderr] = identifier[sp] . identifier[STDOUT] ,
identifier[close_fds] = identifier[sys] . identifier[platform] != literal[string] )
keyword[for] identifier[line] keyword[in] identifier[proc] . identifier[stdout] :
identifier[print] ( identifier[line] [:- literal[int] ]. identifier[decode] ())
identifier[retcode] = identifier[proc] . identifier[wait] ()
keyword[if] identifier[retcode] :
keyword[raise] identifier[sp] . identifier[CalledProcessError] ( identifier[retcode] , identifier[cmds] ) | def run(cmds, **kwargs):
"""
Wrapper around subprocess.run, with unicode decoding of output.
Additional kwargs are passed to subprocess.run.
"""
proc = sp.Popen(cmds, bufsize=-1, stdout=sp.PIPE, stderr=sp.STDOUT, close_fds=sys.platform != 'win32')
for line in proc.stdout:
print(line[:-1].decode()) # depends on [control=['for'], data=['line']]
retcode = proc.wait()
if retcode:
raise sp.CalledProcessError(retcode, cmds) # depends on [control=['if'], data=[]] |
def get_template_names(self):
'''
Build the list of templates related to this user
'''
# Get user template
template_model = getattr(self, 'template_model', "{0}/{1}_{2}".format(self._appname.lower(), self._modelname.lower(), self.get_template_names_key))
template_model_ext = getattr(self, 'template_model_ext', 'html')
templates = get_template(template_model, self.user, self.language, template_model_ext, raise_error=False)
if type(templates) == list:
templates.append("codenerix/{0}.html".format(self.get_template_names_key))
# Return thet of templates
return templates | def function[get_template_names, parameter[self]]:
constant[
Build the list of templates related to this user
]
variable[template_model] assign[=] call[name[getattr], parameter[name[self], constant[template_model], call[constant[{0}/{1}_{2}].format, parameter[call[name[self]._appname.lower, parameter[]], call[name[self]._modelname.lower, parameter[]], name[self].get_template_names_key]]]]
variable[template_model_ext] assign[=] call[name[getattr], parameter[name[self], constant[template_model_ext], constant[html]]]
variable[templates] assign[=] call[name[get_template], parameter[name[template_model], name[self].user, name[self].language, name[template_model_ext]]]
if compare[call[name[type], parameter[name[templates]]] equal[==] name[list]] begin[:]
call[name[templates].append, parameter[call[constant[codenerix/{0}.html].format, parameter[name[self].get_template_names_key]]]]
return[name[templates]] | keyword[def] identifier[get_template_names] ( identifier[self] ):
literal[string]
identifier[template_model] = identifier[getattr] ( identifier[self] , literal[string] , literal[string] . identifier[format] ( identifier[self] . identifier[_appname] . identifier[lower] (), identifier[self] . identifier[_modelname] . identifier[lower] (), identifier[self] . identifier[get_template_names_key] ))
identifier[template_model_ext] = identifier[getattr] ( identifier[self] , literal[string] , literal[string] )
identifier[templates] = identifier[get_template] ( identifier[template_model] , identifier[self] . identifier[user] , identifier[self] . identifier[language] , identifier[template_model_ext] , identifier[raise_error] = keyword[False] )
keyword[if] identifier[type] ( identifier[templates] )== identifier[list] :
identifier[templates] . identifier[append] ( literal[string] . identifier[format] ( identifier[self] . identifier[get_template_names_key] ))
keyword[return] identifier[templates] | def get_template_names(self):
"""
Build the list of templates related to this user
"""
# Get user template
template_model = getattr(self, 'template_model', '{0}/{1}_{2}'.format(self._appname.lower(), self._modelname.lower(), self.get_template_names_key))
template_model_ext = getattr(self, 'template_model_ext', 'html')
templates = get_template(template_model, self.user, self.language, template_model_ext, raise_error=False)
if type(templates) == list:
templates.append('codenerix/{0}.html'.format(self.get_template_names_key)) # depends on [control=['if'], data=[]]
# Return thet of templates
return templates |
def get_enumeration(rq, v, endpoint, metadata={}, auth=None):
"""
Returns a list of enumerated values for variable 'v' in query 'rq'
"""
# glogger.debug("Metadata before processing enums: {}".format(metadata))
# We only fire the enum filling queries if indicated by the query metadata
if 'enumerate' not in metadata:
return None
enumDict = _getDictWithKey(v, metadata['enumerate'])
if enumDict:
return enumDict[v]
if v in metadata['enumerate']:
return get_enumeration_sparql(rq, v, endpoint, auth)
return None | def function[get_enumeration, parameter[rq, v, endpoint, metadata, auth]]:
constant[
Returns a list of enumerated values for variable 'v' in query 'rq'
]
if compare[constant[enumerate] <ast.NotIn object at 0x7da2590d7190> name[metadata]] begin[:]
return[constant[None]]
variable[enumDict] assign[=] call[name[_getDictWithKey], parameter[name[v], call[name[metadata]][constant[enumerate]]]]
if name[enumDict] begin[:]
return[call[name[enumDict]][name[v]]]
if compare[name[v] in call[name[metadata]][constant[enumerate]]] begin[:]
return[call[name[get_enumeration_sparql], parameter[name[rq], name[v], name[endpoint], name[auth]]]]
return[constant[None]] | keyword[def] identifier[get_enumeration] ( identifier[rq] , identifier[v] , identifier[endpoint] , identifier[metadata] ={}, identifier[auth] = keyword[None] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[metadata] :
keyword[return] keyword[None]
identifier[enumDict] = identifier[_getDictWithKey] ( identifier[v] , identifier[metadata] [ literal[string] ])
keyword[if] identifier[enumDict] :
keyword[return] identifier[enumDict] [ identifier[v] ]
keyword[if] identifier[v] keyword[in] identifier[metadata] [ literal[string] ]:
keyword[return] identifier[get_enumeration_sparql] ( identifier[rq] , identifier[v] , identifier[endpoint] , identifier[auth] )
keyword[return] keyword[None] | def get_enumeration(rq, v, endpoint, metadata={}, auth=None):
"""
Returns a list of enumerated values for variable 'v' in query 'rq'
"""
# glogger.debug("Metadata before processing enums: {}".format(metadata))
# We only fire the enum filling queries if indicated by the query metadata
if 'enumerate' not in metadata:
return None # depends on [control=['if'], data=[]]
enumDict = _getDictWithKey(v, metadata['enumerate'])
if enumDict:
return enumDict[v] # depends on [control=['if'], data=[]]
if v in metadata['enumerate']:
return get_enumeration_sparql(rq, v, endpoint, auth) # depends on [control=['if'], data=['v']]
return None |
def republish_collection(cursor, ident_hash, version):
"""Republish the collection identified as ``ident_hash`` with
the given ``version``.
"""
if not isinstance(version, (list, tuple,)):
split_version = version.split('.')
if len(split_version) == 1:
split_version.append(None)
version = tuple(split_version)
major_version, minor_version = version
cursor.execute("""\
WITH previous AS (
SELECT module_ident
FROM modules
WHERE ident_hash(uuid, major_version, minor_version) = %s),
inserted AS (
INSERT INTO modules
(uuid, major_version, minor_version, revised,
portal_type, moduleid,
name, created, language,
submitter, submitlog,
abstractid, licenseid, parent, parentauthors,
authors, maintainers, licensors,
google_analytics, buylink,
stateid, doctype)
SELECT
uuid, %s, %s, CURRENT_TIMESTAMP,
portal_type, moduleid,
name, created, language,
submitter, submitlog,
abstractid, licenseid, parent, parentauthors,
authors, maintainers, licensors,
google_analytics, buylink,
stateid, doctype
FROM modules AS m JOIN previous AS p ON (m.module_ident = p.module_ident)
RETURNING
ident_hash(uuid, major_version, minor_version) AS ident_hash,
module_ident),
keywords AS (
INSERT INTO modulekeywords (module_ident, keywordid)
SELECT i.module_ident, keywordid
FROM modulekeywords AS mk, inserted AS i, previous AS p
WHERE mk.module_ident = p.module_ident),
tags AS (
INSERT INTO moduletags (module_ident, tagid)
SELECT i.module_ident, tagid
FROM moduletags AS mt, inserted AS i, previous AS p
WHERE mt.module_ident = p.module_ident)
SELECT ident_hash FROM inserted""",
(ident_hash, major_version, minor_version,))
repub_ident_hash = cursor.fetchone()[0]
return repub_ident_hash | def function[republish_collection, parameter[cursor, ident_hash, version]]:
constant[Republish the collection identified as ``ident_hash`` with
the given ``version``.
]
if <ast.UnaryOp object at 0x7da1aff8d450> begin[:]
variable[split_version] assign[=] call[name[version].split, parameter[constant[.]]]
if compare[call[name[len], parameter[name[split_version]]] equal[==] constant[1]] begin[:]
call[name[split_version].append, parameter[constant[None]]]
variable[version] assign[=] call[name[tuple], parameter[name[split_version]]]
<ast.Tuple object at 0x7da1aff8d090> assign[=] name[version]
call[name[cursor].execute, parameter[constant[WITH previous AS (
SELECT module_ident
FROM modules
WHERE ident_hash(uuid, major_version, minor_version) = %s),
inserted AS (
INSERT INTO modules
(uuid, major_version, minor_version, revised,
portal_type, moduleid,
name, created, language,
submitter, submitlog,
abstractid, licenseid, parent, parentauthors,
authors, maintainers, licensors,
google_analytics, buylink,
stateid, doctype)
SELECT
uuid, %s, %s, CURRENT_TIMESTAMP,
portal_type, moduleid,
name, created, language,
submitter, submitlog,
abstractid, licenseid, parent, parentauthors,
authors, maintainers, licensors,
google_analytics, buylink,
stateid, doctype
FROM modules AS m JOIN previous AS p ON (m.module_ident = p.module_ident)
RETURNING
ident_hash(uuid, major_version, minor_version) AS ident_hash,
module_ident),
keywords AS (
INSERT INTO modulekeywords (module_ident, keywordid)
SELECT i.module_ident, keywordid
FROM modulekeywords AS mk, inserted AS i, previous AS p
WHERE mk.module_ident = p.module_ident),
tags AS (
INSERT INTO moduletags (module_ident, tagid)
SELECT i.module_ident, tagid
FROM moduletags AS mt, inserted AS i, previous AS p
WHERE mt.module_ident = p.module_ident)
SELECT ident_hash FROM inserted], tuple[[<ast.Name object at 0x7da20c6e5cc0>, <ast.Name object at 0x7da20c6e7910>, <ast.Name object at 0x7da20c6e4340>]]]]
variable[repub_ident_hash] assign[=] call[call[name[cursor].fetchone, parameter[]]][constant[0]]
return[name[repub_ident_hash]] | keyword[def] identifier[republish_collection] ( identifier[cursor] , identifier[ident_hash] , identifier[version] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[version] ,( identifier[list] , identifier[tuple] ,)):
identifier[split_version] = identifier[version] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[split_version] )== literal[int] :
identifier[split_version] . identifier[append] ( keyword[None] )
identifier[version] = identifier[tuple] ( identifier[split_version] )
identifier[major_version] , identifier[minor_version] = identifier[version]
identifier[cursor] . identifier[execute] ( literal[string] ,
( identifier[ident_hash] , identifier[major_version] , identifier[minor_version] ,))
identifier[repub_ident_hash] = identifier[cursor] . identifier[fetchone] ()[ literal[int] ]
keyword[return] identifier[repub_ident_hash] | def republish_collection(cursor, ident_hash, version):
"""Republish the collection identified as ``ident_hash`` with
the given ``version``.
"""
if not isinstance(version, (list, tuple)):
split_version = version.split('.')
if len(split_version) == 1:
split_version.append(None) # depends on [control=['if'], data=[]]
version = tuple(split_version) # depends on [control=['if'], data=[]]
(major_version, minor_version) = version
cursor.execute('WITH previous AS (\n SELECT module_ident\n FROM modules\n WHERE ident_hash(uuid, major_version, minor_version) = %s),\ninserted AS (\n INSERT INTO modules\n (uuid, major_version, minor_version, revised,\n portal_type, moduleid,\n name, created, language,\n submitter, submitlog,\n abstractid, licenseid, parent, parentauthors,\n authors, maintainers, licensors,\n google_analytics, buylink,\n stateid, doctype)\n SELECT\n uuid, %s, %s, CURRENT_TIMESTAMP,\n portal_type, moduleid,\n name, created, language,\n submitter, submitlog,\n abstractid, licenseid, parent, parentauthors,\n authors, maintainers, licensors,\n google_analytics, buylink,\n stateid, doctype\n FROM modules AS m JOIN previous AS p ON (m.module_ident = p.module_ident)\n RETURNING\n ident_hash(uuid, major_version, minor_version) AS ident_hash,\n module_ident),\nkeywords AS (\n INSERT INTO modulekeywords (module_ident, keywordid)\n SELECT i.module_ident, keywordid\n FROM modulekeywords AS mk, inserted AS i, previous AS p\n WHERE mk.module_ident = p.module_ident),\ntags AS (\n INSERT INTO moduletags (module_ident, tagid)\n SELECT i.module_ident, tagid\n FROM moduletags AS mt, inserted AS i, previous AS p\n WHERE mt.module_ident = p.module_ident)\nSELECT ident_hash FROM inserted', (ident_hash, major_version, minor_version))
repub_ident_hash = cursor.fetchone()[0]
return repub_ident_hash |
def LFL(Hc=None, atoms={}, CASRN='', AvailableMethods=False, Method=None):
r'''This function handles the retrieval or calculation of a chemical's
Lower Flammability Limit. Lookup is based on CASRNs. Two predictive methods
are currently implemented. Will automatically select a data source to use
if no Method is provided; returns None if the data is not available.
Prefered source is 'IEC 60079-20-1 (2010)' [1]_, with the secondary source
'NFPA 497 (2008)' [2]_ having very similar data. If the heat of combustion
is provided, the estimation method `Suzuki_LFL` can be used. If the atoms
of the molecule are available, the method `Crowl_Louvar_LFL` can be used.
Examples
--------
>>> LFL(CASRN='71-43-2')
0.012
Parameters
----------
Hc : float, optional
Heat of combustion of gas [J/mol]
atoms : dict, optional
Dictionary of atoms and atom counts
CASRN : string, optional
CASRN [-]
Returns
-------
LFL : float
Lower flammability limit of the gas in an atmosphere at STP, [mole fraction]
methods : list, only returned if AvailableMethods == True
List of methods which can be used to obtain LFL with the
given inputs
Other Parameters
----------------
Method : string, optional
A string for the method name to use, as defined by constants in
LFL_methods
AvailableMethods : bool, optional
If True, function will determine which methods can be used to obtain
the Lower Flammability Limit for the desired chemical, and will return
methods instead of Lower Flammability Limit.
Notes
-----
References
----------
.. [1] IEC. “IEC 60079-20-1:2010 Explosive atmospheres - Part 20-1:
Material characteristics for gas and vapour classification - Test
methods and data.” https://webstore.iec.ch/publication/635. See also
https://law.resource.org/pub/in/bis/S05/is.iec.60079.20.1.2010.pdf
.. [2] National Fire Protection Association. NFPA 497: Recommended
Practice for the Classification of Flammable Liquids, Gases, or Vapors
and of Hazardous. NFPA, 2008.
'''
def list_methods():
methods = []
if CASRN in IEC_2010.index and not np.isnan(IEC_2010.at[CASRN, 'LFL']):
methods.append(IEC)
if CASRN in NFPA_2008.index and not np.isnan(NFPA_2008.at[CASRN, 'LFL']):
methods.append(NFPA)
if Hc:
methods.append(SUZUKI)
if atoms:
methods.append(CROWLLOUVAR)
methods.append(NONE)
return methods
if AvailableMethods:
return list_methods()
if not Method:
Method = list_methods()[0]
if Method == IEC:
return float(IEC_2010.at[CASRN, 'LFL'])
elif Method == NFPA:
return float(NFPA_2008.at[CASRN, 'LFL'])
elif Method == SUZUKI:
return Suzuki_LFL(Hc=Hc)
elif Method == CROWLLOUVAR:
return Crowl_Louvar_LFL(atoms=atoms)
elif Method == NONE:
return None
else:
raise Exception('Failure in in function') | def function[LFL, parameter[Hc, atoms, CASRN, AvailableMethods, Method]]:
constant[This function handles the retrieval or calculation of a chemical's
Lower Flammability Limit. Lookup is based on CASRNs. Two predictive methods
are currently implemented. Will automatically select a data source to use
if no Method is provided; returns None if the data is not available.
Prefered source is 'IEC 60079-20-1 (2010)' [1]_, with the secondary source
'NFPA 497 (2008)' [2]_ having very similar data. If the heat of combustion
is provided, the estimation method `Suzuki_LFL` can be used. If the atoms
of the molecule are available, the method `Crowl_Louvar_LFL` can be used.
Examples
--------
>>> LFL(CASRN='71-43-2')
0.012
Parameters
----------
Hc : float, optional
Heat of combustion of gas [J/mol]
atoms : dict, optional
Dictionary of atoms and atom counts
CASRN : string, optional
CASRN [-]
Returns
-------
LFL : float
Lower flammability limit of the gas in an atmosphere at STP, [mole fraction]
methods : list, only returned if AvailableMethods == True
List of methods which can be used to obtain LFL with the
given inputs
Other Parameters
----------------
Method : string, optional
A string for the method name to use, as defined by constants in
LFL_methods
AvailableMethods : bool, optional
If True, function will determine which methods can be used to obtain
the Lower Flammability Limit for the desired chemical, and will return
methods instead of Lower Flammability Limit.
Notes
-----
References
----------
.. [1] IEC. “IEC 60079-20-1:2010 Explosive atmospheres - Part 20-1:
Material characteristics for gas and vapour classification - Test
methods and data.” https://webstore.iec.ch/publication/635. See also
https://law.resource.org/pub/in/bis/S05/is.iec.60079.20.1.2010.pdf
.. [2] National Fire Protection Association. NFPA 497: Recommended
Practice for the Classification of Flammable Liquids, Gases, or Vapors
and of Hazardous. NFPA, 2008.
]
def function[list_methods, parameter[]]:
variable[methods] assign[=] list[[]]
if <ast.BoolOp object at 0x7da204566f20> begin[:]
call[name[methods].append, parameter[name[IEC]]]
if <ast.BoolOp object at 0x7da204566410> begin[:]
call[name[methods].append, parameter[name[NFPA]]]
if name[Hc] begin[:]
call[name[methods].append, parameter[name[SUZUKI]]]
if name[atoms] begin[:]
call[name[methods].append, parameter[name[CROWLLOUVAR]]]
call[name[methods].append, parameter[name[NONE]]]
return[name[methods]]
if name[AvailableMethods] begin[:]
return[call[name[list_methods], parameter[]]]
if <ast.UnaryOp object at 0x7da204567670> begin[:]
variable[Method] assign[=] call[call[name[list_methods], parameter[]]][constant[0]]
if compare[name[Method] equal[==] name[IEC]] begin[:]
return[call[name[float], parameter[call[name[IEC_2010].at][tuple[[<ast.Name object at 0x7da204565060>, <ast.Constant object at 0x7da204564190>]]]]]] | keyword[def] identifier[LFL] ( identifier[Hc] = keyword[None] , identifier[atoms] ={}, identifier[CASRN] = literal[string] , identifier[AvailableMethods] = keyword[False] , identifier[Method] = keyword[None] ):
literal[string]
keyword[def] identifier[list_methods] ():
identifier[methods] =[]
keyword[if] identifier[CASRN] keyword[in] identifier[IEC_2010] . identifier[index] keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[IEC_2010] . identifier[at] [ identifier[CASRN] , literal[string] ]):
identifier[methods] . identifier[append] ( identifier[IEC] )
keyword[if] identifier[CASRN] keyword[in] identifier[NFPA_2008] . identifier[index] keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[NFPA_2008] . identifier[at] [ identifier[CASRN] , literal[string] ]):
identifier[methods] . identifier[append] ( identifier[NFPA] )
keyword[if] identifier[Hc] :
identifier[methods] . identifier[append] ( identifier[SUZUKI] )
keyword[if] identifier[atoms] :
identifier[methods] . identifier[append] ( identifier[CROWLLOUVAR] )
identifier[methods] . identifier[append] ( identifier[NONE] )
keyword[return] identifier[methods]
keyword[if] identifier[AvailableMethods] :
keyword[return] identifier[list_methods] ()
keyword[if] keyword[not] identifier[Method] :
identifier[Method] = identifier[list_methods] ()[ literal[int] ]
keyword[if] identifier[Method] == identifier[IEC] :
keyword[return] identifier[float] ( identifier[IEC_2010] . identifier[at] [ identifier[CASRN] , literal[string] ])
keyword[elif] identifier[Method] == identifier[NFPA] :
keyword[return] identifier[float] ( identifier[NFPA_2008] . identifier[at] [ identifier[CASRN] , literal[string] ])
keyword[elif] identifier[Method] == identifier[SUZUKI] :
keyword[return] identifier[Suzuki_LFL] ( identifier[Hc] = identifier[Hc] )
keyword[elif] identifier[Method] == identifier[CROWLLOUVAR] :
keyword[return] identifier[Crowl_Louvar_LFL] ( identifier[atoms] = identifier[atoms] )
keyword[elif] identifier[Method] == identifier[NONE] :
keyword[return] keyword[None]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] ) | def LFL(Hc=None, atoms={}, CASRN='', AvailableMethods=False, Method=None):
"""This function handles the retrieval or calculation of a chemical's
Lower Flammability Limit. Lookup is based on CASRNs. Two predictive methods
are currently implemented. Will automatically select a data source to use
if no Method is provided; returns None if the data is not available.
Prefered source is 'IEC 60079-20-1 (2010)' [1]_, with the secondary source
'NFPA 497 (2008)' [2]_ having very similar data. If the heat of combustion
is provided, the estimation method `Suzuki_LFL` can be used. If the atoms
of the molecule are available, the method `Crowl_Louvar_LFL` can be used.
Examples
--------
>>> LFL(CASRN='71-43-2')
0.012
Parameters
----------
Hc : float, optional
Heat of combustion of gas [J/mol]
atoms : dict, optional
Dictionary of atoms and atom counts
CASRN : string, optional
CASRN [-]
Returns
-------
LFL : float
Lower flammability limit of the gas in an atmosphere at STP, [mole fraction]
methods : list, only returned if AvailableMethods == True
List of methods which can be used to obtain LFL with the
given inputs
Other Parameters
----------------
Method : string, optional
A string for the method name to use, as defined by constants in
LFL_methods
AvailableMethods : bool, optional
If True, function will determine which methods can be used to obtain
the Lower Flammability Limit for the desired chemical, and will return
methods instead of Lower Flammability Limit.
Notes
-----
References
----------
.. [1] IEC. “IEC 60079-20-1:2010 Explosive atmospheres - Part 20-1:
Material characteristics for gas and vapour classification - Test
methods and data.” https://webstore.iec.ch/publication/635. See also
https://law.resource.org/pub/in/bis/S05/is.iec.60079.20.1.2010.pdf
.. [2] National Fire Protection Association. NFPA 497: Recommended
Practice for the Classification of Flammable Liquids, Gases, or Vapors
and of Hazardous. NFPA, 2008.
"""
def list_methods():
methods = []
if CASRN in IEC_2010.index and (not np.isnan(IEC_2010.at[CASRN, 'LFL'])):
methods.append(IEC) # depends on [control=['if'], data=[]]
if CASRN in NFPA_2008.index and (not np.isnan(NFPA_2008.at[CASRN, 'LFL'])):
methods.append(NFPA) # depends on [control=['if'], data=[]]
if Hc:
methods.append(SUZUKI) # depends on [control=['if'], data=[]]
if atoms:
methods.append(CROWLLOUVAR) # depends on [control=['if'], data=[]]
methods.append(NONE)
return methods
if AvailableMethods:
return list_methods() # depends on [control=['if'], data=[]]
if not Method:
Method = list_methods()[0] # depends on [control=['if'], data=[]]
if Method == IEC:
return float(IEC_2010.at[CASRN, 'LFL']) # depends on [control=['if'], data=[]]
elif Method == NFPA:
return float(NFPA_2008.at[CASRN, 'LFL']) # depends on [control=['if'], data=[]]
elif Method == SUZUKI:
return Suzuki_LFL(Hc=Hc) # depends on [control=['if'], data=[]]
elif Method == CROWLLOUVAR:
return Crowl_Louvar_LFL(atoms=atoms) # depends on [control=['if'], data=[]]
elif Method == NONE:
return None # depends on [control=['if'], data=[]]
else:
raise Exception('Failure in in function') |
def writeAnn(filename, catalog, fmt):
"""
Write an annotation file that can be read by Kvis (.ann) or DS9 (.reg).
Uses ra/dec from catalog.
Draws ellipses if bmaj/bmin/pa are in catalog. Draws 30" circles otherwise.
Only :class:`AegeanTools.models.OutputSource` will appear in the annotation file
unless there are none, in which case :class:`AegeanTools.models.SimpleSource` (if present)
will be written. If any :class:`AegeanTools.models.IslandSource` objects are present then
an island contours file will be written.
Parameters
----------
filename : str
Output filename base.
catalog : list
List of sources.
fmt : ['ann', 'reg']
Output file type.
Returns
-------
None
See Also
--------
AegeanTools.catalogs.writeIslandContours
"""
if fmt not in ['reg', 'ann']:
log.warning("Format not supported for island boxes{0}".format(fmt))
return # fmt not supported
components, islands, simples = classify_catalog(catalog)
if len(components) > 0:
cat = sorted(components)
suffix = "comp"
elif len(simples) > 0:
cat = simples
suffix = "simp"
else:
cat = []
if len(cat) > 0:
ras = [a.ra for a in cat]
decs = [a.dec for a in cat]
if not hasattr(cat[0], 'a'): # a being the variable that I used for bmaj.
bmajs = [30 / 3600.0 for a in cat]
bmins = bmajs
pas = [0 for a in cat]
else:
bmajs = [a.a / 3600.0 for a in cat]
bmins = [a.b / 3600.0 for a in cat]
pas = [a.pa for a in cat]
names = [a.__repr__() for a in cat]
if fmt == 'ann':
new_file = re.sub('.ann$', '_{0}.ann'.format(suffix), filename)
out = open(new_file, 'w')
print("#Aegean version {0}-({1})".format(__version__, __date__), file=out)
print('PA SKY', file=out)
print('FONT hershey12', file=out)
print('COORD W', file=out)
formatter = "ELLIPSE W {0} {1} {2} {3} {4:+07.3f} #{5}\nTEXT W {0} {1} {5}"
else: # reg
new_file = re.sub('.reg$', '_{0}.reg'.format(suffix), filename)
out = open(new_file, 'w')
print("#Aegean version {0}-({1})".format(__version__, __date__), file=out)
print("fk5", file=out)
formatter = 'ellipse {0} {1} {2:.9f}d {3:.9f}d {4:+07.3f}d # text="{5}"'
# DS9 has some strange ideas about position angle
pas = [a - 90 for a in pas]
for ra, dec, bmaj, bmin, pa, name in zip(ras, decs, bmajs, bmins, pas, names):
# comment out lines that have invalid or stupid entries
if np.nan in [ra, dec, bmaj, bmin, pa] or bmaj >= 180:
print('#', end=' ', file=out)
print(formatter.format(ra, dec, bmaj, bmin, pa, name), file=out)
out.close()
log.info("wrote {0}".format(new_file))
if len(islands) > 0:
if fmt == 'reg':
new_file = re.sub('.reg$', '_isle.reg', filename)
elif fmt == 'ann':
log.warning('kvis islands are currently not working')
return
else:
log.warning('format {0} not supported for island annotations'.format(fmt))
return
writeIslandContours(new_file, islands, fmt)
log.info("wrote {0}".format(new_file))
return | def function[writeAnn, parameter[filename, catalog, fmt]]:
constant[
Write an annotation file that can be read by Kvis (.ann) or DS9 (.reg).
Uses ra/dec from catalog.
Draws ellipses if bmaj/bmin/pa are in catalog. Draws 30" circles otherwise.
Only :class:`AegeanTools.models.OutputSource` will appear in the annotation file
unless there are none, in which case :class:`AegeanTools.models.SimpleSource` (if present)
will be written. If any :class:`AegeanTools.models.IslandSource` objects are present then
an island contours file will be written.
Parameters
----------
filename : str
Output filename base.
catalog : list
List of sources.
fmt : ['ann', 'reg']
Output file type.
Returns
-------
None
See Also
--------
AegeanTools.catalogs.writeIslandContours
]
if compare[name[fmt] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da2041da9e0>, <ast.Constant object at 0x7da2041d8f40>]]] begin[:]
call[name[log].warning, parameter[call[constant[Format not supported for island boxes{0}].format, parameter[name[fmt]]]]]
return[None]
<ast.Tuple object at 0x7da2041dbfa0> assign[=] call[name[classify_catalog], parameter[name[catalog]]]
if compare[call[name[len], parameter[name[components]]] greater[>] constant[0]] begin[:]
variable[cat] assign[=] call[name[sorted], parameter[name[components]]]
variable[suffix] assign[=] constant[comp]
if compare[call[name[len], parameter[name[cat]]] greater[>] constant[0]] begin[:]
variable[ras] assign[=] <ast.ListComp object at 0x7da2041d9fc0>
variable[decs] assign[=] <ast.ListComp object at 0x7da2041da1d0>
if <ast.UnaryOp object at 0x7da2041db6a0> begin[:]
variable[bmajs] assign[=] <ast.ListComp object at 0x7da2041db040>
variable[bmins] assign[=] name[bmajs]
variable[pas] assign[=] <ast.ListComp object at 0x7da2041db880>
variable[names] assign[=] <ast.ListComp object at 0x7da2041d8e80>
if compare[name[fmt] equal[==] constant[ann]] begin[:]
variable[new_file] assign[=] call[name[re].sub, parameter[constant[.ann$], call[constant[_{0}.ann].format, parameter[name[suffix]]], name[filename]]]
variable[out] assign[=] call[name[open], parameter[name[new_file], constant[w]]]
call[name[print], parameter[call[constant[#Aegean version {0}-({1})].format, parameter[name[__version__], name[__date__]]]]]
call[name[print], parameter[constant[PA SKY]]]
call[name[print], parameter[constant[FONT hershey12]]]
call[name[print], parameter[constant[COORD W]]]
variable[formatter] assign[=] constant[ELLIPSE W {0} {1} {2} {3} {4:+07.3f} #{5}
TEXT W {0} {1} {5}]
for taget[tuple[[<ast.Name object at 0x7da2047e8310>, <ast.Name object at 0x7da2047e9f90>, <ast.Name object at 0x7da20ec06ef0>, <ast.Name object at 0x7da20ec05c90>, <ast.Name object at 0x7da20ec06260>, <ast.Name object at 0x7da20ec062c0>]]] in starred[call[name[zip], parameter[name[ras], name[decs], name[bmajs], name[bmins], name[pas], name[names]]]] begin[:]
if <ast.BoolOp object at 0x7da20e74be50> begin[:]
call[name[print], parameter[constant[#]]]
call[name[print], parameter[call[name[formatter].format, parameter[name[ra], name[dec], name[bmaj], name[bmin], name[pa], name[name]]]]]
call[name[out].close, parameter[]]
call[name[log].info, parameter[call[constant[wrote {0}].format, parameter[name[new_file]]]]]
if compare[call[name[len], parameter[name[islands]]] greater[>] constant[0]] begin[:]
if compare[name[fmt] equal[==] constant[reg]] begin[:]
variable[new_file] assign[=] call[name[re].sub, parameter[constant[.reg$], constant[_isle.reg], name[filename]]]
call[name[writeIslandContours], parameter[name[new_file], name[islands], name[fmt]]]
call[name[log].info, parameter[call[constant[wrote {0}].format, parameter[name[new_file]]]]]
return[None] | keyword[def] identifier[writeAnn] ( identifier[filename] , identifier[catalog] , identifier[fmt] ):
literal[string]
keyword[if] identifier[fmt] keyword[not] keyword[in] [ literal[string] , literal[string] ]:
identifier[log] . identifier[warning] ( literal[string] . identifier[format] ( identifier[fmt] ))
keyword[return]
identifier[components] , identifier[islands] , identifier[simples] = identifier[classify_catalog] ( identifier[catalog] )
keyword[if] identifier[len] ( identifier[components] )> literal[int] :
identifier[cat] = identifier[sorted] ( identifier[components] )
identifier[suffix] = literal[string]
keyword[elif] identifier[len] ( identifier[simples] )> literal[int] :
identifier[cat] = identifier[simples]
identifier[suffix] = literal[string]
keyword[else] :
identifier[cat] =[]
keyword[if] identifier[len] ( identifier[cat] )> literal[int] :
identifier[ras] =[ identifier[a] . identifier[ra] keyword[for] identifier[a] keyword[in] identifier[cat] ]
identifier[decs] =[ identifier[a] . identifier[dec] keyword[for] identifier[a] keyword[in] identifier[cat] ]
keyword[if] keyword[not] identifier[hasattr] ( identifier[cat] [ literal[int] ], literal[string] ):
identifier[bmajs] =[ literal[int] / literal[int] keyword[for] identifier[a] keyword[in] identifier[cat] ]
identifier[bmins] = identifier[bmajs]
identifier[pas] =[ literal[int] keyword[for] identifier[a] keyword[in] identifier[cat] ]
keyword[else] :
identifier[bmajs] =[ identifier[a] . identifier[a] / literal[int] keyword[for] identifier[a] keyword[in] identifier[cat] ]
identifier[bmins] =[ identifier[a] . identifier[b] / literal[int] keyword[for] identifier[a] keyword[in] identifier[cat] ]
identifier[pas] =[ identifier[a] . identifier[pa] keyword[for] identifier[a] keyword[in] identifier[cat] ]
identifier[names] =[ identifier[a] . identifier[__repr__] () keyword[for] identifier[a] keyword[in] identifier[cat] ]
keyword[if] identifier[fmt] == literal[string] :
identifier[new_file] = identifier[re] . identifier[sub] ( literal[string] , literal[string] . identifier[format] ( identifier[suffix] ), identifier[filename] )
identifier[out] = identifier[open] ( identifier[new_file] , literal[string] )
identifier[print] ( literal[string] . identifier[format] ( identifier[__version__] , identifier[__date__] ), identifier[file] = identifier[out] )
identifier[print] ( literal[string] , identifier[file] = identifier[out] )
identifier[print] ( literal[string] , identifier[file] = identifier[out] )
identifier[print] ( literal[string] , identifier[file] = identifier[out] )
identifier[formatter] = literal[string]
keyword[else] :
identifier[new_file] = identifier[re] . identifier[sub] ( literal[string] , literal[string] . identifier[format] ( identifier[suffix] ), identifier[filename] )
identifier[out] = identifier[open] ( identifier[new_file] , literal[string] )
identifier[print] ( literal[string] . identifier[format] ( identifier[__version__] , identifier[__date__] ), identifier[file] = identifier[out] )
identifier[print] ( literal[string] , identifier[file] = identifier[out] )
identifier[formatter] = literal[string]
identifier[pas] =[ identifier[a] - literal[int] keyword[for] identifier[a] keyword[in] identifier[pas] ]
keyword[for] identifier[ra] , identifier[dec] , identifier[bmaj] , identifier[bmin] , identifier[pa] , identifier[name] keyword[in] identifier[zip] ( identifier[ras] , identifier[decs] , identifier[bmajs] , identifier[bmins] , identifier[pas] , identifier[names] ):
keyword[if] identifier[np] . identifier[nan] keyword[in] [ identifier[ra] , identifier[dec] , identifier[bmaj] , identifier[bmin] , identifier[pa] ] keyword[or] identifier[bmaj] >= literal[int] :
identifier[print] ( literal[string] , identifier[end] = literal[string] , identifier[file] = identifier[out] )
identifier[print] ( identifier[formatter] . identifier[format] ( identifier[ra] , identifier[dec] , identifier[bmaj] , identifier[bmin] , identifier[pa] , identifier[name] ), identifier[file] = identifier[out] )
identifier[out] . identifier[close] ()
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[new_file] ))
keyword[if] identifier[len] ( identifier[islands] )> literal[int] :
keyword[if] identifier[fmt] == literal[string] :
identifier[new_file] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[filename] )
keyword[elif] identifier[fmt] == literal[string] :
identifier[log] . identifier[warning] ( literal[string] )
keyword[return]
keyword[else] :
identifier[log] . identifier[warning] ( literal[string] . identifier[format] ( identifier[fmt] ))
keyword[return]
identifier[writeIslandContours] ( identifier[new_file] , identifier[islands] , identifier[fmt] )
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[new_file] ))
keyword[return] | def writeAnn(filename, catalog, fmt):
"""
Write an annotation file that can be read by Kvis (.ann) or DS9 (.reg).
Uses ra/dec from catalog.
Draws ellipses if bmaj/bmin/pa are in catalog. Draws 30" circles otherwise.
Only :class:`AegeanTools.models.OutputSource` will appear in the annotation file
unless there are none, in which case :class:`AegeanTools.models.SimpleSource` (if present)
will be written. If any :class:`AegeanTools.models.IslandSource` objects are present then
an island contours file will be written.
Parameters
----------
filename : str
Output filename base.
catalog : list
List of sources.
fmt : ['ann', 'reg']
Output file type.
Returns
-------
None
See Also
--------
AegeanTools.catalogs.writeIslandContours
"""
if fmt not in ['reg', 'ann']:
log.warning('Format not supported for island boxes{0}'.format(fmt))
return # fmt not supported # depends on [control=['if'], data=['fmt']]
(components, islands, simples) = classify_catalog(catalog)
if len(components) > 0:
cat = sorted(components)
suffix = 'comp' # depends on [control=['if'], data=[]]
elif len(simples) > 0:
cat = simples
suffix = 'simp' # depends on [control=['if'], data=[]]
else:
cat = []
if len(cat) > 0:
ras = [a.ra for a in cat]
decs = [a.dec for a in cat]
if not hasattr(cat[0], 'a'): # a being the variable that I used for bmaj.
bmajs = [30 / 3600.0 for a in cat]
bmins = bmajs
pas = [0 for a in cat] # depends on [control=['if'], data=[]]
else:
bmajs = [a.a / 3600.0 for a in cat]
bmins = [a.b / 3600.0 for a in cat]
pas = [a.pa for a in cat]
names = [a.__repr__() for a in cat]
if fmt == 'ann':
new_file = re.sub('.ann$', '_{0}.ann'.format(suffix), filename)
out = open(new_file, 'w')
print('#Aegean version {0}-({1})'.format(__version__, __date__), file=out)
print('PA SKY', file=out)
print('FONT hershey12', file=out)
print('COORD W', file=out)
formatter = 'ELLIPSE W {0} {1} {2} {3} {4:+07.3f} #{5}\nTEXT W {0} {1} {5}' # depends on [control=['if'], data=[]]
else: # reg
new_file = re.sub('.reg$', '_{0}.reg'.format(suffix), filename)
out = open(new_file, 'w')
print('#Aegean version {0}-({1})'.format(__version__, __date__), file=out)
print('fk5', file=out)
formatter = 'ellipse {0} {1} {2:.9f}d {3:.9f}d {4:+07.3f}d # text="{5}"'
# DS9 has some strange ideas about position angle
pas = [a - 90 for a in pas]
for (ra, dec, bmaj, bmin, pa, name) in zip(ras, decs, bmajs, bmins, pas, names):
# comment out lines that have invalid or stupid entries
if np.nan in [ra, dec, bmaj, bmin, pa] or bmaj >= 180:
print('#', end=' ', file=out) # depends on [control=['if'], data=[]]
print(formatter.format(ra, dec, bmaj, bmin, pa, name), file=out) # depends on [control=['for'], data=[]]
out.close()
log.info('wrote {0}'.format(new_file)) # depends on [control=['if'], data=[]]
if len(islands) > 0:
if fmt == 'reg':
new_file = re.sub('.reg$', '_isle.reg', filename) # depends on [control=['if'], data=[]]
elif fmt == 'ann':
log.warning('kvis islands are currently not working')
return # depends on [control=['if'], data=[]]
else:
log.warning('format {0} not supported for island annotations'.format(fmt))
return
writeIslandContours(new_file, islands, fmt)
log.info('wrote {0}'.format(new_file)) # depends on [control=['if'], data=[]]
return |
def makeFontBoundingBox(self):
"""
Make a bounding box for the font.
**This should not be called externally.** Subclasses
may override this method to handle the bounds creation
in a different way if desired.
"""
if not hasattr(self, "glyphBoundingBoxes"):
self.glyphBoundingBoxes = self.makeGlyphsBoundingBoxes()
fontBox = None
for glyphName, glyphBox in self.glyphBoundingBoxes.items():
if glyphBox is None:
continue
if fontBox is None:
fontBox = glyphBox
else:
fontBox = unionRect(fontBox, glyphBox)
if fontBox is None: # unlikely
fontBox = BoundingBox(0, 0, 0, 0)
return fontBox | def function[makeFontBoundingBox, parameter[self]]:
constant[
Make a bounding box for the font.
**This should not be called externally.** Subclasses
may override this method to handle the bounds creation
in a different way if desired.
]
if <ast.UnaryOp object at 0x7da1b0e04bb0> begin[:]
name[self].glyphBoundingBoxes assign[=] call[name[self].makeGlyphsBoundingBoxes, parameter[]]
variable[fontBox] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da1b0e06710>, <ast.Name object at 0x7da1b0e05cc0>]]] in starred[call[name[self].glyphBoundingBoxes.items, parameter[]]] begin[:]
if compare[name[glyphBox] is constant[None]] begin[:]
continue
if compare[name[fontBox] is constant[None]] begin[:]
variable[fontBox] assign[=] name[glyphBox]
if compare[name[fontBox] is constant[None]] begin[:]
variable[fontBox] assign[=] call[name[BoundingBox], parameter[constant[0], constant[0], constant[0], constant[0]]]
return[name[fontBox]] | keyword[def] identifier[makeFontBoundingBox] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[glyphBoundingBoxes] = identifier[self] . identifier[makeGlyphsBoundingBoxes] ()
identifier[fontBox] = keyword[None]
keyword[for] identifier[glyphName] , identifier[glyphBox] keyword[in] identifier[self] . identifier[glyphBoundingBoxes] . identifier[items] ():
keyword[if] identifier[glyphBox] keyword[is] keyword[None] :
keyword[continue]
keyword[if] identifier[fontBox] keyword[is] keyword[None] :
identifier[fontBox] = identifier[glyphBox]
keyword[else] :
identifier[fontBox] = identifier[unionRect] ( identifier[fontBox] , identifier[glyphBox] )
keyword[if] identifier[fontBox] keyword[is] keyword[None] :
identifier[fontBox] = identifier[BoundingBox] ( literal[int] , literal[int] , literal[int] , literal[int] )
keyword[return] identifier[fontBox] | def makeFontBoundingBox(self):
"""
Make a bounding box for the font.
**This should not be called externally.** Subclasses
may override this method to handle the bounds creation
in a different way if desired.
"""
if not hasattr(self, 'glyphBoundingBoxes'):
self.glyphBoundingBoxes = self.makeGlyphsBoundingBoxes() # depends on [control=['if'], data=[]]
fontBox = None
for (glyphName, glyphBox) in self.glyphBoundingBoxes.items():
if glyphBox is None:
continue # depends on [control=['if'], data=[]]
if fontBox is None:
fontBox = glyphBox # depends on [control=['if'], data=['fontBox']]
else:
fontBox = unionRect(fontBox, glyphBox) # depends on [control=['for'], data=[]]
if fontBox is None: # unlikely
fontBox = BoundingBox(0, 0, 0, 0) # depends on [control=['if'], data=['fontBox']]
return fontBox |
def contextMenuEvent(self, event):
""" Creates and executes the context menu for the tree view
"""
menu = QtWidgets.QMenu(self)
for action in self.actions():
menu.addAction(action)
openAsMenu = self.createOpenAsMenu(parent=menu)
menu.insertMenu(self.closeItemAction, openAsMenu)
menu.exec_(event.globalPos()) | def function[contextMenuEvent, parameter[self, event]]:
constant[ Creates and executes the context menu for the tree view
]
variable[menu] assign[=] call[name[QtWidgets].QMenu, parameter[name[self]]]
for taget[name[action]] in starred[call[name[self].actions, parameter[]]] begin[:]
call[name[menu].addAction, parameter[name[action]]]
variable[openAsMenu] assign[=] call[name[self].createOpenAsMenu, parameter[]]
call[name[menu].insertMenu, parameter[name[self].closeItemAction, name[openAsMenu]]]
call[name[menu].exec_, parameter[call[name[event].globalPos, parameter[]]]] | keyword[def] identifier[contextMenuEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[menu] = identifier[QtWidgets] . identifier[QMenu] ( identifier[self] )
keyword[for] identifier[action] keyword[in] identifier[self] . identifier[actions] ():
identifier[menu] . identifier[addAction] ( identifier[action] )
identifier[openAsMenu] = identifier[self] . identifier[createOpenAsMenu] ( identifier[parent] = identifier[menu] )
identifier[menu] . identifier[insertMenu] ( identifier[self] . identifier[closeItemAction] , identifier[openAsMenu] )
identifier[menu] . identifier[exec_] ( identifier[event] . identifier[globalPos] ()) | def contextMenuEvent(self, event):
""" Creates and executes the context menu for the tree view
"""
menu = QtWidgets.QMenu(self)
for action in self.actions():
menu.addAction(action) # depends on [control=['for'], data=['action']]
openAsMenu = self.createOpenAsMenu(parent=menu)
menu.insertMenu(self.closeItemAction, openAsMenu)
menu.exec_(event.globalPos()) |
def _decorate(flush=True, attempts=1, only_authenticate=False):
"""
Wraps the given function such that conn.login() or conn.authenticate() is
executed.
Doing the real work for autologin and autoauthenticate to minimize code
duplication.
:type flush: bool
:param flush: Whether to flush the last prompt from the buffer.
:type attempts: int
:param attempts: The number of login attempts if login fails.
:type only_authenticate: bool
:param only_authenticate: login or only authenticate (don't authorize)?
:rtype: function
:return: The wrapped function.
"""
def decorator(function):
def decorated(job, host, conn, *args, **kwargs):
failed = 0
while True:
try:
if only_authenticate:
conn.authenticate(flush=flush)
else:
conn.login(flush=flush)
except LoginFailure as e:
failed += 1
if failed >= attempts:
raise
continue
break
return function(job, host, conn, *args, **kwargs)
copy_labels(function, decorated)
return decorated
return decorator | def function[_decorate, parameter[flush, attempts, only_authenticate]]:
constant[
Wraps the given function such that conn.login() or conn.authenticate() is
executed.
Doing the real work for autologin and autoauthenticate to minimize code
duplication.
:type flush: bool
:param flush: Whether to flush the last prompt from the buffer.
:type attempts: int
:param attempts: The number of login attempts if login fails.
:type only_authenticate: bool
:param only_authenticate: login or only authenticate (don't authorize)?
:rtype: function
:return: The wrapped function.
]
def function[decorator, parameter[function]]:
def function[decorated, parameter[job, host, conn]]:
variable[failed] assign[=] constant[0]
while constant[True] begin[:]
<ast.Try object at 0x7da1b06778b0>
break
return[call[name[function], parameter[name[job], name[host], name[conn], <ast.Starred object at 0x7da1b0516770>]]]
call[name[copy_labels], parameter[name[function], name[decorated]]]
return[name[decorated]]
return[name[decorator]] | keyword[def] identifier[_decorate] ( identifier[flush] = keyword[True] , identifier[attempts] = literal[int] , identifier[only_authenticate] = keyword[False] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[function] ):
keyword[def] identifier[decorated] ( identifier[job] , identifier[host] , identifier[conn] ,* identifier[args] ,** identifier[kwargs] ):
identifier[failed] = literal[int]
keyword[while] keyword[True] :
keyword[try] :
keyword[if] identifier[only_authenticate] :
identifier[conn] . identifier[authenticate] ( identifier[flush] = identifier[flush] )
keyword[else] :
identifier[conn] . identifier[login] ( identifier[flush] = identifier[flush] )
keyword[except] identifier[LoginFailure] keyword[as] identifier[e] :
identifier[failed] += literal[int]
keyword[if] identifier[failed] >= identifier[attempts] :
keyword[raise]
keyword[continue]
keyword[break]
keyword[return] identifier[function] ( identifier[job] , identifier[host] , identifier[conn] ,* identifier[args] ,** identifier[kwargs] )
identifier[copy_labels] ( identifier[function] , identifier[decorated] )
keyword[return] identifier[decorated]
keyword[return] identifier[decorator] | def _decorate(flush=True, attempts=1, only_authenticate=False):
"""
Wraps the given function such that conn.login() or conn.authenticate() is
executed.
Doing the real work for autologin and autoauthenticate to minimize code
duplication.
:type flush: bool
:param flush: Whether to flush the last prompt from the buffer.
:type attempts: int
:param attempts: The number of login attempts if login fails.
:type only_authenticate: bool
:param only_authenticate: login or only authenticate (don't authorize)?
:rtype: function
:return: The wrapped function.
"""
def decorator(function):
def decorated(job, host, conn, *args, **kwargs):
failed = 0
while True:
try:
if only_authenticate:
conn.authenticate(flush=flush) # depends on [control=['if'], data=[]]
else:
conn.login(flush=flush) # depends on [control=['try'], data=[]]
except LoginFailure as e:
failed += 1
if failed >= attempts:
raise # depends on [control=['if'], data=[]]
continue # depends on [control=['except'], data=[]]
break # depends on [control=['while'], data=[]]
return function(job, host, conn, *args, **kwargs)
copy_labels(function, decorated)
return decorated
return decorator |
def show_first_and_last_lines(result):
"""Just print the first and last five lines of (pypi) output"""
lines = [line for line in result.split('\n')]
if len(lines) < 11:
for line in lines:
print(line)
return
print('Showing first few lines...')
for line in lines[:5]:
print(line)
print('...')
print('Showing last few lines...')
for line in lines[-5:]:
print(line) | def function[show_first_and_last_lines, parameter[result]]:
constant[Just print the first and last five lines of (pypi) output]
variable[lines] assign[=] <ast.ListComp object at 0x7da20c6a9510>
if compare[call[name[len], parameter[name[lines]]] less[<] constant[11]] begin[:]
for taget[name[line]] in starred[name[lines]] begin[:]
call[name[print], parameter[name[line]]]
return[None]
call[name[print], parameter[constant[Showing first few lines...]]]
for taget[name[line]] in starred[call[name[lines]][<ast.Slice object at 0x7da1b1601ba0>]] begin[:]
call[name[print], parameter[name[line]]]
call[name[print], parameter[constant[...]]]
call[name[print], parameter[constant[Showing last few lines...]]]
for taget[name[line]] in starred[call[name[lines]][<ast.Slice object at 0x7da1b1435030>]] begin[:]
call[name[print], parameter[name[line]]] | keyword[def] identifier[show_first_and_last_lines] ( identifier[result] ):
literal[string]
identifier[lines] =[ identifier[line] keyword[for] identifier[line] keyword[in] identifier[result] . identifier[split] ( literal[string] )]
keyword[if] identifier[len] ( identifier[lines] )< literal[int] :
keyword[for] identifier[line] keyword[in] identifier[lines] :
identifier[print] ( identifier[line] )
keyword[return]
identifier[print] ( literal[string] )
keyword[for] identifier[line] keyword[in] identifier[lines] [: literal[int] ]:
identifier[print] ( identifier[line] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
keyword[for] identifier[line] keyword[in] identifier[lines] [- literal[int] :]:
identifier[print] ( identifier[line] ) | def show_first_and_last_lines(result):
"""Just print the first and last five lines of (pypi) output"""
lines = [line for line in result.split('\n')]
if len(lines) < 11:
for line in lines:
print(line) # depends on [control=['for'], data=['line']]
return # depends on [control=['if'], data=[]]
print('Showing first few lines...')
for line in lines[:5]:
print(line) # depends on [control=['for'], data=['line']]
print('...')
print('Showing last few lines...')
for line in lines[-5:]:
print(line) # depends on [control=['for'], data=['line']] |
def _write_mef(self, key, extlist, outfile):
"""Write out regular multi-extension FITS data."""
channel = self.fv.get_channel(self.chname)
with fits.open(outfile, mode='update') as pf:
# Process each modified data extension
for idx in extlist:
k = '{0}[{1}]'.format(key, self._format_extname(idx))
image = channel.datasrc[k]
# Insert data and header into output HDU
pf[idx].data = image.get_data()
self._write_header(image, pf[idx])
# Write history to PRIMARY
self._write_history(key, pf['PRIMARY']) | def function[_write_mef, parameter[self, key, extlist, outfile]]:
constant[Write out regular multi-extension FITS data.]
variable[channel] assign[=] call[name[self].fv.get_channel, parameter[name[self].chname]]
with call[name[fits].open, parameter[name[outfile]]] begin[:]
for taget[name[idx]] in starred[name[extlist]] begin[:]
variable[k] assign[=] call[constant[{0}[{1}]].format, parameter[name[key], call[name[self]._format_extname, parameter[name[idx]]]]]
variable[image] assign[=] call[name[channel].datasrc][name[k]]
call[name[pf]][name[idx]].data assign[=] call[name[image].get_data, parameter[]]
call[name[self]._write_header, parameter[name[image], call[name[pf]][name[idx]]]]
call[name[self]._write_history, parameter[name[key], call[name[pf]][constant[PRIMARY]]]] | keyword[def] identifier[_write_mef] ( identifier[self] , identifier[key] , identifier[extlist] , identifier[outfile] ):
literal[string]
identifier[channel] = identifier[self] . identifier[fv] . identifier[get_channel] ( identifier[self] . identifier[chname] )
keyword[with] identifier[fits] . identifier[open] ( identifier[outfile] , identifier[mode] = literal[string] ) keyword[as] identifier[pf] :
keyword[for] identifier[idx] keyword[in] identifier[extlist] :
identifier[k] = literal[string] . identifier[format] ( identifier[key] , identifier[self] . identifier[_format_extname] ( identifier[idx] ))
identifier[image] = identifier[channel] . identifier[datasrc] [ identifier[k] ]
identifier[pf] [ identifier[idx] ]. identifier[data] = identifier[image] . identifier[get_data] ()
identifier[self] . identifier[_write_header] ( identifier[image] , identifier[pf] [ identifier[idx] ])
identifier[self] . identifier[_write_history] ( identifier[key] , identifier[pf] [ literal[string] ]) | def _write_mef(self, key, extlist, outfile):
"""Write out regular multi-extension FITS data."""
channel = self.fv.get_channel(self.chname)
with fits.open(outfile, mode='update') as pf:
# Process each modified data extension
for idx in extlist:
k = '{0}[{1}]'.format(key, self._format_extname(idx))
image = channel.datasrc[k]
# Insert data and header into output HDU
pf[idx].data = image.get_data()
self._write_header(image, pf[idx]) # depends on [control=['for'], data=['idx']]
# Write history to PRIMARY
self._write_history(key, pf['PRIMARY']) # depends on [control=['with'], data=['pf']] |
def insert(self, loc, item, value, allow_duplicates=False):
"""
Insert item at selected position.
Parameters
----------
loc : int
item : hashable
value : array_like
allow_duplicates: bool
If False, trying to insert non-unique item will raise
"""
if not allow_duplicates and item in self.items:
# Should this be a different kind of error??
raise ValueError('cannot insert {}, already exists'.format(item))
if not isinstance(loc, int):
raise TypeError("loc must be int")
# insert to the axis; this could possibly raise a TypeError
new_axis = self.items.insert(loc, item)
block = make_block(values=value, ndim=self.ndim,
placement=slice(loc, loc + 1))
for blkno, count in _fast_count_smallints(self._blknos[loc:]):
blk = self.blocks[blkno]
if count == len(blk.mgr_locs):
blk.mgr_locs = blk.mgr_locs.add(1)
else:
new_mgr_locs = blk.mgr_locs.as_array.copy()
new_mgr_locs[new_mgr_locs >= loc] += 1
blk.mgr_locs = new_mgr_locs
if loc == self._blklocs.shape[0]:
# np.append is a lot faster, let's use it if we can.
self._blklocs = np.append(self._blklocs, 0)
self._blknos = np.append(self._blknos, len(self.blocks))
else:
self._blklocs = np.insert(self._blklocs, loc, 0)
self._blknos = np.insert(self._blknos, loc, len(self.blocks))
self.axes[0] = new_axis
self.blocks += (block,)
self._shape = None
self._known_consolidated = False
if len(self.blocks) > 100:
self._consolidate_inplace() | def function[insert, parameter[self, loc, item, value, allow_duplicates]]:
constant[
Insert item at selected position.
Parameters
----------
loc : int
item : hashable
value : array_like
allow_duplicates: bool
If False, trying to insert non-unique item will raise
]
if <ast.BoolOp object at 0x7da18f00ee90> begin[:]
<ast.Raise object at 0x7da18f00c820>
if <ast.UnaryOp object at 0x7da18f00e0b0> begin[:]
<ast.Raise object at 0x7da18f00e5f0>
variable[new_axis] assign[=] call[name[self].items.insert, parameter[name[loc], name[item]]]
variable[block] assign[=] call[name[make_block], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20e954cd0>, <ast.Name object at 0x7da20e956050>]]] in starred[call[name[_fast_count_smallints], parameter[call[name[self]._blknos][<ast.Slice object at 0x7da20e956aa0>]]]] begin[:]
variable[blk] assign[=] call[name[self].blocks][name[blkno]]
if compare[name[count] equal[==] call[name[len], parameter[name[blk].mgr_locs]]] begin[:]
name[blk].mgr_locs assign[=] call[name[blk].mgr_locs.add, parameter[constant[1]]]
if compare[name[loc] equal[==] call[name[self]._blklocs.shape][constant[0]]] begin[:]
name[self]._blklocs assign[=] call[name[np].append, parameter[name[self]._blklocs, constant[0]]]
name[self]._blknos assign[=] call[name[np].append, parameter[name[self]._blknos, call[name[len], parameter[name[self].blocks]]]]
call[name[self].axes][constant[0]] assign[=] name[new_axis]
<ast.AugAssign object at 0x7da20c6c66e0>
name[self]._shape assign[=] constant[None]
name[self]._known_consolidated assign[=] constant[False]
if compare[call[name[len], parameter[name[self].blocks]] greater[>] constant[100]] begin[:]
call[name[self]._consolidate_inplace, parameter[]] | keyword[def] identifier[insert] ( identifier[self] , identifier[loc] , identifier[item] , identifier[value] , identifier[allow_duplicates] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[allow_duplicates] keyword[and] identifier[item] keyword[in] identifier[self] . identifier[items] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[item] ))
keyword[if] keyword[not] identifier[isinstance] ( identifier[loc] , identifier[int] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[new_axis] = identifier[self] . identifier[items] . identifier[insert] ( identifier[loc] , identifier[item] )
identifier[block] = identifier[make_block] ( identifier[values] = identifier[value] , identifier[ndim] = identifier[self] . identifier[ndim] ,
identifier[placement] = identifier[slice] ( identifier[loc] , identifier[loc] + literal[int] ))
keyword[for] identifier[blkno] , identifier[count] keyword[in] identifier[_fast_count_smallints] ( identifier[self] . identifier[_blknos] [ identifier[loc] :]):
identifier[blk] = identifier[self] . identifier[blocks] [ identifier[blkno] ]
keyword[if] identifier[count] == identifier[len] ( identifier[blk] . identifier[mgr_locs] ):
identifier[blk] . identifier[mgr_locs] = identifier[blk] . identifier[mgr_locs] . identifier[add] ( literal[int] )
keyword[else] :
identifier[new_mgr_locs] = identifier[blk] . identifier[mgr_locs] . identifier[as_array] . identifier[copy] ()
identifier[new_mgr_locs] [ identifier[new_mgr_locs] >= identifier[loc] ]+= literal[int]
identifier[blk] . identifier[mgr_locs] = identifier[new_mgr_locs]
keyword[if] identifier[loc] == identifier[self] . identifier[_blklocs] . identifier[shape] [ literal[int] ]:
identifier[self] . identifier[_blklocs] = identifier[np] . identifier[append] ( identifier[self] . identifier[_blklocs] , literal[int] )
identifier[self] . identifier[_blknos] = identifier[np] . identifier[append] ( identifier[self] . identifier[_blknos] , identifier[len] ( identifier[self] . identifier[blocks] ))
keyword[else] :
identifier[self] . identifier[_blklocs] = identifier[np] . identifier[insert] ( identifier[self] . identifier[_blklocs] , identifier[loc] , literal[int] )
identifier[self] . identifier[_blknos] = identifier[np] . identifier[insert] ( identifier[self] . identifier[_blknos] , identifier[loc] , identifier[len] ( identifier[self] . identifier[blocks] ))
identifier[self] . identifier[axes] [ literal[int] ]= identifier[new_axis]
identifier[self] . identifier[blocks] +=( identifier[block] ,)
identifier[self] . identifier[_shape] = keyword[None]
identifier[self] . identifier[_known_consolidated] = keyword[False]
keyword[if] identifier[len] ( identifier[self] . identifier[blocks] )> literal[int] :
identifier[self] . identifier[_consolidate_inplace] () | def insert(self, loc, item, value, allow_duplicates=False):
"""
Insert item at selected position.
Parameters
----------
loc : int
item : hashable
value : array_like
allow_duplicates: bool
If False, trying to insert non-unique item will raise
"""
if not allow_duplicates and item in self.items:
# Should this be a different kind of error??
raise ValueError('cannot insert {}, already exists'.format(item)) # depends on [control=['if'], data=[]]
if not isinstance(loc, int):
raise TypeError('loc must be int') # depends on [control=['if'], data=[]]
# insert to the axis; this could possibly raise a TypeError
new_axis = self.items.insert(loc, item)
block = make_block(values=value, ndim=self.ndim, placement=slice(loc, loc + 1))
for (blkno, count) in _fast_count_smallints(self._blknos[loc:]):
blk = self.blocks[blkno]
if count == len(blk.mgr_locs):
blk.mgr_locs = blk.mgr_locs.add(1) # depends on [control=['if'], data=[]]
else:
new_mgr_locs = blk.mgr_locs.as_array.copy()
new_mgr_locs[new_mgr_locs >= loc] += 1
blk.mgr_locs = new_mgr_locs # depends on [control=['for'], data=[]]
if loc == self._blklocs.shape[0]:
# np.append is a lot faster, let's use it if we can.
self._blklocs = np.append(self._blklocs, 0)
self._blknos = np.append(self._blknos, len(self.blocks)) # depends on [control=['if'], data=[]]
else:
self._blklocs = np.insert(self._blklocs, loc, 0)
self._blknos = np.insert(self._blknos, loc, len(self.blocks))
self.axes[0] = new_axis
self.blocks += (block,)
self._shape = None
self._known_consolidated = False
if len(self.blocks) > 100:
self._consolidate_inplace() # depends on [control=['if'], data=[]] |
def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None):
"""Substitute construction variables in a string (or list or other
object) and separate the arguments into a command list.
The companion scons_subst() function (above) handles basic
substitutions within strings, so see that function instead
if that's what you're looking for.
"""
class ListSubber(collections.UserList):
"""A class to construct the results of a scons_subst_list() call.
Like StringSubber, this class binds a specific construction
environment, mode, target and source with two methods
(substitute() and expand()) that handle the expansion.
In addition, however, this class is used to track the state of
the result(s) we're gathering so we can do the appropriate thing
whenever we have to append another word to the result--start a new
line, start a new word, append to the current word, etc. We do
this by setting the "append" attribute to the right method so
that our wrapper methods only need ever call ListSubber.append(),
and the rest of the object takes care of doing the right thing
internally.
"""
def __init__(self, env, mode, conv, gvars):
collections.UserList.__init__(self, [])
self.env = env
self.mode = mode
self.conv = conv
self.gvars = gvars
if self.mode == SUBST_RAW:
self.add_strip = lambda x: self.append(x)
else:
self.add_strip = lambda x: None
self.in_strip = None
self.next_line()
def expand(self, s, lvars, within_list):
"""Expand a single "token" as necessary, appending the
expansion to the current result.
This handles expanding different types of things (strings,
lists, callables) appropriately. It calls the wrapper
substitute() method to re-expand things as necessary, so that
the results of expansions of side-by-side strings still get
re-evaluated separately, not smushed together.
"""
if is_String(s):
try:
s0, s1 = s[:2]
except (IndexError, ValueError):
self.append(s)
return
if s0 != '$':
self.append(s)
return
if s1 == '$':
self.append('$')
elif s1 == '(':
self.open_strip('$(')
elif s1 == ')':
self.close_strip('$)')
else:
key = s[1:]
if key[0] == '{' or key.find('.') >= 0:
if key[0] == '{':
key = key[1:-1]
try:
s = eval(key, self.gvars, lvars)
except KeyboardInterrupt:
raise
except Exception as e:
if e.__class__ in AllowableExceptions:
return
raise_exception(e, lvars['TARGETS'], s)
else:
if key in lvars:
s = lvars[key]
elif key in self.gvars:
s = self.gvars[key]
elif not NameError in AllowableExceptions:
raise_exception(NameError(), lvars['TARGETS'], s)
else:
return
# Before re-expanding the result, handle
# recursive expansion by copying the local
# variable dictionary and overwriting a null
# string for the value of the variable name
# we just expanded.
lv = lvars.copy()
var = key.split('.')[0]
lv[var] = ''
self.substitute(s, lv, 0)
self.this_word()
elif is_Sequence(s):
for a in s:
self.substitute(a, lvars, 1)
self.next_word()
elif callable(s):
try:
s = s(target=lvars['TARGETS'],
source=lvars['SOURCES'],
env=self.env,
for_signature=(self.mode != SUBST_CMD))
except TypeError:
# This probably indicates that it's a callable
# object that doesn't match our calling arguments
# (like an Action).
if self.mode == SUBST_RAW:
self.append(s)
return
s = self.conv(s)
self.substitute(s, lvars, within_list)
elif s is None:
self.this_word()
else:
self.append(s)
def substitute(self, args, lvars, within_list):
"""Substitute expansions in an argument or list of arguments.
This serves as a wrapper for splitting up a string into
separate tokens.
"""
if is_String(args) and not isinstance(args, CmdStringHolder):
args = str(args) # In case it's a UserString.
args = _separate_args.findall(args)
for a in args:
if a[0] in ' \t\n\r\f\v':
if '\n' in a:
self.next_line()
elif within_list:
self.append(a)
else:
self.next_word()
else:
self.expand(a, lvars, within_list)
else:
self.expand(args, lvars, within_list)
def next_line(self):
"""Arrange for the next word to start a new line. This
is like starting a new word, except that we have to append
another line to the result."""
collections.UserList.append(self, [])
self.next_word()
def this_word(self):
"""Arrange for the next word to append to the end of the
current last word in the result."""
self.append = self.add_to_current_word
def next_word(self):
"""Arrange for the next word to start a new word."""
self.append = self.add_new_word
def add_to_current_word(self, x):
"""Append the string x to the end of the current last word
in the result. If that is not possible, then just add
it as a new word. Make sure the entire concatenated string
inherits the object attributes of x (in particular, the
escape function) by wrapping it as CmdStringHolder."""
if not self.in_strip or self.mode != SUBST_SIG:
try:
current_word = self[-1][-1]
except IndexError:
self.add_new_word(x)
else:
# All right, this is a hack and it should probably
# be refactored out of existence in the future.
# The issue is that we want to smoosh words together
# and make one file name that gets escaped if
# we're expanding something like foo$EXTENSION,
# but we don't want to smoosh them together if
# it's something like >$TARGET, because then we'll
# treat the '>' like it's part of the file name.
# So for now, just hard-code looking for the special
# command-line redirection characters...
try:
last_char = str(current_word)[-1]
except IndexError:
last_char = '\0'
if last_char in '<>|':
self.add_new_word(x)
else:
y = current_word + x
# We used to treat a word appended to a literal
# as a literal itself, but this caused problems
# with interpreting quotes around space-separated
# targets on command lines. Removing this makes
# none of the "substantive" end-to-end tests fail,
# so we'll take this out but leave it commented
# for now in case there's a problem not covered
# by the test cases and we need to resurrect this.
#literal1 = self.literal(self[-1][-1])
#literal2 = self.literal(x)
y = self.conv(y)
if is_String(y):
#y = CmdStringHolder(y, literal1 or literal2)
y = CmdStringHolder(y, None)
self[-1][-1] = y
def add_new_word(self, x):
if not self.in_strip or self.mode != SUBST_SIG:
literal = self.literal(x)
x = self.conv(x)
if is_String(x):
x = CmdStringHolder(x, literal)
self[-1].append(x)
self.append = self.add_to_current_word
def literal(self, x):
try:
l = x.is_literal
except AttributeError:
return None
else:
return l()
def open_strip(self, x):
"""Handle the "open strip" $( token."""
self.add_strip(x)
self.in_strip = 1
def close_strip(self, x):
"""Handle the "close strip" $) token."""
self.add_strip(x)
self.in_strip = None
if conv is None:
conv = _strconv[mode]
# Doing this every time is a bit of a waste, since the Executor
# has typically already populated the OverrideEnvironment with
# $TARGET/$SOURCE variables. We're keeping this (for now), though,
# because it supports existing behavior that allows us to call
# an Action directly with an arbitrary target+source pair, which
# we use in Tool/tex.py to handle calling $BIBTEX when necessary.
# If we dropped that behavior (or found another way to cover it),
# we could get rid of this call completely and just rely on the
# Executor setting the variables.
if 'TARGET' not in lvars:
d = subst_dict(target, source)
if d:
lvars = lvars.copy()
lvars.update(d)
# We're (most likely) going to eval() things. If Python doesn't
# find a __builtins__ value in the global dictionary used for eval(),
# it copies the current global values for you. Avoid this by
# setting it explicitly and then deleting, so we don't pollute the
# construction environment Dictionary(ies) that are typically used
# for expansion.
gvars['__builtins__'] = __builtins__
ls = ListSubber(env, mode, conv, gvars)
ls.substitute(strSubst, lvars, 0)
try:
del gvars['__builtins__']
except KeyError:
pass
return ls.data | def function[scons_subst_list, parameter[strSubst, env, mode, target, source, gvars, lvars, conv]]:
constant[Substitute construction variables in a string (or list or other
object) and separate the arguments into a command list.
The companion scons_subst() function (above) handles basic
substitutions within strings, so see that function instead
if that's what you're looking for.
]
class class[ListSubber, parameter[]] begin[:]
constant[A class to construct the results of a scons_subst_list() call.
Like StringSubber, this class binds a specific construction
environment, mode, target and source with two methods
(substitute() and expand()) that handle the expansion.
In addition, however, this class is used to track the state of
the result(s) we're gathering so we can do the appropriate thing
whenever we have to append another word to the result--start a new
line, start a new word, append to the current word, etc. We do
this by setting the "append" attribute to the right method so
that our wrapper methods only need ever call ListSubber.append(),
and the rest of the object takes care of doing the right thing
internally.
]
def function[__init__, parameter[self, env, mode, conv, gvars]]:
call[name[collections].UserList.__init__, parameter[name[self], list[[]]]]
name[self].env assign[=] name[env]
name[self].mode assign[=] name[mode]
name[self].conv assign[=] name[conv]
name[self].gvars assign[=] name[gvars]
if compare[name[self].mode equal[==] name[SUBST_RAW]] begin[:]
name[self].add_strip assign[=] <ast.Lambda object at 0x7da204345270>
name[self].in_strip assign[=] constant[None]
call[name[self].next_line, parameter[]]
def function[expand, parameter[self, s, lvars, within_list]]:
constant[Expand a single "token" as necessary, appending the
expansion to the current result.
This handles expanding different types of things (strings,
lists, callables) appropriately. It calls the wrapper
substitute() method to re-expand things as necessary, so that
the results of expansions of side-by-side strings still get
re-evaluated separately, not smushed together.
]
if call[name[is_String], parameter[name[s]]] begin[:]
<ast.Try object at 0x7da204344ca0>
if compare[name[s0] not_equal[!=] constant[$]] begin[:]
call[name[self].append, parameter[name[s]]]
return[None]
if compare[name[s1] equal[==] constant[$]] begin[:]
call[name[self].append, parameter[constant[$]]]
def function[substitute, parameter[self, args, lvars, within_list]]:
constant[Substitute expansions in an argument or list of arguments.
This serves as a wrapper for splitting up a string into
separate tokens.
]
if <ast.BoolOp object at 0x7da2047e99f0> begin[:]
variable[args] assign[=] call[name[str], parameter[name[args]]]
variable[args] assign[=] call[name[_separate_args].findall, parameter[name[args]]]
for taget[name[a]] in starred[name[args]] begin[:]
if compare[call[name[a]][constant[0]] in constant[
]] begin[:]
if compare[constant[
] in name[a]] begin[:]
call[name[self].next_line, parameter[]]
def function[next_line, parameter[self]]:
constant[Arrange for the next word to start a new line. This
is like starting a new word, except that we have to append
another line to the result.]
call[name[collections].UserList.append, parameter[name[self], list[[]]]]
call[name[self].next_word, parameter[]]
def function[this_word, parameter[self]]:
constant[Arrange for the next word to append to the end of the
current last word in the result.]
name[self].append assign[=] name[self].add_to_current_word
def function[next_word, parameter[self]]:
constant[Arrange for the next word to start a new word.]
name[self].append assign[=] name[self].add_new_word
def function[add_to_current_word, parameter[self, x]]:
constant[Append the string x to the end of the current last word
in the result. If that is not possible, then just add
it as a new word. Make sure the entire concatenated string
inherits the object attributes of x (in particular, the
escape function) by wrapping it as CmdStringHolder.]
if <ast.BoolOp object at 0x7da20e9b3d00> begin[:]
<ast.Try object at 0x7da20e9b0550>
def function[add_new_word, parameter[self, x]]:
if <ast.BoolOp object at 0x7da20e9b1d50> begin[:]
variable[literal] assign[=] call[name[self].literal, parameter[name[x]]]
variable[x] assign[=] call[name[self].conv, parameter[name[x]]]
if call[name[is_String], parameter[name[x]]] begin[:]
variable[x] assign[=] call[name[CmdStringHolder], parameter[name[x], name[literal]]]
call[call[name[self]][<ast.UnaryOp object at 0x7da20e9b3a90>].append, parameter[name[x]]]
name[self].append assign[=] name[self].add_to_current_word
def function[literal, parameter[self, x]]:
<ast.Try object at 0x7da20e9b1f30>
def function[open_strip, parameter[self, x]]:
constant[Handle the "open strip" $( token.]
call[name[self].add_strip, parameter[name[x]]]
name[self].in_strip assign[=] constant[1]
def function[close_strip, parameter[self, x]]:
constant[Handle the "close strip" $) token.]
call[name[self].add_strip, parameter[name[x]]]
name[self].in_strip assign[=] constant[None]
if compare[name[conv] is constant[None]] begin[:]
variable[conv] assign[=] call[name[_strconv]][name[mode]]
if compare[constant[TARGET] <ast.NotIn object at 0x7da2590d7190> name[lvars]] begin[:]
variable[d] assign[=] call[name[subst_dict], parameter[name[target], name[source]]]
if name[d] begin[:]
variable[lvars] assign[=] call[name[lvars].copy, parameter[]]
call[name[lvars].update, parameter[name[d]]]
call[name[gvars]][constant[__builtins__]] assign[=] name[__builtins__]
variable[ls] assign[=] call[name[ListSubber], parameter[name[env], name[mode], name[conv], name[gvars]]]
call[name[ls].substitute, parameter[name[strSubst], name[lvars], constant[0]]]
<ast.Try object at 0x7da2041d86a0>
return[name[ls].data] | keyword[def] identifier[scons_subst_list] ( identifier[strSubst] , identifier[env] , identifier[mode] = identifier[SUBST_RAW] , identifier[target] = keyword[None] , identifier[source] = keyword[None] , identifier[gvars] ={}, identifier[lvars] ={}, identifier[conv] = keyword[None] ):
literal[string]
keyword[class] identifier[ListSubber] ( identifier[collections] . identifier[UserList] ):
literal[string]
keyword[def] identifier[__init__] ( identifier[self] , identifier[env] , identifier[mode] , identifier[conv] , identifier[gvars] ):
identifier[collections] . identifier[UserList] . identifier[__init__] ( identifier[self] ,[])
identifier[self] . identifier[env] = identifier[env]
identifier[self] . identifier[mode] = identifier[mode]
identifier[self] . identifier[conv] = identifier[conv]
identifier[self] . identifier[gvars] = identifier[gvars]
keyword[if] identifier[self] . identifier[mode] == identifier[SUBST_RAW] :
identifier[self] . identifier[add_strip] = keyword[lambda] identifier[x] : identifier[self] . identifier[append] ( identifier[x] )
keyword[else] :
identifier[self] . identifier[add_strip] = keyword[lambda] identifier[x] : keyword[None]
identifier[self] . identifier[in_strip] = keyword[None]
identifier[self] . identifier[next_line] ()
keyword[def] identifier[expand] ( identifier[self] , identifier[s] , identifier[lvars] , identifier[within_list] ):
literal[string]
keyword[if] identifier[is_String] ( identifier[s] ):
keyword[try] :
identifier[s0] , identifier[s1] = identifier[s] [: literal[int] ]
keyword[except] ( identifier[IndexError] , identifier[ValueError] ):
identifier[self] . identifier[append] ( identifier[s] )
keyword[return]
keyword[if] identifier[s0] != literal[string] :
identifier[self] . identifier[append] ( identifier[s] )
keyword[return]
keyword[if] identifier[s1] == literal[string] :
identifier[self] . identifier[append] ( literal[string] )
keyword[elif] identifier[s1] == literal[string] :
identifier[self] . identifier[open_strip] ( literal[string] )
keyword[elif] identifier[s1] == literal[string] :
identifier[self] . identifier[close_strip] ( literal[string] )
keyword[else] :
identifier[key] = identifier[s] [ literal[int] :]
keyword[if] identifier[key] [ literal[int] ]== literal[string] keyword[or] identifier[key] . identifier[find] ( literal[string] )>= literal[int] :
keyword[if] identifier[key] [ literal[int] ]== literal[string] :
identifier[key] = identifier[key] [ literal[int] :- literal[int] ]
keyword[try] :
identifier[s] = identifier[eval] ( identifier[key] , identifier[self] . identifier[gvars] , identifier[lvars] )
keyword[except] identifier[KeyboardInterrupt] :
keyword[raise]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[__class__] keyword[in] identifier[AllowableExceptions] :
keyword[return]
identifier[raise_exception] ( identifier[e] , identifier[lvars] [ literal[string] ], identifier[s] )
keyword[else] :
keyword[if] identifier[key] keyword[in] identifier[lvars] :
identifier[s] = identifier[lvars] [ identifier[key] ]
keyword[elif] identifier[key] keyword[in] identifier[self] . identifier[gvars] :
identifier[s] = identifier[self] . identifier[gvars] [ identifier[key] ]
keyword[elif] keyword[not] identifier[NameError] keyword[in] identifier[AllowableExceptions] :
identifier[raise_exception] ( identifier[NameError] (), identifier[lvars] [ literal[string] ], identifier[s] )
keyword[else] :
keyword[return]
identifier[lv] = identifier[lvars] . identifier[copy] ()
identifier[var] = identifier[key] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[lv] [ identifier[var] ]= literal[string]
identifier[self] . identifier[substitute] ( identifier[s] , identifier[lv] , literal[int] )
identifier[self] . identifier[this_word] ()
keyword[elif] identifier[is_Sequence] ( identifier[s] ):
keyword[for] identifier[a] keyword[in] identifier[s] :
identifier[self] . identifier[substitute] ( identifier[a] , identifier[lvars] , literal[int] )
identifier[self] . identifier[next_word] ()
keyword[elif] identifier[callable] ( identifier[s] ):
keyword[try] :
identifier[s] = identifier[s] ( identifier[target] = identifier[lvars] [ literal[string] ],
identifier[source] = identifier[lvars] [ literal[string] ],
identifier[env] = identifier[self] . identifier[env] ,
identifier[for_signature] =( identifier[self] . identifier[mode] != identifier[SUBST_CMD] ))
keyword[except] identifier[TypeError] :
keyword[if] identifier[self] . identifier[mode] == identifier[SUBST_RAW] :
identifier[self] . identifier[append] ( identifier[s] )
keyword[return]
identifier[s] = identifier[self] . identifier[conv] ( identifier[s] )
identifier[self] . identifier[substitute] ( identifier[s] , identifier[lvars] , identifier[within_list] )
keyword[elif] identifier[s] keyword[is] keyword[None] :
identifier[self] . identifier[this_word] ()
keyword[else] :
identifier[self] . identifier[append] ( identifier[s] )
keyword[def] identifier[substitute] ( identifier[self] , identifier[args] , identifier[lvars] , identifier[within_list] ):
literal[string]
keyword[if] identifier[is_String] ( identifier[args] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[args] , identifier[CmdStringHolder] ):
identifier[args] = identifier[str] ( identifier[args] )
identifier[args] = identifier[_separate_args] . identifier[findall] ( identifier[args] )
keyword[for] identifier[a] keyword[in] identifier[args] :
keyword[if] identifier[a] [ literal[int] ] keyword[in] literal[string] :
keyword[if] literal[string] keyword[in] identifier[a] :
identifier[self] . identifier[next_line] ()
keyword[elif] identifier[within_list] :
identifier[self] . identifier[append] ( identifier[a] )
keyword[else] :
identifier[self] . identifier[next_word] ()
keyword[else] :
identifier[self] . identifier[expand] ( identifier[a] , identifier[lvars] , identifier[within_list] )
keyword[else] :
identifier[self] . identifier[expand] ( identifier[args] , identifier[lvars] , identifier[within_list] )
keyword[def] identifier[next_line] ( identifier[self] ):
literal[string]
identifier[collections] . identifier[UserList] . identifier[append] ( identifier[self] ,[])
identifier[self] . identifier[next_word] ()
keyword[def] identifier[this_word] ( identifier[self] ):
literal[string]
identifier[self] . identifier[append] = identifier[self] . identifier[add_to_current_word]
keyword[def] identifier[next_word] ( identifier[self] ):
literal[string]
identifier[self] . identifier[append] = identifier[self] . identifier[add_new_word]
keyword[def] identifier[add_to_current_word] ( identifier[self] , identifier[x] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[in_strip] keyword[or] identifier[self] . identifier[mode] != identifier[SUBST_SIG] :
keyword[try] :
identifier[current_word] = identifier[self] [- literal[int] ][- literal[int] ]
keyword[except] identifier[IndexError] :
identifier[self] . identifier[add_new_word] ( identifier[x] )
keyword[else] :
keyword[try] :
identifier[last_char] = identifier[str] ( identifier[current_word] )[- literal[int] ]
keyword[except] identifier[IndexError] :
identifier[last_char] = literal[string]
keyword[if] identifier[last_char] keyword[in] literal[string] :
identifier[self] . identifier[add_new_word] ( identifier[x] )
keyword[else] :
identifier[y] = identifier[current_word] + identifier[x]
identifier[y] = identifier[self] . identifier[conv] ( identifier[y] )
keyword[if] identifier[is_String] ( identifier[y] ):
identifier[y] = identifier[CmdStringHolder] ( identifier[y] , keyword[None] )
identifier[self] [- literal[int] ][- literal[int] ]= identifier[y]
keyword[def] identifier[add_new_word] ( identifier[self] , identifier[x] ):
keyword[if] keyword[not] identifier[self] . identifier[in_strip] keyword[or] identifier[self] . identifier[mode] != identifier[SUBST_SIG] :
identifier[literal] = identifier[self] . identifier[literal] ( identifier[x] )
identifier[x] = identifier[self] . identifier[conv] ( identifier[x] )
keyword[if] identifier[is_String] ( identifier[x] ):
identifier[x] = identifier[CmdStringHolder] ( identifier[x] , identifier[literal] )
identifier[self] [- literal[int] ]. identifier[append] ( identifier[x] )
identifier[self] . identifier[append] = identifier[self] . identifier[add_to_current_word]
keyword[def] identifier[literal] ( identifier[self] , identifier[x] ):
keyword[try] :
identifier[l] = identifier[x] . identifier[is_literal]
keyword[except] identifier[AttributeError] :
keyword[return] keyword[None]
keyword[else] :
keyword[return] identifier[l] ()
keyword[def] identifier[open_strip] ( identifier[self] , identifier[x] ):
literal[string]
identifier[self] . identifier[add_strip] ( identifier[x] )
identifier[self] . identifier[in_strip] = literal[int]
keyword[def] identifier[close_strip] ( identifier[self] , identifier[x] ):
literal[string]
identifier[self] . identifier[add_strip] ( identifier[x] )
identifier[self] . identifier[in_strip] = keyword[None]
keyword[if] identifier[conv] keyword[is] keyword[None] :
identifier[conv] = identifier[_strconv] [ identifier[mode] ]
keyword[if] literal[string] keyword[not] keyword[in] identifier[lvars] :
identifier[d] = identifier[subst_dict] ( identifier[target] , identifier[source] )
keyword[if] identifier[d] :
identifier[lvars] = identifier[lvars] . identifier[copy] ()
identifier[lvars] . identifier[update] ( identifier[d] )
identifier[gvars] [ literal[string] ]= identifier[__builtins__]
identifier[ls] = identifier[ListSubber] ( identifier[env] , identifier[mode] , identifier[conv] , identifier[gvars] )
identifier[ls] . identifier[substitute] ( identifier[strSubst] , identifier[lvars] , literal[int] )
keyword[try] :
keyword[del] identifier[gvars] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[return] identifier[ls] . identifier[data] | def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None):
"""Substitute construction variables in a string (or list or other
object) and separate the arguments into a command list.
The companion scons_subst() function (above) handles basic
substitutions within strings, so see that function instead
if that's what you're looking for.
"""
class ListSubber(collections.UserList):
"""A class to construct the results of a scons_subst_list() call.
Like StringSubber, this class binds a specific construction
environment, mode, target and source with two methods
(substitute() and expand()) that handle the expansion.
In addition, however, this class is used to track the state of
the result(s) we're gathering so we can do the appropriate thing
whenever we have to append another word to the result--start a new
line, start a new word, append to the current word, etc. We do
this by setting the "append" attribute to the right method so
that our wrapper methods only need ever call ListSubber.append(),
and the rest of the object takes care of doing the right thing
internally.
"""
def __init__(self, env, mode, conv, gvars):
collections.UserList.__init__(self, [])
self.env = env
self.mode = mode
self.conv = conv
self.gvars = gvars
if self.mode == SUBST_RAW:
self.add_strip = lambda x: self.append(x) # depends on [control=['if'], data=[]]
else:
self.add_strip = lambda x: None
self.in_strip = None
self.next_line()
def expand(self, s, lvars, within_list):
"""Expand a single "token" as necessary, appending the
expansion to the current result.
This handles expanding different types of things (strings,
lists, callables) appropriately. It calls the wrapper
substitute() method to re-expand things as necessary, so that
the results of expansions of side-by-side strings still get
re-evaluated separately, not smushed together.
"""
if is_String(s):
try:
(s0, s1) = s[:2] # depends on [control=['try'], data=[]]
except (IndexError, ValueError):
self.append(s)
return # depends on [control=['except'], data=[]]
if s0 != '$':
self.append(s)
return # depends on [control=['if'], data=[]]
if s1 == '$':
self.append('$') # depends on [control=['if'], data=[]]
elif s1 == '(':
self.open_strip('$(') # depends on [control=['if'], data=[]]
elif s1 == ')':
self.close_strip('$)') # depends on [control=['if'], data=[]]
else:
key = s[1:]
if key[0] == '{' or key.find('.') >= 0:
if key[0] == '{':
key = key[1:-1] # depends on [control=['if'], data=[]]
try:
s = eval(key, self.gvars, lvars) # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
raise # depends on [control=['except'], data=[]]
except Exception as e:
if e.__class__ in AllowableExceptions:
return # depends on [control=['if'], data=[]]
raise_exception(e, lvars['TARGETS'], s) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
elif key in lvars:
s = lvars[key] # depends on [control=['if'], data=['key', 'lvars']]
elif key in self.gvars:
s = self.gvars[key] # depends on [control=['if'], data=['key']]
elif not NameError in AllowableExceptions:
raise_exception(NameError(), lvars['TARGETS'], s) # depends on [control=['if'], data=[]]
else:
return
# Before re-expanding the result, handle
# recursive expansion by copying the local
# variable dictionary and overwriting a null
# string for the value of the variable name
# we just expanded.
lv = lvars.copy()
var = key.split('.')[0]
lv[var] = ''
self.substitute(s, lv, 0)
self.this_word() # depends on [control=['if'], data=[]]
elif is_Sequence(s):
for a in s:
self.substitute(a, lvars, 1)
self.next_word() # depends on [control=['for'], data=['a']] # depends on [control=['if'], data=[]]
elif callable(s):
try:
s = s(target=lvars['TARGETS'], source=lvars['SOURCES'], env=self.env, for_signature=self.mode != SUBST_CMD) # depends on [control=['try'], data=[]]
except TypeError:
# This probably indicates that it's a callable
# object that doesn't match our calling arguments
# (like an Action).
if self.mode == SUBST_RAW:
self.append(s)
return # depends on [control=['if'], data=[]]
s = self.conv(s) # depends on [control=['except'], data=[]]
self.substitute(s, lvars, within_list) # depends on [control=['if'], data=[]]
elif s is None:
self.this_word() # depends on [control=['if'], data=[]]
else:
self.append(s)
def substitute(self, args, lvars, within_list):
"""Substitute expansions in an argument or list of arguments.
This serves as a wrapper for splitting up a string into
separate tokens.
"""
if is_String(args) and (not isinstance(args, CmdStringHolder)):
args = str(args) # In case it's a UserString.
args = _separate_args.findall(args)
for a in args:
if a[0] in ' \t\n\r\x0c\x0b':
if '\n' in a:
self.next_line() # depends on [control=['if'], data=[]]
elif within_list:
self.append(a) # depends on [control=['if'], data=[]]
else:
self.next_word() # depends on [control=['if'], data=[]]
else:
self.expand(a, lvars, within_list) # depends on [control=['for'], data=['a']] # depends on [control=['if'], data=[]]
else:
self.expand(args, lvars, within_list)
def next_line(self):
"""Arrange for the next word to start a new line. This
is like starting a new word, except that we have to append
another line to the result."""
collections.UserList.append(self, [])
self.next_word()
def this_word(self):
"""Arrange for the next word to append to the end of the
current last word in the result."""
self.append = self.add_to_current_word
def next_word(self):
"""Arrange for the next word to start a new word."""
self.append = self.add_new_word
def add_to_current_word(self, x):
"""Append the string x to the end of the current last word
in the result. If that is not possible, then just add
it as a new word. Make sure the entire concatenated string
inherits the object attributes of x (in particular, the
escape function) by wrapping it as CmdStringHolder."""
if not self.in_strip or self.mode != SUBST_SIG:
try:
current_word = self[-1][-1] # depends on [control=['try'], data=[]]
except IndexError:
self.add_new_word(x) # depends on [control=['except'], data=[]]
else:
# All right, this is a hack and it should probably
# be refactored out of existence in the future.
# The issue is that we want to smoosh words together
# and make one file name that gets escaped if
# we're expanding something like foo$EXTENSION,
# but we don't want to smoosh them together if
# it's something like >$TARGET, because then we'll
# treat the '>' like it's part of the file name.
# So for now, just hard-code looking for the special
# command-line redirection characters...
try:
last_char = str(current_word)[-1] # depends on [control=['try'], data=[]]
except IndexError:
last_char = '\x00' # depends on [control=['except'], data=[]]
if last_char in '<>|':
self.add_new_word(x) # depends on [control=['if'], data=[]]
else:
y = current_word + x
# We used to treat a word appended to a literal
# as a literal itself, but this caused problems
# with interpreting quotes around space-separated
# targets on command lines. Removing this makes
# none of the "substantive" end-to-end tests fail,
# so we'll take this out but leave it commented
# for now in case there's a problem not covered
# by the test cases and we need to resurrect this.
#literal1 = self.literal(self[-1][-1])
#literal2 = self.literal(x)
y = self.conv(y)
if is_String(y):
#y = CmdStringHolder(y, literal1 or literal2)
y = CmdStringHolder(y, None) # depends on [control=['if'], data=[]]
self[-1][-1] = y # depends on [control=['if'], data=[]]
def add_new_word(self, x):
if not self.in_strip or self.mode != SUBST_SIG:
literal = self.literal(x)
x = self.conv(x)
if is_String(x):
x = CmdStringHolder(x, literal) # depends on [control=['if'], data=[]]
self[-1].append(x) # depends on [control=['if'], data=[]]
self.append = self.add_to_current_word
def literal(self, x):
try:
l = x.is_literal # depends on [control=['try'], data=[]]
except AttributeError:
return None # depends on [control=['except'], data=[]]
else:
return l()
def open_strip(self, x):
"""Handle the "open strip" $( token."""
self.add_strip(x)
self.in_strip = 1
def close_strip(self, x):
"""Handle the "close strip" $) token."""
self.add_strip(x)
self.in_strip = None
if conv is None:
conv = _strconv[mode] # depends on [control=['if'], data=['conv']]
# Doing this every time is a bit of a waste, since the Executor
# has typically already populated the OverrideEnvironment with
# $TARGET/$SOURCE variables. We're keeping this (for now), though,
# because it supports existing behavior that allows us to call
# an Action directly with an arbitrary target+source pair, which
# we use in Tool/tex.py to handle calling $BIBTEX when necessary.
# If we dropped that behavior (or found another way to cover it),
# we could get rid of this call completely and just rely on the
# Executor setting the variables.
if 'TARGET' not in lvars:
d = subst_dict(target, source)
if d:
lvars = lvars.copy()
lvars.update(d) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['lvars']]
# We're (most likely) going to eval() things. If Python doesn't
# find a __builtins__ value in the global dictionary used for eval(),
# it copies the current global values for you. Avoid this by
# setting it explicitly and then deleting, so we don't pollute the
# construction environment Dictionary(ies) that are typically used
# for expansion.
gvars['__builtins__'] = __builtins__
ls = ListSubber(env, mode, conv, gvars)
ls.substitute(strSubst, lvars, 0)
try:
del gvars['__builtins__'] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
return ls.data |
def sleep_if_necessary(cls, user, token, endpoint='search', msg=''):
"""Sleep a little if hit github recently to honor rate limit.
"""
my_kw = {'auth': (user, token)} if user else {}
info = requests.get('https://api.github.com/rate_limit', **my_kw)
info_dict = info.json()
remaining = info_dict['resources'][endpoint]['remaining']
logging.debug('Search remaining on github is at %s', remaining)
if remaining <= 5:
sleep_time = 120
else:
sleep_time = 0
if sleep_time:
logging.warning('Sleep %i since github requests remaining = %i%s',
sleep_time, remaining, msg)
time.sleep(sleep_time)
return True
return False | def function[sleep_if_necessary, parameter[cls, user, token, endpoint, msg]]:
constant[Sleep a little if hit github recently to honor rate limit.
]
variable[my_kw] assign[=] <ast.IfExp object at 0x7da204961600>
variable[info] assign[=] call[name[requests].get, parameter[constant[https://api.github.com/rate_limit]]]
variable[info_dict] assign[=] call[name[info].json, parameter[]]
variable[remaining] assign[=] call[call[call[name[info_dict]][constant[resources]]][name[endpoint]]][constant[remaining]]
call[name[logging].debug, parameter[constant[Search remaining on github is at %s], name[remaining]]]
if compare[name[remaining] less_or_equal[<=] constant[5]] begin[:]
variable[sleep_time] assign[=] constant[120]
if name[sleep_time] begin[:]
call[name[logging].warning, parameter[constant[Sleep %i since github requests remaining = %i%s], name[sleep_time], name[remaining], name[msg]]]
call[name[time].sleep, parameter[name[sleep_time]]]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[sleep_if_necessary] ( identifier[cls] , identifier[user] , identifier[token] , identifier[endpoint] = literal[string] , identifier[msg] = literal[string] ):
literal[string]
identifier[my_kw] ={ literal[string] :( identifier[user] , identifier[token] )} keyword[if] identifier[user] keyword[else] {}
identifier[info] = identifier[requests] . identifier[get] ( literal[string] ,** identifier[my_kw] )
identifier[info_dict] = identifier[info] . identifier[json] ()
identifier[remaining] = identifier[info_dict] [ literal[string] ][ identifier[endpoint] ][ literal[string] ]
identifier[logging] . identifier[debug] ( literal[string] , identifier[remaining] )
keyword[if] identifier[remaining] <= literal[int] :
identifier[sleep_time] = literal[int]
keyword[else] :
identifier[sleep_time] = literal[int]
keyword[if] identifier[sleep_time] :
identifier[logging] . identifier[warning] ( literal[string] ,
identifier[sleep_time] , identifier[remaining] , identifier[msg] )
identifier[time] . identifier[sleep] ( identifier[sleep_time] )
keyword[return] keyword[True]
keyword[return] keyword[False] | def sleep_if_necessary(cls, user, token, endpoint='search', msg=''):
"""Sleep a little if hit github recently to honor rate limit.
"""
my_kw = {'auth': (user, token)} if user else {}
info = requests.get('https://api.github.com/rate_limit', **my_kw)
info_dict = info.json()
remaining = info_dict['resources'][endpoint]['remaining']
logging.debug('Search remaining on github is at %s', remaining)
if remaining <= 5:
sleep_time = 120 # depends on [control=['if'], data=[]]
else:
sleep_time = 0
if sleep_time:
logging.warning('Sleep %i since github requests remaining = %i%s', sleep_time, remaining, msg)
time.sleep(sleep_time)
return True # depends on [control=['if'], data=[]]
return False |
def reset(self):
"""Clear the internal statistics to initial state."""
if getattr(self, 'num', None) is None:
self.num_inst = 0
self.sum_metric = 0.0
else:
self.num_inst = [0] * self.num
self.sum_metric = [0.0] * self.num
self.records = dict()
self.counts = dict() | def function[reset, parameter[self]]:
constant[Clear the internal statistics to initial state.]
if compare[call[name[getattr], parameter[name[self], constant[num], constant[None]]] is constant[None]] begin[:]
name[self].num_inst assign[=] constant[0]
name[self].sum_metric assign[=] constant[0.0]
name[self].records assign[=] call[name[dict], parameter[]]
name[self].counts assign[=] call[name[dict], parameter[]] | keyword[def] identifier[reset] ( identifier[self] ):
literal[string]
keyword[if] identifier[getattr] ( identifier[self] , literal[string] , keyword[None] ) keyword[is] keyword[None] :
identifier[self] . identifier[num_inst] = literal[int]
identifier[self] . identifier[sum_metric] = literal[int]
keyword[else] :
identifier[self] . identifier[num_inst] =[ literal[int] ]* identifier[self] . identifier[num]
identifier[self] . identifier[sum_metric] =[ literal[int] ]* identifier[self] . identifier[num]
identifier[self] . identifier[records] = identifier[dict] ()
identifier[self] . identifier[counts] = identifier[dict] () | def reset(self):
"""Clear the internal statistics to initial state."""
if getattr(self, 'num', None) is None:
self.num_inst = 0
self.sum_metric = 0.0 # depends on [control=['if'], data=[]]
else:
self.num_inst = [0] * self.num
self.sum_metric = [0.0] * self.num
self.records = dict()
self.counts = dict() |
def arp():
'''
Return the arp table from the minion
.. versionchanged:: 2015.8.0
Added support for SunOS
CLI Example:
.. code-block:: bash
salt '*' network.arp
'''
ret = {}
out = __salt__['cmd.run']('arp -an')
for line in out.splitlines():
comps = line.split()
if len(comps) < 4:
continue
if __grains__['kernel'] == 'SunOS':
if ':' not in comps[-1]:
continue
ret[comps[-1]] = comps[1]
elif __grains__['kernel'] == 'OpenBSD':
if comps[0] == 'Host' or comps[1] == '(incomplete)':
continue
ret[comps[1]] = comps[0]
elif __grains__['kernel'] == 'AIX':
if comps[0] in ('bucket', 'There'):
continue
ret[comps[3]] = comps[1].strip('(').strip(')')
else:
ret[comps[3]] = comps[1].strip('(').strip(')')
return ret | def function[arp, parameter[]]:
constant[
Return the arp table from the minion
.. versionchanged:: 2015.8.0
Added support for SunOS
CLI Example:
.. code-block:: bash
salt '*' network.arp
]
variable[ret] assign[=] dictionary[[], []]
variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run]], parameter[constant[arp -an]]]
for taget[name[line]] in starred[call[name[out].splitlines, parameter[]]] begin[:]
variable[comps] assign[=] call[name[line].split, parameter[]]
if compare[call[name[len], parameter[name[comps]]] less[<] constant[4]] begin[:]
continue
if compare[call[name[__grains__]][constant[kernel]] equal[==] constant[SunOS]] begin[:]
if compare[constant[:] <ast.NotIn object at 0x7da2590d7190> call[name[comps]][<ast.UnaryOp object at 0x7da20c7945b0>]] begin[:]
continue
call[name[ret]][call[name[comps]][<ast.UnaryOp object at 0x7da20c7947f0>]] assign[=] call[name[comps]][constant[1]]
return[name[ret]] | keyword[def] identifier[arp] ():
literal[string]
identifier[ret] ={}
identifier[out] = identifier[__salt__] [ literal[string] ]( literal[string] )
keyword[for] identifier[line] keyword[in] identifier[out] . identifier[splitlines] ():
identifier[comps] = identifier[line] . identifier[split] ()
keyword[if] identifier[len] ( identifier[comps] )< literal[int] :
keyword[continue]
keyword[if] identifier[__grains__] [ literal[string] ]== literal[string] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[comps] [- literal[int] ]:
keyword[continue]
identifier[ret] [ identifier[comps] [- literal[int] ]]= identifier[comps] [ literal[int] ]
keyword[elif] identifier[__grains__] [ literal[string] ]== literal[string] :
keyword[if] identifier[comps] [ literal[int] ]== literal[string] keyword[or] identifier[comps] [ literal[int] ]== literal[string] :
keyword[continue]
identifier[ret] [ identifier[comps] [ literal[int] ]]= identifier[comps] [ literal[int] ]
keyword[elif] identifier[__grains__] [ literal[string] ]== literal[string] :
keyword[if] identifier[comps] [ literal[int] ] keyword[in] ( literal[string] , literal[string] ):
keyword[continue]
identifier[ret] [ identifier[comps] [ literal[int] ]]= identifier[comps] [ literal[int] ]. identifier[strip] ( literal[string] ). identifier[strip] ( literal[string] )
keyword[else] :
identifier[ret] [ identifier[comps] [ literal[int] ]]= identifier[comps] [ literal[int] ]. identifier[strip] ( literal[string] ). identifier[strip] ( literal[string] )
keyword[return] identifier[ret] | def arp():
"""
Return the arp table from the minion
.. versionchanged:: 2015.8.0
Added support for SunOS
CLI Example:
.. code-block:: bash
salt '*' network.arp
"""
ret = {}
out = __salt__['cmd.run']('arp -an')
for line in out.splitlines():
comps = line.split()
if len(comps) < 4:
continue # depends on [control=['if'], data=[]]
if __grains__['kernel'] == 'SunOS':
if ':' not in comps[-1]:
continue # depends on [control=['if'], data=[]]
ret[comps[-1]] = comps[1] # depends on [control=['if'], data=[]]
elif __grains__['kernel'] == 'OpenBSD':
if comps[0] == 'Host' or comps[1] == '(incomplete)':
continue # depends on [control=['if'], data=[]]
ret[comps[1]] = comps[0] # depends on [control=['if'], data=[]]
elif __grains__['kernel'] == 'AIX':
if comps[0] in ('bucket', 'There'):
continue # depends on [control=['if'], data=[]]
ret[comps[3]] = comps[1].strip('(').strip(')') # depends on [control=['if'], data=[]]
else:
ret[comps[3]] = comps[1].strip('(').strip(')') # depends on [control=['for'], data=['line']]
return ret |
def run_ding0(self, session, mv_grid_districts_no=None, debug=False, export_figures=False):
""" Let DING0 run by shouting at this method (or just call
it from NetworkDing0 instance). This method is a wrapper
for the main functionality of DING0.
Parameters
----------
session : sqlalchemy.orm.session.Session
Database session
mv_grid_districts_no : List of Integers
List of MV grid_districts/stations to be imported (if empty,
all grid_districts & stations are imported)
debug : bool, defaults to False
If True, information is printed during process
export_figures : bool, defaults to False
If True, figures are shown or exported (default path: ~/.ding0/) during run.
Returns
-------
msg : str
Message of invalidity of a grid district
Notes
-----
The steps performed in this method are to be kept in the given order
since there are hard dependencies between them. Short description of
all steps performed:
* STEP 1: Import MV Grid Districts and subjacent objects
Imports MV Grid Districts, HV-MV stations, Load Areas, LV Grid Districts
and MV-LV stations, instantiates and initiates objects.
* STEP 2: Import generators
Conventional and renewable generators of voltage levels 4..7 are imported
and added to corresponding grid.
* STEP 3: Parametrize grid
Parameters of MV grid are set such as voltage level and cable/line types
according to MV Grid District's characteristics.
* STEP 4: Validate MV Grid Districts
Tests MV grid districts for validity concerning imported data such as
count of Load Areas.
* STEP 5: Build LV grids
Builds LV grids for every non-aggregated LA in every MV Grid District
using model grids.
* STEP 6: Build MV grids
Builds MV grid by performing a routing on Load Area centres to build
ring topology.
* STEP 7: Connect MV and LV generators
Generators are connected to grids, used approach depends on voltage
level.
* STEP 8: Set IDs for all branches in MV and LV grids
While IDs of imported objects can be derived from dataset's ID, branches
are created in steps 5+6 and need unique IDs (e.g. for PF calculation).
* STEP 9: Relocate switch disconnectors in MV grid
Switch disconnectors are set during routing process (step 6) according
to the load distribution within a ring. After further modifications of
the grid within step 6+7 they have to be relocated (note: switch
disconnectors are called circuit breakers in DING0 for historical reasons).
* STEP 10: Open all switch disconnectors in MV grid
Under normal conditions, rings are operated in open state (half-rings).
Furthermore, this is required to allow powerflow for MV grid.
* STEP 11: Do power flow analysis of MV grid
The technically working MV grid created in step 6 was extended by satellite
loads and generators. It is finally tested again using powerflow calculation.
* STEP 12: Reinforce MV grid
MV grid is eventually reinforced persuant to results from step 11.
STEP 13: Close all switch disconnectors in MV grid
The rings are finally closed to hold a complete graph (if the SDs are open,
the edges adjacent to a SD will not be exported!)
"""
if debug:
start = time.time()
# STEP 1: Import MV Grid Districts and subjacent objects
self.import_mv_grid_districts(session,
mv_grid_districts_no=mv_grid_districts_no)
# STEP 2: Import generators
self.import_generators(session, debug=debug)
# STEP 3: Parametrize MV grid
self.mv_parametrize_grid(debug=debug)
# STEP 4: Validate MV Grid Districts
msg = self.validate_grid_districts()
# STEP 5: Build LV grids
self.build_lv_grids()
# STEP 6: Build MV grids
self.mv_routing(debug=False)
if export_figures:
grid = self._mv_grid_districts[0].mv_grid
plot_mv_topology(grid, subtitle='Routing completed', filename='1_routing_completed.png')
# STEP 7: Connect MV and LV generators
self.connect_generators(debug=False)
if export_figures:
plot_mv_topology(grid, subtitle='Generators connected', filename='2_generators_connected.png')
# STEP 8: Set IDs for all branches in MV and LV grids
self.set_branch_ids()
# STEP 9: Relocate switch disconnectors in MV grid
self.set_circuit_breakers(debug=debug)
if export_figures:
plot_mv_topology(grid, subtitle='Circuit breakers relocated', filename='3_circuit_breakers_relocated.png')
# STEP 10: Open all switch disconnectors in MV grid
self.control_circuit_breakers(mode='open')
# STEP 11: Do power flow analysis of MV grid
self.run_powerflow(session, method='onthefly', export_pypsa=False, debug=debug)
if export_figures:
plot_mv_topology(grid, subtitle='PF result (load case)',
filename='4_PF_result_load.png',
line_color='loading', node_color='voltage', testcase='load')
plot_mv_topology(grid, subtitle='PF result (feedin case)',
filename='5_PF_result_feedin.png',
line_color='loading', node_color='voltage', testcase='feedin')
# STEP 12: Reinforce MV grid
self.reinforce_grid()
# STEP 13: Close all switch disconnectors in MV grid
self.control_circuit_breakers(mode='close')
if export_figures:
plot_mv_topology(grid, subtitle='Final grid PF result (load case)',
filename='6_final_grid_PF_result_load.png',
line_color='loading', node_color='voltage', testcase='load')
plot_mv_topology(grid, subtitle='Final grid PF result (feedin case)',
filename='7_final_grid_PF_result_feedin.png',
line_color='loading', node_color='voltage', testcase='feedin')
if debug:
logger.info('Elapsed time for {0} MV Grid Districts (seconds): {1}'.format(
str(len(mv_grid_districts_no)), time.time() - start))
return msg | def function[run_ding0, parameter[self, session, mv_grid_districts_no, debug, export_figures]]:
constant[ Let DING0 run by shouting at this method (or just call
it from NetworkDing0 instance). This method is a wrapper
for the main functionality of DING0.
Parameters
----------
session : sqlalchemy.orm.session.Session
Database session
mv_grid_districts_no : List of Integers
List of MV grid_districts/stations to be imported (if empty,
all grid_districts & stations are imported)
debug : bool, defaults to False
If True, information is printed during process
export_figures : bool, defaults to False
If True, figures are shown or exported (default path: ~/.ding0/) during run.
Returns
-------
msg : str
Message of invalidity of a grid district
Notes
-----
The steps performed in this method are to be kept in the given order
since there are hard dependencies between them. Short description of
all steps performed:
* STEP 1: Import MV Grid Districts and subjacent objects
Imports MV Grid Districts, HV-MV stations, Load Areas, LV Grid Districts
and MV-LV stations, instantiates and initiates objects.
* STEP 2: Import generators
Conventional and renewable generators of voltage levels 4..7 are imported
and added to corresponding grid.
* STEP 3: Parametrize grid
Parameters of MV grid are set such as voltage level and cable/line types
according to MV Grid District's characteristics.
* STEP 4: Validate MV Grid Districts
Tests MV grid districts for validity concerning imported data such as
count of Load Areas.
* STEP 5: Build LV grids
Builds LV grids for every non-aggregated LA in every MV Grid District
using model grids.
* STEP 6: Build MV grids
Builds MV grid by performing a routing on Load Area centres to build
ring topology.
* STEP 7: Connect MV and LV generators
Generators are connected to grids, used approach depends on voltage
level.
* STEP 8: Set IDs for all branches in MV and LV grids
While IDs of imported objects can be derived from dataset's ID, branches
are created in steps 5+6 and need unique IDs (e.g. for PF calculation).
* STEP 9: Relocate switch disconnectors in MV grid
Switch disconnectors are set during routing process (step 6) according
to the load distribution within a ring. After further modifications of
the grid within step 6+7 they have to be relocated (note: switch
disconnectors are called circuit breakers in DING0 for historical reasons).
* STEP 10: Open all switch disconnectors in MV grid
Under normal conditions, rings are operated in open state (half-rings).
Furthermore, this is required to allow powerflow for MV grid.
* STEP 11: Do power flow analysis of MV grid
The technically working MV grid created in step 6 was extended by satellite
loads and generators. It is finally tested again using powerflow calculation.
* STEP 12: Reinforce MV grid
MV grid is eventually reinforced persuant to results from step 11.
STEP 13: Close all switch disconnectors in MV grid
The rings are finally closed to hold a complete graph (if the SDs are open,
the edges adjacent to a SD will not be exported!)
]
if name[debug] begin[:]
variable[start] assign[=] call[name[time].time, parameter[]]
call[name[self].import_mv_grid_districts, parameter[name[session]]]
call[name[self].import_generators, parameter[name[session]]]
call[name[self].mv_parametrize_grid, parameter[]]
variable[msg] assign[=] call[name[self].validate_grid_districts, parameter[]]
call[name[self].build_lv_grids, parameter[]]
call[name[self].mv_routing, parameter[]]
if name[export_figures] begin[:]
variable[grid] assign[=] call[name[self]._mv_grid_districts][constant[0]].mv_grid
call[name[plot_mv_topology], parameter[name[grid]]]
call[name[self].connect_generators, parameter[]]
if name[export_figures] begin[:]
call[name[plot_mv_topology], parameter[name[grid]]]
call[name[self].set_branch_ids, parameter[]]
call[name[self].set_circuit_breakers, parameter[]]
if name[export_figures] begin[:]
call[name[plot_mv_topology], parameter[name[grid]]]
call[name[self].control_circuit_breakers, parameter[]]
call[name[self].run_powerflow, parameter[name[session]]]
if name[export_figures] begin[:]
call[name[plot_mv_topology], parameter[name[grid]]]
call[name[plot_mv_topology], parameter[name[grid]]]
call[name[self].reinforce_grid, parameter[]]
call[name[self].control_circuit_breakers, parameter[]]
if name[export_figures] begin[:]
call[name[plot_mv_topology], parameter[name[grid]]]
call[name[plot_mv_topology], parameter[name[grid]]]
if name[debug] begin[:]
call[name[logger].info, parameter[call[constant[Elapsed time for {0} MV Grid Districts (seconds): {1}].format, parameter[call[name[str], parameter[call[name[len], parameter[name[mv_grid_districts_no]]]]], binary_operation[call[name[time].time, parameter[]] - name[start]]]]]]
return[name[msg]] | keyword[def] identifier[run_ding0] ( identifier[self] , identifier[session] , identifier[mv_grid_districts_no] = keyword[None] , identifier[debug] = keyword[False] , identifier[export_figures] = keyword[False] ):
literal[string]
keyword[if] identifier[debug] :
identifier[start] = identifier[time] . identifier[time] ()
identifier[self] . identifier[import_mv_grid_districts] ( identifier[session] ,
identifier[mv_grid_districts_no] = identifier[mv_grid_districts_no] )
identifier[self] . identifier[import_generators] ( identifier[session] , identifier[debug] = identifier[debug] )
identifier[self] . identifier[mv_parametrize_grid] ( identifier[debug] = identifier[debug] )
identifier[msg] = identifier[self] . identifier[validate_grid_districts] ()
identifier[self] . identifier[build_lv_grids] ()
identifier[self] . identifier[mv_routing] ( identifier[debug] = keyword[False] )
keyword[if] identifier[export_figures] :
identifier[grid] = identifier[self] . identifier[_mv_grid_districts] [ literal[int] ]. identifier[mv_grid]
identifier[plot_mv_topology] ( identifier[grid] , identifier[subtitle] = literal[string] , identifier[filename] = literal[string] )
identifier[self] . identifier[connect_generators] ( identifier[debug] = keyword[False] )
keyword[if] identifier[export_figures] :
identifier[plot_mv_topology] ( identifier[grid] , identifier[subtitle] = literal[string] , identifier[filename] = literal[string] )
identifier[self] . identifier[set_branch_ids] ()
identifier[self] . identifier[set_circuit_breakers] ( identifier[debug] = identifier[debug] )
keyword[if] identifier[export_figures] :
identifier[plot_mv_topology] ( identifier[grid] , identifier[subtitle] = literal[string] , identifier[filename] = literal[string] )
identifier[self] . identifier[control_circuit_breakers] ( identifier[mode] = literal[string] )
identifier[self] . identifier[run_powerflow] ( identifier[session] , identifier[method] = literal[string] , identifier[export_pypsa] = keyword[False] , identifier[debug] = identifier[debug] )
keyword[if] identifier[export_figures] :
identifier[plot_mv_topology] ( identifier[grid] , identifier[subtitle] = literal[string] ,
identifier[filename] = literal[string] ,
identifier[line_color] = literal[string] , identifier[node_color] = literal[string] , identifier[testcase] = literal[string] )
identifier[plot_mv_topology] ( identifier[grid] , identifier[subtitle] = literal[string] ,
identifier[filename] = literal[string] ,
identifier[line_color] = literal[string] , identifier[node_color] = literal[string] , identifier[testcase] = literal[string] )
identifier[self] . identifier[reinforce_grid] ()
identifier[self] . identifier[control_circuit_breakers] ( identifier[mode] = literal[string] )
keyword[if] identifier[export_figures] :
identifier[plot_mv_topology] ( identifier[grid] , identifier[subtitle] = literal[string] ,
identifier[filename] = literal[string] ,
identifier[line_color] = literal[string] , identifier[node_color] = literal[string] , identifier[testcase] = literal[string] )
identifier[plot_mv_topology] ( identifier[grid] , identifier[subtitle] = literal[string] ,
identifier[filename] = literal[string] ,
identifier[line_color] = literal[string] , identifier[node_color] = literal[string] , identifier[testcase] = literal[string] )
keyword[if] identifier[debug] :
identifier[logger] . identifier[info] ( literal[string] . identifier[format] (
identifier[str] ( identifier[len] ( identifier[mv_grid_districts_no] )), identifier[time] . identifier[time] ()- identifier[start] ))
keyword[return] identifier[msg] | def run_ding0(self, session, mv_grid_districts_no=None, debug=False, export_figures=False):
""" Let DING0 run by shouting at this method (or just call
it from NetworkDing0 instance). This method is a wrapper
for the main functionality of DING0.
Parameters
----------
session : sqlalchemy.orm.session.Session
Database session
mv_grid_districts_no : List of Integers
List of MV grid_districts/stations to be imported (if empty,
all grid_districts & stations are imported)
debug : bool, defaults to False
If True, information is printed during process
export_figures : bool, defaults to False
If True, figures are shown or exported (default path: ~/.ding0/) during run.
Returns
-------
msg : str
Message of invalidity of a grid district
Notes
-----
The steps performed in this method are to be kept in the given order
since there are hard dependencies between them. Short description of
all steps performed:
* STEP 1: Import MV Grid Districts and subjacent objects
Imports MV Grid Districts, HV-MV stations, Load Areas, LV Grid Districts
and MV-LV stations, instantiates and initiates objects.
* STEP 2: Import generators
Conventional and renewable generators of voltage levels 4..7 are imported
and added to corresponding grid.
* STEP 3: Parametrize grid
Parameters of MV grid are set such as voltage level and cable/line types
according to MV Grid District's characteristics.
* STEP 4: Validate MV Grid Districts
Tests MV grid districts for validity concerning imported data such as
count of Load Areas.
* STEP 5: Build LV grids
Builds LV grids for every non-aggregated LA in every MV Grid District
using model grids.
* STEP 6: Build MV grids
Builds MV grid by performing a routing on Load Area centres to build
ring topology.
* STEP 7: Connect MV and LV generators
Generators are connected to grids, used approach depends on voltage
level.
* STEP 8: Set IDs for all branches in MV and LV grids
While IDs of imported objects can be derived from dataset's ID, branches
are created in steps 5+6 and need unique IDs (e.g. for PF calculation).
* STEP 9: Relocate switch disconnectors in MV grid
Switch disconnectors are set during routing process (step 6) according
to the load distribution within a ring. After further modifications of
the grid within step 6+7 they have to be relocated (note: switch
disconnectors are called circuit breakers in DING0 for historical reasons).
* STEP 10: Open all switch disconnectors in MV grid
Under normal conditions, rings are operated in open state (half-rings).
Furthermore, this is required to allow powerflow for MV grid.
* STEP 11: Do power flow analysis of MV grid
The technically working MV grid created in step 6 was extended by satellite
loads and generators. It is finally tested again using powerflow calculation.
* STEP 12: Reinforce MV grid
MV grid is eventually reinforced persuant to results from step 11.
STEP 13: Close all switch disconnectors in MV grid
The rings are finally closed to hold a complete graph (if the SDs are open,
the edges adjacent to a SD will not be exported!)
"""
if debug:
start = time.time() # depends on [control=['if'], data=[]]
# STEP 1: Import MV Grid Districts and subjacent objects
self.import_mv_grid_districts(session, mv_grid_districts_no=mv_grid_districts_no)
# STEP 2: Import generators
self.import_generators(session, debug=debug)
# STEP 3: Parametrize MV grid
self.mv_parametrize_grid(debug=debug)
# STEP 4: Validate MV Grid Districts
msg = self.validate_grid_districts()
# STEP 5: Build LV grids
self.build_lv_grids()
# STEP 6: Build MV grids
self.mv_routing(debug=False)
if export_figures:
grid = self._mv_grid_districts[0].mv_grid
plot_mv_topology(grid, subtitle='Routing completed', filename='1_routing_completed.png') # depends on [control=['if'], data=[]]
# STEP 7: Connect MV and LV generators
self.connect_generators(debug=False)
if export_figures:
plot_mv_topology(grid, subtitle='Generators connected', filename='2_generators_connected.png') # depends on [control=['if'], data=[]]
# STEP 8: Set IDs for all branches in MV and LV grids
self.set_branch_ids()
# STEP 9: Relocate switch disconnectors in MV grid
self.set_circuit_breakers(debug=debug)
if export_figures:
plot_mv_topology(grid, subtitle='Circuit breakers relocated', filename='3_circuit_breakers_relocated.png') # depends on [control=['if'], data=[]]
# STEP 10: Open all switch disconnectors in MV grid
self.control_circuit_breakers(mode='open')
# STEP 11: Do power flow analysis of MV grid
self.run_powerflow(session, method='onthefly', export_pypsa=False, debug=debug)
if export_figures:
plot_mv_topology(grid, subtitle='PF result (load case)', filename='4_PF_result_load.png', line_color='loading', node_color='voltage', testcase='load')
plot_mv_topology(grid, subtitle='PF result (feedin case)', filename='5_PF_result_feedin.png', line_color='loading', node_color='voltage', testcase='feedin') # depends on [control=['if'], data=[]]
# STEP 12: Reinforce MV grid
self.reinforce_grid()
# STEP 13: Close all switch disconnectors in MV grid
self.control_circuit_breakers(mode='close')
if export_figures:
plot_mv_topology(grid, subtitle='Final grid PF result (load case)', filename='6_final_grid_PF_result_load.png', line_color='loading', node_color='voltage', testcase='load')
plot_mv_topology(grid, subtitle='Final grid PF result (feedin case)', filename='7_final_grid_PF_result_feedin.png', line_color='loading', node_color='voltage', testcase='feedin') # depends on [control=['if'], data=[]]
if debug:
logger.info('Elapsed time for {0} MV Grid Districts (seconds): {1}'.format(str(len(mv_grid_districts_no)), time.time() - start)) # depends on [control=['if'], data=[]]
return msg |
def notification_selected_sm_changed(self, model, prop_name, info):
"""If a new state machine is selected, make sure the tab is open"""
selected_state_machine_id = self.model.selected_state_machine_id
if selected_state_machine_id is None:
return
page_id = self.get_page_num(selected_state_machine_id)
# to retrieve the current tab colors
number_of_pages = self.view["notebook"].get_n_pages()
old_label_colors = list(range(number_of_pages))
for p in range(number_of_pages):
page = self.view["notebook"].get_nth_page(p)
label = self.view["notebook"].get_tab_label(page).get_child().get_children()[0]
# old_label_colors[p] = label.get_style().fg[Gtk.StateType.NORMAL]
old_label_colors[p] = label.get_style_context().get_color(Gtk.StateType.NORMAL)
if not self.view.notebook.get_current_page() == page_id:
self.view.notebook.set_current_page(page_id)
# set the old colors
for p in range(number_of_pages):
page = self.view["notebook"].get_nth_page(p)
label = self.view["notebook"].get_tab_label(page).get_child().get_children()[0]
# Gtk TODO
style = label.get_style_context() | def function[notification_selected_sm_changed, parameter[self, model, prop_name, info]]:
constant[If a new state machine is selected, make sure the tab is open]
variable[selected_state_machine_id] assign[=] name[self].model.selected_state_machine_id
if compare[name[selected_state_machine_id] is constant[None]] begin[:]
return[None]
variable[page_id] assign[=] call[name[self].get_page_num, parameter[name[selected_state_machine_id]]]
variable[number_of_pages] assign[=] call[call[name[self].view][constant[notebook]].get_n_pages, parameter[]]
variable[old_label_colors] assign[=] call[name[list], parameter[call[name[range], parameter[name[number_of_pages]]]]]
for taget[name[p]] in starred[call[name[range], parameter[name[number_of_pages]]]] begin[:]
variable[page] assign[=] call[call[name[self].view][constant[notebook]].get_nth_page, parameter[name[p]]]
variable[label] assign[=] call[call[call[call[call[name[self].view][constant[notebook]].get_tab_label, parameter[name[page]]].get_child, parameter[]].get_children, parameter[]]][constant[0]]
call[name[old_label_colors]][name[p]] assign[=] call[call[name[label].get_style_context, parameter[]].get_color, parameter[name[Gtk].StateType.NORMAL]]
if <ast.UnaryOp object at 0x7da20c76d960> begin[:]
call[name[self].view.notebook.set_current_page, parameter[name[page_id]]]
for taget[name[p]] in starred[call[name[range], parameter[name[number_of_pages]]]] begin[:]
variable[page] assign[=] call[call[name[self].view][constant[notebook]].get_nth_page, parameter[name[p]]]
variable[label] assign[=] call[call[call[call[call[name[self].view][constant[notebook]].get_tab_label, parameter[name[page]]].get_child, parameter[]].get_children, parameter[]]][constant[0]]
variable[style] assign[=] call[name[label].get_style_context, parameter[]] | keyword[def] identifier[notification_selected_sm_changed] ( identifier[self] , identifier[model] , identifier[prop_name] , identifier[info] ):
literal[string]
identifier[selected_state_machine_id] = identifier[self] . identifier[model] . identifier[selected_state_machine_id]
keyword[if] identifier[selected_state_machine_id] keyword[is] keyword[None] :
keyword[return]
identifier[page_id] = identifier[self] . identifier[get_page_num] ( identifier[selected_state_machine_id] )
identifier[number_of_pages] = identifier[self] . identifier[view] [ literal[string] ]. identifier[get_n_pages] ()
identifier[old_label_colors] = identifier[list] ( identifier[range] ( identifier[number_of_pages] ))
keyword[for] identifier[p] keyword[in] identifier[range] ( identifier[number_of_pages] ):
identifier[page] = identifier[self] . identifier[view] [ literal[string] ]. identifier[get_nth_page] ( identifier[p] )
identifier[label] = identifier[self] . identifier[view] [ literal[string] ]. identifier[get_tab_label] ( identifier[page] ). identifier[get_child] (). identifier[get_children] ()[ literal[int] ]
identifier[old_label_colors] [ identifier[p] ]= identifier[label] . identifier[get_style_context] (). identifier[get_color] ( identifier[Gtk] . identifier[StateType] . identifier[NORMAL] )
keyword[if] keyword[not] identifier[self] . identifier[view] . identifier[notebook] . identifier[get_current_page] ()== identifier[page_id] :
identifier[self] . identifier[view] . identifier[notebook] . identifier[set_current_page] ( identifier[page_id] )
keyword[for] identifier[p] keyword[in] identifier[range] ( identifier[number_of_pages] ):
identifier[page] = identifier[self] . identifier[view] [ literal[string] ]. identifier[get_nth_page] ( identifier[p] )
identifier[label] = identifier[self] . identifier[view] [ literal[string] ]. identifier[get_tab_label] ( identifier[page] ). identifier[get_child] (). identifier[get_children] ()[ literal[int] ]
identifier[style] = identifier[label] . identifier[get_style_context] () | def notification_selected_sm_changed(self, model, prop_name, info):
"""If a new state machine is selected, make sure the tab is open"""
selected_state_machine_id = self.model.selected_state_machine_id
if selected_state_machine_id is None:
return # depends on [control=['if'], data=[]]
page_id = self.get_page_num(selected_state_machine_id)
# to retrieve the current tab colors
number_of_pages = self.view['notebook'].get_n_pages()
old_label_colors = list(range(number_of_pages))
for p in range(number_of_pages):
page = self.view['notebook'].get_nth_page(p)
label = self.view['notebook'].get_tab_label(page).get_child().get_children()[0]
# old_label_colors[p] = label.get_style().fg[Gtk.StateType.NORMAL]
old_label_colors[p] = label.get_style_context().get_color(Gtk.StateType.NORMAL) # depends on [control=['for'], data=['p']]
if not self.view.notebook.get_current_page() == page_id:
self.view.notebook.set_current_page(page_id) # depends on [control=['if'], data=[]]
# set the old colors
for p in range(number_of_pages):
page = self.view['notebook'].get_nth_page(p)
label = self.view['notebook'].get_tab_label(page).get_child().get_children()[0]
# Gtk TODO
style = label.get_style_context() # depends on [control=['for'], data=['p']] |
def components(self) -> List['DAGCircuit']:
"""Split DAGCircuit into independent components"""
comps = nx.weakly_connected_component_subgraphs(self.graph)
return [DAGCircuit(comp) for comp in comps] | def function[components, parameter[self]]:
constant[Split DAGCircuit into independent components]
variable[comps] assign[=] call[name[nx].weakly_connected_component_subgraphs, parameter[name[self].graph]]
return[<ast.ListComp object at 0x7da20c6c4700>] | keyword[def] identifier[components] ( identifier[self] )-> identifier[List] [ literal[string] ]:
literal[string]
identifier[comps] = identifier[nx] . identifier[weakly_connected_component_subgraphs] ( identifier[self] . identifier[graph] )
keyword[return] [ identifier[DAGCircuit] ( identifier[comp] ) keyword[for] identifier[comp] keyword[in] identifier[comps] ] | def components(self) -> List['DAGCircuit']:
"""Split DAGCircuit into independent components"""
comps = nx.weakly_connected_component_subgraphs(self.graph)
return [DAGCircuit(comp) for comp in comps] |
def fromDataFrameRDD(cls, rdd, sql_ctx):
"""Construct a DataFrame from an RDD of DataFrames.
No checking or validation occurs."""
result = DataFrame(None, sql_ctx)
return result.from_rdd_of_dataframes(rdd) | def function[fromDataFrameRDD, parameter[cls, rdd, sql_ctx]]:
constant[Construct a DataFrame from an RDD of DataFrames.
No checking or validation occurs.]
variable[result] assign[=] call[name[DataFrame], parameter[constant[None], name[sql_ctx]]]
return[call[name[result].from_rdd_of_dataframes, parameter[name[rdd]]]] | keyword[def] identifier[fromDataFrameRDD] ( identifier[cls] , identifier[rdd] , identifier[sql_ctx] ):
literal[string]
identifier[result] = identifier[DataFrame] ( keyword[None] , identifier[sql_ctx] )
keyword[return] identifier[result] . identifier[from_rdd_of_dataframes] ( identifier[rdd] ) | def fromDataFrameRDD(cls, rdd, sql_ctx):
"""Construct a DataFrame from an RDD of DataFrames.
No checking or validation occurs."""
result = DataFrame(None, sql_ctx)
return result.from_rdd_of_dataframes(rdd) |
def alias_asset(self, asset_id, alias_id):
"""Adds an ``Id`` to an ``Asset`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Asset`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another asset, it is
reassigned to the given asset ``Id``.
arg: asset_id (osid.id.Id): the ``Id`` of an ``Asset``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is already assigned
raise: NotFound - ``asset_id`` not found
raise: NullArgument - ``asset_id`` or ``alias_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.alias_resources_template
self._alias_id(primary_id=asset_id, equivalent_id=alias_id) | def function[alias_asset, parameter[self, asset_id, alias_id]]:
constant[Adds an ``Id`` to an ``Asset`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Asset`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another asset, it is
reassigned to the given asset ``Id``.
arg: asset_id (osid.id.Id): the ``Id`` of an ``Asset``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is already assigned
raise: NotFound - ``asset_id`` not found
raise: NullArgument - ``asset_id`` or ``alias_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
call[name[self]._alias_id, parameter[]] | keyword[def] identifier[alias_asset] ( identifier[self] , identifier[asset_id] , identifier[alias_id] ):
literal[string]
identifier[self] . identifier[_alias_id] ( identifier[primary_id] = identifier[asset_id] , identifier[equivalent_id] = identifier[alias_id] ) | def alias_asset(self, asset_id, alias_id):
"""Adds an ``Id`` to an ``Asset`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Asset`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another asset, it is
reassigned to the given asset ``Id``.
arg: asset_id (osid.id.Id): the ``Id`` of an ``Asset``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is already assigned
raise: NotFound - ``asset_id`` not found
raise: NullArgument - ``asset_id`` or ``alias_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.alias_resources_template
self._alias_id(primary_id=asset_id, equivalent_id=alias_id) |
def update_source(ident, data):
'''Update an harvest source'''
source = get_source(ident)
source.modify(**data)
signals.harvest_source_updated.send(source)
return source | def function[update_source, parameter[ident, data]]:
constant[Update an harvest source]
variable[source] assign[=] call[name[get_source], parameter[name[ident]]]
call[name[source].modify, parameter[]]
call[name[signals].harvest_source_updated.send, parameter[name[source]]]
return[name[source]] | keyword[def] identifier[update_source] ( identifier[ident] , identifier[data] ):
literal[string]
identifier[source] = identifier[get_source] ( identifier[ident] )
identifier[source] . identifier[modify] (** identifier[data] )
identifier[signals] . identifier[harvest_source_updated] . identifier[send] ( identifier[source] )
keyword[return] identifier[source] | def update_source(ident, data):
"""Update an harvest source"""
source = get_source(ident)
source.modify(**data)
signals.harvest_source_updated.send(source)
return source |
def keywords(self) -> Set[str]:
"""A set of all keywords of all handled devices.
In addition to attribute access via device names, |Nodes| and
|Elements| objects allow for attribute access via keywords,
allowing for an efficient search of certain groups of devices.
Let us use the example from above, where the nodes `na` and `nb`
have no keywords, but each of the other three nodes both belongs
to either `group_a` or `group_b` and `group_1` or `group_2`:
>>> from hydpy import Node, Nodes
>>> nodes = Nodes('na',
... Node('nb', variable='W'),
... Node('nc', keywords=('group_a', 'group_1')),
... Node('nd', keywords=('group_a', 'group_2')),
... Node('ne', keywords=('group_b', 'group_1')))
>>> nodes
Nodes("na", "nb", "nc", "nd", "ne")
>>> sorted(nodes.keywords)
['group_1', 'group_2', 'group_a', 'group_b']
If you are interested in inspecting all devices belonging to
`group_a`, select them via this keyword:
>>> subgroup = nodes.group_1
>>> subgroup
Nodes("nc", "ne")
You can further restrict the search by also selecting the devices
belonging to `group_b`, which holds only for node "e", in the given
example:
>>> subsubgroup = subgroup.group_b
>>> subsubgroup
Node("ne", variable="Q",
keywords=["group_1", "group_b"])
Note that the keywords already used for building a device subgroup
are not informative anymore (as they hold for each device) and are
thus not shown anymore:
>>> sorted(subgroup.keywords)
['group_a', 'group_b']
The latter might be confusing if you intend to work with a device
subgroup for a longer time. After copying the subgroup, all
keywords of the contained devices are available again:
>>> from copy import copy
>>> newgroup = copy(subgroup)
>>> sorted(newgroup.keywords)
['group_1', 'group_a', 'group_b']
"""
return set(keyword for device in self
for keyword in device.keywords if
keyword not in self._shadowed_keywords) | def function[keywords, parameter[self]]:
constant[A set of all keywords of all handled devices.
In addition to attribute access via device names, |Nodes| and
|Elements| objects allow for attribute access via keywords,
allowing for an efficient search of certain groups of devices.
Let us use the example from above, where the nodes `na` and `nb`
have no keywords, but each of the other three nodes both belongs
to either `group_a` or `group_b` and `group_1` or `group_2`:
>>> from hydpy import Node, Nodes
>>> nodes = Nodes('na',
... Node('nb', variable='W'),
... Node('nc', keywords=('group_a', 'group_1')),
... Node('nd', keywords=('group_a', 'group_2')),
... Node('ne', keywords=('group_b', 'group_1')))
>>> nodes
Nodes("na", "nb", "nc", "nd", "ne")
>>> sorted(nodes.keywords)
['group_1', 'group_2', 'group_a', 'group_b']
If you are interested in inspecting all devices belonging to
`group_a`, select them via this keyword:
>>> subgroup = nodes.group_1
>>> subgroup
Nodes("nc", "ne")
You can further restrict the search by also selecting the devices
belonging to `group_b`, which holds only for node "e", in the given
example:
>>> subsubgroup = subgroup.group_b
>>> subsubgroup
Node("ne", variable="Q",
keywords=["group_1", "group_b"])
Note that the keywords already used for building a device subgroup
are not informative anymore (as they hold for each device) and are
thus not shown anymore:
>>> sorted(subgroup.keywords)
['group_a', 'group_b']
The latter might be confusing if you intend to work with a device
subgroup for a longer time. After copying the subgroup, all
keywords of the contained devices are available again:
>>> from copy import copy
>>> newgroup = copy(subgroup)
>>> sorted(newgroup.keywords)
['group_1', 'group_a', 'group_b']
]
return[call[name[set], parameter[<ast.GeneratorExp object at 0x7da20c7cb310>]]] | keyword[def] identifier[keywords] ( identifier[self] )-> identifier[Set] [ identifier[str] ]:
literal[string]
keyword[return] identifier[set] ( identifier[keyword] keyword[for] identifier[device] keyword[in] identifier[self]
keyword[for] identifier[keyword] keyword[in] identifier[device] . identifier[keywords] keyword[if]
identifier[keyword] keyword[not] keyword[in] identifier[self] . identifier[_shadowed_keywords] ) | def keywords(self) -> Set[str]:
"""A set of all keywords of all handled devices.
In addition to attribute access via device names, |Nodes| and
|Elements| objects allow for attribute access via keywords,
allowing for an efficient search of certain groups of devices.
Let us use the example from above, where the nodes `na` and `nb`
have no keywords, but each of the other three nodes both belongs
to either `group_a` or `group_b` and `group_1` or `group_2`:
>>> from hydpy import Node, Nodes
>>> nodes = Nodes('na',
... Node('nb', variable='W'),
... Node('nc', keywords=('group_a', 'group_1')),
... Node('nd', keywords=('group_a', 'group_2')),
... Node('ne', keywords=('group_b', 'group_1')))
>>> nodes
Nodes("na", "nb", "nc", "nd", "ne")
>>> sorted(nodes.keywords)
['group_1', 'group_2', 'group_a', 'group_b']
If you are interested in inspecting all devices belonging to
`group_a`, select them via this keyword:
>>> subgroup = nodes.group_1
>>> subgroup
Nodes("nc", "ne")
You can further restrict the search by also selecting the devices
belonging to `group_b`, which holds only for node "e", in the given
example:
>>> subsubgroup = subgroup.group_b
>>> subsubgroup
Node("ne", variable="Q",
keywords=["group_1", "group_b"])
Note that the keywords already used for building a device subgroup
are not informative anymore (as they hold for each device) and are
thus not shown anymore:
>>> sorted(subgroup.keywords)
['group_a', 'group_b']
The latter might be confusing if you intend to work with a device
subgroup for a longer time. After copying the subgroup, all
keywords of the contained devices are available again:
>>> from copy import copy
>>> newgroup = copy(subgroup)
>>> sorted(newgroup.keywords)
['group_1', 'group_a', 'group_b']
"""
return set((keyword for device in self for keyword in device.keywords if keyword not in self._shadowed_keywords)) |
def _proj_l1_sortsum(v, gamma, axis=None):
r"""Projection operator of the :math:`\ell_1` norm. The solution is
computed via the method of :cite:`duchi-2008-efficient`.
Parameters
----------
v : array_like
Input array :math:`\mathbf{v}`
gamma : float
Parameter :math:`\gamma`
axis : None or int, optional (default None)
Axes of `v` over which to compute the :math:`\ell_1` norm. If
`None`, an entire multi-dimensional array is treated as a
vector. If axes are specified, then distinct norm values are
computed over the indices of the remaining axes of input array
`v`. **Note:** specifying a tuple of ints is not supported by
this function.
Returns
-------
x : ndarray
Output array
"""
if axis is None and norm_l1(v) <= gamma:
return v
if axis is not None and axis < 0:
axis = v.ndim + axis
av = np.abs(v)
vs = np.sort(av, axis=axis)
if axis is None:
N = v.size
c = 1.0 / np.arange(1, N + 1, dtype=v.dtype).reshape(v.shape)
vs = vs[::-1].reshape(v.shape)
else:
N = v.shape[axis]
ns = [v.shape[k] if k == axis else 1 for k in range(v.ndim)]
c = 1.0 / np.arange(1, N + 1, dtype=v.dtype).reshape(ns)
vs = vs[(slice(None),) * axis + (slice(None, None, -1),)]
t = c * (np.cumsum(vs, axis=axis).reshape(v.shape) - gamma)
K = np.sum(vs >= t, axis=axis, keepdims=True)
t = (np.sum(vs * (vs >= t), axis=axis, keepdims=True) - gamma) / K
t = np.asarray(np.maximum(0, t), dtype=v.dtype)
return np.sign(v) * np.where(av > t, av - t, 0) | def function[_proj_l1_sortsum, parameter[v, gamma, axis]]:
constant[Projection operator of the :math:`\ell_1` norm. The solution is
computed via the method of :cite:`duchi-2008-efficient`.
Parameters
----------
v : array_like
Input array :math:`\mathbf{v}`
gamma : float
Parameter :math:`\gamma`
axis : None or int, optional (default None)
Axes of `v` over which to compute the :math:`\ell_1` norm. If
`None`, an entire multi-dimensional array is treated as a
vector. If axes are specified, then distinct norm values are
computed over the indices of the remaining axes of input array
`v`. **Note:** specifying a tuple of ints is not supported by
this function.
Returns
-------
x : ndarray
Output array
]
if <ast.BoolOp object at 0x7da1b06996c0> begin[:]
return[name[v]]
if <ast.BoolOp object at 0x7da1b069afe0> begin[:]
variable[axis] assign[=] binary_operation[name[v].ndim + name[axis]]
variable[av] assign[=] call[name[np].abs, parameter[name[v]]]
variable[vs] assign[=] call[name[np].sort, parameter[name[av]]]
if compare[name[axis] is constant[None]] begin[:]
variable[N] assign[=] name[v].size
variable[c] assign[=] binary_operation[constant[1.0] / call[call[name[np].arange, parameter[constant[1], binary_operation[name[N] + constant[1]]]].reshape, parameter[name[v].shape]]]
variable[vs] assign[=] call[call[name[vs]][<ast.Slice object at 0x7da1b0699390>].reshape, parameter[name[v].shape]]
variable[t] assign[=] binary_operation[name[c] * binary_operation[call[call[name[np].cumsum, parameter[name[vs]]].reshape, parameter[name[v].shape]] - name[gamma]]]
variable[K] assign[=] call[name[np].sum, parameter[compare[name[vs] greater_or_equal[>=] name[t]]]]
variable[t] assign[=] binary_operation[binary_operation[call[name[np].sum, parameter[binary_operation[name[vs] * compare[name[vs] greater_or_equal[>=] name[t]]]]] - name[gamma]] / name[K]]
variable[t] assign[=] call[name[np].asarray, parameter[call[name[np].maximum, parameter[constant[0], name[t]]]]]
return[binary_operation[call[name[np].sign, parameter[name[v]]] * call[name[np].where, parameter[compare[name[av] greater[>] name[t]], binary_operation[name[av] - name[t]], constant[0]]]]] | keyword[def] identifier[_proj_l1_sortsum] ( identifier[v] , identifier[gamma] , identifier[axis] = keyword[None] ):
literal[string]
keyword[if] identifier[axis] keyword[is] keyword[None] keyword[and] identifier[norm_l1] ( identifier[v] )<= identifier[gamma] :
keyword[return] identifier[v]
keyword[if] identifier[axis] keyword[is] keyword[not] keyword[None] keyword[and] identifier[axis] < literal[int] :
identifier[axis] = identifier[v] . identifier[ndim] + identifier[axis]
identifier[av] = identifier[np] . identifier[abs] ( identifier[v] )
identifier[vs] = identifier[np] . identifier[sort] ( identifier[av] , identifier[axis] = identifier[axis] )
keyword[if] identifier[axis] keyword[is] keyword[None] :
identifier[N] = identifier[v] . identifier[size]
identifier[c] = literal[int] / identifier[np] . identifier[arange] ( literal[int] , identifier[N] + literal[int] , identifier[dtype] = identifier[v] . identifier[dtype] ). identifier[reshape] ( identifier[v] . identifier[shape] )
identifier[vs] = identifier[vs] [::- literal[int] ]. identifier[reshape] ( identifier[v] . identifier[shape] )
keyword[else] :
identifier[N] = identifier[v] . identifier[shape] [ identifier[axis] ]
identifier[ns] =[ identifier[v] . identifier[shape] [ identifier[k] ] keyword[if] identifier[k] == identifier[axis] keyword[else] literal[int] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[v] . identifier[ndim] )]
identifier[c] = literal[int] / identifier[np] . identifier[arange] ( literal[int] , identifier[N] + literal[int] , identifier[dtype] = identifier[v] . identifier[dtype] ). identifier[reshape] ( identifier[ns] )
identifier[vs] = identifier[vs] [( identifier[slice] ( keyword[None] ),)* identifier[axis] +( identifier[slice] ( keyword[None] , keyword[None] ,- literal[int] ),)]
identifier[t] = identifier[c] *( identifier[np] . identifier[cumsum] ( identifier[vs] , identifier[axis] = identifier[axis] ). identifier[reshape] ( identifier[v] . identifier[shape] )- identifier[gamma] )
identifier[K] = identifier[np] . identifier[sum] ( identifier[vs] >= identifier[t] , identifier[axis] = identifier[axis] , identifier[keepdims] = keyword[True] )
identifier[t] =( identifier[np] . identifier[sum] ( identifier[vs] *( identifier[vs] >= identifier[t] ), identifier[axis] = identifier[axis] , identifier[keepdims] = keyword[True] )- identifier[gamma] )/ identifier[K]
identifier[t] = identifier[np] . identifier[asarray] ( identifier[np] . identifier[maximum] ( literal[int] , identifier[t] ), identifier[dtype] = identifier[v] . identifier[dtype] )
keyword[return] identifier[np] . identifier[sign] ( identifier[v] )* identifier[np] . identifier[where] ( identifier[av] > identifier[t] , identifier[av] - identifier[t] , literal[int] ) | def _proj_l1_sortsum(v, gamma, axis=None):
"""Projection operator of the :math:`\\ell_1` norm. The solution is
computed via the method of :cite:`duchi-2008-efficient`.
Parameters
----------
v : array_like
Input array :math:`\\mathbf{v}`
gamma : float
Parameter :math:`\\gamma`
axis : None or int, optional (default None)
Axes of `v` over which to compute the :math:`\\ell_1` norm. If
`None`, an entire multi-dimensional array is treated as a
vector. If axes are specified, then distinct norm values are
computed over the indices of the remaining axes of input array
`v`. **Note:** specifying a tuple of ints is not supported by
this function.
Returns
-------
x : ndarray
Output array
"""
if axis is None and norm_l1(v) <= gamma:
return v # depends on [control=['if'], data=[]]
if axis is not None and axis < 0:
axis = v.ndim + axis # depends on [control=['if'], data=[]]
av = np.abs(v)
vs = np.sort(av, axis=axis)
if axis is None:
N = v.size
c = 1.0 / np.arange(1, N + 1, dtype=v.dtype).reshape(v.shape)
vs = vs[::-1].reshape(v.shape) # depends on [control=['if'], data=[]]
else:
N = v.shape[axis]
ns = [v.shape[k] if k == axis else 1 for k in range(v.ndim)]
c = 1.0 / np.arange(1, N + 1, dtype=v.dtype).reshape(ns)
vs = vs[(slice(None),) * axis + (slice(None, None, -1),)]
t = c * (np.cumsum(vs, axis=axis).reshape(v.shape) - gamma)
K = np.sum(vs >= t, axis=axis, keepdims=True)
t = (np.sum(vs * (vs >= t), axis=axis, keepdims=True) - gamma) / K
t = np.asarray(np.maximum(0, t), dtype=v.dtype)
return np.sign(v) * np.where(av > t, av - t, 0) |
def soap_action(self, service, action, payloadbody):
"""Do a soap request """
payload = self.soapenvelope.format(body=payloadbody).encode('utf-8')
headers = ['SOAPAction: ' + action,
'Content-Type: application/soap+xml; charset=UTF-8',
'Content-Length: ' + str(len(payload))]
try:
curl = pycurl.Curl()
curl.setopt(pycurl.SSL_CIPHER_LIST, "AES256-SHA")
curl.setopt(pycurl.SSLVERSION, pycurl.SSLVERSION_TLSv1_0)
# self.curl.setopt(pycurl.CAINFO,'ihc.crt')
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
curl.setopt(pycurl.SSL_VERIFYHOST, 0)
curl.setopt(pycurl.POST, 1)
curl.setopt(pycurl.HEADERFUNCTION, IHCCurlConnection._write_header)
curl.setopt(pycurl.HTTPHEADER, headers)
inbuffer = BytesIO(payload)
curl.setopt(pycurl.READDATA, inbuffer)
buffer = BytesIO()
curl.setopt(pycurl.WRITEDATA, buffer)
curl.setopt(pycurl.URL, self.url + service)
curl.setopt(pycurl.COOKIE, IHCCurlConnection.cookies)
# curl.setopt(pycurl.VERBOSE,1)
curl.perform()
body = buffer.getvalue().decode('utf-8')
code = curl.getinfo(pycurl.HTTP_CODE)
curl.close()
except Exception as exp:
return False
if code != 200:
return False
try:
xdoc = xml.etree.ElementTree.fromstring(body)
except xml.etree.ElementTree.ParseError:
return False
return xdoc | def function[soap_action, parameter[self, service, action, payloadbody]]:
constant[Do a soap request ]
variable[payload] assign[=] call[call[name[self].soapenvelope.format, parameter[]].encode, parameter[constant[utf-8]]]
variable[headers] assign[=] list[[<ast.BinOp object at 0x7da1b23e5900>, <ast.Constant object at 0x7da1b23e4a60>, <ast.BinOp object at 0x7da1b23e6350>]]
<ast.Try object at 0x7da1b23e7280>
if compare[name[code] not_equal[!=] constant[200]] begin[:]
return[constant[False]]
<ast.Try object at 0x7da1b23e5ea0>
return[name[xdoc]] | keyword[def] identifier[soap_action] ( identifier[self] , identifier[service] , identifier[action] , identifier[payloadbody] ):
literal[string]
identifier[payload] = identifier[self] . identifier[soapenvelope] . identifier[format] ( identifier[body] = identifier[payloadbody] ). identifier[encode] ( literal[string] )
identifier[headers] =[ literal[string] + identifier[action] ,
literal[string] ,
literal[string] + identifier[str] ( identifier[len] ( identifier[payload] ))]
keyword[try] :
identifier[curl] = identifier[pycurl] . identifier[Curl] ()
identifier[curl] . identifier[setopt] ( identifier[pycurl] . identifier[SSL_CIPHER_LIST] , literal[string] )
identifier[curl] . identifier[setopt] ( identifier[pycurl] . identifier[SSLVERSION] , identifier[pycurl] . identifier[SSLVERSION_TLSv1_0] )
identifier[curl] . identifier[setopt] ( identifier[pycurl] . identifier[SSL_VERIFYPEER] , literal[int] )
identifier[curl] . identifier[setopt] ( identifier[pycurl] . identifier[SSL_VERIFYHOST] , literal[int] )
identifier[curl] . identifier[setopt] ( identifier[pycurl] . identifier[POST] , literal[int] )
identifier[curl] . identifier[setopt] ( identifier[pycurl] . identifier[HEADERFUNCTION] , identifier[IHCCurlConnection] . identifier[_write_header] )
identifier[curl] . identifier[setopt] ( identifier[pycurl] . identifier[HTTPHEADER] , identifier[headers] )
identifier[inbuffer] = identifier[BytesIO] ( identifier[payload] )
identifier[curl] . identifier[setopt] ( identifier[pycurl] . identifier[READDATA] , identifier[inbuffer] )
identifier[buffer] = identifier[BytesIO] ()
identifier[curl] . identifier[setopt] ( identifier[pycurl] . identifier[WRITEDATA] , identifier[buffer] )
identifier[curl] . identifier[setopt] ( identifier[pycurl] . identifier[URL] , identifier[self] . identifier[url] + identifier[service] )
identifier[curl] . identifier[setopt] ( identifier[pycurl] . identifier[COOKIE] , identifier[IHCCurlConnection] . identifier[cookies] )
identifier[curl] . identifier[perform] ()
identifier[body] = identifier[buffer] . identifier[getvalue] (). identifier[decode] ( literal[string] )
identifier[code] = identifier[curl] . identifier[getinfo] ( identifier[pycurl] . identifier[HTTP_CODE] )
identifier[curl] . identifier[close] ()
keyword[except] identifier[Exception] keyword[as] identifier[exp] :
keyword[return] keyword[False]
keyword[if] identifier[code] != literal[int] :
keyword[return] keyword[False]
keyword[try] :
identifier[xdoc] = identifier[xml] . identifier[etree] . identifier[ElementTree] . identifier[fromstring] ( identifier[body] )
keyword[except] identifier[xml] . identifier[etree] . identifier[ElementTree] . identifier[ParseError] :
keyword[return] keyword[False]
keyword[return] identifier[xdoc] | def soap_action(self, service, action, payloadbody):
"""Do a soap request """
payload = self.soapenvelope.format(body=payloadbody).encode('utf-8')
headers = ['SOAPAction: ' + action, 'Content-Type: application/soap+xml; charset=UTF-8', 'Content-Length: ' + str(len(payload))]
try:
curl = pycurl.Curl()
curl.setopt(pycurl.SSL_CIPHER_LIST, 'AES256-SHA')
curl.setopt(pycurl.SSLVERSION, pycurl.SSLVERSION_TLSv1_0)
# self.curl.setopt(pycurl.CAINFO,'ihc.crt')
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
curl.setopt(pycurl.SSL_VERIFYHOST, 0)
curl.setopt(pycurl.POST, 1)
curl.setopt(pycurl.HEADERFUNCTION, IHCCurlConnection._write_header)
curl.setopt(pycurl.HTTPHEADER, headers)
inbuffer = BytesIO(payload)
curl.setopt(pycurl.READDATA, inbuffer)
buffer = BytesIO()
curl.setopt(pycurl.WRITEDATA, buffer)
curl.setopt(pycurl.URL, self.url + service)
curl.setopt(pycurl.COOKIE, IHCCurlConnection.cookies)
# curl.setopt(pycurl.VERBOSE,1)
curl.perform()
body = buffer.getvalue().decode('utf-8')
code = curl.getinfo(pycurl.HTTP_CODE)
curl.close() # depends on [control=['try'], data=[]]
except Exception as exp:
return False # depends on [control=['except'], data=[]]
if code != 200:
return False # depends on [control=['if'], data=[]]
try:
xdoc = xml.etree.ElementTree.fromstring(body) # depends on [control=['try'], data=[]]
except xml.etree.ElementTree.ParseError:
return False # depends on [control=['except'], data=[]]
return xdoc |
def override_ssh_auth_env():
"""Override the `$SSH_AUTH_SOCK `env variable to mock the absence of an SSH agent."""
ssh_auth_sock = "SSH_AUTH_SOCK"
old_ssh_auth_sock = os.environ.get(ssh_auth_sock)
del os.environ[ssh_auth_sock]
yield
if old_ssh_auth_sock:
os.environ[ssh_auth_sock] = old_ssh_auth_sock | def function[override_ssh_auth_env, parameter[]]:
constant[Override the `$SSH_AUTH_SOCK `env variable to mock the absence of an SSH agent.]
variable[ssh_auth_sock] assign[=] constant[SSH_AUTH_SOCK]
variable[old_ssh_auth_sock] assign[=] call[name[os].environ.get, parameter[name[ssh_auth_sock]]]
<ast.Delete object at 0x7da18f7229e0>
<ast.Yield object at 0x7da18f7211b0>
if name[old_ssh_auth_sock] begin[:]
call[name[os].environ][name[ssh_auth_sock]] assign[=] name[old_ssh_auth_sock] | keyword[def] identifier[override_ssh_auth_env] ():
literal[string]
identifier[ssh_auth_sock] = literal[string]
identifier[old_ssh_auth_sock] = identifier[os] . identifier[environ] . identifier[get] ( identifier[ssh_auth_sock] )
keyword[del] identifier[os] . identifier[environ] [ identifier[ssh_auth_sock] ]
keyword[yield]
keyword[if] identifier[old_ssh_auth_sock] :
identifier[os] . identifier[environ] [ identifier[ssh_auth_sock] ]= identifier[old_ssh_auth_sock] | def override_ssh_auth_env():
"""Override the `$SSH_AUTH_SOCK `env variable to mock the absence of an SSH agent."""
ssh_auth_sock = 'SSH_AUTH_SOCK'
old_ssh_auth_sock = os.environ.get(ssh_auth_sock)
del os.environ[ssh_auth_sock]
yield
if old_ssh_auth_sock:
os.environ[ssh_auth_sock] = old_ssh_auth_sock # depends on [control=['if'], data=[]] |
def rst_blocks(script_blocks, output_blocks, file_conf, gallery_conf):
"""Generates the rst string containing the script prose, code and output
Parameters
----------
script_blocks : list
(label, content, line_number)
List where each element is a tuple with the label ('text' or 'code'),
the corresponding content string of block and the leading line number
output_blocks : list
List of strings where each element is the restructured text
representation of the output of each block
file_conf : dict
File-specific settings given in source file comments as:
``# sphinx_gallery_<name> = <value>``
gallery_conf : dict
Contains the configuration of Sphinx-Gallery
Returns
-------
out : str
rst notebook
"""
# A simple example has two blocks: one for the
# example introduction/explanation and one for the code
is_example_notebook_like = len(script_blocks) > 2
example_rst = u"" # there can be unicode content
for (blabel, bcontent, lineno), code_output in \
zip(script_blocks, output_blocks):
if blabel == 'code':
if not file_conf.get('line_numbers',
gallery_conf.get('line_numbers', False)):
lineno = None
code_rst = codestr2rst(bcontent, lang=gallery_conf['lang'],
lineno=lineno) + '\n'
if is_example_notebook_like:
example_rst += code_rst
example_rst += code_output
else:
example_rst += code_output
if 'sphx-glr-script-out' in code_output:
# Add some vertical space after output
example_rst += "\n\n|\n\n"
example_rst += code_rst
else:
block_separator = '\n\n' if not bcontent.endswith('\n') else '\n'
example_rst += bcontent + block_separator
return example_rst | def function[rst_blocks, parameter[script_blocks, output_blocks, file_conf, gallery_conf]]:
constant[Generates the rst string containing the script prose, code and output
Parameters
----------
script_blocks : list
(label, content, line_number)
List where each element is a tuple with the label ('text' or 'code'),
the corresponding content string of block and the leading line number
output_blocks : list
List of strings where each element is the restructured text
representation of the output of each block
file_conf : dict
File-specific settings given in source file comments as:
``# sphinx_gallery_<name> = <value>``
gallery_conf : dict
Contains the configuration of Sphinx-Gallery
Returns
-------
out : str
rst notebook
]
variable[is_example_notebook_like] assign[=] compare[call[name[len], parameter[name[script_blocks]]] greater[>] constant[2]]
variable[example_rst] assign[=] constant[]
for taget[tuple[[<ast.Tuple object at 0x7da1b26ac790>, <ast.Name object at 0x7da1b26ae890>]]] in starred[call[name[zip], parameter[name[script_blocks], name[output_blocks]]]] begin[:]
if compare[name[blabel] equal[==] constant[code]] begin[:]
if <ast.UnaryOp object at 0x7da1b26ae7a0> begin[:]
variable[lineno] assign[=] constant[None]
variable[code_rst] assign[=] binary_operation[call[name[codestr2rst], parameter[name[bcontent]]] + constant[
]]
if name[is_example_notebook_like] begin[:]
<ast.AugAssign object at 0x7da1b26ac040>
<ast.AugAssign object at 0x7da1b26ad630>
return[name[example_rst]] | keyword[def] identifier[rst_blocks] ( identifier[script_blocks] , identifier[output_blocks] , identifier[file_conf] , identifier[gallery_conf] ):
literal[string]
identifier[is_example_notebook_like] = identifier[len] ( identifier[script_blocks] )> literal[int]
identifier[example_rst] = literal[string]
keyword[for] ( identifier[blabel] , identifier[bcontent] , identifier[lineno] ), identifier[code_output] keyword[in] identifier[zip] ( identifier[script_blocks] , identifier[output_blocks] ):
keyword[if] identifier[blabel] == literal[string] :
keyword[if] keyword[not] identifier[file_conf] . identifier[get] ( literal[string] ,
identifier[gallery_conf] . identifier[get] ( literal[string] , keyword[False] )):
identifier[lineno] = keyword[None]
identifier[code_rst] = identifier[codestr2rst] ( identifier[bcontent] , identifier[lang] = identifier[gallery_conf] [ literal[string] ],
identifier[lineno] = identifier[lineno] )+ literal[string]
keyword[if] identifier[is_example_notebook_like] :
identifier[example_rst] += identifier[code_rst]
identifier[example_rst] += identifier[code_output]
keyword[else] :
identifier[example_rst] += identifier[code_output]
keyword[if] literal[string] keyword[in] identifier[code_output] :
identifier[example_rst] += literal[string]
identifier[example_rst] += identifier[code_rst]
keyword[else] :
identifier[block_separator] = literal[string] keyword[if] keyword[not] identifier[bcontent] . identifier[endswith] ( literal[string] ) keyword[else] literal[string]
identifier[example_rst] += identifier[bcontent] + identifier[block_separator]
keyword[return] identifier[example_rst] | def rst_blocks(script_blocks, output_blocks, file_conf, gallery_conf):
"""Generates the rst string containing the script prose, code and output
Parameters
----------
script_blocks : list
(label, content, line_number)
List where each element is a tuple with the label ('text' or 'code'),
the corresponding content string of block and the leading line number
output_blocks : list
List of strings where each element is the restructured text
representation of the output of each block
file_conf : dict
File-specific settings given in source file comments as:
``# sphinx_gallery_<name> = <value>``
gallery_conf : dict
Contains the configuration of Sphinx-Gallery
Returns
-------
out : str
rst notebook
"""
# A simple example has two blocks: one for the
# example introduction/explanation and one for the code
is_example_notebook_like = len(script_blocks) > 2
example_rst = u'' # there can be unicode content
for ((blabel, bcontent, lineno), code_output) in zip(script_blocks, output_blocks):
if blabel == 'code':
if not file_conf.get('line_numbers', gallery_conf.get('line_numbers', False)):
lineno = None # depends on [control=['if'], data=[]]
code_rst = codestr2rst(bcontent, lang=gallery_conf['lang'], lineno=lineno) + '\n'
if is_example_notebook_like:
example_rst += code_rst
example_rst += code_output # depends on [control=['if'], data=[]]
else:
example_rst += code_output
if 'sphx-glr-script-out' in code_output:
# Add some vertical space after output
example_rst += '\n\n|\n\n' # depends on [control=['if'], data=[]]
example_rst += code_rst # depends on [control=['if'], data=[]]
else:
block_separator = '\n\n' if not bcontent.endswith('\n') else '\n'
example_rst += bcontent + block_separator # depends on [control=['for'], data=[]]
return example_rst |
def read_model(self):
""" Read the model and the couplings from the model file.
"""
if self.verbosity > 0:
settings.m(0,'reading model',self.model)
# read model
boolRules = []
for line in open(self.model):
if line.startswith('#') and 'modelType =' in line:
keyval = line
if '|' in line:
keyval, type = line.split('|')[:2]
self.modelType = keyval.split('=')[1].strip()
if line.startswith('#') and 'invTimeStep =' in line:
keyval = line
if '|' in line:
keyval, type = line.split('|')[:2]
self.invTimeStep = float(keyval.split('=')[1].strip())
if not line.startswith('#'):
boolRules.append([s.strip() for s in line.split('=')])
if line.startswith('# coupling list:'):
break
self.dim = len(boolRules)
self.boolRules = collections.OrderedDict(boolRules)
self.varNames = collections.OrderedDict([(s, i)
for i, s in enumerate(self.boolRules.keys())])
names = self.varNames
# read couplings via names
self.Coupl = np.zeros((self.dim, self.dim))
boolContinue = True
for line in open(self.model): # open(self.model.replace('/model','/couplList')):
if line.startswith('# coupling list:'):
boolContinue = False
if boolContinue:
continue
if not line.startswith('#'):
gps, gs, val = line.strip().split()
self.Coupl[int(names[gps]), int(names[gs])] = float(val)
# adjancecy matrices
self.Adj_signed = np.sign(self.Coupl)
self.Adj = np.abs(np.array(self.Adj_signed))
# build bool coefficients (necessary for odefy type
# version of the discrete model)
self.build_boolCoeff() | def function[read_model, parameter[self]]:
constant[ Read the model and the couplings from the model file.
]
if compare[name[self].verbosity greater[>] constant[0]] begin[:]
call[name[settings].m, parameter[constant[0], constant[reading model], name[self].model]]
variable[boolRules] assign[=] list[[]]
for taget[name[line]] in starred[call[name[open], parameter[name[self].model]]] begin[:]
if <ast.BoolOp object at 0x7da20c6c44c0> begin[:]
variable[keyval] assign[=] name[line]
if compare[constant[|] in name[line]] begin[:]
<ast.Tuple object at 0x7da20c6c6650> assign[=] call[call[name[line].split, parameter[constant[|]]]][<ast.Slice object at 0x7da20c6c7550>]
name[self].modelType assign[=] call[call[call[name[keyval].split, parameter[constant[=]]]][constant[1]].strip, parameter[]]
if <ast.BoolOp object at 0x7da20c6c5840> begin[:]
variable[keyval] assign[=] name[line]
if compare[constant[|] in name[line]] begin[:]
<ast.Tuple object at 0x7da20c6c70d0> assign[=] call[call[name[line].split, parameter[constant[|]]]][<ast.Slice object at 0x7da20c6c7a00>]
name[self].invTimeStep assign[=] call[name[float], parameter[call[call[call[name[keyval].split, parameter[constant[=]]]][constant[1]].strip, parameter[]]]]
if <ast.UnaryOp object at 0x7da20c6c6830> begin[:]
call[name[boolRules].append, parameter[<ast.ListComp object at 0x7da20c6c6470>]]
if call[name[line].startswith, parameter[constant[# coupling list:]]] begin[:]
break
name[self].dim assign[=] call[name[len], parameter[name[boolRules]]]
name[self].boolRules assign[=] call[name[collections].OrderedDict, parameter[name[boolRules]]]
name[self].varNames assign[=] call[name[collections].OrderedDict, parameter[<ast.ListComp object at 0x7da1b2344b50>]]
variable[names] assign[=] name[self].varNames
name[self].Coupl assign[=] call[name[np].zeros, parameter[tuple[[<ast.Attribute object at 0x7da1b2346d70>, <ast.Attribute object at 0x7da1b2345630>]]]]
variable[boolContinue] assign[=] constant[True]
for taget[name[line]] in starred[call[name[open], parameter[name[self].model]]] begin[:]
if call[name[line].startswith, parameter[constant[# coupling list:]]] begin[:]
variable[boolContinue] assign[=] constant[False]
if name[boolContinue] begin[:]
continue
if <ast.UnaryOp object at 0x7da1b2347850> begin[:]
<ast.Tuple object at 0x7da204567bb0> assign[=] call[call[name[line].strip, parameter[]].split, parameter[]]
call[name[self].Coupl][tuple[[<ast.Call object at 0x7da204567e50>, <ast.Call object at 0x7da2054a5840>]]] assign[=] call[name[float], parameter[name[val]]]
name[self].Adj_signed assign[=] call[name[np].sign, parameter[name[self].Coupl]]
name[self].Adj assign[=] call[name[np].abs, parameter[call[name[np].array, parameter[name[self].Adj_signed]]]]
call[name[self].build_boolCoeff, parameter[]] | keyword[def] identifier[read_model] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[verbosity] > literal[int] :
identifier[settings] . identifier[m] ( literal[int] , literal[string] , identifier[self] . identifier[model] )
identifier[boolRules] =[]
keyword[for] identifier[line] keyword[in] identifier[open] ( identifier[self] . identifier[model] ):
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ) keyword[and] literal[string] keyword[in] identifier[line] :
identifier[keyval] = identifier[line]
keyword[if] literal[string] keyword[in] identifier[line] :
identifier[keyval] , identifier[type] = identifier[line] . identifier[split] ( literal[string] )[: literal[int] ]
identifier[self] . identifier[modelType] = identifier[keyval] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] ()
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ) keyword[and] literal[string] keyword[in] identifier[line] :
identifier[keyval] = identifier[line]
keyword[if] literal[string] keyword[in] identifier[line] :
identifier[keyval] , identifier[type] = identifier[line] . identifier[split] ( literal[string] )[: literal[int] ]
identifier[self] . identifier[invTimeStep] = identifier[float] ( identifier[keyval] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] ())
keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[boolRules] . identifier[append] ([ identifier[s] . identifier[strip] () keyword[for] identifier[s] keyword[in] identifier[line] . identifier[split] ( literal[string] )])
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
keyword[break]
identifier[self] . identifier[dim] = identifier[len] ( identifier[boolRules] )
identifier[self] . identifier[boolRules] = identifier[collections] . identifier[OrderedDict] ( identifier[boolRules] )
identifier[self] . identifier[varNames] = identifier[collections] . identifier[OrderedDict] ([( identifier[s] , identifier[i] )
keyword[for] identifier[i] , identifier[s] keyword[in] identifier[enumerate] ( identifier[self] . identifier[boolRules] . identifier[keys] ())])
identifier[names] = identifier[self] . identifier[varNames]
identifier[self] . identifier[Coupl] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[dim] , identifier[self] . identifier[dim] ))
identifier[boolContinue] = keyword[True]
keyword[for] identifier[line] keyword[in] identifier[open] ( identifier[self] . identifier[model] ):
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[boolContinue] = keyword[False]
keyword[if] identifier[boolContinue] :
keyword[continue]
keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[gps] , identifier[gs] , identifier[val] = identifier[line] . identifier[strip] (). identifier[split] ()
identifier[self] . identifier[Coupl] [ identifier[int] ( identifier[names] [ identifier[gps] ]), identifier[int] ( identifier[names] [ identifier[gs] ])]= identifier[float] ( identifier[val] )
identifier[self] . identifier[Adj_signed] = identifier[np] . identifier[sign] ( identifier[self] . identifier[Coupl] )
identifier[self] . identifier[Adj] = identifier[np] . identifier[abs] ( identifier[np] . identifier[array] ( identifier[self] . identifier[Adj_signed] ))
identifier[self] . identifier[build_boolCoeff] () | def read_model(self):
""" Read the model and the couplings from the model file.
"""
if self.verbosity > 0:
settings.m(0, 'reading model', self.model) # depends on [control=['if'], data=[]]
# read model
boolRules = []
for line in open(self.model):
if line.startswith('#') and 'modelType =' in line:
keyval = line
if '|' in line:
(keyval, type) = line.split('|')[:2] # depends on [control=['if'], data=['line']]
self.modelType = keyval.split('=')[1].strip() # depends on [control=['if'], data=[]]
if line.startswith('#') and 'invTimeStep =' in line:
keyval = line
if '|' in line:
(keyval, type) = line.split('|')[:2] # depends on [control=['if'], data=['line']]
self.invTimeStep = float(keyval.split('=')[1].strip()) # depends on [control=['if'], data=[]]
if not line.startswith('#'):
boolRules.append([s.strip() for s in line.split('=')]) # depends on [control=['if'], data=[]]
if line.startswith('# coupling list:'):
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
self.dim = len(boolRules)
self.boolRules = collections.OrderedDict(boolRules)
self.varNames = collections.OrderedDict([(s, i) for (i, s) in enumerate(self.boolRules.keys())])
names = self.varNames
# read couplings via names
self.Coupl = np.zeros((self.dim, self.dim))
boolContinue = True
for line in open(self.model): # open(self.model.replace('/model','/couplList')):
if line.startswith('# coupling list:'):
boolContinue = False # depends on [control=['if'], data=[]]
if boolContinue:
continue # depends on [control=['if'], data=[]]
if not line.startswith('#'):
(gps, gs, val) = line.strip().split()
self.Coupl[int(names[gps]), int(names[gs])] = float(val) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
# adjancecy matrices
self.Adj_signed = np.sign(self.Coupl)
self.Adj = np.abs(np.array(self.Adj_signed))
# build bool coefficients (necessary for odefy type
# version of the discrete model)
self.build_boolCoeff() |
def formatar_cep(cep):
"""Formata CEP, removendo qualquer caractere não numérico.
Arguments:
cep {str} -- CEP a ser formatado.
Raises:
ValueError -- Quando a string esta vazia ou não contem numeros.
Returns:
str -- string contendo o CEP formatado.
"""
if not isinstance(cep, str) or not cep:
raise ValueError('CEP deve ser uma string não vazia '
'contendo somente numeros')
return CARACTERES_NUMERICOS.sub('', cep) | def function[formatar_cep, parameter[cep]]:
constant[Formata CEP, removendo qualquer caractere não numérico.
Arguments:
cep {str} -- CEP a ser formatado.
Raises:
ValueError -- Quando a string esta vazia ou não contem numeros.
Returns:
str -- string contendo o CEP formatado.
]
if <ast.BoolOp object at 0x7da1b1139c90> begin[:]
<ast.Raise object at 0x7da1b1139300>
return[call[name[CARACTERES_NUMERICOS].sub, parameter[constant[], name[cep]]]] | keyword[def] identifier[formatar_cep] ( identifier[cep] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[cep] , identifier[str] ) keyword[or] keyword[not] identifier[cep] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[return] identifier[CARACTERES_NUMERICOS] . identifier[sub] ( literal[string] , identifier[cep] ) | def formatar_cep(cep):
"""Formata CEP, removendo qualquer caractere não numérico.
Arguments:
cep {str} -- CEP a ser formatado.
Raises:
ValueError -- Quando a string esta vazia ou não contem numeros.
Returns:
str -- string contendo o CEP formatado.
"""
if not isinstance(cep, str) or not cep:
raise ValueError('CEP deve ser uma string não vazia contendo somente numeros') # depends on [control=['if'], data=[]]
return CARACTERES_NUMERICOS.sub('', cep) |
def common_arg_parser():
"""
Create an argparse.ArgumentParser for run_mujoco.py.
"""
parser = arg_parser()
parser.add_argument('--env', help='environment ID', type=str, default='Reacher-v2')
parser.add_argument('--env_type', help='type of environment, used when the environment type cannot be automatically determined', type=str)
parser.add_argument('--seed', help='RNG seed', type=int, default=None)
parser.add_argument('--alg', help='Algorithm', type=str, default='ppo2')
parser.add_argument('--num_timesteps', type=float, default=1e6),
parser.add_argument('--network', help='network type (mlp, cnn, lstm, cnn_lstm, conv_only)', default=None)
parser.add_argument('--gamestate', help='game state to load (so far only used in retro games)', default=None)
parser.add_argument('--num_env', help='Number of environment copies being run in parallel. When not specified, set to number of cpus for Atari, and to 1 for Mujoco', default=None, type=int)
parser.add_argument('--reward_scale', help='Reward scale factor. Default: 1.0', default=1.0, type=float)
parser.add_argument('--save_path', help='Path to save trained model to', default=None, type=str)
parser.add_argument('--save_video_interval', help='Save video every x steps (0 = disabled)', default=0, type=int)
parser.add_argument('--save_video_length', help='Length of recorded video. Default: 200', default=200, type=int)
parser.add_argument('--play', default=False, action='store_true')
return parser | def function[common_arg_parser, parameter[]]:
constant[
Create an argparse.ArgumentParser for run_mujoco.py.
]
variable[parser] assign[=] call[name[arg_parser], parameter[]]
call[name[parser].add_argument, parameter[constant[--env]]]
call[name[parser].add_argument, parameter[constant[--env_type]]]
call[name[parser].add_argument, parameter[constant[--seed]]]
call[name[parser].add_argument, parameter[constant[--alg]]]
tuple[[<ast.Call object at 0x7da18dc9b640>]]
call[name[parser].add_argument, parameter[constant[--network]]]
call[name[parser].add_argument, parameter[constant[--gamestate]]]
call[name[parser].add_argument, parameter[constant[--num_env]]]
call[name[parser].add_argument, parameter[constant[--reward_scale]]]
call[name[parser].add_argument, parameter[constant[--save_path]]]
call[name[parser].add_argument, parameter[constant[--save_video_interval]]]
call[name[parser].add_argument, parameter[constant[--save_video_length]]]
call[name[parser].add_argument, parameter[constant[--play]]]
return[name[parser]] | keyword[def] identifier[common_arg_parser] ():
literal[string]
identifier[parser] = identifier[arg_parser] ()
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[type] = identifier[str] , identifier[default] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[type] = identifier[str] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[type] = identifier[int] , identifier[default] = keyword[None] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[type] = identifier[str] , identifier[default] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[float] , identifier[default] = literal[int] ),
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[default] = keyword[None] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[default] = keyword[None] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[default] = keyword[None] , identifier[type] = identifier[int] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[default] = literal[int] , identifier[type] = identifier[float] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[default] = keyword[None] , identifier[type] = identifier[str] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[default] = literal[int] , identifier[type] = identifier[int] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[default] = literal[int] , identifier[type] = identifier[int] )
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] )
keyword[return] identifier[parser] | def common_arg_parser():
"""
Create an argparse.ArgumentParser for run_mujoco.py.
"""
parser = arg_parser()
parser.add_argument('--env', help='environment ID', type=str, default='Reacher-v2')
parser.add_argument('--env_type', help='type of environment, used when the environment type cannot be automatically determined', type=str)
parser.add_argument('--seed', help='RNG seed', type=int, default=None)
parser.add_argument('--alg', help='Algorithm', type=str, default='ppo2')
(parser.add_argument('--num_timesteps', type=float, default=1000000.0),)
parser.add_argument('--network', help='network type (mlp, cnn, lstm, cnn_lstm, conv_only)', default=None)
parser.add_argument('--gamestate', help='game state to load (so far only used in retro games)', default=None)
parser.add_argument('--num_env', help='Number of environment copies being run in parallel. When not specified, set to number of cpus for Atari, and to 1 for Mujoco', default=None, type=int)
parser.add_argument('--reward_scale', help='Reward scale factor. Default: 1.0', default=1.0, type=float)
parser.add_argument('--save_path', help='Path to save trained model to', default=None, type=str)
parser.add_argument('--save_video_interval', help='Save video every x steps (0 = disabled)', default=0, type=int)
parser.add_argument('--save_video_length', help='Length of recorded video. Default: 200', default=200, type=int)
parser.add_argument('--play', default=False, action='store_true')
return parser |
def from_netcdf(filename):
"""Initialize object from a netcdf file.
Expects that the file will have groups, each of which can be loaded by xarray.
Parameters
----------
filename : str
location of netcdf file
Returns
-------
InferenceData object
"""
groups = {}
with nc.Dataset(filename, mode="r") as data:
data_groups = list(data.groups)
for group in data_groups:
with xr.open_dataset(filename, group=group) as data:
groups[group] = data
return InferenceData(**groups) | def function[from_netcdf, parameter[filename]]:
constant[Initialize object from a netcdf file.
Expects that the file will have groups, each of which can be loaded by xarray.
Parameters
----------
filename : str
location of netcdf file
Returns
-------
InferenceData object
]
variable[groups] assign[=] dictionary[[], []]
with call[name[nc].Dataset, parameter[name[filename]]] begin[:]
variable[data_groups] assign[=] call[name[list], parameter[name[data].groups]]
for taget[name[group]] in starred[name[data_groups]] begin[:]
with call[name[xr].open_dataset, parameter[name[filename]]] begin[:]
call[name[groups]][name[group]] assign[=] name[data]
return[call[name[InferenceData], parameter[]]] | keyword[def] identifier[from_netcdf] ( identifier[filename] ):
literal[string]
identifier[groups] ={}
keyword[with] identifier[nc] . identifier[Dataset] ( identifier[filename] , identifier[mode] = literal[string] ) keyword[as] identifier[data] :
identifier[data_groups] = identifier[list] ( identifier[data] . identifier[groups] )
keyword[for] identifier[group] keyword[in] identifier[data_groups] :
keyword[with] identifier[xr] . identifier[open_dataset] ( identifier[filename] , identifier[group] = identifier[group] ) keyword[as] identifier[data] :
identifier[groups] [ identifier[group] ]= identifier[data]
keyword[return] identifier[InferenceData] (** identifier[groups] ) | def from_netcdf(filename):
"""Initialize object from a netcdf file.
Expects that the file will have groups, each of which can be loaded by xarray.
Parameters
----------
filename : str
location of netcdf file
Returns
-------
InferenceData object
"""
groups = {}
with nc.Dataset(filename, mode='r') as data:
data_groups = list(data.groups) # depends on [control=['with'], data=['data']]
for group in data_groups:
with xr.open_dataset(filename, group=group) as data:
groups[group] = data # depends on [control=['with'], data=['data']] # depends on [control=['for'], data=['group']]
return InferenceData(**groups) |
def deregister_image(self, image_id, delete_snapshot=False):
"""
Unregister an AMI.
:type image_id: string
:param image_id: the ID of the Image to unregister
:type delete_snapshot: bool
:param delete_snapshot: Set to True if we should delete the
snapshot associated with an EBS volume
mounted at /dev/sda1
:rtype: bool
:return: True if successful
"""
snapshot_id = None
if delete_snapshot:
image = self.get_image(image_id)
for key in image.block_device_mapping:
if key == "/dev/sda1":
snapshot_id = image.block_device_mapping[key].snapshot_id
break
result = self.get_status('DeregisterImage',
{'ImageId':image_id}, verb='POST')
if result and snapshot_id:
return result and self.delete_snapshot(snapshot_id)
return result | def function[deregister_image, parameter[self, image_id, delete_snapshot]]:
constant[
Unregister an AMI.
:type image_id: string
:param image_id: the ID of the Image to unregister
:type delete_snapshot: bool
:param delete_snapshot: Set to True if we should delete the
snapshot associated with an EBS volume
mounted at /dev/sda1
:rtype: bool
:return: True if successful
]
variable[snapshot_id] assign[=] constant[None]
if name[delete_snapshot] begin[:]
variable[image] assign[=] call[name[self].get_image, parameter[name[image_id]]]
for taget[name[key]] in starred[name[image].block_device_mapping] begin[:]
if compare[name[key] equal[==] constant[/dev/sda1]] begin[:]
variable[snapshot_id] assign[=] call[name[image].block_device_mapping][name[key]].snapshot_id
break
variable[result] assign[=] call[name[self].get_status, parameter[constant[DeregisterImage], dictionary[[<ast.Constant object at 0x7da1b2650220>], [<ast.Name object at 0x7da1b26508b0>]]]]
if <ast.BoolOp object at 0x7da1b26513f0> begin[:]
return[<ast.BoolOp object at 0x7da1b2650070>]
return[name[result]] | keyword[def] identifier[deregister_image] ( identifier[self] , identifier[image_id] , identifier[delete_snapshot] = keyword[False] ):
literal[string]
identifier[snapshot_id] = keyword[None]
keyword[if] identifier[delete_snapshot] :
identifier[image] = identifier[self] . identifier[get_image] ( identifier[image_id] )
keyword[for] identifier[key] keyword[in] identifier[image] . identifier[block_device_mapping] :
keyword[if] identifier[key] == literal[string] :
identifier[snapshot_id] = identifier[image] . identifier[block_device_mapping] [ identifier[key] ]. identifier[snapshot_id]
keyword[break]
identifier[result] = identifier[self] . identifier[get_status] ( literal[string] ,
{ literal[string] : identifier[image_id] }, identifier[verb] = literal[string] )
keyword[if] identifier[result] keyword[and] identifier[snapshot_id] :
keyword[return] identifier[result] keyword[and] identifier[self] . identifier[delete_snapshot] ( identifier[snapshot_id] )
keyword[return] identifier[result] | def deregister_image(self, image_id, delete_snapshot=False):
"""
Unregister an AMI.
:type image_id: string
:param image_id: the ID of the Image to unregister
:type delete_snapshot: bool
:param delete_snapshot: Set to True if we should delete the
snapshot associated with an EBS volume
mounted at /dev/sda1
:rtype: bool
:return: True if successful
"""
snapshot_id = None
if delete_snapshot:
image = self.get_image(image_id)
for key in image.block_device_mapping:
if key == '/dev/sda1':
snapshot_id = image.block_device_mapping[key].snapshot_id
break # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
result = self.get_status('DeregisterImage', {'ImageId': image_id}, verb='POST')
if result and snapshot_id:
return result and self.delete_snapshot(snapshot_id) # depends on [control=['if'], data=[]]
return result |
def resolve_reference(self, ref):
"""
Resolve a JSON Pointer object reference to the object itself.
:param ref: Reference string (`#/foo/bar`, for instance)
:return: The object, if found
:raises jsonschema.exceptions.RefResolutionError: if there is trouble resolving the reference
"""
url, resolved = self.resolver.resolve(ref)
return resolved | def function[resolve_reference, parameter[self, ref]]:
constant[
Resolve a JSON Pointer object reference to the object itself.
:param ref: Reference string (`#/foo/bar`, for instance)
:return: The object, if found
:raises jsonschema.exceptions.RefResolutionError: if there is trouble resolving the reference
]
<ast.Tuple object at 0x7da1b2346950> assign[=] call[name[self].resolver.resolve, parameter[name[ref]]]
return[name[resolved]] | keyword[def] identifier[resolve_reference] ( identifier[self] , identifier[ref] ):
literal[string]
identifier[url] , identifier[resolved] = identifier[self] . identifier[resolver] . identifier[resolve] ( identifier[ref] )
keyword[return] identifier[resolved] | def resolve_reference(self, ref):
"""
Resolve a JSON Pointer object reference to the object itself.
:param ref: Reference string (`#/foo/bar`, for instance)
:return: The object, if found
:raises jsonschema.exceptions.RefResolutionError: if there is trouble resolving the reference
"""
(url, resolved) = self.resolver.resolve(ref)
return resolved |
def stop(self):
"""
Stop the monitoring thread of the plugin.
The super-class will send the stop signal on the monitor-IP queue,
which prompts the loop to stop.
"""
super(Multi, self).stop()
self.monitor_thread.join()
logging.info("Multi-plugin health monitor: Stopping plugins")
for p in self.plugins:
p.stop()
logging.info("Multi-plugin health monitor: Stopped") | def function[stop, parameter[self]]:
constant[
Stop the monitoring thread of the plugin.
The super-class will send the stop signal on the monitor-IP queue,
which prompts the loop to stop.
]
call[call[name[super], parameter[name[Multi], name[self]]].stop, parameter[]]
call[name[self].monitor_thread.join, parameter[]]
call[name[logging].info, parameter[constant[Multi-plugin health monitor: Stopping plugins]]]
for taget[name[p]] in starred[name[self].plugins] begin[:]
call[name[p].stop, parameter[]]
call[name[logging].info, parameter[constant[Multi-plugin health monitor: Stopped]]] | keyword[def] identifier[stop] ( identifier[self] ):
literal[string]
identifier[super] ( identifier[Multi] , identifier[self] ). identifier[stop] ()
identifier[self] . identifier[monitor_thread] . identifier[join] ()
identifier[logging] . identifier[info] ( literal[string] )
keyword[for] identifier[p] keyword[in] identifier[self] . identifier[plugins] :
identifier[p] . identifier[stop] ()
identifier[logging] . identifier[info] ( literal[string] ) | def stop(self):
"""
Stop the monitoring thread of the plugin.
The super-class will send the stop signal on the monitor-IP queue,
which prompts the loop to stop.
"""
super(Multi, self).stop()
self.monitor_thread.join()
logging.info('Multi-plugin health monitor: Stopping plugins')
for p in self.plugins:
p.stop() # depends on [control=['for'], data=['p']]
logging.info('Multi-plugin health monitor: Stopped') |
def get_queryset(self):
'''
Only serve site-specific languages
'''
request = self.request
return (Languages.for_site(request.site)
.languages.filter().order_by('pk')) | def function[get_queryset, parameter[self]]:
constant[
Only serve site-specific languages
]
variable[request] assign[=] name[self].request
return[call[call[call[name[Languages].for_site, parameter[name[request].site]].languages.filter, parameter[]].order_by, parameter[constant[pk]]]] | keyword[def] identifier[get_queryset] ( identifier[self] ):
literal[string]
identifier[request] = identifier[self] . identifier[request]
keyword[return] ( identifier[Languages] . identifier[for_site] ( identifier[request] . identifier[site] )
. identifier[languages] . identifier[filter] (). identifier[order_by] ( literal[string] )) | def get_queryset(self):
"""
Only serve site-specific languages
"""
request = self.request
return Languages.for_site(request.site).languages.filter().order_by('pk') |
def add_to_emails(self, *emails):
"""
:calls: `POST /user/emails <http://developer.github.com/v3/users/emails>`_
:param email: string
:rtype: None
"""
assert all(isinstance(element, (str, unicode)) for element in emails), emails
post_parameters = emails
headers, data = self._requester.requestJsonAndCheck(
"POST",
"/user/emails",
input=post_parameters
) | def function[add_to_emails, parameter[self]]:
constant[
:calls: `POST /user/emails <http://developer.github.com/v3/users/emails>`_
:param email: string
:rtype: None
]
assert[call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b1f486a0>]]]
variable[post_parameters] assign[=] name[emails]
<ast.Tuple object at 0x7da1b1f49480> assign[=] call[name[self]._requester.requestJsonAndCheck, parameter[constant[POST], constant[/user/emails]]] | keyword[def] identifier[add_to_emails] ( identifier[self] ,* identifier[emails] ):
literal[string]
keyword[assert] identifier[all] ( identifier[isinstance] ( identifier[element] ,( identifier[str] , identifier[unicode] )) keyword[for] identifier[element] keyword[in] identifier[emails] ), identifier[emails]
identifier[post_parameters] = identifier[emails]
identifier[headers] , identifier[data] = identifier[self] . identifier[_requester] . identifier[requestJsonAndCheck] (
literal[string] ,
literal[string] ,
identifier[input] = identifier[post_parameters]
) | def add_to_emails(self, *emails):
"""
:calls: `POST /user/emails <http://developer.github.com/v3/users/emails>`_
:param email: string
:rtype: None
"""
assert all((isinstance(element, (str, unicode)) for element in emails)), emails
post_parameters = emails
(headers, data) = self._requester.requestJsonAndCheck('POST', '/user/emails', input=post_parameters) |
def run_with_reloader(main_func, extra_files=None, interval=1, reloader_type="auto"):
"""Run the given function in an independent python interpreter."""
import signal
reloader = reloader_loops[reloader_type](extra_files, interval)
signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
try:
if os.environ.get("WERKZEUG_RUN_MAIN") == "true":
ensure_echo_on()
t = threading.Thread(target=main_func, args=())
t.setDaemon(True)
t.start()
reloader.run()
else:
sys.exit(reloader.restart_with_reloader())
except KeyboardInterrupt:
pass | def function[run_with_reloader, parameter[main_func, extra_files, interval, reloader_type]]:
constant[Run the given function in an independent python interpreter.]
import module[signal]
variable[reloader] assign[=] call[call[name[reloader_loops]][name[reloader_type]], parameter[name[extra_files], name[interval]]]
call[name[signal].signal, parameter[name[signal].SIGTERM, <ast.Lambda object at 0x7da204622260>]]
<ast.Try object at 0x7da204623d30> | keyword[def] identifier[run_with_reloader] ( identifier[main_func] , identifier[extra_files] = keyword[None] , identifier[interval] = literal[int] , identifier[reloader_type] = literal[string] ):
literal[string]
keyword[import] identifier[signal]
identifier[reloader] = identifier[reloader_loops] [ identifier[reloader_type] ]( identifier[extra_files] , identifier[interval] )
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGTERM] , keyword[lambda] * identifier[args] : identifier[sys] . identifier[exit] ( literal[int] ))
keyword[try] :
keyword[if] identifier[os] . identifier[environ] . identifier[get] ( literal[string] )== literal[string] :
identifier[ensure_echo_on] ()
identifier[t] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[main_func] , identifier[args] =())
identifier[t] . identifier[setDaemon] ( keyword[True] )
identifier[t] . identifier[start] ()
identifier[reloader] . identifier[run] ()
keyword[else] :
identifier[sys] . identifier[exit] ( identifier[reloader] . identifier[restart_with_reloader] ())
keyword[except] identifier[KeyboardInterrupt] :
keyword[pass] | def run_with_reloader(main_func, extra_files=None, interval=1, reloader_type='auto'):
"""Run the given function in an independent python interpreter."""
import signal
reloader = reloader_loops[reloader_type](extra_files, interval)
signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
try:
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
ensure_echo_on()
t = threading.Thread(target=main_func, args=())
t.setDaemon(True)
t.start()
reloader.run() # depends on [control=['if'], data=[]]
else:
sys.exit(reloader.restart_with_reloader()) # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
pass # depends on [control=['except'], data=[]] |
def _get_default_letters(model_admin=None):
"""
Returns the set of letters defined in the configuration variable
DEFAULT_ALPHABET. DEFAULT_ALPHABET can be a callable, string, tuple, or
list and returns a set.
If a ModelAdmin class is passed, it will look for a DEFAULT_ALPHABET
attribute and use it instead.
"""
from django.conf import settings
import string
default_ltrs = string.digits + string.ascii_uppercase
default_letters = getattr(settings, 'DEFAULT_ALPHABET', default_ltrs)
if model_admin and hasattr(model_admin, 'DEFAULT_ALPHABET'):
default_letters = model_admin.DEFAULT_ALPHABET
if callable(default_letters):
return set(default_letters())
elif isinstance(default_letters, str):
return set([x for x in default_letters])
elif isinstance(default_letters, str):
return set([x for x in default_letters.decode('utf8')])
elif isinstance(default_letters, (tuple, list)):
return set(default_letters) | def function[_get_default_letters, parameter[model_admin]]:
constant[
Returns the set of letters defined in the configuration variable
DEFAULT_ALPHABET. DEFAULT_ALPHABET can be a callable, string, tuple, or
list and returns a set.
If a ModelAdmin class is passed, it will look for a DEFAULT_ALPHABET
attribute and use it instead.
]
from relative_module[django.conf] import module[settings]
import module[string]
variable[default_ltrs] assign[=] binary_operation[name[string].digits + name[string].ascii_uppercase]
variable[default_letters] assign[=] call[name[getattr], parameter[name[settings], constant[DEFAULT_ALPHABET], name[default_ltrs]]]
if <ast.BoolOp object at 0x7da18f09e7d0> begin[:]
variable[default_letters] assign[=] name[model_admin].DEFAULT_ALPHABET
if call[name[callable], parameter[name[default_letters]]] begin[:]
return[call[name[set], parameter[call[name[default_letters], parameter[]]]]] | keyword[def] identifier[_get_default_letters] ( identifier[model_admin] = keyword[None] ):
literal[string]
keyword[from] identifier[django] . identifier[conf] keyword[import] identifier[settings]
keyword[import] identifier[string]
identifier[default_ltrs] = identifier[string] . identifier[digits] + identifier[string] . identifier[ascii_uppercase]
identifier[default_letters] = identifier[getattr] ( identifier[settings] , literal[string] , identifier[default_ltrs] )
keyword[if] identifier[model_admin] keyword[and] identifier[hasattr] ( identifier[model_admin] , literal[string] ):
identifier[default_letters] = identifier[model_admin] . identifier[DEFAULT_ALPHABET]
keyword[if] identifier[callable] ( identifier[default_letters] ):
keyword[return] identifier[set] ( identifier[default_letters] ())
keyword[elif] identifier[isinstance] ( identifier[default_letters] , identifier[str] ):
keyword[return] identifier[set] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[default_letters] ])
keyword[elif] identifier[isinstance] ( identifier[default_letters] , identifier[str] ):
keyword[return] identifier[set] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[default_letters] . identifier[decode] ( literal[string] )])
keyword[elif] identifier[isinstance] ( identifier[default_letters] ,( identifier[tuple] , identifier[list] )):
keyword[return] identifier[set] ( identifier[default_letters] ) | def _get_default_letters(model_admin=None):
"""
Returns the set of letters defined in the configuration variable
DEFAULT_ALPHABET. DEFAULT_ALPHABET can be a callable, string, tuple, or
list and returns a set.
If a ModelAdmin class is passed, it will look for a DEFAULT_ALPHABET
attribute and use it instead.
"""
from django.conf import settings
import string
default_ltrs = string.digits + string.ascii_uppercase
default_letters = getattr(settings, 'DEFAULT_ALPHABET', default_ltrs)
if model_admin and hasattr(model_admin, 'DEFAULT_ALPHABET'):
default_letters = model_admin.DEFAULT_ALPHABET # depends on [control=['if'], data=[]]
if callable(default_letters):
return set(default_letters()) # depends on [control=['if'], data=[]]
elif isinstance(default_letters, str):
return set([x for x in default_letters]) # depends on [control=['if'], data=[]]
elif isinstance(default_letters, str):
return set([x for x in default_letters.decode('utf8')]) # depends on [control=['if'], data=[]]
elif isinstance(default_letters, (tuple, list)):
return set(default_letters) # depends on [control=['if'], data=[]] |
def get_agg_data(cls, obj, category=None):
"""
Reduces any Overlay or NdOverlay of Elements into a single
xarray Dataset that can be aggregated.
"""
paths = []
if isinstance(obj, Graph):
obj = obj.edgepaths
kdims = list(obj.kdims)
vdims = list(obj.vdims)
dims = obj.dimensions()[:2]
if isinstance(obj, Path):
glyph = 'line'
for p in obj.split(datatype='dataframe'):
paths.append(p)
elif isinstance(obj, CompositeOverlay):
element = None
for key, el in obj.data.items():
x, y, element, glyph = cls.get_agg_data(el)
dims = (x, y)
df = PandasInterface.as_dframe(element)
if isinstance(obj, NdOverlay):
df = df.assign(**dict(zip(obj.dimensions('key', True), key)))
paths.append(df)
if element is None:
dims = None
else:
kdims += element.kdims
vdims = element.vdims
elif isinstance(obj, Element):
glyph = 'line' if isinstance(obj, Curve) else 'points'
paths.append(PandasInterface.as_dframe(obj))
if dims is None or len(dims) != 2:
return None, None, None, None
else:
x, y = dims
if len(paths) > 1:
if glyph == 'line':
path = paths[0][:1]
if isinstance(path, dd.DataFrame):
path = path.compute()
empty = path.copy()
empty.iloc[0, :] = (np.NaN,) * empty.shape[1]
paths = [elem for p in paths for elem in (p, empty)][:-1]
if all(isinstance(path, dd.DataFrame) for path in paths):
df = dd.concat(paths)
else:
paths = [p.compute() if isinstance(p, dd.DataFrame) else p for p in paths]
df = pd.concat(paths)
else:
df = paths[0] if paths else pd.DataFrame([], columns=[x.name, y.name])
if category and df[category].dtype.name != 'category':
df[category] = df[category].astype('category')
is_dask = isinstance(df, dd.DataFrame)
if any((not is_dask and len(df[d.name]) and isinstance(df[d.name].values[0], cftime_types)) or
df[d.name].dtype.kind == 'M' for d in (x, y)):
df = df.copy()
for d in (x, y):
vals = df[d.name]
if not is_dask and len(vals) and isinstance(vals.values[0], cftime_types):
vals = cftime_to_timestamp(vals, 'ns')
elif df[d.name].dtype.kind == 'M':
vals = vals.astype('datetime64[ns]')
else:
continue
df[d.name] = vals.astype('int64')
return x, y, Dataset(df, kdims=kdims, vdims=vdims), glyph | def function[get_agg_data, parameter[cls, obj, category]]:
constant[
Reduces any Overlay or NdOverlay of Elements into a single
xarray Dataset that can be aggregated.
]
variable[paths] assign[=] list[[]]
if call[name[isinstance], parameter[name[obj], name[Graph]]] begin[:]
variable[obj] assign[=] name[obj].edgepaths
variable[kdims] assign[=] call[name[list], parameter[name[obj].kdims]]
variable[vdims] assign[=] call[name[list], parameter[name[obj].vdims]]
variable[dims] assign[=] call[call[name[obj].dimensions, parameter[]]][<ast.Slice object at 0x7da18bcc9a50>]
if call[name[isinstance], parameter[name[obj], name[Path]]] begin[:]
variable[glyph] assign[=] constant[line]
for taget[name[p]] in starred[call[name[obj].split, parameter[]]] begin[:]
call[name[paths].append, parameter[name[p]]]
if <ast.BoolOp object at 0x7da18bcc9b70> begin[:]
return[tuple[[<ast.Constant object at 0x7da18bccbd90>, <ast.Constant object at 0x7da18bccac20>, <ast.Constant object at 0x7da18bcc8610>, <ast.Constant object at 0x7da18bccb670>]]]
if compare[call[name[len], parameter[name[paths]]] greater[>] constant[1]] begin[:]
if compare[name[glyph] equal[==] constant[line]] begin[:]
variable[path] assign[=] call[call[name[paths]][constant[0]]][<ast.Slice object at 0x7da18bcc96f0>]
if call[name[isinstance], parameter[name[path], name[dd].DataFrame]] begin[:]
variable[path] assign[=] call[name[path].compute, parameter[]]
variable[empty] assign[=] call[name[path].copy, parameter[]]
call[name[empty].iloc][tuple[[<ast.Constant object at 0x7da18bcc8b50>, <ast.Slice object at 0x7da18bcc8d60>]]] assign[=] binary_operation[tuple[[<ast.Attribute object at 0x7da18bcca950>]] * call[name[empty].shape][constant[1]]]
variable[paths] assign[=] call[<ast.ListComp object at 0x7da18bcc9750>][<ast.Slice object at 0x7da18bcca5f0>]
if call[name[all], parameter[<ast.GeneratorExp object at 0x7da18bccbf10>]] begin[:]
variable[df] assign[=] call[name[dd].concat, parameter[name[paths]]]
if <ast.BoolOp object at 0x7da18bcc97b0> begin[:]
call[name[df]][name[category]] assign[=] call[call[name[df]][name[category]].astype, parameter[constant[category]]]
variable[is_dask] assign[=] call[name[isinstance], parameter[name[df], name[dd].DataFrame]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da18bcc81f0>]] begin[:]
variable[df] assign[=] call[name[df].copy, parameter[]]
for taget[name[d]] in starred[tuple[[<ast.Name object at 0x7da20c9928c0>, <ast.Name object at 0x7da20c992770>]]] begin[:]
variable[vals] assign[=] call[name[df]][name[d].name]
if <ast.BoolOp object at 0x7da20c993ee0> begin[:]
variable[vals] assign[=] call[name[cftime_to_timestamp], parameter[name[vals], constant[ns]]]
call[name[df]][name[d].name] assign[=] call[name[vals].astype, parameter[constant[int64]]]
return[tuple[[<ast.Name object at 0x7da20c992f80>, <ast.Name object at 0x7da20c993f10>, <ast.Call object at 0x7da20c9912d0>, <ast.Name object at 0x7da20c993310>]]] | keyword[def] identifier[get_agg_data] ( identifier[cls] , identifier[obj] , identifier[category] = keyword[None] ):
literal[string]
identifier[paths] =[]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Graph] ):
identifier[obj] = identifier[obj] . identifier[edgepaths]
identifier[kdims] = identifier[list] ( identifier[obj] . identifier[kdims] )
identifier[vdims] = identifier[list] ( identifier[obj] . identifier[vdims] )
identifier[dims] = identifier[obj] . identifier[dimensions] ()[: literal[int] ]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Path] ):
identifier[glyph] = literal[string]
keyword[for] identifier[p] keyword[in] identifier[obj] . identifier[split] ( identifier[datatype] = literal[string] ):
identifier[paths] . identifier[append] ( identifier[p] )
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[CompositeOverlay] ):
identifier[element] = keyword[None]
keyword[for] identifier[key] , identifier[el] keyword[in] identifier[obj] . identifier[data] . identifier[items] ():
identifier[x] , identifier[y] , identifier[element] , identifier[glyph] = identifier[cls] . identifier[get_agg_data] ( identifier[el] )
identifier[dims] =( identifier[x] , identifier[y] )
identifier[df] = identifier[PandasInterface] . identifier[as_dframe] ( identifier[element] )
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[NdOverlay] ):
identifier[df] = identifier[df] . identifier[assign] (** identifier[dict] ( identifier[zip] ( identifier[obj] . identifier[dimensions] ( literal[string] , keyword[True] ), identifier[key] )))
identifier[paths] . identifier[append] ( identifier[df] )
keyword[if] identifier[element] keyword[is] keyword[None] :
identifier[dims] = keyword[None]
keyword[else] :
identifier[kdims] += identifier[element] . identifier[kdims]
identifier[vdims] = identifier[element] . identifier[vdims]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[Element] ):
identifier[glyph] = literal[string] keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Curve] ) keyword[else] literal[string]
identifier[paths] . identifier[append] ( identifier[PandasInterface] . identifier[as_dframe] ( identifier[obj] ))
keyword[if] identifier[dims] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[dims] )!= literal[int] :
keyword[return] keyword[None] , keyword[None] , keyword[None] , keyword[None]
keyword[else] :
identifier[x] , identifier[y] = identifier[dims]
keyword[if] identifier[len] ( identifier[paths] )> literal[int] :
keyword[if] identifier[glyph] == literal[string] :
identifier[path] = identifier[paths] [ literal[int] ][: literal[int] ]
keyword[if] identifier[isinstance] ( identifier[path] , identifier[dd] . identifier[DataFrame] ):
identifier[path] = identifier[path] . identifier[compute] ()
identifier[empty] = identifier[path] . identifier[copy] ()
identifier[empty] . identifier[iloc] [ literal[int] ,:]=( identifier[np] . identifier[NaN] ,)* identifier[empty] . identifier[shape] [ literal[int] ]
identifier[paths] =[ identifier[elem] keyword[for] identifier[p] keyword[in] identifier[paths] keyword[for] identifier[elem] keyword[in] ( identifier[p] , identifier[empty] )][:- literal[int] ]
keyword[if] identifier[all] ( identifier[isinstance] ( identifier[path] , identifier[dd] . identifier[DataFrame] ) keyword[for] identifier[path] keyword[in] identifier[paths] ):
identifier[df] = identifier[dd] . identifier[concat] ( identifier[paths] )
keyword[else] :
identifier[paths] =[ identifier[p] . identifier[compute] () keyword[if] identifier[isinstance] ( identifier[p] , identifier[dd] . identifier[DataFrame] ) keyword[else] identifier[p] keyword[for] identifier[p] keyword[in] identifier[paths] ]
identifier[df] = identifier[pd] . identifier[concat] ( identifier[paths] )
keyword[else] :
identifier[df] = identifier[paths] [ literal[int] ] keyword[if] identifier[paths] keyword[else] identifier[pd] . identifier[DataFrame] ([], identifier[columns] =[ identifier[x] . identifier[name] , identifier[y] . identifier[name] ])
keyword[if] identifier[category] keyword[and] identifier[df] [ identifier[category] ]. identifier[dtype] . identifier[name] != literal[string] :
identifier[df] [ identifier[category] ]= identifier[df] [ identifier[category] ]. identifier[astype] ( literal[string] )
identifier[is_dask] = identifier[isinstance] ( identifier[df] , identifier[dd] . identifier[DataFrame] )
keyword[if] identifier[any] (( keyword[not] identifier[is_dask] keyword[and] identifier[len] ( identifier[df] [ identifier[d] . identifier[name] ]) keyword[and] identifier[isinstance] ( identifier[df] [ identifier[d] . identifier[name] ]. identifier[values] [ literal[int] ], identifier[cftime_types] )) keyword[or]
identifier[df] [ identifier[d] . identifier[name] ]. identifier[dtype] . identifier[kind] == literal[string] keyword[for] identifier[d] keyword[in] ( identifier[x] , identifier[y] )):
identifier[df] = identifier[df] . identifier[copy] ()
keyword[for] identifier[d] keyword[in] ( identifier[x] , identifier[y] ):
identifier[vals] = identifier[df] [ identifier[d] . identifier[name] ]
keyword[if] keyword[not] identifier[is_dask] keyword[and] identifier[len] ( identifier[vals] ) keyword[and] identifier[isinstance] ( identifier[vals] . identifier[values] [ literal[int] ], identifier[cftime_types] ):
identifier[vals] = identifier[cftime_to_timestamp] ( identifier[vals] , literal[string] )
keyword[elif] identifier[df] [ identifier[d] . identifier[name] ]. identifier[dtype] . identifier[kind] == literal[string] :
identifier[vals] = identifier[vals] . identifier[astype] ( literal[string] )
keyword[else] :
keyword[continue]
identifier[df] [ identifier[d] . identifier[name] ]= identifier[vals] . identifier[astype] ( literal[string] )
keyword[return] identifier[x] , identifier[y] , identifier[Dataset] ( identifier[df] , identifier[kdims] = identifier[kdims] , identifier[vdims] = identifier[vdims] ), identifier[glyph] | def get_agg_data(cls, obj, category=None):
"""
Reduces any Overlay or NdOverlay of Elements into a single
xarray Dataset that can be aggregated.
"""
paths = []
if isinstance(obj, Graph):
obj = obj.edgepaths # depends on [control=['if'], data=[]]
kdims = list(obj.kdims)
vdims = list(obj.vdims)
dims = obj.dimensions()[:2]
if isinstance(obj, Path):
glyph = 'line'
for p in obj.split(datatype='dataframe'):
paths.append(p) # depends on [control=['for'], data=['p']] # depends on [control=['if'], data=[]]
elif isinstance(obj, CompositeOverlay):
element = None
for (key, el) in obj.data.items():
(x, y, element, glyph) = cls.get_agg_data(el)
dims = (x, y)
df = PandasInterface.as_dframe(element)
if isinstance(obj, NdOverlay):
df = df.assign(**dict(zip(obj.dimensions('key', True), key))) # depends on [control=['if'], data=[]]
paths.append(df) # depends on [control=['for'], data=[]]
if element is None:
dims = None # depends on [control=['if'], data=[]]
else:
kdims += element.kdims
vdims = element.vdims # depends on [control=['if'], data=[]]
elif isinstance(obj, Element):
glyph = 'line' if isinstance(obj, Curve) else 'points'
paths.append(PandasInterface.as_dframe(obj)) # depends on [control=['if'], data=[]]
if dims is None or len(dims) != 2:
return (None, None, None, None) # depends on [control=['if'], data=[]]
else:
(x, y) = dims
if len(paths) > 1:
if glyph == 'line':
path = paths[0][:1]
if isinstance(path, dd.DataFrame):
path = path.compute() # depends on [control=['if'], data=[]]
empty = path.copy()
empty.iloc[0, :] = (np.NaN,) * empty.shape[1]
paths = [elem for p in paths for elem in (p, empty)][:-1] # depends on [control=['if'], data=[]]
if all((isinstance(path, dd.DataFrame) for path in paths)):
df = dd.concat(paths) # depends on [control=['if'], data=[]]
else:
paths = [p.compute() if isinstance(p, dd.DataFrame) else p for p in paths]
df = pd.concat(paths) # depends on [control=['if'], data=[]]
else:
df = paths[0] if paths else pd.DataFrame([], columns=[x.name, y.name])
if category and df[category].dtype.name != 'category':
df[category] = df[category].astype('category') # depends on [control=['if'], data=[]]
is_dask = isinstance(df, dd.DataFrame)
if any((not is_dask and len(df[d.name]) and isinstance(df[d.name].values[0], cftime_types) or df[d.name].dtype.kind == 'M' for d in (x, y))):
df = df.copy() # depends on [control=['if'], data=[]]
for d in (x, y):
vals = df[d.name]
if not is_dask and len(vals) and isinstance(vals.values[0], cftime_types):
vals = cftime_to_timestamp(vals, 'ns') # depends on [control=['if'], data=[]]
elif df[d.name].dtype.kind == 'M':
vals = vals.astype('datetime64[ns]') # depends on [control=['if'], data=[]]
else:
continue
df[d.name] = vals.astype('int64') # depends on [control=['for'], data=['d']]
return (x, y, Dataset(df, kdims=kdims, vdims=vdims), glyph) |
def models(self, model=None):
"""
Returns the tables that this query is referencing.
:return [ <subclass of Table>, .. ]
"""
for query in self.__queries:
if isinstance(query, orb.Query):
yield query.model(model)
else:
for model in query.models(model):
yield model | def function[models, parameter[self, model]]:
constant[
Returns the tables that this query is referencing.
:return [ <subclass of Table>, .. ]
]
for taget[name[query]] in starred[name[self].__queries] begin[:]
if call[name[isinstance], parameter[name[query], name[orb].Query]] begin[:]
<ast.Yield object at 0x7da1b255fe50> | keyword[def] identifier[models] ( identifier[self] , identifier[model] = keyword[None] ):
literal[string]
keyword[for] identifier[query] keyword[in] identifier[self] . identifier[__queries] :
keyword[if] identifier[isinstance] ( identifier[query] , identifier[orb] . identifier[Query] ):
keyword[yield] identifier[query] . identifier[model] ( identifier[model] )
keyword[else] :
keyword[for] identifier[model] keyword[in] identifier[query] . identifier[models] ( identifier[model] ):
keyword[yield] identifier[model] | def models(self, model=None):
"""
Returns the tables that this query is referencing.
:return [ <subclass of Table>, .. ]
"""
for query in self.__queries:
if isinstance(query, orb.Query):
yield query.model(model) # depends on [control=['if'], data=[]]
else:
for model in query.models(model):
yield model # depends on [control=['for'], data=['model']] # depends on [control=['for'], data=['query']] |
def complete_opt_display(self, text, *_):
""" Autocomplete for display option """
return [t + " " for t in DISPLAYS if t.startswith(text)] | def function[complete_opt_display, parameter[self, text]]:
constant[ Autocomplete for display option ]
return[<ast.ListComp object at 0x7da1b0ccb940>] | keyword[def] identifier[complete_opt_display] ( identifier[self] , identifier[text] ,* identifier[_] ):
literal[string]
keyword[return] [ identifier[t] + literal[string] keyword[for] identifier[t] keyword[in] identifier[DISPLAYS] keyword[if] identifier[t] . identifier[startswith] ( identifier[text] )] | def complete_opt_display(self, text, *_):
""" Autocomplete for display option """
return [t + ' ' for t in DISPLAYS if t.startswith(text)] |
def argparse_dict(default_dict_, lbl=None, verbose=None,
only_specified=False, force_keys={}, type_hint=None,
alias_dict={}):
r"""
Gets values for a dict based on the command line
Args:
default_dict_ (?):
only_specified (bool): if True only returns keys that are specified on commandline. no defaults.
Returns:
dict_: dict_ - a dictionary
CommandLine:
python -m utool.util_arg --test-argparse_dict
python -m utool.util_arg --test-argparse_dict --foo=3
python -m utool.util_arg --test-argparse_dict --flag1
python -m utool.util_arg --test-argparse_dict --flag2
python -m utool.util_arg --test-argparse_dict --noflag2
python -m utool.util_arg --test-argparse_dict --thresh=43
python -m utool.util_arg --test-argparse_dict --bins=-10
python -m utool.util_arg --test-argparse_dict --bins=-10 --only-specified --helpx
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_arg import * # NOQA
>>> import utool as ut
>>> # build test data
>>> default_dict_ = {
... 'bins': 8,
... 'foo': None,
... 'flag1': False,
... 'flag2': True,
... 'max': 0.2,
... 'neg': -5,
... 'thresh': -5.333,
... }
>>> # execute function
>>> only_specified = ut.get_argflag('--only-specified')
>>> dict_ = argparse_dict(default_dict_, only_specified=only_specified)
>>> # verify results
>>> result = ut.repr4(dict_, sorted_=True)
>>> print(result)
"""
if verbose is None:
verbose = VERBOSE_ARGPARSE
def make_argstrs(key, prefix_list):
for prefix in prefix_list:
yield prefix + key
yield prefix + key.replace('-', '_')
yield prefix + key.replace('_', '-')
def get_dictkey_cmdline_val(key, default, type_hint):
# see if the user gave a commandline value for this dict key
defaulttype_ = None if default is None else type(default)
if type_hint is None:
type_ = defaulttype_
elif isinstance(type_hint, dict):
type_ = type_hint.get(key, defaulttype_)
elif isinstance(type_hint, type):
type_ = type_hint
else:
raise NotImplementedError('Unknown type of type_hint=%r' % (type_hint,))
was_specified = False
if isinstance(default, bool):
val = default
if default is True:
falsekeys = list(set(make_argstrs(key, ['--no', '--no-'])))
notval, was_specified = get_argflag(falsekeys, return_specified=True)
val = not notval
if not was_specified:
truekeys = list(set(make_argstrs(key, ['--'])))
val_, was_specified = get_argflag(truekeys, return_specified=True)
if was_specified:
val = val_
elif default is False:
truekeys = list(set(make_argstrs(key, ['--'])))
val, was_specified = get_argflag(truekeys, return_specified=True)
else:
argtup = list(set(make_argstrs(key, ['--'])))
#if key == 'species':
# import utool as ut
# ut.embed()
val, was_specified = get_argval(argtup, type_=type_,
default=default,
return_specified=True)
return val, was_specified
dict_ = {}
num_specified = 0
for key, default in six.iteritems(default_dict_):
val, was_specified = get_dictkey_cmdline_val(key, default, type_hint)
if not was_specified:
alias_keys = meta_util_iter.ensure_iterable(alias_dict.get(key, []))
for alias_key in alias_keys:
val, was_specified = get_dictkey_cmdline_val(alias_key, default,
type_hint)
if was_specified:
break
if VERBOSE_ARGPARSE:
if was_specified:
num_specified += 1
print('[argparse_dict] Specified key=%r, val=%r' % (key, val))
#if key == 'foo':
# import utool as ut
# ut.embed()
if not only_specified or was_specified or key in force_keys:
dict_[key] = val
if VERBOSE_ARGPARSE:
print('[argparse_dict] num_specified = %r' % (num_specified,))
print('[argparse_dict] force_keys = %r' % (force_keys,))
#dict_ = {key: get_dictkey_cmdline_val(key, default) for key, default in
#six.iteritems(default_dict_)}
if verbose:
for key in dict_:
if dict_[key] != default_dict_[key]:
print('[argparse_dict] GOT ARGUMENT: cfgdict[%r] = %r' % (key, dict_[key]))
do_helpx = get_argflag('--helpx',
help_='Specifies that argparse_dict should print help and quit')
if get_argflag(('--help', '--help2')) or do_helpx:
import utool as ut
print('COMMAND LINE IS ACCEPTING THESE PARAMS WITH DEFAULTS:')
if lbl is not None:
print(lbl)
#print(ut.align(ut.repr4(dict_, sorted_=True), ':'))
print(ut.align(ut.repr4(default_dict_, sorted_=True), ':'))
if do_helpx:
sys.exit(1)
return dict_ | def function[argparse_dict, parameter[default_dict_, lbl, verbose, only_specified, force_keys, type_hint, alias_dict]]:
constant[
Gets values for a dict based on the command line
Args:
default_dict_ (?):
only_specified (bool): if True only returns keys that are specified on commandline. no defaults.
Returns:
dict_: dict_ - a dictionary
CommandLine:
python -m utool.util_arg --test-argparse_dict
python -m utool.util_arg --test-argparse_dict --foo=3
python -m utool.util_arg --test-argparse_dict --flag1
python -m utool.util_arg --test-argparse_dict --flag2
python -m utool.util_arg --test-argparse_dict --noflag2
python -m utool.util_arg --test-argparse_dict --thresh=43
python -m utool.util_arg --test-argparse_dict --bins=-10
python -m utool.util_arg --test-argparse_dict --bins=-10 --only-specified --helpx
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_arg import * # NOQA
>>> import utool as ut
>>> # build test data
>>> default_dict_ = {
... 'bins': 8,
... 'foo': None,
... 'flag1': False,
... 'flag2': True,
... 'max': 0.2,
... 'neg': -5,
... 'thresh': -5.333,
... }
>>> # execute function
>>> only_specified = ut.get_argflag('--only-specified')
>>> dict_ = argparse_dict(default_dict_, only_specified=only_specified)
>>> # verify results
>>> result = ut.repr4(dict_, sorted_=True)
>>> print(result)
]
if compare[name[verbose] is constant[None]] begin[:]
variable[verbose] assign[=] name[VERBOSE_ARGPARSE]
def function[make_argstrs, parameter[key, prefix_list]]:
for taget[name[prefix]] in starred[name[prefix_list]] begin[:]
<ast.Yield object at 0x7da1b2425bd0>
<ast.Yield object at 0x7da1b2425a50>
<ast.Yield object at 0x7da1b2425c90>
def function[get_dictkey_cmdline_val, parameter[key, default, type_hint]]:
variable[defaulttype_] assign[=] <ast.IfExp object at 0x7da1b2426410>
if compare[name[type_hint] is constant[None]] begin[:]
variable[type_] assign[=] name[defaulttype_]
variable[was_specified] assign[=] constant[False]
if call[name[isinstance], parameter[name[default], name[bool]]] begin[:]
variable[val] assign[=] name[default]
if compare[name[default] is constant[True]] begin[:]
variable[falsekeys] assign[=] call[name[list], parameter[call[name[set], parameter[call[name[make_argstrs], parameter[name[key], list[[<ast.Constant object at 0x7da1b245d300>, <ast.Constant object at 0x7da1b245fd60>]]]]]]]]
<ast.Tuple object at 0x7da1b245eb90> assign[=] call[name[get_argflag], parameter[name[falsekeys]]]
variable[val] assign[=] <ast.UnaryOp object at 0x7da1b245fee0>
if <ast.UnaryOp object at 0x7da1b245fd90> begin[:]
variable[truekeys] assign[=] call[name[list], parameter[call[name[set], parameter[call[name[make_argstrs], parameter[name[key], list[[<ast.Constant object at 0x7da1b245f700>]]]]]]]]
<ast.Tuple object at 0x7da1b245f1f0> assign[=] call[name[get_argflag], parameter[name[truekeys]]]
if name[was_specified] begin[:]
variable[val] assign[=] name[val_]
return[tuple[[<ast.Name object at 0x7da1b245df30>, <ast.Name object at 0x7da1b245f8e0>]]]
variable[dict_] assign[=] dictionary[[], []]
variable[num_specified] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b245dea0>, <ast.Name object at 0x7da1b245c3a0>]]] in starred[call[name[six].iteritems, parameter[name[default_dict_]]]] begin[:]
<ast.Tuple object at 0x7da1b245ee60> assign[=] call[name[get_dictkey_cmdline_val], parameter[name[key], name[default], name[type_hint]]]
if <ast.UnaryOp object at 0x7da1b245d030> begin[:]
variable[alias_keys] assign[=] call[name[meta_util_iter].ensure_iterable, parameter[call[name[alias_dict].get, parameter[name[key], list[[]]]]]]
for taget[name[alias_key]] in starred[name[alias_keys]] begin[:]
<ast.Tuple object at 0x7da1b245caf0> assign[=] call[name[get_dictkey_cmdline_val], parameter[name[alias_key], name[default], name[type_hint]]]
if name[was_specified] begin[:]
break
if name[VERBOSE_ARGPARSE] begin[:]
if name[was_specified] begin[:]
<ast.AugAssign object at 0x7da1b245e3b0>
call[name[print], parameter[binary_operation[constant[[argparse_dict] Specified key=%r, val=%r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b245ffa0>, <ast.Name object at 0x7da1b245ef50>]]]]]
if <ast.BoolOp object at 0x7da1b24456f0> begin[:]
call[name[dict_]][name[key]] assign[=] name[val]
if name[VERBOSE_ARGPARSE] begin[:]
call[name[print], parameter[binary_operation[constant[[argparse_dict] num_specified = %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2445270>]]]]]
call[name[print], parameter[binary_operation[constant[[argparse_dict] force_keys = %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2445180>]]]]]
if name[verbose] begin[:]
for taget[name[key]] in starred[name[dict_]] begin[:]
if compare[call[name[dict_]][name[key]] not_equal[!=] call[name[default_dict_]][name[key]]] begin[:]
call[name[print], parameter[binary_operation[constant[[argparse_dict] GOT ARGUMENT: cfgdict[%r] = %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b24442e0>, <ast.Subscript object at 0x7da1b2444340>]]]]]
variable[do_helpx] assign[=] call[name[get_argflag], parameter[constant[--helpx]]]
if <ast.BoolOp object at 0x7da1b2445000> begin[:]
import module[utool] as alias[ut]
call[name[print], parameter[constant[COMMAND LINE IS ACCEPTING THESE PARAMS WITH DEFAULTS:]]]
if compare[name[lbl] is_not constant[None]] begin[:]
call[name[print], parameter[name[lbl]]]
call[name[print], parameter[call[name[ut].align, parameter[call[name[ut].repr4, parameter[name[default_dict_]]], constant[:]]]]]
if name[do_helpx] begin[:]
call[name[sys].exit, parameter[constant[1]]]
return[name[dict_]] | keyword[def] identifier[argparse_dict] ( identifier[default_dict_] , identifier[lbl] = keyword[None] , identifier[verbose] = keyword[None] ,
identifier[only_specified] = keyword[False] , identifier[force_keys] ={}, identifier[type_hint] = keyword[None] ,
identifier[alias_dict] ={}):
literal[string]
keyword[if] identifier[verbose] keyword[is] keyword[None] :
identifier[verbose] = identifier[VERBOSE_ARGPARSE]
keyword[def] identifier[make_argstrs] ( identifier[key] , identifier[prefix_list] ):
keyword[for] identifier[prefix] keyword[in] identifier[prefix_list] :
keyword[yield] identifier[prefix] + identifier[key]
keyword[yield] identifier[prefix] + identifier[key] . identifier[replace] ( literal[string] , literal[string] )
keyword[yield] identifier[prefix] + identifier[key] . identifier[replace] ( literal[string] , literal[string] )
keyword[def] identifier[get_dictkey_cmdline_val] ( identifier[key] , identifier[default] , identifier[type_hint] ):
identifier[defaulttype_] = keyword[None] keyword[if] identifier[default] keyword[is] keyword[None] keyword[else] identifier[type] ( identifier[default] )
keyword[if] identifier[type_hint] keyword[is] keyword[None] :
identifier[type_] = identifier[defaulttype_]
keyword[elif] identifier[isinstance] ( identifier[type_hint] , identifier[dict] ):
identifier[type_] = identifier[type_hint] . identifier[get] ( identifier[key] , identifier[defaulttype_] )
keyword[elif] identifier[isinstance] ( identifier[type_hint] , identifier[type] ):
identifier[type_] = identifier[type_hint]
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] %( identifier[type_hint] ,))
identifier[was_specified] = keyword[False]
keyword[if] identifier[isinstance] ( identifier[default] , identifier[bool] ):
identifier[val] = identifier[default]
keyword[if] identifier[default] keyword[is] keyword[True] :
identifier[falsekeys] = identifier[list] ( identifier[set] ( identifier[make_argstrs] ( identifier[key] ,[ literal[string] , literal[string] ])))
identifier[notval] , identifier[was_specified] = identifier[get_argflag] ( identifier[falsekeys] , identifier[return_specified] = keyword[True] )
identifier[val] = keyword[not] identifier[notval]
keyword[if] keyword[not] identifier[was_specified] :
identifier[truekeys] = identifier[list] ( identifier[set] ( identifier[make_argstrs] ( identifier[key] ,[ literal[string] ])))
identifier[val_] , identifier[was_specified] = identifier[get_argflag] ( identifier[truekeys] , identifier[return_specified] = keyword[True] )
keyword[if] identifier[was_specified] :
identifier[val] = identifier[val_]
keyword[elif] identifier[default] keyword[is] keyword[False] :
identifier[truekeys] = identifier[list] ( identifier[set] ( identifier[make_argstrs] ( identifier[key] ,[ literal[string] ])))
identifier[val] , identifier[was_specified] = identifier[get_argflag] ( identifier[truekeys] , identifier[return_specified] = keyword[True] )
keyword[else] :
identifier[argtup] = identifier[list] ( identifier[set] ( identifier[make_argstrs] ( identifier[key] ,[ literal[string] ])))
identifier[val] , identifier[was_specified] = identifier[get_argval] ( identifier[argtup] , identifier[type_] = identifier[type_] ,
identifier[default] = identifier[default] ,
identifier[return_specified] = keyword[True] )
keyword[return] identifier[val] , identifier[was_specified]
identifier[dict_] ={}
identifier[num_specified] = literal[int]
keyword[for] identifier[key] , identifier[default] keyword[in] identifier[six] . identifier[iteritems] ( identifier[default_dict_] ):
identifier[val] , identifier[was_specified] = identifier[get_dictkey_cmdline_val] ( identifier[key] , identifier[default] , identifier[type_hint] )
keyword[if] keyword[not] identifier[was_specified] :
identifier[alias_keys] = identifier[meta_util_iter] . identifier[ensure_iterable] ( identifier[alias_dict] . identifier[get] ( identifier[key] ,[]))
keyword[for] identifier[alias_key] keyword[in] identifier[alias_keys] :
identifier[val] , identifier[was_specified] = identifier[get_dictkey_cmdline_val] ( identifier[alias_key] , identifier[default] ,
identifier[type_hint] )
keyword[if] identifier[was_specified] :
keyword[break]
keyword[if] identifier[VERBOSE_ARGPARSE] :
keyword[if] identifier[was_specified] :
identifier[num_specified] += literal[int]
identifier[print] ( literal[string] %( identifier[key] , identifier[val] ))
keyword[if] keyword[not] identifier[only_specified] keyword[or] identifier[was_specified] keyword[or] identifier[key] keyword[in] identifier[force_keys] :
identifier[dict_] [ identifier[key] ]= identifier[val]
keyword[if] identifier[VERBOSE_ARGPARSE] :
identifier[print] ( literal[string] %( identifier[num_specified] ,))
identifier[print] ( literal[string] %( identifier[force_keys] ,))
keyword[if] identifier[verbose] :
keyword[for] identifier[key] keyword[in] identifier[dict_] :
keyword[if] identifier[dict_] [ identifier[key] ]!= identifier[default_dict_] [ identifier[key] ]:
identifier[print] ( literal[string] %( identifier[key] , identifier[dict_] [ identifier[key] ]))
identifier[do_helpx] = identifier[get_argflag] ( literal[string] ,
identifier[help_] = literal[string] )
keyword[if] identifier[get_argflag] (( literal[string] , literal[string] )) keyword[or] identifier[do_helpx] :
keyword[import] identifier[utool] keyword[as] identifier[ut]
identifier[print] ( literal[string] )
keyword[if] identifier[lbl] keyword[is] keyword[not] keyword[None] :
identifier[print] ( identifier[lbl] )
identifier[print] ( identifier[ut] . identifier[align] ( identifier[ut] . identifier[repr4] ( identifier[default_dict_] , identifier[sorted_] = keyword[True] ), literal[string] ))
keyword[if] identifier[do_helpx] :
identifier[sys] . identifier[exit] ( literal[int] )
keyword[return] identifier[dict_] | def argparse_dict(default_dict_, lbl=None, verbose=None, only_specified=False, force_keys={}, type_hint=None, alias_dict={}):
"""
Gets values for a dict based on the command line
Args:
default_dict_ (?):
only_specified (bool): if True only returns keys that are specified on commandline. no defaults.
Returns:
dict_: dict_ - a dictionary
CommandLine:
python -m utool.util_arg --test-argparse_dict
python -m utool.util_arg --test-argparse_dict --foo=3
python -m utool.util_arg --test-argparse_dict --flag1
python -m utool.util_arg --test-argparse_dict --flag2
python -m utool.util_arg --test-argparse_dict --noflag2
python -m utool.util_arg --test-argparse_dict --thresh=43
python -m utool.util_arg --test-argparse_dict --bins=-10
python -m utool.util_arg --test-argparse_dict --bins=-10 --only-specified --helpx
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_arg import * # NOQA
>>> import utool as ut
>>> # build test data
>>> default_dict_ = {
... 'bins': 8,
... 'foo': None,
... 'flag1': False,
... 'flag2': True,
... 'max': 0.2,
... 'neg': -5,
... 'thresh': -5.333,
... }
>>> # execute function
>>> only_specified = ut.get_argflag('--only-specified')
>>> dict_ = argparse_dict(default_dict_, only_specified=only_specified)
>>> # verify results
>>> result = ut.repr4(dict_, sorted_=True)
>>> print(result)
"""
if verbose is None:
verbose = VERBOSE_ARGPARSE # depends on [control=['if'], data=['verbose']]
def make_argstrs(key, prefix_list):
for prefix in prefix_list:
yield (prefix + key)
yield (prefix + key.replace('-', '_'))
yield (prefix + key.replace('_', '-')) # depends on [control=['for'], data=['prefix']]
def get_dictkey_cmdline_val(key, default, type_hint):
# see if the user gave a commandline value for this dict key
defaulttype_ = None if default is None else type(default)
if type_hint is None:
type_ = defaulttype_ # depends on [control=['if'], data=[]]
elif isinstance(type_hint, dict):
type_ = type_hint.get(key, defaulttype_) # depends on [control=['if'], data=[]]
elif isinstance(type_hint, type):
type_ = type_hint # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('Unknown type of type_hint=%r' % (type_hint,))
was_specified = False
if isinstance(default, bool):
val = default
if default is True:
falsekeys = list(set(make_argstrs(key, ['--no', '--no-'])))
(notval, was_specified) = get_argflag(falsekeys, return_specified=True)
val = not notval
if not was_specified:
truekeys = list(set(make_argstrs(key, ['--'])))
(val_, was_specified) = get_argflag(truekeys, return_specified=True)
if was_specified:
val = val_ # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif default is False:
truekeys = list(set(make_argstrs(key, ['--'])))
(val, was_specified) = get_argflag(truekeys, return_specified=True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
argtup = list(set(make_argstrs(key, ['--'])))
#if key == 'species':
# import utool as ut
# ut.embed()
(val, was_specified) = get_argval(argtup, type_=type_, default=default, return_specified=True)
return (val, was_specified)
dict_ = {}
num_specified = 0
for (key, default) in six.iteritems(default_dict_):
(val, was_specified) = get_dictkey_cmdline_val(key, default, type_hint)
if not was_specified:
alias_keys = meta_util_iter.ensure_iterable(alias_dict.get(key, []))
for alias_key in alias_keys:
(val, was_specified) = get_dictkey_cmdline_val(alias_key, default, type_hint)
if was_specified:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['alias_key']] # depends on [control=['if'], data=[]]
if VERBOSE_ARGPARSE:
if was_specified:
num_specified += 1
print('[argparse_dict] Specified key=%r, val=%r' % (key, val)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
#if key == 'foo':
# import utool as ut
# ut.embed()
if not only_specified or was_specified or key in force_keys:
dict_[key] = val # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if VERBOSE_ARGPARSE:
print('[argparse_dict] num_specified = %r' % (num_specified,))
print('[argparse_dict] force_keys = %r' % (force_keys,)) # depends on [control=['if'], data=[]]
#dict_ = {key: get_dictkey_cmdline_val(key, default) for key, default in
#six.iteritems(default_dict_)}
if verbose:
for key in dict_:
if dict_[key] != default_dict_[key]:
print('[argparse_dict] GOT ARGUMENT: cfgdict[%r] = %r' % (key, dict_[key])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
do_helpx = get_argflag('--helpx', help_='Specifies that argparse_dict should print help and quit')
if get_argflag(('--help', '--help2')) or do_helpx:
import utool as ut
print('COMMAND LINE IS ACCEPTING THESE PARAMS WITH DEFAULTS:')
if lbl is not None:
print(lbl) # depends on [control=['if'], data=['lbl']]
#print(ut.align(ut.repr4(dict_, sorted_=True), ':'))
print(ut.align(ut.repr4(default_dict_, sorted_=True), ':'))
if do_helpx:
sys.exit(1) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return dict_ |
def installed(cls):
"""
Used in ``yacms.pages.views.page`` to ensure
``PageMiddleware`` or a subclass has been installed. We cache
the result on the ``PageMiddleware._installed`` to only run
this once. Short path is to just check for the dotted path to
``PageMiddleware`` in ``MIDDLEWARE_CLASSES`` - if not found,
we need to load each middleware class to match a subclass.
"""
try:
return cls._installed
except AttributeError:
name = "yacms.pages.middleware.PageMiddleware"
mw_setting = get_middleware_setting()
installed = name in mw_setting
if not installed:
for name in mw_setting:
if issubclass(import_dotted_path(name), cls):
installed = True
break
setattr(cls, "_installed", installed)
return installed | def function[installed, parameter[cls]]:
constant[
Used in ``yacms.pages.views.page`` to ensure
``PageMiddleware`` or a subclass has been installed. We cache
the result on the ``PageMiddleware._installed`` to only run
this once. Short path is to just check for the dotted path to
``PageMiddleware`` in ``MIDDLEWARE_CLASSES`` - if not found,
we need to load each middleware class to match a subclass.
]
<ast.Try object at 0x7da1b15f2140> | keyword[def] identifier[installed] ( identifier[cls] ):
literal[string]
keyword[try] :
keyword[return] identifier[cls] . identifier[_installed]
keyword[except] identifier[AttributeError] :
identifier[name] = literal[string]
identifier[mw_setting] = identifier[get_middleware_setting] ()
identifier[installed] = identifier[name] keyword[in] identifier[mw_setting]
keyword[if] keyword[not] identifier[installed] :
keyword[for] identifier[name] keyword[in] identifier[mw_setting] :
keyword[if] identifier[issubclass] ( identifier[import_dotted_path] ( identifier[name] ), identifier[cls] ):
identifier[installed] = keyword[True]
keyword[break]
identifier[setattr] ( identifier[cls] , literal[string] , identifier[installed] )
keyword[return] identifier[installed] | def installed(cls):
"""
Used in ``yacms.pages.views.page`` to ensure
``PageMiddleware`` or a subclass has been installed. We cache
the result on the ``PageMiddleware._installed`` to only run
this once. Short path is to just check for the dotted path to
``PageMiddleware`` in ``MIDDLEWARE_CLASSES`` - if not found,
we need to load each middleware class to match a subclass.
"""
try:
return cls._installed # depends on [control=['try'], data=[]]
except AttributeError:
name = 'yacms.pages.middleware.PageMiddleware'
mw_setting = get_middleware_setting()
installed = name in mw_setting
if not installed:
for name in mw_setting:
if issubclass(import_dotted_path(name), cls):
installed = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']] # depends on [control=['if'], data=[]]
setattr(cls, '_installed', installed)
return installed # depends on [control=['except'], data=[]] |
def get_frame_list():
"""
Create the list of frames
"""
# TODO: use this function in IPS below (less code duplication)
frame_info_list = []
frame_list = []
frame = inspect.currentframe()
while frame is not None:
frame_list.append(frame)
info = inspect.getframeinfo(frame)
frame_info_list.append(info)
frame = frame.f_back
frame_info_list.reverse()
frame_list.reverse()
frame_info_str_list = [format_frameinfo(fi) for fi in frame_info_list]
return frame_list, frame_info_list, frame_info_str_list | def function[get_frame_list, parameter[]]:
constant[
Create the list of frames
]
variable[frame_info_list] assign[=] list[[]]
variable[frame_list] assign[=] list[[]]
variable[frame] assign[=] call[name[inspect].currentframe, parameter[]]
while compare[name[frame] is_not constant[None]] begin[:]
call[name[frame_list].append, parameter[name[frame]]]
variable[info] assign[=] call[name[inspect].getframeinfo, parameter[name[frame]]]
call[name[frame_info_list].append, parameter[name[info]]]
variable[frame] assign[=] name[frame].f_back
call[name[frame_info_list].reverse, parameter[]]
call[name[frame_list].reverse, parameter[]]
variable[frame_info_str_list] assign[=] <ast.ListComp object at 0x7da2054a7c70>
return[tuple[[<ast.Name object at 0x7da2054a5f90>, <ast.Name object at 0x7da2054a69b0>, <ast.Name object at 0x7da2054a52d0>]]] | keyword[def] identifier[get_frame_list] ():
literal[string]
identifier[frame_info_list] =[]
identifier[frame_list] =[]
identifier[frame] = identifier[inspect] . identifier[currentframe] ()
keyword[while] identifier[frame] keyword[is] keyword[not] keyword[None] :
identifier[frame_list] . identifier[append] ( identifier[frame] )
identifier[info] = identifier[inspect] . identifier[getframeinfo] ( identifier[frame] )
identifier[frame_info_list] . identifier[append] ( identifier[info] )
identifier[frame] = identifier[frame] . identifier[f_back]
identifier[frame_info_list] . identifier[reverse] ()
identifier[frame_list] . identifier[reverse] ()
identifier[frame_info_str_list] =[ identifier[format_frameinfo] ( identifier[fi] ) keyword[for] identifier[fi] keyword[in] identifier[frame_info_list] ]
keyword[return] identifier[frame_list] , identifier[frame_info_list] , identifier[frame_info_str_list] | def get_frame_list():
"""
Create the list of frames
"""
# TODO: use this function in IPS below (less code duplication)
frame_info_list = []
frame_list = []
frame = inspect.currentframe()
while frame is not None:
frame_list.append(frame)
info = inspect.getframeinfo(frame)
frame_info_list.append(info)
frame = frame.f_back # depends on [control=['while'], data=['frame']]
frame_info_list.reverse()
frame_list.reverse()
frame_info_str_list = [format_frameinfo(fi) for fi in frame_info_list]
return (frame_list, frame_info_list, frame_info_str_list) |
def compute_slice_bounds(slices, scs, shape):
"""
Given a 2D selection consisting of slices/coordinates, a
SheetCoordinateSystem and the shape of the array returns a new
BoundingBox representing the sliced region.
"""
xidx, yidx = slices
ys, xs = shape
l, b, r, t = scs.bounds.lbrt()
xdensity, ydensity = scs.xdensity, scs.ydensity
xunit = (1./xdensity)
yunit = (1./ydensity)
if isinstance(l, datetime_types):
xunit = np.timedelta64(int(round(xunit)), scs._time_unit)
if isinstance(b, datetime_types):
yunit = np.timedelta64(int(round(yunit)), scs._time_unit)
if isinstance(xidx, slice):
l = l if xidx.start is None else max(l, xidx.start)
r = r if xidx.stop is None else min(r, xidx.stop)
if isinstance(yidx, slice):
b = b if yidx.start is None else max(b, yidx.start)
t = t if yidx.stop is None else min(t, yidx.stop)
bounds = BoundingBox(points=((l, b), (r, t)))
# Apply new bounds
slc = Slice(bounds, scs)
# Apply scalar and list indices
l, b, r, t = slc.compute_bounds(scs).lbrt()
if not isinstance(xidx, slice):
if not isinstance(xidx, (list, set)): xidx = [xidx]
if len(xidx) > 1:
xdensity = xdensity*(float(len(xidx))/xs)
ls, rs = [], []
for idx in xidx:
xc, _ = scs.closest_cell_center(idx, b)
ls.append(xc-xunit/2)
rs.append(xc+xunit/2)
l, r = np.min(ls), np.max(rs)
elif not isinstance(yidx, slice):
if not isinstance(yidx, (set, list)): yidx = [yidx]
if len(yidx) > 1:
ydensity = ydensity*(float(len(yidx))/ys)
bs, ts = [], []
for idx in yidx:
_, yc = scs.closest_cell_center(l, idx)
bs.append(yc-yunit/2)
ts.append(yc+yunit/2)
b, t = np.min(bs), np.max(ts)
return BoundingBox(points=((l, b), (r, t))) | def function[compute_slice_bounds, parameter[slices, scs, shape]]:
constant[
Given a 2D selection consisting of slices/coordinates, a
SheetCoordinateSystem and the shape of the array returns a new
BoundingBox representing the sliced region.
]
<ast.Tuple object at 0x7da1b1bed1b0> assign[=] name[slices]
<ast.Tuple object at 0x7da1b1bec3a0> assign[=] name[shape]
<ast.Tuple object at 0x7da1b1bef5e0> assign[=] call[name[scs].bounds.lbrt, parameter[]]
<ast.Tuple object at 0x7da1b1bece20> assign[=] tuple[[<ast.Attribute object at 0x7da1b1bef8e0>, <ast.Attribute object at 0x7da1b1beca60>]]
variable[xunit] assign[=] binary_operation[constant[1.0] / name[xdensity]]
variable[yunit] assign[=] binary_operation[constant[1.0] / name[ydensity]]
if call[name[isinstance], parameter[name[l], name[datetime_types]]] begin[:]
variable[xunit] assign[=] call[name[np].timedelta64, parameter[call[name[int], parameter[call[name[round], parameter[name[xunit]]]]], name[scs]._time_unit]]
if call[name[isinstance], parameter[name[b], name[datetime_types]]] begin[:]
variable[yunit] assign[=] call[name[np].timedelta64, parameter[call[name[int], parameter[call[name[round], parameter[name[yunit]]]]], name[scs]._time_unit]]
if call[name[isinstance], parameter[name[xidx], name[slice]]] begin[:]
variable[l] assign[=] <ast.IfExp object at 0x7da1b1bec910>
variable[r] assign[=] <ast.IfExp object at 0x7da1b1bec100>
if call[name[isinstance], parameter[name[yidx], name[slice]]] begin[:]
variable[b] assign[=] <ast.IfExp object at 0x7da1b1bef070>
variable[t] assign[=] <ast.IfExp object at 0x7da1b1bedb70>
variable[bounds] assign[=] call[name[BoundingBox], parameter[]]
variable[slc] assign[=] call[name[Slice], parameter[name[bounds], name[scs]]]
<ast.Tuple object at 0x7da1b1bed120> assign[=] call[call[name[slc].compute_bounds, parameter[name[scs]]].lbrt, parameter[]]
if <ast.UnaryOp object at 0x7da1b1bef7c0> begin[:]
if <ast.UnaryOp object at 0x7da1b1bef400> begin[:]
variable[xidx] assign[=] list[[<ast.Name object at 0x7da1b1beeef0>]]
if compare[call[name[len], parameter[name[xidx]]] greater[>] constant[1]] begin[:]
variable[xdensity] assign[=] binary_operation[name[xdensity] * binary_operation[call[name[float], parameter[call[name[len], parameter[name[xidx]]]]] / name[xs]]]
<ast.Tuple object at 0x7da1b1bee800> assign[=] tuple[[<ast.List object at 0x7da1b1bec520>, <ast.List object at 0x7da1b1bedb10>]]
for taget[name[idx]] in starred[name[xidx]] begin[:]
<ast.Tuple object at 0x7da1b1bee1d0> assign[=] call[name[scs].closest_cell_center, parameter[name[idx], name[b]]]
call[name[ls].append, parameter[binary_operation[name[xc] - binary_operation[name[xunit] / constant[2]]]]]
call[name[rs].append, parameter[binary_operation[name[xc] + binary_operation[name[xunit] / constant[2]]]]]
<ast.Tuple object at 0x7da1b1bed8a0> assign[=] tuple[[<ast.Call object at 0x7da1b1beccd0>, <ast.Call object at 0x7da1b1bee320>]]
return[call[name[BoundingBox], parameter[]]] | keyword[def] identifier[compute_slice_bounds] ( identifier[slices] , identifier[scs] , identifier[shape] ):
literal[string]
identifier[xidx] , identifier[yidx] = identifier[slices]
identifier[ys] , identifier[xs] = identifier[shape]
identifier[l] , identifier[b] , identifier[r] , identifier[t] = identifier[scs] . identifier[bounds] . identifier[lbrt] ()
identifier[xdensity] , identifier[ydensity] = identifier[scs] . identifier[xdensity] , identifier[scs] . identifier[ydensity]
identifier[xunit] =( literal[int] / identifier[xdensity] )
identifier[yunit] =( literal[int] / identifier[ydensity] )
keyword[if] identifier[isinstance] ( identifier[l] , identifier[datetime_types] ):
identifier[xunit] = identifier[np] . identifier[timedelta64] ( identifier[int] ( identifier[round] ( identifier[xunit] )), identifier[scs] . identifier[_time_unit] )
keyword[if] identifier[isinstance] ( identifier[b] , identifier[datetime_types] ):
identifier[yunit] = identifier[np] . identifier[timedelta64] ( identifier[int] ( identifier[round] ( identifier[yunit] )), identifier[scs] . identifier[_time_unit] )
keyword[if] identifier[isinstance] ( identifier[xidx] , identifier[slice] ):
identifier[l] = identifier[l] keyword[if] identifier[xidx] . identifier[start] keyword[is] keyword[None] keyword[else] identifier[max] ( identifier[l] , identifier[xidx] . identifier[start] )
identifier[r] = identifier[r] keyword[if] identifier[xidx] . identifier[stop] keyword[is] keyword[None] keyword[else] identifier[min] ( identifier[r] , identifier[xidx] . identifier[stop] )
keyword[if] identifier[isinstance] ( identifier[yidx] , identifier[slice] ):
identifier[b] = identifier[b] keyword[if] identifier[yidx] . identifier[start] keyword[is] keyword[None] keyword[else] identifier[max] ( identifier[b] , identifier[yidx] . identifier[start] )
identifier[t] = identifier[t] keyword[if] identifier[yidx] . identifier[stop] keyword[is] keyword[None] keyword[else] identifier[min] ( identifier[t] , identifier[yidx] . identifier[stop] )
identifier[bounds] = identifier[BoundingBox] ( identifier[points] =(( identifier[l] , identifier[b] ),( identifier[r] , identifier[t] )))
identifier[slc] = identifier[Slice] ( identifier[bounds] , identifier[scs] )
identifier[l] , identifier[b] , identifier[r] , identifier[t] = identifier[slc] . identifier[compute_bounds] ( identifier[scs] ). identifier[lbrt] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[xidx] , identifier[slice] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[xidx] ,( identifier[list] , identifier[set] )): identifier[xidx] =[ identifier[xidx] ]
keyword[if] identifier[len] ( identifier[xidx] )> literal[int] :
identifier[xdensity] = identifier[xdensity] *( identifier[float] ( identifier[len] ( identifier[xidx] ))/ identifier[xs] )
identifier[ls] , identifier[rs] =[],[]
keyword[for] identifier[idx] keyword[in] identifier[xidx] :
identifier[xc] , identifier[_] = identifier[scs] . identifier[closest_cell_center] ( identifier[idx] , identifier[b] )
identifier[ls] . identifier[append] ( identifier[xc] - identifier[xunit] / literal[int] )
identifier[rs] . identifier[append] ( identifier[xc] + identifier[xunit] / literal[int] )
identifier[l] , identifier[r] = identifier[np] . identifier[min] ( identifier[ls] ), identifier[np] . identifier[max] ( identifier[rs] )
keyword[elif] keyword[not] identifier[isinstance] ( identifier[yidx] , identifier[slice] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[yidx] ,( identifier[set] , identifier[list] )): identifier[yidx] =[ identifier[yidx] ]
keyword[if] identifier[len] ( identifier[yidx] )> literal[int] :
identifier[ydensity] = identifier[ydensity] *( identifier[float] ( identifier[len] ( identifier[yidx] ))/ identifier[ys] )
identifier[bs] , identifier[ts] =[],[]
keyword[for] identifier[idx] keyword[in] identifier[yidx] :
identifier[_] , identifier[yc] = identifier[scs] . identifier[closest_cell_center] ( identifier[l] , identifier[idx] )
identifier[bs] . identifier[append] ( identifier[yc] - identifier[yunit] / literal[int] )
identifier[ts] . identifier[append] ( identifier[yc] + identifier[yunit] / literal[int] )
identifier[b] , identifier[t] = identifier[np] . identifier[min] ( identifier[bs] ), identifier[np] . identifier[max] ( identifier[ts] )
keyword[return] identifier[BoundingBox] ( identifier[points] =(( identifier[l] , identifier[b] ),( identifier[r] , identifier[t] ))) | def compute_slice_bounds(slices, scs, shape):
"""
Given a 2D selection consisting of slices/coordinates, a
SheetCoordinateSystem and the shape of the array returns a new
BoundingBox representing the sliced region.
"""
(xidx, yidx) = slices
(ys, xs) = shape
(l, b, r, t) = scs.bounds.lbrt()
(xdensity, ydensity) = (scs.xdensity, scs.ydensity)
xunit = 1.0 / xdensity
yunit = 1.0 / ydensity
if isinstance(l, datetime_types):
xunit = np.timedelta64(int(round(xunit)), scs._time_unit) # depends on [control=['if'], data=[]]
if isinstance(b, datetime_types):
yunit = np.timedelta64(int(round(yunit)), scs._time_unit) # depends on [control=['if'], data=[]]
if isinstance(xidx, slice):
l = l if xidx.start is None else max(l, xidx.start)
r = r if xidx.stop is None else min(r, xidx.stop) # depends on [control=['if'], data=[]]
if isinstance(yidx, slice):
b = b if yidx.start is None else max(b, yidx.start)
t = t if yidx.stop is None else min(t, yidx.stop) # depends on [control=['if'], data=[]]
bounds = BoundingBox(points=((l, b), (r, t)))
# Apply new bounds
slc = Slice(bounds, scs)
# Apply scalar and list indices
(l, b, r, t) = slc.compute_bounds(scs).lbrt()
if not isinstance(xidx, slice):
if not isinstance(xidx, (list, set)):
xidx = [xidx] # depends on [control=['if'], data=[]]
if len(xidx) > 1:
xdensity = xdensity * (float(len(xidx)) / xs) # depends on [control=['if'], data=[]]
(ls, rs) = ([], [])
for idx in xidx:
(xc, _) = scs.closest_cell_center(idx, b)
ls.append(xc - xunit / 2)
rs.append(xc + xunit / 2) # depends on [control=['for'], data=['idx']]
(l, r) = (np.min(ls), np.max(rs)) # depends on [control=['if'], data=[]]
elif not isinstance(yidx, slice):
if not isinstance(yidx, (set, list)):
yidx = [yidx] # depends on [control=['if'], data=[]]
if len(yidx) > 1:
ydensity = ydensity * (float(len(yidx)) / ys) # depends on [control=['if'], data=[]]
(bs, ts) = ([], [])
for idx in yidx:
(_, yc) = scs.closest_cell_center(l, idx)
bs.append(yc - yunit / 2)
ts.append(yc + yunit / 2) # depends on [control=['for'], data=['idx']]
(b, t) = (np.min(bs), np.max(ts)) # depends on [control=['if'], data=[]]
return BoundingBox(points=((l, b), (r, t))) |
def _idx_to_bits(self, i):
"""Convert an group index to its bit representation."""
bits = bin(i)[2:].zfill(self.nb_hyperplanes) # Pad the bits str with 0
return [-1.0 if b == "0" else 1.0 for b in bits] | def function[_idx_to_bits, parameter[self, i]]:
constant[Convert an group index to its bit representation.]
variable[bits] assign[=] call[call[call[name[bin], parameter[name[i]]]][<ast.Slice object at 0x7da20c6e4850>].zfill, parameter[name[self].nb_hyperplanes]]
return[<ast.ListComp object at 0x7da20c6e52a0>] | keyword[def] identifier[_idx_to_bits] ( identifier[self] , identifier[i] ):
literal[string]
identifier[bits] = identifier[bin] ( identifier[i] )[ literal[int] :]. identifier[zfill] ( identifier[self] . identifier[nb_hyperplanes] )
keyword[return] [- literal[int] keyword[if] identifier[b] == literal[string] keyword[else] literal[int] keyword[for] identifier[b] keyword[in] identifier[bits] ] | def _idx_to_bits(self, i):
"""Convert an group index to its bit representation."""
bits = bin(i)[2:].zfill(self.nb_hyperplanes) # Pad the bits str with 0
return [-1.0 if b == '0' else 1.0 for b in bits] |
def _find_cont_fitfunc_regions(fluxes, ivars, contmask, deg, ranges, ffunc,
n_proc=1):
""" Run fit_cont, dealing with spectrum in regions or chunks
This is useful if a spectrum has gaps.
Parameters
----------
fluxes: ndarray of shape (nstars, npixels)
training set or test set pixel intensities
ivars: numpy ndarray of shape (nstars, npixels)
inverse variances, parallel to fluxes
contmask: numpy ndarray of length (npixels)
boolean pixel mask, True indicates that pixel is continuum
deg: int
degree of fitting function
ffunc: str
type of fitting function, chebyshev or sinusoid
Returns
-------
cont: numpy ndarray of shape (nstars, npixels)
the continuum, parallel to fluxes
"""
nstars = fluxes.shape[0]
npixels = fluxes.shape[1]
cont = np.zeros(fluxes.shape)
for chunk in ranges:
start = chunk[0]
stop = chunk[1]
if ffunc=="chebyshev":
output = _find_cont_fitfunc(fluxes[:,start:stop],
ivars[:,start:stop],
contmask[start:stop],
deg=deg, ffunc="chebyshev",
n_proc=n_proc)
elif ffunc=="sinusoid":
output = _find_cont_fitfunc(fluxes[:,start:stop],
ivars[:,start:stop],
contmask[start:stop],
deg=deg, ffunc="sinusoid",
n_proc=n_proc)
cont[:, start:stop] = output
return cont | def function[_find_cont_fitfunc_regions, parameter[fluxes, ivars, contmask, deg, ranges, ffunc, n_proc]]:
constant[ Run fit_cont, dealing with spectrum in regions or chunks
This is useful if a spectrum has gaps.
Parameters
----------
fluxes: ndarray of shape (nstars, npixels)
training set or test set pixel intensities
ivars: numpy ndarray of shape (nstars, npixels)
inverse variances, parallel to fluxes
contmask: numpy ndarray of length (npixels)
boolean pixel mask, True indicates that pixel is continuum
deg: int
degree of fitting function
ffunc: str
type of fitting function, chebyshev or sinusoid
Returns
-------
cont: numpy ndarray of shape (nstars, npixels)
the continuum, parallel to fluxes
]
variable[nstars] assign[=] call[name[fluxes].shape][constant[0]]
variable[npixels] assign[=] call[name[fluxes].shape][constant[1]]
variable[cont] assign[=] call[name[np].zeros, parameter[name[fluxes].shape]]
for taget[name[chunk]] in starred[name[ranges]] begin[:]
variable[start] assign[=] call[name[chunk]][constant[0]]
variable[stop] assign[=] call[name[chunk]][constant[1]]
if compare[name[ffunc] equal[==] constant[chebyshev]] begin[:]
variable[output] assign[=] call[name[_find_cont_fitfunc], parameter[call[name[fluxes]][tuple[[<ast.Slice object at 0x7da1b10e78b0>, <ast.Slice object at 0x7da1b10e4790>]]], call[name[ivars]][tuple[[<ast.Slice object at 0x7da1b10e7f10>, <ast.Slice object at 0x7da1b10e4580>]]], call[name[contmask]][<ast.Slice object at 0x7da1b10e5270>]]]
call[name[cont]][tuple[[<ast.Slice object at 0x7da1b10e5240>, <ast.Slice object at 0x7da1b10e5300>]]] assign[=] name[output]
return[name[cont]] | keyword[def] identifier[_find_cont_fitfunc_regions] ( identifier[fluxes] , identifier[ivars] , identifier[contmask] , identifier[deg] , identifier[ranges] , identifier[ffunc] ,
identifier[n_proc] = literal[int] ):
literal[string]
identifier[nstars] = identifier[fluxes] . identifier[shape] [ literal[int] ]
identifier[npixels] = identifier[fluxes] . identifier[shape] [ literal[int] ]
identifier[cont] = identifier[np] . identifier[zeros] ( identifier[fluxes] . identifier[shape] )
keyword[for] identifier[chunk] keyword[in] identifier[ranges] :
identifier[start] = identifier[chunk] [ literal[int] ]
identifier[stop] = identifier[chunk] [ literal[int] ]
keyword[if] identifier[ffunc] == literal[string] :
identifier[output] = identifier[_find_cont_fitfunc] ( identifier[fluxes] [:, identifier[start] : identifier[stop] ],
identifier[ivars] [:, identifier[start] : identifier[stop] ],
identifier[contmask] [ identifier[start] : identifier[stop] ],
identifier[deg] = identifier[deg] , identifier[ffunc] = literal[string] ,
identifier[n_proc] = identifier[n_proc] )
keyword[elif] identifier[ffunc] == literal[string] :
identifier[output] = identifier[_find_cont_fitfunc] ( identifier[fluxes] [:, identifier[start] : identifier[stop] ],
identifier[ivars] [:, identifier[start] : identifier[stop] ],
identifier[contmask] [ identifier[start] : identifier[stop] ],
identifier[deg] = identifier[deg] , identifier[ffunc] = literal[string] ,
identifier[n_proc] = identifier[n_proc] )
identifier[cont] [:, identifier[start] : identifier[stop] ]= identifier[output]
keyword[return] identifier[cont] | def _find_cont_fitfunc_regions(fluxes, ivars, contmask, deg, ranges, ffunc, n_proc=1):
""" Run fit_cont, dealing with spectrum in regions or chunks
This is useful if a spectrum has gaps.
Parameters
----------
fluxes: ndarray of shape (nstars, npixels)
training set or test set pixel intensities
ivars: numpy ndarray of shape (nstars, npixels)
inverse variances, parallel to fluxes
contmask: numpy ndarray of length (npixels)
boolean pixel mask, True indicates that pixel is continuum
deg: int
degree of fitting function
ffunc: str
type of fitting function, chebyshev or sinusoid
Returns
-------
cont: numpy ndarray of shape (nstars, npixels)
the continuum, parallel to fluxes
"""
nstars = fluxes.shape[0]
npixels = fluxes.shape[1]
cont = np.zeros(fluxes.shape)
for chunk in ranges:
start = chunk[0]
stop = chunk[1]
if ffunc == 'chebyshev':
output = _find_cont_fitfunc(fluxes[:, start:stop], ivars[:, start:stop], contmask[start:stop], deg=deg, ffunc='chebyshev', n_proc=n_proc) # depends on [control=['if'], data=[]]
elif ffunc == 'sinusoid':
output = _find_cont_fitfunc(fluxes[:, start:stop], ivars[:, start:stop], contmask[start:stop], deg=deg, ffunc='sinusoid', n_proc=n_proc) # depends on [control=['if'], data=[]]
cont[:, start:stop] = output # depends on [control=['for'], data=['chunk']]
return cont |
def identify_ibids(line):
"""Find IBIDs within the line, record their position and length,
and replace them with underscores.
@param line: (string) the working reference line
@return: (tuple) containing 2 dictionaries and a string:
Dictionary: matched IBID text: (Key: position of IBID in
line; Value: matched IBID text)
String: working line with matched IBIDs removed
"""
ibid_match_txt = {}
# Record details of each matched ibid:
for m_ibid in re_ibid.finditer(line):
ibid_match_txt[m_ibid.start()] = m_ibid.group(0)
# Replace matched text in line with underscores:
line = line[0:m_ibid.start()] + \
"_" * len(m_ibid.group(0)) + \
line[m_ibid.end():]
return ibid_match_txt, line | def function[identify_ibids, parameter[line]]:
constant[Find IBIDs within the line, record their position and length,
and replace them with underscores.
@param line: (string) the working reference line
@return: (tuple) containing 2 dictionaries and a string:
Dictionary: matched IBID text: (Key: position of IBID in
line; Value: matched IBID text)
String: working line with matched IBIDs removed
]
variable[ibid_match_txt] assign[=] dictionary[[], []]
for taget[name[m_ibid]] in starred[call[name[re_ibid].finditer, parameter[name[line]]]] begin[:]
call[name[ibid_match_txt]][call[name[m_ibid].start, parameter[]]] assign[=] call[name[m_ibid].group, parameter[constant[0]]]
variable[line] assign[=] binary_operation[binary_operation[call[name[line]][<ast.Slice object at 0x7da1b1393c70>] + binary_operation[constant[_] * call[name[len], parameter[call[name[m_ibid].group, parameter[constant[0]]]]]]] + call[name[line]][<ast.Slice object at 0x7da1b1390220>]]
return[tuple[[<ast.Name object at 0x7da1b1390d00>, <ast.Name object at 0x7da1b1390880>]]] | keyword[def] identifier[identify_ibids] ( identifier[line] ):
literal[string]
identifier[ibid_match_txt] ={}
keyword[for] identifier[m_ibid] keyword[in] identifier[re_ibid] . identifier[finditer] ( identifier[line] ):
identifier[ibid_match_txt] [ identifier[m_ibid] . identifier[start] ()]= identifier[m_ibid] . identifier[group] ( literal[int] )
identifier[line] = identifier[line] [ literal[int] : identifier[m_ibid] . identifier[start] ()]+ literal[string] * identifier[len] ( identifier[m_ibid] . identifier[group] ( literal[int] ))+ identifier[line] [ identifier[m_ibid] . identifier[end] ():]
keyword[return] identifier[ibid_match_txt] , identifier[line] | def identify_ibids(line):
"""Find IBIDs within the line, record their position and length,
and replace them with underscores.
@param line: (string) the working reference line
@return: (tuple) containing 2 dictionaries and a string:
Dictionary: matched IBID text: (Key: position of IBID in
line; Value: matched IBID text)
String: working line with matched IBIDs removed
"""
ibid_match_txt = {}
# Record details of each matched ibid:
for m_ibid in re_ibid.finditer(line):
ibid_match_txt[m_ibid.start()] = m_ibid.group(0)
# Replace matched text in line with underscores:
line = line[0:m_ibid.start()] + '_' * len(m_ibid.group(0)) + line[m_ibid.end():] # depends on [control=['for'], data=['m_ibid']]
return (ibid_match_txt, line) |
def call(self, iface_name, func_name, params):
"""
Implements the call() function with same signature as Client.call(). Raises
a RpcException if send() has already been called on this batch. Otherwise
appends the request to an internal list.
This method is not commonly called directly.
"""
if self.sent:
raise Exception("Batch already sent. Cannot add more calls.")
else:
req = self.client.to_request(iface_name, func_name, params)
self.req_list.append(req) | def function[call, parameter[self, iface_name, func_name, params]]:
constant[
Implements the call() function with same signature as Client.call(). Raises
a RpcException if send() has already been called on this batch. Otherwise
appends the request to an internal list.
This method is not commonly called directly.
]
if name[self].sent begin[:]
<ast.Raise object at 0x7da1b207e260> | keyword[def] identifier[call] ( identifier[self] , identifier[iface_name] , identifier[func_name] , identifier[params] ):
literal[string]
keyword[if] identifier[self] . identifier[sent] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[else] :
identifier[req] = identifier[self] . identifier[client] . identifier[to_request] ( identifier[iface_name] , identifier[func_name] , identifier[params] )
identifier[self] . identifier[req_list] . identifier[append] ( identifier[req] ) | def call(self, iface_name, func_name, params):
"""
Implements the call() function with same signature as Client.call(). Raises
a RpcException if send() has already been called on this batch. Otherwise
appends the request to an internal list.
This method is not commonly called directly.
"""
if self.sent:
raise Exception('Batch already sent. Cannot add more calls.') # depends on [control=['if'], data=[]]
else:
req = self.client.to_request(iface_name, func_name, params)
self.req_list.append(req) |
def euler_tour(G, node=None, seen=None, visited=None):
"""
definition from
http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.192.8615&rep=rep1&type=pdf
Example:
>>> # DISABLE_DOCTEST
>>> from utool.experimental.euler_tour_tree_avl import * # NOQA
>>> edges = [
>>> ('R', 'A'), ('R', 'B'),
>>> ('B', 'C'), ('C', 'D'), ('C', 'E'),
>>> ('B', 'F'), ('B', 'G'),
>>> ]
>>> G = nx.Graph(edges)
>>> node = list(G.nodes())[0]
>>> et1 = euler_tour(G, node)
>>> et2 = euler_tour_dfs(G, node)
"""
if node is None:
node = next(G.nodes())
if visited is None:
assert nx.is_tree(G)
visited = []
if seen is None:
seen = set([])
visited.append(node)
for c in G.neighbors(node):
if c in seen:
continue
seen.add(c)
euler_tour(G, c, seen, visited)
visited.append(node)
return visited | def function[euler_tour, parameter[G, node, seen, visited]]:
constant[
definition from
http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.192.8615&rep=rep1&type=pdf
Example:
>>> # DISABLE_DOCTEST
>>> from utool.experimental.euler_tour_tree_avl import * # NOQA
>>> edges = [
>>> ('R', 'A'), ('R', 'B'),
>>> ('B', 'C'), ('C', 'D'), ('C', 'E'),
>>> ('B', 'F'), ('B', 'G'),
>>> ]
>>> G = nx.Graph(edges)
>>> node = list(G.nodes())[0]
>>> et1 = euler_tour(G, node)
>>> et2 = euler_tour_dfs(G, node)
]
if compare[name[node] is constant[None]] begin[:]
variable[node] assign[=] call[name[next], parameter[call[name[G].nodes, parameter[]]]]
if compare[name[visited] is constant[None]] begin[:]
assert[call[name[nx].is_tree, parameter[name[G]]]]
variable[visited] assign[=] list[[]]
if compare[name[seen] is constant[None]] begin[:]
variable[seen] assign[=] call[name[set], parameter[list[[]]]]
call[name[visited].append, parameter[name[node]]]
for taget[name[c]] in starred[call[name[G].neighbors, parameter[name[node]]]] begin[:]
if compare[name[c] in name[seen]] begin[:]
continue
call[name[seen].add, parameter[name[c]]]
call[name[euler_tour], parameter[name[G], name[c], name[seen], name[visited]]]
call[name[visited].append, parameter[name[node]]]
return[name[visited]] | keyword[def] identifier[euler_tour] ( identifier[G] , identifier[node] = keyword[None] , identifier[seen] = keyword[None] , identifier[visited] = keyword[None] ):
literal[string]
keyword[if] identifier[node] keyword[is] keyword[None] :
identifier[node] = identifier[next] ( identifier[G] . identifier[nodes] ())
keyword[if] identifier[visited] keyword[is] keyword[None] :
keyword[assert] identifier[nx] . identifier[is_tree] ( identifier[G] )
identifier[visited] =[]
keyword[if] identifier[seen] keyword[is] keyword[None] :
identifier[seen] = identifier[set] ([])
identifier[visited] . identifier[append] ( identifier[node] )
keyword[for] identifier[c] keyword[in] identifier[G] . identifier[neighbors] ( identifier[node] ):
keyword[if] identifier[c] keyword[in] identifier[seen] :
keyword[continue]
identifier[seen] . identifier[add] ( identifier[c] )
identifier[euler_tour] ( identifier[G] , identifier[c] , identifier[seen] , identifier[visited] )
identifier[visited] . identifier[append] ( identifier[node] )
keyword[return] identifier[visited] | def euler_tour(G, node=None, seen=None, visited=None):
"""
definition from
http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.192.8615&rep=rep1&type=pdf
Example:
>>> # DISABLE_DOCTEST
>>> from utool.experimental.euler_tour_tree_avl import * # NOQA
>>> edges = [
>>> ('R', 'A'), ('R', 'B'),
>>> ('B', 'C'), ('C', 'D'), ('C', 'E'),
>>> ('B', 'F'), ('B', 'G'),
>>> ]
>>> G = nx.Graph(edges)
>>> node = list(G.nodes())[0]
>>> et1 = euler_tour(G, node)
>>> et2 = euler_tour_dfs(G, node)
"""
if node is None:
node = next(G.nodes()) # depends on [control=['if'], data=['node']]
if visited is None:
assert nx.is_tree(G)
visited = [] # depends on [control=['if'], data=['visited']]
if seen is None:
seen = set([]) # depends on [control=['if'], data=['seen']]
visited.append(node)
for c in G.neighbors(node):
if c in seen:
continue # depends on [control=['if'], data=[]]
seen.add(c)
euler_tour(G, c, seen, visited)
visited.append(node) # depends on [control=['for'], data=['c']]
return visited |
def get(self, setting_name, warn_only_if_overridden=False,
accept_deprecated='', suppress_warnings=False,
enforce_type=None, check_if_setting_deprecated=True,
warning_stacklevel=3):
"""
Returns a setting value for the setting named by ``setting_name``. The
returned value is actually a reference to the original setting value,
so care should be taken to avoid setting the result to a different
value.
:param setting_name:
The name of the app setting for which a value is required.
:type setting_name: str (e.g. "SETTING_NAME")
:param warn_only_if_overridden:
If the setting named by ``setting_name`` is deprecated, a value of
``True`` can be provided to silence the immediate deprecation
warning that is otherwise raised by default. Instead, a
(differently worded) deprecation warning will be raised, but only
when the setting is overriden.
:type warn_only_if_overridden: bool
:param accept_deprecated:
If the setting named by ``setting_name`` replaces multiple
deprecated settings, the ``accept_deprecated`` keyword argument can
be used to specify which of those deprecated settings to accept as
an override value.
Where the requested setting replaces only a single deprecated
setting, override values for that deprecated setting will be
accepted automatically, without having to specify anything.
:type accept_deprecated: str (e.g. "DEPRECATED_SETTING_NAME")
:param suppress_warnings:
Use this to prevent the raising of any deprecation warnings that
might otherwise be raised. It may be more useful to use
``warn_only_if_overridden`` instead.
:type suppress_warnings: bool
:param enforce_type:
When a setting value of a specific type is required, this can be
used to apply some basic validation at the time of retrieval. If
supplied, and setting value is found not to be an instance of the
supplied type, a ``SettingValueTypeInvalid`` error will be raised.
In cases where more than one type of value is accepted, a tuple of
acceptable types can be provided.
:type enforce_type: A type (class), or tuple of types
:param check_if_setting_deprecated:
Can be used to disable the check that usually happens at the
beginning of the method to identify whether the setting named by
``setting_name`` is deprecated, and conditionally raise a warning.
This can help to improve efficiency where the same check has
already been made.
:type check_if_setting_deprecated: bool
:param warning_stacklevel:
When raising deprecation warnings related to the request, this
value is passed on as ``stacklevel`` to Python's
``warnings.warn()`` method, to help give a more accurate indication
of the code that caused the warning to be raised.
:type warning_stacklevel: int
:raises: UnknownSettingNameError, SettingValueTypeInvalid
Instead of calling this method directly, developers are generally
encouraged to use the direct attribute shortcut, which is a
syntactically much cleaner way to request values using the default
options. For example, the the following lines are equivalent::
appsettingshelper.SETTING_NAME
appsettingshelper.get('SETTING_NAME')
"""
if check_if_setting_deprecated:
self._warn_if_deprecated_setting_value_requested(
setting_name, warn_only_if_overridden, suppress_warnings,
warning_stacklevel)
cache_key = self._make_cache_key(setting_name, accept_deprecated)
if cache_key in self._raw_cache:
return self._raw_cache[cache_key]
result = self._get_raw_value(
setting_name,
accept_deprecated=accept_deprecated,
warn_if_overridden=warn_only_if_overridden,
suppress_warnings=suppress_warnings,
warning_stacklevel=warning_stacklevel + 1,
)
if enforce_type and not isinstance(result, enforce_type):
if isinstance(enforce_type, tuple):
msg = (
"The value is expected to be one of the following types, "
"but a value of type '{current_type}' was found: "
"{required_types}."
)
text_format_kwargs = dict(
current_type=type(result).__name__,
required_types=enforce_type,
)
else:
msg = (
"The value is expected to be a '{required_type}', but a "
"value of type '{current_type}' was found."
)
text_format_kwargs = dict(
current_type=type(result).__name__,
required_type=enforce_type.__name__,
)
self._raise_setting_value_error(
setting_name=setting_name,
user_value_error_class=OverrideValueTypeInvalid,
default_value_error_class=DefaultValueTypeInvalid,
additional_text=msg,
**text_format_kwargs
)
self._raw_cache[cache_key] = result
return result | def function[get, parameter[self, setting_name, warn_only_if_overridden, accept_deprecated, suppress_warnings, enforce_type, check_if_setting_deprecated, warning_stacklevel]]:
constant[
Returns a setting value for the setting named by ``setting_name``. The
returned value is actually a reference to the original setting value,
so care should be taken to avoid setting the result to a different
value.
:param setting_name:
The name of the app setting for which a value is required.
:type setting_name: str (e.g. "SETTING_NAME")
:param warn_only_if_overridden:
If the setting named by ``setting_name`` is deprecated, a value of
``True`` can be provided to silence the immediate deprecation
warning that is otherwise raised by default. Instead, a
(differently worded) deprecation warning will be raised, but only
when the setting is overriden.
:type warn_only_if_overridden: bool
:param accept_deprecated:
If the setting named by ``setting_name`` replaces multiple
deprecated settings, the ``accept_deprecated`` keyword argument can
be used to specify which of those deprecated settings to accept as
an override value.
Where the requested setting replaces only a single deprecated
setting, override values for that deprecated setting will be
accepted automatically, without having to specify anything.
:type accept_deprecated: str (e.g. "DEPRECATED_SETTING_NAME")
:param suppress_warnings:
Use this to prevent the raising of any deprecation warnings that
might otherwise be raised. It may be more useful to use
``warn_only_if_overridden`` instead.
:type suppress_warnings: bool
:param enforce_type:
When a setting value of a specific type is required, this can be
used to apply some basic validation at the time of retrieval. If
supplied, and setting value is found not to be an instance of the
supplied type, a ``SettingValueTypeInvalid`` error will be raised.
In cases where more than one type of value is accepted, a tuple of
acceptable types can be provided.
:type enforce_type: A type (class), or tuple of types
:param check_if_setting_deprecated:
Can be used to disable the check that usually happens at the
beginning of the method to identify whether the setting named by
``setting_name`` is deprecated, and conditionally raise a warning.
This can help to improve efficiency where the same check has
already been made.
:type check_if_setting_deprecated: bool
:param warning_stacklevel:
When raising deprecation warnings related to the request, this
value is passed on as ``stacklevel`` to Python's
``warnings.warn()`` method, to help give a more accurate indication
of the code that caused the warning to be raised.
:type warning_stacklevel: int
:raises: UnknownSettingNameError, SettingValueTypeInvalid
Instead of calling this method directly, developers are generally
encouraged to use the direct attribute shortcut, which is a
syntactically much cleaner way to request values using the default
options. For example, the the following lines are equivalent::
appsettingshelper.SETTING_NAME
appsettingshelper.get('SETTING_NAME')
]
if name[check_if_setting_deprecated] begin[:]
call[name[self]._warn_if_deprecated_setting_value_requested, parameter[name[setting_name], name[warn_only_if_overridden], name[suppress_warnings], name[warning_stacklevel]]]
variable[cache_key] assign[=] call[name[self]._make_cache_key, parameter[name[setting_name], name[accept_deprecated]]]
if compare[name[cache_key] in name[self]._raw_cache] begin[:]
return[call[name[self]._raw_cache][name[cache_key]]]
variable[result] assign[=] call[name[self]._get_raw_value, parameter[name[setting_name]]]
if <ast.BoolOp object at 0x7da20c6a8b50> begin[:]
if call[name[isinstance], parameter[name[enforce_type], name[tuple]]] begin[:]
variable[msg] assign[=] constant[The value is expected to be one of the following types, but a value of type '{current_type}' was found: {required_types}.]
variable[text_format_kwargs] assign[=] call[name[dict], parameter[]]
call[name[self]._raise_setting_value_error, parameter[]]
call[name[self]._raw_cache][name[cache_key]] assign[=] name[result]
return[name[result]] | keyword[def] identifier[get] ( identifier[self] , identifier[setting_name] , identifier[warn_only_if_overridden] = keyword[False] ,
identifier[accept_deprecated] = literal[string] , identifier[suppress_warnings] = keyword[False] ,
identifier[enforce_type] = keyword[None] , identifier[check_if_setting_deprecated] = keyword[True] ,
identifier[warning_stacklevel] = literal[int] ):
literal[string]
keyword[if] identifier[check_if_setting_deprecated] :
identifier[self] . identifier[_warn_if_deprecated_setting_value_requested] (
identifier[setting_name] , identifier[warn_only_if_overridden] , identifier[suppress_warnings] ,
identifier[warning_stacklevel] )
identifier[cache_key] = identifier[self] . identifier[_make_cache_key] ( identifier[setting_name] , identifier[accept_deprecated] )
keyword[if] identifier[cache_key] keyword[in] identifier[self] . identifier[_raw_cache] :
keyword[return] identifier[self] . identifier[_raw_cache] [ identifier[cache_key] ]
identifier[result] = identifier[self] . identifier[_get_raw_value] (
identifier[setting_name] ,
identifier[accept_deprecated] = identifier[accept_deprecated] ,
identifier[warn_if_overridden] = identifier[warn_only_if_overridden] ,
identifier[suppress_warnings] = identifier[suppress_warnings] ,
identifier[warning_stacklevel] = identifier[warning_stacklevel] + literal[int] ,
)
keyword[if] identifier[enforce_type] keyword[and] keyword[not] identifier[isinstance] ( identifier[result] , identifier[enforce_type] ):
keyword[if] identifier[isinstance] ( identifier[enforce_type] , identifier[tuple] ):
identifier[msg] =(
literal[string]
literal[string]
literal[string]
)
identifier[text_format_kwargs] = identifier[dict] (
identifier[current_type] = identifier[type] ( identifier[result] ). identifier[__name__] ,
identifier[required_types] = identifier[enforce_type] ,
)
keyword[else] :
identifier[msg] =(
literal[string]
literal[string]
)
identifier[text_format_kwargs] = identifier[dict] (
identifier[current_type] = identifier[type] ( identifier[result] ). identifier[__name__] ,
identifier[required_type] = identifier[enforce_type] . identifier[__name__] ,
)
identifier[self] . identifier[_raise_setting_value_error] (
identifier[setting_name] = identifier[setting_name] ,
identifier[user_value_error_class] = identifier[OverrideValueTypeInvalid] ,
identifier[default_value_error_class] = identifier[DefaultValueTypeInvalid] ,
identifier[additional_text] = identifier[msg] ,
** identifier[text_format_kwargs]
)
identifier[self] . identifier[_raw_cache] [ identifier[cache_key] ]= identifier[result]
keyword[return] identifier[result] | def get(self, setting_name, warn_only_if_overridden=False, accept_deprecated='', suppress_warnings=False, enforce_type=None, check_if_setting_deprecated=True, warning_stacklevel=3):
"""
Returns a setting value for the setting named by ``setting_name``. The
returned value is actually a reference to the original setting value,
so care should be taken to avoid setting the result to a different
value.
:param setting_name:
The name of the app setting for which a value is required.
:type setting_name: str (e.g. "SETTING_NAME")
:param warn_only_if_overridden:
If the setting named by ``setting_name`` is deprecated, a value of
``True`` can be provided to silence the immediate deprecation
warning that is otherwise raised by default. Instead, a
(differently worded) deprecation warning will be raised, but only
when the setting is overriden.
:type warn_only_if_overridden: bool
:param accept_deprecated:
If the setting named by ``setting_name`` replaces multiple
deprecated settings, the ``accept_deprecated`` keyword argument can
be used to specify which of those deprecated settings to accept as
an override value.
Where the requested setting replaces only a single deprecated
setting, override values for that deprecated setting will be
accepted automatically, without having to specify anything.
:type accept_deprecated: str (e.g. "DEPRECATED_SETTING_NAME")
:param suppress_warnings:
Use this to prevent the raising of any deprecation warnings that
might otherwise be raised. It may be more useful to use
``warn_only_if_overridden`` instead.
:type suppress_warnings: bool
:param enforce_type:
When a setting value of a specific type is required, this can be
used to apply some basic validation at the time of retrieval. If
supplied, and setting value is found not to be an instance of the
supplied type, a ``SettingValueTypeInvalid`` error will be raised.
In cases where more than one type of value is accepted, a tuple of
acceptable types can be provided.
:type enforce_type: A type (class), or tuple of types
:param check_if_setting_deprecated:
Can be used to disable the check that usually happens at the
beginning of the method to identify whether the setting named by
``setting_name`` is deprecated, and conditionally raise a warning.
This can help to improve efficiency where the same check has
already been made.
:type check_if_setting_deprecated: bool
:param warning_stacklevel:
When raising deprecation warnings related to the request, this
value is passed on as ``stacklevel`` to Python's
``warnings.warn()`` method, to help give a more accurate indication
of the code that caused the warning to be raised.
:type warning_stacklevel: int
:raises: UnknownSettingNameError, SettingValueTypeInvalid
Instead of calling this method directly, developers are generally
encouraged to use the direct attribute shortcut, which is a
syntactically much cleaner way to request values using the default
options. For example, the the following lines are equivalent::
appsettingshelper.SETTING_NAME
appsettingshelper.get('SETTING_NAME')
"""
if check_if_setting_deprecated:
self._warn_if_deprecated_setting_value_requested(setting_name, warn_only_if_overridden, suppress_warnings, warning_stacklevel) # depends on [control=['if'], data=[]]
cache_key = self._make_cache_key(setting_name, accept_deprecated)
if cache_key in self._raw_cache:
return self._raw_cache[cache_key] # depends on [control=['if'], data=['cache_key']]
result = self._get_raw_value(setting_name, accept_deprecated=accept_deprecated, warn_if_overridden=warn_only_if_overridden, suppress_warnings=suppress_warnings, warning_stacklevel=warning_stacklevel + 1)
if enforce_type and (not isinstance(result, enforce_type)):
if isinstance(enforce_type, tuple):
msg = "The value is expected to be one of the following types, but a value of type '{current_type}' was found: {required_types}."
text_format_kwargs = dict(current_type=type(result).__name__, required_types=enforce_type) # depends on [control=['if'], data=[]]
else:
msg = "The value is expected to be a '{required_type}', but a value of type '{current_type}' was found."
text_format_kwargs = dict(current_type=type(result).__name__, required_type=enforce_type.__name__)
self._raise_setting_value_error(setting_name=setting_name, user_value_error_class=OverrideValueTypeInvalid, default_value_error_class=DefaultValueTypeInvalid, additional_text=msg, **text_format_kwargs) # depends on [control=['if'], data=[]]
self._raw_cache[cache_key] = result
return result |
def estimategaps(args):
"""
%prog estimategaps JM-4 chr1 JMMale-1
Illustrate ALLMAPS gap estimation algorithm.
"""
p = OptionParser(estimategaps.__doc__)
opts, args, iopts = p.set_image_options(args, figsize="6x6", dpi=300)
if len(args) != 3:
sys.exit(not p.print_help())
pf, seqid, mlg = args
bedfile = pf + ".lifted.bed"
agpfile = pf + ".agp"
function = lambda x: x.cm
cc = Map(bedfile, scaffold_info=True, function=function)
agp = AGP(agpfile)
g = GapEstimator(cc, agp, seqid, mlg, function=function)
pp, chrsize, mlgsize = g.pp, g.chrsize, g.mlgsize
spl, spld = g.spl, g.spld
g.compute_all_gaps(verbose=False)
fig = plt.figure(1, (iopts.w, iopts.h))
root = fig.add_axes([0, 0, 1, 1])
# Panel A
xstart, ystart = .15, .65
w, h = .7, .3
t = np.linspace(0, chrsize, 1000)
ax = fig.add_axes([xstart, ystart, w, h])
mx, my = zip(*g.scatter_data)
rho = spearmanr(mx, my)
dsg = "g"
ax.vlines(pp, 0, mlgsize, colors="beige")
ax.plot(mx, my, ".", color=set2[3])
ax.plot(t, spl(t), "-", color=dsg)
ax.text(.05, .95, mlg, va="top", transform=ax.transAxes)
normalize_lms_axis(ax, xlim=chrsize, ylim=mlgsize,
ylabel="Genetic distance (cM)")
if rho < 0:
ax.invert_yaxis()
# Panel B
ystart -= .28
h = .25
ax = fig.add_axes([xstart, ystart, w, h])
ax.vlines(pp, 0, mlgsize, colors="beige")
ax.plot(t, spld(t), "-", lw=2, color=dsg)
ax.plot(pp, spld(pp), "o", mfc="w", mec=dsg, ms=5)
normalize_lms_axis(ax, xlim=chrsize, ylim=25 * 1e-6,
xfactor=1e-6, xlabel="Physical position (Mb)",
yfactor=1000000, ylabel="Recomb. rate\n(cM / Mb)")
ax.xaxis.grid(False)
# Panel C (specific to JMMale-1)
a, b = "scaffold_1076", "scaffold_861"
sizes = dict((x.component_id, (x.object_beg, x.object_end,
x.component_span, x.orientation)) \
for x in g.agp if not x.is_gap)
a_beg, a_end, asize, ao = sizes[a]
b_beg, b_end, bsize, bo = sizes[b]
gapsize = g.get_gapsize(a)
total_size = asize + gapsize + bsize
ratio = .6 / total_size
y = .16
pad = .03
pb_ratio = w / chrsize
# Zoom
lsg = "lightslategray"
root.plot((.15 + pb_ratio * a_beg, .2),
(ystart, ystart - .14), ":", color=lsg)
root.plot((.15 + pb_ratio * b_end, .3),
(ystart, ystart - .08), ":", color=lsg)
ends = []
for tag, size, marker, beg in zip((a, b), (asize, bsize), (49213, 81277),
(.2, .2 + (asize + gapsize) * ratio)):
end = beg + size * ratio
marker = beg + marker * ratio
ends.append((beg, end, marker))
root.plot((marker,), (y,), "o", color=lsg)
root.text((beg + end) / 2, y + pad, latex(tag),
ha="center", va="center")
HorizontalChromosome(root, beg, end, y, height=.025, fc='gainsboro')
begs, ends, markers = zip(*ends)
fontprop = dict(color=lsg, ha="center", va="center")
ypos = y + pad * 2
root.plot(markers, (ypos, ypos), "-", lw=2, color=lsg)
root.text(sum(markers) / 2, ypos + pad,
"Distance: 1.29cM $\Leftrightarrow$ 211,824bp (6.1 cM/Mb)", **fontprop)
ypos = y - pad
xx = markers[0], ends[0]
root.plot(xx, (ypos, ypos), "-", lw=2, color=lsg)
root.text(sum(xx) / 2, ypos - pad, "34,115bp", **fontprop)
xx = markers[1], begs[1]
root.plot(xx, (ypos, ypos), "-", lw=2, color=lsg)
root.text(sum(xx) / 2, ypos - pad, "81,276bp", **fontprop)
root.plot((ends[0], begs[1]), (y, y), ":", lw=2, color=lsg)
root.text(sum(markers) / 2, ypos - 3 * pad, r"$\textit{Estimated gap size: 96,433bp}$",
color="r", ha="center", va="center")
labels = ((.05, .95, 'A'), (.05, .6, 'B'), (.05, .27, 'C'))
panel_labels(root, labels)
normalize_axes(root)
pf = "estimategaps"
image_name = pf + "." + iopts.format
savefig(image_name, dpi=iopts.dpi, iopts=iopts) | def function[estimategaps, parameter[args]]:
constant[
%prog estimategaps JM-4 chr1 JMMale-1
Illustrate ALLMAPS gap estimation algorithm.
]
variable[p] assign[=] call[name[OptionParser], parameter[name[estimategaps].__doc__]]
<ast.Tuple object at 0x7da1b0887d30> assign[=] call[name[p].set_image_options, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[3]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b0887910>]]
<ast.Tuple object at 0x7da1b08877f0> assign[=] name[args]
variable[bedfile] assign[=] binary_operation[name[pf] + constant[.lifted.bed]]
variable[agpfile] assign[=] binary_operation[name[pf] + constant[.agp]]
variable[function] assign[=] <ast.Lambda object at 0x7da1b08874c0>
variable[cc] assign[=] call[name[Map], parameter[name[bedfile]]]
variable[agp] assign[=] call[name[AGP], parameter[name[agpfile]]]
variable[g] assign[=] call[name[GapEstimator], parameter[name[cc], name[agp], name[seqid], name[mlg]]]
<ast.Tuple object at 0x7da1b0886f20> assign[=] tuple[[<ast.Attribute object at 0x7da1b0886e30>, <ast.Attribute object at 0x7da1b0886dd0>, <ast.Attribute object at 0x7da1b0886d70>]]
<ast.Tuple object at 0x7da1b0886ce0> assign[=] tuple[[<ast.Attribute object at 0x7da1b0886c20>, <ast.Attribute object at 0x7da1b0886bc0>]]
call[name[g].compute_all_gaps, parameter[]]
variable[fig] assign[=] call[name[plt].figure, parameter[constant[1], tuple[[<ast.Attribute object at 0x7da1b08868c0>, <ast.Attribute object at 0x7da1b0886860>]]]]
variable[root] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da1b08866e0>, <ast.Constant object at 0x7da1b08866b0>, <ast.Constant object at 0x7da1b0886680>, <ast.Constant object at 0x7da1b0886650>]]]]
<ast.Tuple object at 0x7da1b08865f0> assign[=] tuple[[<ast.Constant object at 0x7da1b0886530>, <ast.Constant object at 0x7da1b0886500>]]
<ast.Tuple object at 0x7da1b08864a0> assign[=] tuple[[<ast.Constant object at 0x7da1b08863e0>, <ast.Constant object at 0x7da1b08863b0>]]
variable[t] assign[=] call[name[np].linspace, parameter[constant[0], name[chrsize], constant[1000]]]
variable[ax] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Name object at 0x7da1b08860e0>, <ast.Name object at 0x7da1b08860b0>, <ast.Name object at 0x7da1b0886080>, <ast.Name object at 0x7da1b0886050>]]]]
<ast.Tuple object at 0x7da1b0885ff0> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b0885f00>]]
variable[rho] assign[=] call[name[spearmanr], parameter[name[mx], name[my]]]
variable[dsg] assign[=] constant[g]
call[name[ax].vlines, parameter[name[pp], constant[0], name[mlgsize]]]
call[name[ax].plot, parameter[name[mx], name[my], constant[.]]]
call[name[ax].plot, parameter[name[t], call[name[spl], parameter[name[t]]], constant[-]]]
call[name[ax].text, parameter[constant[0.05], constant[0.95], name[mlg]]]
call[name[normalize_lms_axis], parameter[name[ax]]]
if compare[name[rho] less[<] constant[0]] begin[:]
call[name[ax].invert_yaxis, parameter[]]
<ast.AugAssign object at 0x7da1b08aac20>
variable[h] assign[=] constant[0.25]
variable[ax] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Name object at 0x7da1b08aa9e0>, <ast.Name object at 0x7da1b08aa9b0>, <ast.Name object at 0x7da1b08aa980>, <ast.Name object at 0x7da1b08aa950>]]]]
call[name[ax].vlines, parameter[name[pp], constant[0], name[mlgsize]]]
call[name[ax].plot, parameter[name[t], call[name[spld], parameter[name[t]]], constant[-]]]
call[name[ax].plot, parameter[name[pp], call[name[spld], parameter[name[pp]]], constant[o]]]
call[name[normalize_lms_axis], parameter[name[ax]]]
call[name[ax].xaxis.grid, parameter[constant[False]]]
<ast.Tuple object at 0x7da1b08a9c60> assign[=] tuple[[<ast.Constant object at 0x7da1b08a9ba0>, <ast.Constant object at 0x7da1b08a9b70>]]
variable[sizes] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b08a9a80>]]
<ast.Tuple object at 0x7da1b08a9660> assign[=] call[name[sizes]][name[a]]
<ast.Tuple object at 0x7da1b08a94b0> assign[=] call[name[sizes]][name[b]]
variable[gapsize] assign[=] call[name[g].get_gapsize, parameter[name[a]]]
variable[total_size] assign[=] binary_operation[binary_operation[name[asize] + name[gapsize]] + name[bsize]]
variable[ratio] assign[=] binary_operation[constant[0.6] / name[total_size]]
variable[y] assign[=] constant[0.16]
variable[pad] assign[=] constant[0.03]
variable[pb_ratio] assign[=] binary_operation[name[w] / name[chrsize]]
variable[lsg] assign[=] constant[lightslategray]
call[name[root].plot, parameter[tuple[[<ast.BinOp object at 0x7da1b08a8c40>, <ast.Constant object at 0x7da1b08a8b50>]], tuple[[<ast.Name object at 0x7da1b08a8af0>, <ast.BinOp object at 0x7da1b08a8ac0>]], constant[:]]]
call[name[root].plot, parameter[tuple[[<ast.BinOp object at 0x7da1b08a8880>, <ast.Constant object at 0x7da1b08a8790>]], tuple[[<ast.Name object at 0x7da1b08a8730>, <ast.BinOp object at 0x7da1b08a8700>]], constant[:]]]
variable[ends] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b08a84c0>, <ast.Name object at 0x7da1b08a8490>, <ast.Name object at 0x7da1b08a8460>, <ast.Name object at 0x7da1b08a8430>]]] in starred[call[name[zip], parameter[tuple[[<ast.Name object at 0x7da1b08a8370>, <ast.Name object at 0x7da1b08a8340>]], tuple[[<ast.Name object at 0x7da1b08a82e0>, <ast.Name object at 0x7da1b08a82b0>]], tuple[[<ast.Constant object at 0x7da1b08a8250>, <ast.Constant object at 0x7da1b08a8220>]], tuple[[<ast.Constant object at 0x7da1b08a81c0>, <ast.BinOp object at 0x7da1b08a8190>]]]]] begin[:]
variable[end] assign[=] binary_operation[name[beg] + binary_operation[name[size] * name[ratio]]]
variable[marker] assign[=] binary_operation[name[beg] + binary_operation[name[marker] * name[ratio]]]
call[name[ends].append, parameter[tuple[[<ast.Name object at 0x7da1b080fc40>, <ast.Name object at 0x7da1b080fb80>, <ast.Name object at 0x7da1b080fbb0>]]]]
call[name[root].plot, parameter[tuple[[<ast.Name object at 0x7da1b080fca0>]], tuple[[<ast.Name object at 0x7da1b080fd90>]], constant[o]]]
call[name[root].text, parameter[binary_operation[binary_operation[name[beg] + name[end]] / constant[2]], binary_operation[name[y] + name[pad]], call[name[latex], parameter[name[tag]]]]]
call[name[HorizontalChromosome], parameter[name[root], name[beg], name[end], name[y]]]
<ast.Tuple object at 0x7da1b080f5b0> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b080f160>]]
variable[fontprop] assign[=] call[name[dict], parameter[]]
variable[ypos] assign[=] binary_operation[name[y] + binary_operation[name[pad] * constant[2]]]
call[name[root].plot, parameter[name[markers], tuple[[<ast.Name object at 0x7da1b080e7d0>, <ast.Name object at 0x7da1b080e770>]], constant[-]]]
call[name[root].text, parameter[binary_operation[call[name[sum], parameter[name[markers]]] / constant[2]], binary_operation[name[ypos] + name[pad]], constant[Distance: 1.29cM $\Leftrightarrow$ 211,824bp (6.1 cM/Mb)]]]
variable[ypos] assign[=] binary_operation[name[y] - name[pad]]
variable[xx] assign[=] tuple[[<ast.Subscript object at 0x7da1b080c610>, <ast.Subscript object at 0x7da1b080c520>]]
call[name[root].plot, parameter[name[xx], tuple[[<ast.Name object at 0x7da1b080eda0>, <ast.Name object at 0x7da1b080eef0>]], constant[-]]]
call[name[root].text, parameter[binary_operation[call[name[sum], parameter[name[xx]]] / constant[2]], binary_operation[name[ypos] - name[pad]], constant[34,115bp]]]
variable[xx] assign[=] tuple[[<ast.Subscript object at 0x7da1b080cd60>, <ast.Subscript object at 0x7da1b080ccd0>]]
call[name[root].plot, parameter[name[xx], tuple[[<ast.Name object at 0x7da1b080cca0>, <ast.Name object at 0x7da1b080cc70>]], constant[-]]]
call[name[root].text, parameter[binary_operation[call[name[sum], parameter[name[xx]]] / constant[2]], binary_operation[name[ypos] - name[pad]], constant[81,276bp]]]
call[name[root].plot, parameter[tuple[[<ast.Subscript object at 0x7da1b080cc10>, <ast.Subscript object at 0x7da1b080dea0>]], tuple[[<ast.Name object at 0x7da1b080dc90>, <ast.Name object at 0x7da1b080e080>]], constant[:]]]
call[name[root].text, parameter[binary_operation[call[name[sum], parameter[name[markers]]] / constant[2]], binary_operation[name[ypos] - binary_operation[constant[3] * name[pad]]], constant[$\textit{Estimated gap size: 96,433bp}$]]]
variable[labels] assign[=] tuple[[<ast.Tuple object at 0x7da1b088e140>, <ast.Tuple object at 0x7da1b088e230>, <ast.Tuple object at 0x7da1b088d480>]]
call[name[panel_labels], parameter[name[root], name[labels]]]
call[name[normalize_axes], parameter[name[root]]]
variable[pf] assign[=] constant[estimategaps]
variable[image_name] assign[=] binary_operation[binary_operation[name[pf] + constant[.]] + name[iopts].format]
call[name[savefig], parameter[name[image_name]]] | keyword[def] identifier[estimategaps] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[estimategaps] . identifier[__doc__] )
identifier[opts] , identifier[args] , identifier[iopts] = identifier[p] . identifier[set_image_options] ( identifier[args] , identifier[figsize] = literal[string] , identifier[dpi] = literal[int] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[pf] , identifier[seqid] , identifier[mlg] = identifier[args]
identifier[bedfile] = identifier[pf] + literal[string]
identifier[agpfile] = identifier[pf] + literal[string]
identifier[function] = keyword[lambda] identifier[x] : identifier[x] . identifier[cm]
identifier[cc] = identifier[Map] ( identifier[bedfile] , identifier[scaffold_info] = keyword[True] , identifier[function] = identifier[function] )
identifier[agp] = identifier[AGP] ( identifier[agpfile] )
identifier[g] = identifier[GapEstimator] ( identifier[cc] , identifier[agp] , identifier[seqid] , identifier[mlg] , identifier[function] = identifier[function] )
identifier[pp] , identifier[chrsize] , identifier[mlgsize] = identifier[g] . identifier[pp] , identifier[g] . identifier[chrsize] , identifier[g] . identifier[mlgsize]
identifier[spl] , identifier[spld] = identifier[g] . identifier[spl] , identifier[g] . identifier[spld]
identifier[g] . identifier[compute_all_gaps] ( identifier[verbose] = keyword[False] )
identifier[fig] = identifier[plt] . identifier[figure] ( literal[int] ,( identifier[iopts] . identifier[w] , identifier[iopts] . identifier[h] ))
identifier[root] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[xstart] , identifier[ystart] = literal[int] , literal[int]
identifier[w] , identifier[h] = literal[int] , literal[int]
identifier[t] = identifier[np] . identifier[linspace] ( literal[int] , identifier[chrsize] , literal[int] )
identifier[ax] = identifier[fig] . identifier[add_axes] ([ identifier[xstart] , identifier[ystart] , identifier[w] , identifier[h] ])
identifier[mx] , identifier[my] = identifier[zip] (* identifier[g] . identifier[scatter_data] )
identifier[rho] = identifier[spearmanr] ( identifier[mx] , identifier[my] )
identifier[dsg] = literal[string]
identifier[ax] . identifier[vlines] ( identifier[pp] , literal[int] , identifier[mlgsize] , identifier[colors] = literal[string] )
identifier[ax] . identifier[plot] ( identifier[mx] , identifier[my] , literal[string] , identifier[color] = identifier[set2] [ literal[int] ])
identifier[ax] . identifier[plot] ( identifier[t] , identifier[spl] ( identifier[t] ), literal[string] , identifier[color] = identifier[dsg] )
identifier[ax] . identifier[text] ( literal[int] , literal[int] , identifier[mlg] , identifier[va] = literal[string] , identifier[transform] = identifier[ax] . identifier[transAxes] )
identifier[normalize_lms_axis] ( identifier[ax] , identifier[xlim] = identifier[chrsize] , identifier[ylim] = identifier[mlgsize] ,
identifier[ylabel] = literal[string] )
keyword[if] identifier[rho] < literal[int] :
identifier[ax] . identifier[invert_yaxis] ()
identifier[ystart] -= literal[int]
identifier[h] = literal[int]
identifier[ax] = identifier[fig] . identifier[add_axes] ([ identifier[xstart] , identifier[ystart] , identifier[w] , identifier[h] ])
identifier[ax] . identifier[vlines] ( identifier[pp] , literal[int] , identifier[mlgsize] , identifier[colors] = literal[string] )
identifier[ax] . identifier[plot] ( identifier[t] , identifier[spld] ( identifier[t] ), literal[string] , identifier[lw] = literal[int] , identifier[color] = identifier[dsg] )
identifier[ax] . identifier[plot] ( identifier[pp] , identifier[spld] ( identifier[pp] ), literal[string] , identifier[mfc] = literal[string] , identifier[mec] = identifier[dsg] , identifier[ms] = literal[int] )
identifier[normalize_lms_axis] ( identifier[ax] , identifier[xlim] = identifier[chrsize] , identifier[ylim] = literal[int] * literal[int] ,
identifier[xfactor] = literal[int] , identifier[xlabel] = literal[string] ,
identifier[yfactor] = literal[int] , identifier[ylabel] = literal[string] )
identifier[ax] . identifier[xaxis] . identifier[grid] ( keyword[False] )
identifier[a] , identifier[b] = literal[string] , literal[string]
identifier[sizes] = identifier[dict] (( identifier[x] . identifier[component_id] ,( identifier[x] . identifier[object_beg] , identifier[x] . identifier[object_end] ,
identifier[x] . identifier[component_span] , identifier[x] . identifier[orientation] )) keyword[for] identifier[x] keyword[in] identifier[g] . identifier[agp] keyword[if] keyword[not] identifier[x] . identifier[is_gap] )
identifier[a_beg] , identifier[a_end] , identifier[asize] , identifier[ao] = identifier[sizes] [ identifier[a] ]
identifier[b_beg] , identifier[b_end] , identifier[bsize] , identifier[bo] = identifier[sizes] [ identifier[b] ]
identifier[gapsize] = identifier[g] . identifier[get_gapsize] ( identifier[a] )
identifier[total_size] = identifier[asize] + identifier[gapsize] + identifier[bsize]
identifier[ratio] = literal[int] / identifier[total_size]
identifier[y] = literal[int]
identifier[pad] = literal[int]
identifier[pb_ratio] = identifier[w] / identifier[chrsize]
identifier[lsg] = literal[string]
identifier[root] . identifier[plot] (( literal[int] + identifier[pb_ratio] * identifier[a_beg] , literal[int] ),
( identifier[ystart] , identifier[ystart] - literal[int] ), literal[string] , identifier[color] = identifier[lsg] )
identifier[root] . identifier[plot] (( literal[int] + identifier[pb_ratio] * identifier[b_end] , literal[int] ),
( identifier[ystart] , identifier[ystart] - literal[int] ), literal[string] , identifier[color] = identifier[lsg] )
identifier[ends] =[]
keyword[for] identifier[tag] , identifier[size] , identifier[marker] , identifier[beg] keyword[in] identifier[zip] (( identifier[a] , identifier[b] ),( identifier[asize] , identifier[bsize] ),( literal[int] , literal[int] ),
( literal[int] , literal[int] +( identifier[asize] + identifier[gapsize] )* identifier[ratio] )):
identifier[end] = identifier[beg] + identifier[size] * identifier[ratio]
identifier[marker] = identifier[beg] + identifier[marker] * identifier[ratio]
identifier[ends] . identifier[append] (( identifier[beg] , identifier[end] , identifier[marker] ))
identifier[root] . identifier[plot] (( identifier[marker] ,),( identifier[y] ,), literal[string] , identifier[color] = identifier[lsg] )
identifier[root] . identifier[text] (( identifier[beg] + identifier[end] )/ literal[int] , identifier[y] + identifier[pad] , identifier[latex] ( identifier[tag] ),
identifier[ha] = literal[string] , identifier[va] = literal[string] )
identifier[HorizontalChromosome] ( identifier[root] , identifier[beg] , identifier[end] , identifier[y] , identifier[height] = literal[int] , identifier[fc] = literal[string] )
identifier[begs] , identifier[ends] , identifier[markers] = identifier[zip] (* identifier[ends] )
identifier[fontprop] = identifier[dict] ( identifier[color] = identifier[lsg] , identifier[ha] = literal[string] , identifier[va] = literal[string] )
identifier[ypos] = identifier[y] + identifier[pad] * literal[int]
identifier[root] . identifier[plot] ( identifier[markers] ,( identifier[ypos] , identifier[ypos] ), literal[string] , identifier[lw] = literal[int] , identifier[color] = identifier[lsg] )
identifier[root] . identifier[text] ( identifier[sum] ( identifier[markers] )/ literal[int] , identifier[ypos] + identifier[pad] ,
literal[string] ,** identifier[fontprop] )
identifier[ypos] = identifier[y] - identifier[pad]
identifier[xx] = identifier[markers] [ literal[int] ], identifier[ends] [ literal[int] ]
identifier[root] . identifier[plot] ( identifier[xx] ,( identifier[ypos] , identifier[ypos] ), literal[string] , identifier[lw] = literal[int] , identifier[color] = identifier[lsg] )
identifier[root] . identifier[text] ( identifier[sum] ( identifier[xx] )/ literal[int] , identifier[ypos] - identifier[pad] , literal[string] ,** identifier[fontprop] )
identifier[xx] = identifier[markers] [ literal[int] ], identifier[begs] [ literal[int] ]
identifier[root] . identifier[plot] ( identifier[xx] ,( identifier[ypos] , identifier[ypos] ), literal[string] , identifier[lw] = literal[int] , identifier[color] = identifier[lsg] )
identifier[root] . identifier[text] ( identifier[sum] ( identifier[xx] )/ literal[int] , identifier[ypos] - identifier[pad] , literal[string] ,** identifier[fontprop] )
identifier[root] . identifier[plot] (( identifier[ends] [ literal[int] ], identifier[begs] [ literal[int] ]),( identifier[y] , identifier[y] ), literal[string] , identifier[lw] = literal[int] , identifier[color] = identifier[lsg] )
identifier[root] . identifier[text] ( identifier[sum] ( identifier[markers] )/ literal[int] , identifier[ypos] - literal[int] * identifier[pad] , literal[string] ,
identifier[color] = literal[string] , identifier[ha] = literal[string] , identifier[va] = literal[string] )
identifier[labels] =(( literal[int] , literal[int] , literal[string] ),( literal[int] , literal[int] , literal[string] ),( literal[int] , literal[int] , literal[string] ))
identifier[panel_labels] ( identifier[root] , identifier[labels] )
identifier[normalize_axes] ( identifier[root] )
identifier[pf] = literal[string]
identifier[image_name] = identifier[pf] + literal[string] + identifier[iopts] . identifier[format]
identifier[savefig] ( identifier[image_name] , identifier[dpi] = identifier[iopts] . identifier[dpi] , identifier[iopts] = identifier[iopts] ) | def estimategaps(args):
"""
%prog estimategaps JM-4 chr1 JMMale-1
Illustrate ALLMAPS gap estimation algorithm.
"""
p = OptionParser(estimategaps.__doc__)
(opts, args, iopts) = p.set_image_options(args, figsize='6x6', dpi=300)
if len(args) != 3:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(pf, seqid, mlg) = args
bedfile = pf + '.lifted.bed'
agpfile = pf + '.agp'
function = lambda x: x.cm
cc = Map(bedfile, scaffold_info=True, function=function)
agp = AGP(agpfile)
g = GapEstimator(cc, agp, seqid, mlg, function=function)
(pp, chrsize, mlgsize) = (g.pp, g.chrsize, g.mlgsize)
(spl, spld) = (g.spl, g.spld)
g.compute_all_gaps(verbose=False)
fig = plt.figure(1, (iopts.w, iopts.h))
root = fig.add_axes([0, 0, 1, 1])
# Panel A
(xstart, ystart) = (0.15, 0.65)
(w, h) = (0.7, 0.3)
t = np.linspace(0, chrsize, 1000)
ax = fig.add_axes([xstart, ystart, w, h])
(mx, my) = zip(*g.scatter_data)
rho = spearmanr(mx, my)
dsg = 'g'
ax.vlines(pp, 0, mlgsize, colors='beige')
ax.plot(mx, my, '.', color=set2[3])
ax.plot(t, spl(t), '-', color=dsg)
ax.text(0.05, 0.95, mlg, va='top', transform=ax.transAxes)
normalize_lms_axis(ax, xlim=chrsize, ylim=mlgsize, ylabel='Genetic distance (cM)')
if rho < 0:
ax.invert_yaxis() # depends on [control=['if'], data=[]]
# Panel B
ystart -= 0.28
h = 0.25
ax = fig.add_axes([xstart, ystart, w, h])
ax.vlines(pp, 0, mlgsize, colors='beige')
ax.plot(t, spld(t), '-', lw=2, color=dsg)
ax.plot(pp, spld(pp), 'o', mfc='w', mec=dsg, ms=5)
normalize_lms_axis(ax, xlim=chrsize, ylim=25 * 1e-06, xfactor=1e-06, xlabel='Physical position (Mb)', yfactor=1000000, ylabel='Recomb. rate\n(cM / Mb)')
ax.xaxis.grid(False)
# Panel C (specific to JMMale-1)
(a, b) = ('scaffold_1076', 'scaffold_861')
sizes = dict(((x.component_id, (x.object_beg, x.object_end, x.component_span, x.orientation)) for x in g.agp if not x.is_gap))
(a_beg, a_end, asize, ao) = sizes[a]
(b_beg, b_end, bsize, bo) = sizes[b]
gapsize = g.get_gapsize(a)
total_size = asize + gapsize + bsize
ratio = 0.6 / total_size
y = 0.16
pad = 0.03
pb_ratio = w / chrsize
# Zoom
lsg = 'lightslategray'
root.plot((0.15 + pb_ratio * a_beg, 0.2), (ystart, ystart - 0.14), ':', color=lsg)
root.plot((0.15 + pb_ratio * b_end, 0.3), (ystart, ystart - 0.08), ':', color=lsg)
ends = []
for (tag, size, marker, beg) in zip((a, b), (asize, bsize), (49213, 81277), (0.2, 0.2 + (asize + gapsize) * ratio)):
end = beg + size * ratio
marker = beg + marker * ratio
ends.append((beg, end, marker))
root.plot((marker,), (y,), 'o', color=lsg)
root.text((beg + end) / 2, y + pad, latex(tag), ha='center', va='center')
HorizontalChromosome(root, beg, end, y, height=0.025, fc='gainsboro') # depends on [control=['for'], data=[]]
(begs, ends, markers) = zip(*ends)
fontprop = dict(color=lsg, ha='center', va='center')
ypos = y + pad * 2
root.plot(markers, (ypos, ypos), '-', lw=2, color=lsg)
root.text(sum(markers) / 2, ypos + pad, 'Distance: 1.29cM $\\Leftrightarrow$ 211,824bp (6.1 cM/Mb)', **fontprop)
ypos = y - pad
xx = (markers[0], ends[0])
root.plot(xx, (ypos, ypos), '-', lw=2, color=lsg)
root.text(sum(xx) / 2, ypos - pad, '34,115bp', **fontprop)
xx = (markers[1], begs[1])
root.plot(xx, (ypos, ypos), '-', lw=2, color=lsg)
root.text(sum(xx) / 2, ypos - pad, '81,276bp', **fontprop)
root.plot((ends[0], begs[1]), (y, y), ':', lw=2, color=lsg)
root.text(sum(markers) / 2, ypos - 3 * pad, '$\\textit{Estimated gap size: 96,433bp}$', color='r', ha='center', va='center')
labels = ((0.05, 0.95, 'A'), (0.05, 0.6, 'B'), (0.05, 0.27, 'C'))
panel_labels(root, labels)
normalize_axes(root)
pf = 'estimategaps'
image_name = pf + '.' + iopts.format
savefig(image_name, dpi=iopts.dpi, iopts=iopts) |
def delete_message(self, messageid="", folderid="", stackid=""):
"""Delete a message or a message stack
:param folderid: The folder to delete the message from, defaults to inbox
:param messageid: The message to delete
:param stackid: The stack to delete
"""
if self.standard_grant_type is not "authorization_code":
raise DeviantartError("Authentication through Authorization Code (Grant Type) is required in order to connect to this endpoint.")
response = self._req('/messages/delete', post_data={
'folderid' : folderid,
'messageid' : messageid,
'stackid' : stackid
})
return response | def function[delete_message, parameter[self, messageid, folderid, stackid]]:
constant[Delete a message or a message stack
:param folderid: The folder to delete the message from, defaults to inbox
:param messageid: The message to delete
:param stackid: The stack to delete
]
if compare[name[self].standard_grant_type is_not constant[authorization_code]] begin[:]
<ast.Raise object at 0x7da18eb576a0>
variable[response] assign[=] call[name[self]._req, parameter[constant[/messages/delete]]]
return[name[response]] | keyword[def] identifier[delete_message] ( identifier[self] , identifier[messageid] = literal[string] , identifier[folderid] = literal[string] , identifier[stackid] = literal[string] ):
literal[string]
keyword[if] identifier[self] . identifier[standard_grant_type] keyword[is] keyword[not] literal[string] :
keyword[raise] identifier[DeviantartError] ( literal[string] )
identifier[response] = identifier[self] . identifier[_req] ( literal[string] , identifier[post_data] ={
literal[string] : identifier[folderid] ,
literal[string] : identifier[messageid] ,
literal[string] : identifier[stackid]
})
keyword[return] identifier[response] | def delete_message(self, messageid='', folderid='', stackid=''):
"""Delete a message or a message stack
:param folderid: The folder to delete the message from, defaults to inbox
:param messageid: The message to delete
:param stackid: The stack to delete
"""
if self.standard_grant_type is not 'authorization_code':
raise DeviantartError('Authentication through Authorization Code (Grant Type) is required in order to connect to this endpoint.') # depends on [control=['if'], data=[]]
response = self._req('/messages/delete', post_data={'folderid': folderid, 'messageid': messageid, 'stackid': stackid})
return response |
def add_cron(self, name, minute, hour, mday, month, wday, who, command, env=None):
"""
Add an entry to the system crontab.
"""
raise NotImplementedError | def function[add_cron, parameter[self, name, minute, hour, mday, month, wday, who, command, env]]:
constant[
Add an entry to the system crontab.
]
<ast.Raise object at 0x7da1b2658eb0> | keyword[def] identifier[add_cron] ( identifier[self] , identifier[name] , identifier[minute] , identifier[hour] , identifier[mday] , identifier[month] , identifier[wday] , identifier[who] , identifier[command] , identifier[env] = keyword[None] ):
literal[string]
keyword[raise] identifier[NotImplementedError] | def add_cron(self, name, minute, hour, mday, month, wday, who, command, env=None):
"""
Add an entry to the system crontab.
"""
raise NotImplementedError |
def get_job_input(self, job_id):
"""GetJobInput
https://apidocs.joyent.com/manta/api.html#GetJobInput
with the added sugar that it will retrieve the archived job if it has
been archived, per:
https://apidocs.joyent.com/manta/jobs-reference.html#job-completion-and-archival
"""
try:
return RawMantaClient.get_job_input(self, job_id)
except errors.MantaAPIError as ex:
if ex.res.status != 404:
raise
# Job was archived, try to retrieve the archived data.
mpath = "/%s/jobs/%s/in.txt" % (self.account, job_id)
content = self.get_object(mpath)
keys = content.splitlines(False)
return keys | def function[get_job_input, parameter[self, job_id]]:
constant[GetJobInput
https://apidocs.joyent.com/manta/api.html#GetJobInput
with the added sugar that it will retrieve the archived job if it has
been archived, per:
https://apidocs.joyent.com/manta/jobs-reference.html#job-completion-and-archival
]
<ast.Try object at 0x7da1b0493250> | keyword[def] identifier[get_job_input] ( identifier[self] , identifier[job_id] ):
literal[string]
keyword[try] :
keyword[return] identifier[RawMantaClient] . identifier[get_job_input] ( identifier[self] , identifier[job_id] )
keyword[except] identifier[errors] . identifier[MantaAPIError] keyword[as] identifier[ex] :
keyword[if] identifier[ex] . identifier[res] . identifier[status] != literal[int] :
keyword[raise]
identifier[mpath] = literal[string] %( identifier[self] . identifier[account] , identifier[job_id] )
identifier[content] = identifier[self] . identifier[get_object] ( identifier[mpath] )
identifier[keys] = identifier[content] . identifier[splitlines] ( keyword[False] )
keyword[return] identifier[keys] | def get_job_input(self, job_id):
"""GetJobInput
https://apidocs.joyent.com/manta/api.html#GetJobInput
with the added sugar that it will retrieve the archived job if it has
been archived, per:
https://apidocs.joyent.com/manta/jobs-reference.html#job-completion-and-archival
"""
try:
return RawMantaClient.get_job_input(self, job_id) # depends on [control=['try'], data=[]]
except errors.MantaAPIError as ex:
if ex.res.status != 404:
raise # depends on [control=['if'], data=[]]
# Job was archived, try to retrieve the archived data.
mpath = '/%s/jobs/%s/in.txt' % (self.account, job_id)
content = self.get_object(mpath)
keys = content.splitlines(False)
return keys # depends on [control=['except'], data=['ex']] |
def show_hbonds(self):
"""Visualizes hydrogen bonds."""
hbonds = self.plcomplex.hbonds
for group in [['HBondDonor-P', hbonds.prot_don_id],
['HBondAccept-P', hbonds.prot_acc_id]]:
if not len(group[1]) == 0:
self.select_by_ids(group[0], group[1], restrict=self.protname)
for group in [['HBondDonor-L', hbonds.lig_don_id],
['HBondAccept-L', hbonds.lig_acc_id]]:
if not len(group[1]) == 0:
self.select_by_ids(group[0], group[1], restrict=self.ligname)
for i in hbonds.ldon_id:
cmd.select('tmp_bs', 'id %i & %s' % (i[0], self.protname))
cmd.select('tmp_lig', 'id %i & %s' % (i[1], self.ligname))
cmd.distance('HBonds', 'tmp_bs', 'tmp_lig')
for i in hbonds.pdon_id:
cmd.select('tmp_bs', 'id %i & %s' % (i[1], self.protname))
cmd.select('tmp_lig', 'id %i & %s' % (i[0], self.ligname))
cmd.distance('HBonds', 'tmp_bs', 'tmp_lig')
if self.object_exists('HBonds'):
cmd.set('dash_color', 'blue', 'HBonds') | def function[show_hbonds, parameter[self]]:
constant[Visualizes hydrogen bonds.]
variable[hbonds] assign[=] name[self].plcomplex.hbonds
for taget[name[group]] in starred[list[[<ast.List object at 0x7da207f9a740>, <ast.List object at 0x7da207f993f0>]]] begin[:]
if <ast.UnaryOp object at 0x7da207f98a90> begin[:]
call[name[self].select_by_ids, parameter[call[name[group]][constant[0]], call[name[group]][constant[1]]]]
for taget[name[group]] in starred[list[[<ast.List object at 0x7da207f9a2c0>, <ast.List object at 0x7da207f9a140>]]] begin[:]
if <ast.UnaryOp object at 0x7da207f9a800> begin[:]
call[name[self].select_by_ids, parameter[call[name[group]][constant[0]], call[name[group]][constant[1]]]]
for taget[name[i]] in starred[name[hbonds].ldon_id] begin[:]
call[name[cmd].select, parameter[constant[tmp_bs], binary_operation[constant[id %i & %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da207f9b1f0>, <ast.Attribute object at 0x7da207f9b8e0>]]]]]
call[name[cmd].select, parameter[constant[tmp_lig], binary_operation[constant[id %i & %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da207f9b6d0>, <ast.Attribute object at 0x7da207f989d0>]]]]]
call[name[cmd].distance, parameter[constant[HBonds], constant[tmp_bs], constant[tmp_lig]]]
for taget[name[i]] in starred[name[hbonds].pdon_id] begin[:]
call[name[cmd].select, parameter[constant[tmp_bs], binary_operation[constant[id %i & %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da207f9a4d0>, <ast.Attribute object at 0x7da207f9a1a0>]]]]]
call[name[cmd].select, parameter[constant[tmp_lig], binary_operation[constant[id %i & %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da207f9b880>, <ast.Attribute object at 0x7da207f99e10>]]]]]
call[name[cmd].distance, parameter[constant[HBonds], constant[tmp_bs], constant[tmp_lig]]]
if call[name[self].object_exists, parameter[constant[HBonds]]] begin[:]
call[name[cmd].set, parameter[constant[dash_color], constant[blue], constant[HBonds]]] | keyword[def] identifier[show_hbonds] ( identifier[self] ):
literal[string]
identifier[hbonds] = identifier[self] . identifier[plcomplex] . identifier[hbonds]
keyword[for] identifier[group] keyword[in] [[ literal[string] , identifier[hbonds] . identifier[prot_don_id] ],
[ literal[string] , identifier[hbonds] . identifier[prot_acc_id] ]]:
keyword[if] keyword[not] identifier[len] ( identifier[group] [ literal[int] ])== literal[int] :
identifier[self] . identifier[select_by_ids] ( identifier[group] [ literal[int] ], identifier[group] [ literal[int] ], identifier[restrict] = identifier[self] . identifier[protname] )
keyword[for] identifier[group] keyword[in] [[ literal[string] , identifier[hbonds] . identifier[lig_don_id] ],
[ literal[string] , identifier[hbonds] . identifier[lig_acc_id] ]]:
keyword[if] keyword[not] identifier[len] ( identifier[group] [ literal[int] ])== literal[int] :
identifier[self] . identifier[select_by_ids] ( identifier[group] [ literal[int] ], identifier[group] [ literal[int] ], identifier[restrict] = identifier[self] . identifier[ligname] )
keyword[for] identifier[i] keyword[in] identifier[hbonds] . identifier[ldon_id] :
identifier[cmd] . identifier[select] ( literal[string] , literal[string] %( identifier[i] [ literal[int] ], identifier[self] . identifier[protname] ))
identifier[cmd] . identifier[select] ( literal[string] , literal[string] %( identifier[i] [ literal[int] ], identifier[self] . identifier[ligname] ))
identifier[cmd] . identifier[distance] ( literal[string] , literal[string] , literal[string] )
keyword[for] identifier[i] keyword[in] identifier[hbonds] . identifier[pdon_id] :
identifier[cmd] . identifier[select] ( literal[string] , literal[string] %( identifier[i] [ literal[int] ], identifier[self] . identifier[protname] ))
identifier[cmd] . identifier[select] ( literal[string] , literal[string] %( identifier[i] [ literal[int] ], identifier[self] . identifier[ligname] ))
identifier[cmd] . identifier[distance] ( literal[string] , literal[string] , literal[string] )
keyword[if] identifier[self] . identifier[object_exists] ( literal[string] ):
identifier[cmd] . identifier[set] ( literal[string] , literal[string] , literal[string] ) | def show_hbonds(self):
"""Visualizes hydrogen bonds."""
hbonds = self.plcomplex.hbonds
for group in [['HBondDonor-P', hbonds.prot_don_id], ['HBondAccept-P', hbonds.prot_acc_id]]:
if not len(group[1]) == 0:
self.select_by_ids(group[0], group[1], restrict=self.protname) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['group']]
for group in [['HBondDonor-L', hbonds.lig_don_id], ['HBondAccept-L', hbonds.lig_acc_id]]:
if not len(group[1]) == 0:
self.select_by_ids(group[0], group[1], restrict=self.ligname) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['group']]
for i in hbonds.ldon_id:
cmd.select('tmp_bs', 'id %i & %s' % (i[0], self.protname))
cmd.select('tmp_lig', 'id %i & %s' % (i[1], self.ligname))
cmd.distance('HBonds', 'tmp_bs', 'tmp_lig') # depends on [control=['for'], data=['i']]
for i in hbonds.pdon_id:
cmd.select('tmp_bs', 'id %i & %s' % (i[1], self.protname))
cmd.select('tmp_lig', 'id %i & %s' % (i[0], self.ligname))
cmd.distance('HBonds', 'tmp_bs', 'tmp_lig') # depends on [control=['for'], data=['i']]
if self.object_exists('HBonds'):
cmd.set('dash_color', 'blue', 'HBonds') # depends on [control=['if'], data=[]] |
def hamming_distance(s1, s2):
"""Return the Hamming distance between equal-length sequences"""
# print(s1,s2)
if len(s1) != len(s2):
raise ValueError("Undefined for sequences of unequal length")
return sum(el1 != el2 for el1, el2 in zip(s1.upper(), s2.upper())) | def function[hamming_distance, parameter[s1, s2]]:
constant[Return the Hamming distance between equal-length sequences]
if compare[call[name[len], parameter[name[s1]]] not_equal[!=] call[name[len], parameter[name[s2]]]] begin[:]
<ast.Raise object at 0x7da1b1ff8430>
return[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b1ff9ae0>]]] | keyword[def] identifier[hamming_distance] ( identifier[s1] , identifier[s2] ):
literal[string]
keyword[if] identifier[len] ( identifier[s1] )!= identifier[len] ( identifier[s2] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[sum] ( identifier[el1] != identifier[el2] keyword[for] identifier[el1] , identifier[el2] keyword[in] identifier[zip] ( identifier[s1] . identifier[upper] (), identifier[s2] . identifier[upper] ())) | def hamming_distance(s1, s2):
"""Return the Hamming distance between equal-length sequences"""
# print(s1,s2)
if len(s1) != len(s2):
raise ValueError('Undefined for sequences of unequal length') # depends on [control=['if'], data=[]]
return sum((el1 != el2 for (el1, el2) in zip(s1.upper(), s2.upper()))) |
def remove(self, arr):
"""Removes an array from the list
Parameters
----------
arr: str or :class:`InteractiveBase`
The array name or the data object in this list to remove
Raises
------
ValueError
If no array with the specified array name is in the list"""
name = arr if isinstance(arr, six.string_types) else arr.psy.arr_name
if arr not in self:
raise ValueError(
"Array {0} not in the list".format(name))
for i, arr in enumerate(self):
if arr.psy.arr_name == name:
del self[i]
return
raise ValueError(
"No array found with name {0}".format(name)) | def function[remove, parameter[self, arr]]:
constant[Removes an array from the list
Parameters
----------
arr: str or :class:`InteractiveBase`
The array name or the data object in this list to remove
Raises
------
ValueError
If no array with the specified array name is in the list]
variable[name] assign[=] <ast.IfExp object at 0x7da1b196c550>
if compare[name[arr] <ast.NotIn object at 0x7da2590d7190> name[self]] begin[:]
<ast.Raise object at 0x7da1b196ee00>
for taget[tuple[[<ast.Name object at 0x7da1b196d450>, <ast.Name object at 0x7da1b196eb60>]]] in starred[call[name[enumerate], parameter[name[self]]]] begin[:]
if compare[name[arr].psy.arr_name equal[==] name[name]] begin[:]
<ast.Delete object at 0x7da1b196ffd0>
return[None]
<ast.Raise object at 0x7da1b196c3a0> | keyword[def] identifier[remove] ( identifier[self] , identifier[arr] ):
literal[string]
identifier[name] = identifier[arr] keyword[if] identifier[isinstance] ( identifier[arr] , identifier[six] . identifier[string_types] ) keyword[else] identifier[arr] . identifier[psy] . identifier[arr_name]
keyword[if] identifier[arr] keyword[not] keyword[in] identifier[self] :
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] ( identifier[name] ))
keyword[for] identifier[i] , identifier[arr] keyword[in] identifier[enumerate] ( identifier[self] ):
keyword[if] identifier[arr] . identifier[psy] . identifier[arr_name] == identifier[name] :
keyword[del] identifier[self] [ identifier[i] ]
keyword[return]
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] ( identifier[name] )) | def remove(self, arr):
"""Removes an array from the list
Parameters
----------
arr: str or :class:`InteractiveBase`
The array name or the data object in this list to remove
Raises
------
ValueError
If no array with the specified array name is in the list"""
name = arr if isinstance(arr, six.string_types) else arr.psy.arr_name
if arr not in self:
raise ValueError('Array {0} not in the list'.format(name)) # depends on [control=['if'], data=[]]
for (i, arr) in enumerate(self):
if arr.psy.arr_name == name:
del self[i]
return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raise ValueError('No array found with name {0}'.format(name)) |
def getMyID(self,gist_name):
'''
Getting gistID of a gist in order to make the workflow
easy and uninterrupted.
'''
r = requests.get(
'%s'%BASE_URL+'/users/%s/gists' % self.user,
headers=self.gist.header
)
if (r.status_code == 200):
r_text = json.loads(r.text)
limit = len(r.json())
for g,no in zip(r_text, range(0,limit)):
for ka,va in r.json()[no]['files'].iteritems():
if str(va['filename']) == str(gist_name):
return r.json()[no]['id']
return 0
raise Exception('Username not found') | def function[getMyID, parameter[self, gist_name]]:
constant[
Getting gistID of a gist in order to make the workflow
easy and uninterrupted.
]
variable[r] assign[=] call[name[requests].get, parameter[binary_operation[binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[BASE_URL]] + binary_operation[constant[/users/%s/gists] <ast.Mod object at 0x7da2590d6920> name[self].user]]]]
if compare[name[r].status_code equal[==] constant[200]] begin[:]
variable[r_text] assign[=] call[name[json].loads, parameter[name[r].text]]
variable[limit] assign[=] call[name[len], parameter[call[name[r].json, parameter[]]]]
for taget[tuple[[<ast.Name object at 0x7da1b26add80>, <ast.Name object at 0x7da1b26ae5c0>]]] in starred[call[name[zip], parameter[name[r_text], call[name[range], parameter[constant[0], name[limit]]]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b26aefb0>, <ast.Name object at 0x7da1b26ae8c0>]]] in starred[call[call[call[call[name[r].json, parameter[]]][name[no]]][constant[files]].iteritems, parameter[]]] begin[:]
if compare[call[name[str], parameter[call[name[va]][constant[filename]]]] equal[==] call[name[str], parameter[name[gist_name]]]] begin[:]
return[call[call[call[name[r].json, parameter[]]][name[no]]][constant[id]]]
return[constant[0]]
<ast.Raise object at 0x7da1b26aef50> | keyword[def] identifier[getMyID] ( identifier[self] , identifier[gist_name] ):
literal[string]
identifier[r] = identifier[requests] . identifier[get] (
literal[string] % identifier[BASE_URL] + literal[string] % identifier[self] . identifier[user] ,
identifier[headers] = identifier[self] . identifier[gist] . identifier[header]
)
keyword[if] ( identifier[r] . identifier[status_code] == literal[int] ):
identifier[r_text] = identifier[json] . identifier[loads] ( identifier[r] . identifier[text] )
identifier[limit] = identifier[len] ( identifier[r] . identifier[json] ())
keyword[for] identifier[g] , identifier[no] keyword[in] identifier[zip] ( identifier[r_text] , identifier[range] ( literal[int] , identifier[limit] )):
keyword[for] identifier[ka] , identifier[va] keyword[in] identifier[r] . identifier[json] ()[ identifier[no] ][ literal[string] ]. identifier[iteritems] ():
keyword[if] identifier[str] ( identifier[va] [ literal[string] ])== identifier[str] ( identifier[gist_name] ):
keyword[return] identifier[r] . identifier[json] ()[ identifier[no] ][ literal[string] ]
keyword[return] literal[int]
keyword[raise] identifier[Exception] ( literal[string] ) | def getMyID(self, gist_name):
"""
Getting gistID of a gist in order to make the workflow
easy and uninterrupted.
"""
r = requests.get('%s' % BASE_URL + '/users/%s/gists' % self.user, headers=self.gist.header)
if r.status_code == 200:
r_text = json.loads(r.text)
limit = len(r.json())
for (g, no) in zip(r_text, range(0, limit)):
for (ka, va) in r.json()[no]['files'].iteritems():
if str(va['filename']) == str(gist_name):
return r.json()[no]['id'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return 0 # depends on [control=['if'], data=[]]
raise Exception('Username not found') |
def shell():
""" Start application-aware shell """
app = bootstrap.get_app()
context = dict(app=app)
# and push app context
app_context = app.app_context()
app_context.push()
# got ipython?
ipython = importlib.util.find_spec("IPython")
# run now
if ipython:
from IPython import embed
embed(user_ns=context)
else:
import code
code.interact(local=context) | def function[shell, parameter[]]:
constant[ Start application-aware shell ]
variable[app] assign[=] call[name[bootstrap].get_app, parameter[]]
variable[context] assign[=] call[name[dict], parameter[]]
variable[app_context] assign[=] call[name[app].app_context, parameter[]]
call[name[app_context].push, parameter[]]
variable[ipython] assign[=] call[name[importlib].util.find_spec, parameter[constant[IPython]]]
if name[ipython] begin[:]
from relative_module[IPython] import module[embed]
call[name[embed], parameter[]] | keyword[def] identifier[shell] ():
literal[string]
identifier[app] = identifier[bootstrap] . identifier[get_app] ()
identifier[context] = identifier[dict] ( identifier[app] = identifier[app] )
identifier[app_context] = identifier[app] . identifier[app_context] ()
identifier[app_context] . identifier[push] ()
identifier[ipython] = identifier[importlib] . identifier[util] . identifier[find_spec] ( literal[string] )
keyword[if] identifier[ipython] :
keyword[from] identifier[IPython] keyword[import] identifier[embed]
identifier[embed] ( identifier[user_ns] = identifier[context] )
keyword[else] :
keyword[import] identifier[code]
identifier[code] . identifier[interact] ( identifier[local] = identifier[context] ) | def shell():
""" Start application-aware shell """
app = bootstrap.get_app()
context = dict(app=app)
# and push app context
app_context = app.app_context()
app_context.push()
# got ipython?
ipython = importlib.util.find_spec('IPython')
# run now
if ipython:
from IPython import embed
embed(user_ns=context) # depends on [control=['if'], data=[]]
else:
import code
code.interact(local=context) |
def set_attribute(library, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param library: the visa library wrapped by ctypes.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
return library.viSetAttribute(session, attribute, attribute_state) | def function[set_attribute, parameter[library, session, attribute, attribute_state]]:
constant[Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param library: the visa library wrapped by ctypes.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
]
return[call[name[library].viSetAttribute, parameter[name[session], name[attribute], name[attribute_state]]]] | keyword[def] identifier[set_attribute] ( identifier[library] , identifier[session] , identifier[attribute] , identifier[attribute_state] ):
literal[string]
keyword[return] identifier[library] . identifier[viSetAttribute] ( identifier[session] , identifier[attribute] , identifier[attribute_state] ) | def set_attribute(library, session, attribute, attribute_state):
"""Sets the state of an attribute.
Corresponds to viSetAttribute function of the VISA library.
:param library: the visa library wrapped by ctypes.
:param session: Unique logical identifier to a session.
:param attribute: Attribute for which the state is to be modified. (Attributes.*)
:param attribute_state: The state of the attribute to be set for the specified object.
:return: return value of the library call.
:rtype: :class:`pyvisa.constants.StatusCode`
"""
return library.viSetAttribute(session, attribute, attribute_state) |
def close(self):
""" Stops the read thread, waits for it to exit cleanly, then closes the underlying serial port """
self.alive = False
self.rxThread.join()
self.serial.close() | def function[close, parameter[self]]:
constant[ Stops the read thread, waits for it to exit cleanly, then closes the underlying serial port ]
name[self].alive assign[=] constant[False]
call[name[self].rxThread.join, parameter[]]
call[name[self].serial.close, parameter[]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
identifier[self] . identifier[alive] = keyword[False]
identifier[self] . identifier[rxThread] . identifier[join] ()
identifier[self] . identifier[serial] . identifier[close] () | def close(self):
""" Stops the read thread, waits for it to exit cleanly, then closes the underlying serial port """
self.alive = False
self.rxThread.join()
self.serial.close() |
def get_generated_project_files(self, tool):
""" Get generated project files, the content depends on a tool. Look at tool implementation """
exporter = ToolsSupported().get_tool(tool)
return exporter(self.generated_files[tool], self.settings).get_generated_project_files() | def function[get_generated_project_files, parameter[self, tool]]:
constant[ Get generated project files, the content depends on a tool. Look at tool implementation ]
variable[exporter] assign[=] call[call[name[ToolsSupported], parameter[]].get_tool, parameter[name[tool]]]
return[call[call[name[exporter], parameter[call[name[self].generated_files][name[tool]], name[self].settings]].get_generated_project_files, parameter[]]] | keyword[def] identifier[get_generated_project_files] ( identifier[self] , identifier[tool] ):
literal[string]
identifier[exporter] = identifier[ToolsSupported] (). identifier[get_tool] ( identifier[tool] )
keyword[return] identifier[exporter] ( identifier[self] . identifier[generated_files] [ identifier[tool] ], identifier[self] . identifier[settings] ). identifier[get_generated_project_files] () | def get_generated_project_files(self, tool):
""" Get generated project files, the content depends on a tool. Look at tool implementation """
exporter = ToolsSupported().get_tool(tool)
return exporter(self.generated_files[tool], self.settings).get_generated_project_files() |
def parse_input(args, kwargs=None, condition=True, no_parse=None):
'''
Parse out the args and kwargs from a list of input values. Optionally,
return the args and kwargs without passing them to condition_input().
Don't pull args with key=val apart if it has a newline in it.
'''
if no_parse is None:
no_parse = ()
if kwargs is None:
kwargs = {}
_args = []
_kwargs = {}
for arg in args:
if isinstance(arg, six.string_types):
arg_name, arg_value = parse_kwarg(arg)
if arg_name:
_kwargs[arg_name] = yamlify_arg(arg_value) \
if arg_name not in no_parse \
else arg_value
else:
_args.append(yamlify_arg(arg))
elif isinstance(arg, dict):
# Yes, we're popping this key off and adding it back if
# condition_input is called below, but this is the only way to
# gracefully handle both CLI and API input.
if arg.pop('__kwarg__', False) is True:
_kwargs.update(arg)
else:
_args.append(arg)
else:
_args.append(arg)
_kwargs.update(kwargs)
if condition:
return condition_input(_args, _kwargs)
return _args, _kwargs | def function[parse_input, parameter[args, kwargs, condition, no_parse]]:
constant[
Parse out the args and kwargs from a list of input values. Optionally,
return the args and kwargs without passing them to condition_input().
Don't pull args with key=val apart if it has a newline in it.
]
if compare[name[no_parse] is constant[None]] begin[:]
variable[no_parse] assign[=] tuple[[]]
if compare[name[kwargs] is constant[None]] begin[:]
variable[kwargs] assign[=] dictionary[[], []]
variable[_args] assign[=] list[[]]
variable[_kwargs] assign[=] dictionary[[], []]
for taget[name[arg]] in starred[name[args]] begin[:]
if call[name[isinstance], parameter[name[arg], name[six].string_types]] begin[:]
<ast.Tuple object at 0x7da18c4cd540> assign[=] call[name[parse_kwarg], parameter[name[arg]]]
if name[arg_name] begin[:]
call[name[_kwargs]][name[arg_name]] assign[=] <ast.IfExp object at 0x7da18c4cfaf0>
call[name[_kwargs].update, parameter[name[kwargs]]]
if name[condition] begin[:]
return[call[name[condition_input], parameter[name[_args], name[_kwargs]]]]
return[tuple[[<ast.Name object at 0x7da18f720880>, <ast.Name object at 0x7da18f721f00>]]] | keyword[def] identifier[parse_input] ( identifier[args] , identifier[kwargs] = keyword[None] , identifier[condition] = keyword[True] , identifier[no_parse] = keyword[None] ):
literal[string]
keyword[if] identifier[no_parse] keyword[is] keyword[None] :
identifier[no_parse] =()
keyword[if] identifier[kwargs] keyword[is] keyword[None] :
identifier[kwargs] ={}
identifier[_args] =[]
identifier[_kwargs] ={}
keyword[for] identifier[arg] keyword[in] identifier[args] :
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[six] . identifier[string_types] ):
identifier[arg_name] , identifier[arg_value] = identifier[parse_kwarg] ( identifier[arg] )
keyword[if] identifier[arg_name] :
identifier[_kwargs] [ identifier[arg_name] ]= identifier[yamlify_arg] ( identifier[arg_value] ) keyword[if] identifier[arg_name] keyword[not] keyword[in] identifier[no_parse] keyword[else] identifier[arg_value]
keyword[else] :
identifier[_args] . identifier[append] ( identifier[yamlify_arg] ( identifier[arg] ))
keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[dict] ):
keyword[if] identifier[arg] . identifier[pop] ( literal[string] , keyword[False] ) keyword[is] keyword[True] :
identifier[_kwargs] . identifier[update] ( identifier[arg] )
keyword[else] :
identifier[_args] . identifier[append] ( identifier[arg] )
keyword[else] :
identifier[_args] . identifier[append] ( identifier[arg] )
identifier[_kwargs] . identifier[update] ( identifier[kwargs] )
keyword[if] identifier[condition] :
keyword[return] identifier[condition_input] ( identifier[_args] , identifier[_kwargs] )
keyword[return] identifier[_args] , identifier[_kwargs] | def parse_input(args, kwargs=None, condition=True, no_parse=None):
"""
Parse out the args and kwargs from a list of input values. Optionally,
return the args and kwargs without passing them to condition_input().
Don't pull args with key=val apart if it has a newline in it.
"""
if no_parse is None:
no_parse = () # depends on [control=['if'], data=['no_parse']]
if kwargs is None:
kwargs = {} # depends on [control=['if'], data=['kwargs']]
_args = []
_kwargs = {}
for arg in args:
if isinstance(arg, six.string_types):
(arg_name, arg_value) = parse_kwarg(arg)
if arg_name:
_kwargs[arg_name] = yamlify_arg(arg_value) if arg_name not in no_parse else arg_value # depends on [control=['if'], data=[]]
else:
_args.append(yamlify_arg(arg)) # depends on [control=['if'], data=[]]
elif isinstance(arg, dict):
# Yes, we're popping this key off and adding it back if
# condition_input is called below, but this is the only way to
# gracefully handle both CLI and API input.
if arg.pop('__kwarg__', False) is True:
_kwargs.update(arg) # depends on [control=['if'], data=[]]
else:
_args.append(arg) # depends on [control=['if'], data=[]]
else:
_args.append(arg) # depends on [control=['for'], data=['arg']]
_kwargs.update(kwargs)
if condition:
return condition_input(_args, _kwargs) # depends on [control=['if'], data=[]]
return (_args, _kwargs) |
def increment(self, key, value=1):
"""
Increment the value of an item in the cache.
:param key: The cache key
:type key: str
:param value: The increment value
:type value: int
:rtype: int or bool
"""
raw = self._get_payload(key)
integer = int(raw['data']) + value
self.put(key, integer, int(raw['time']))
return integer | def function[increment, parameter[self, key, value]]:
constant[
Increment the value of an item in the cache.
:param key: The cache key
:type key: str
:param value: The increment value
:type value: int
:rtype: int or bool
]
variable[raw] assign[=] call[name[self]._get_payload, parameter[name[key]]]
variable[integer] assign[=] binary_operation[call[name[int], parameter[call[name[raw]][constant[data]]]] + name[value]]
call[name[self].put, parameter[name[key], name[integer], call[name[int], parameter[call[name[raw]][constant[time]]]]]]
return[name[integer]] | keyword[def] identifier[increment] ( identifier[self] , identifier[key] , identifier[value] = literal[int] ):
literal[string]
identifier[raw] = identifier[self] . identifier[_get_payload] ( identifier[key] )
identifier[integer] = identifier[int] ( identifier[raw] [ literal[string] ])+ identifier[value]
identifier[self] . identifier[put] ( identifier[key] , identifier[integer] , identifier[int] ( identifier[raw] [ literal[string] ]))
keyword[return] identifier[integer] | def increment(self, key, value=1):
"""
Increment the value of an item in the cache.
:param key: The cache key
:type key: str
:param value: The increment value
:type value: int
:rtype: int or bool
"""
raw = self._get_payload(key)
integer = int(raw['data']) + value
self.put(key, integer, int(raw['time']))
return integer |
def set_reference(self, refobj, reference):
"""Connect the given reftrack node with the given refernce node
:param refobj: the reftrack node to update
:type refobj: str
:param reference: the reference node
:type reference: str
:returns: None
:rtype: None
:raises: None
"""
refnodeattr = "%s.referencenode" % refobj
if reference:
cmds.connectAttr("%s.message" % reference, refnodeattr, force=True)
ns = cmds.referenceQuery(reference, namespace=True)
cmds.setAttr("%s.namespace" % refobj, ns, type="string")
else:
conns = cmds.listConnections(refnodeattr, plugs=True)
if not conns:
return
for c in conns:
cmds.disconnectAttr(c, refnodeattr) | def function[set_reference, parameter[self, refobj, reference]]:
constant[Connect the given reftrack node with the given refernce node
:param refobj: the reftrack node to update
:type refobj: str
:param reference: the reference node
:type reference: str
:returns: None
:rtype: None
:raises: None
]
variable[refnodeattr] assign[=] binary_operation[constant[%s.referencenode] <ast.Mod object at 0x7da2590d6920> name[refobj]]
if name[reference] begin[:]
call[name[cmds].connectAttr, parameter[binary_operation[constant[%s.message] <ast.Mod object at 0x7da2590d6920> name[reference]], name[refnodeattr]]]
variable[ns] assign[=] call[name[cmds].referenceQuery, parameter[name[reference]]]
call[name[cmds].setAttr, parameter[binary_operation[constant[%s.namespace] <ast.Mod object at 0x7da2590d6920> name[refobj]], name[ns]]] | keyword[def] identifier[set_reference] ( identifier[self] , identifier[refobj] , identifier[reference] ):
literal[string]
identifier[refnodeattr] = literal[string] % identifier[refobj]
keyword[if] identifier[reference] :
identifier[cmds] . identifier[connectAttr] ( literal[string] % identifier[reference] , identifier[refnodeattr] , identifier[force] = keyword[True] )
identifier[ns] = identifier[cmds] . identifier[referenceQuery] ( identifier[reference] , identifier[namespace] = keyword[True] )
identifier[cmds] . identifier[setAttr] ( literal[string] % identifier[refobj] , identifier[ns] , identifier[type] = literal[string] )
keyword[else] :
identifier[conns] = identifier[cmds] . identifier[listConnections] ( identifier[refnodeattr] , identifier[plugs] = keyword[True] )
keyword[if] keyword[not] identifier[conns] :
keyword[return]
keyword[for] identifier[c] keyword[in] identifier[conns] :
identifier[cmds] . identifier[disconnectAttr] ( identifier[c] , identifier[refnodeattr] ) | def set_reference(self, refobj, reference):
"""Connect the given reftrack node with the given refernce node
:param refobj: the reftrack node to update
:type refobj: str
:param reference: the reference node
:type reference: str
:returns: None
:rtype: None
:raises: None
"""
refnodeattr = '%s.referencenode' % refobj
if reference:
cmds.connectAttr('%s.message' % reference, refnodeattr, force=True)
ns = cmds.referenceQuery(reference, namespace=True)
cmds.setAttr('%s.namespace' % refobj, ns, type='string') # depends on [control=['if'], data=[]]
else:
conns = cmds.listConnections(refnodeattr, plugs=True)
if not conns:
return # depends on [control=['if'], data=[]]
for c in conns:
cmds.disconnectAttr(c, refnodeattr) # depends on [control=['for'], data=['c']] |
def _numpy_to_python(obj):
""" Convert an nested dict/list/tuple that might contain numpy objects
to their python equivalents. Return converted object.
"""
if isinstance(obj, dict):
return {k: _numpy_to_python(v) for k, v in obj.items()}
elif isinstance(obj, (list, tuple, np.ndarray)):
return [_numpy_to_python(x) for x in obj]
elif isinstance(obj, FormattedFeatureName):
return obj.value
elif isinstance(obj, _numpy_string_types):
return six.text_type(obj)
elif hasattr(obj, 'dtype') and np.isscalar(obj):
if np.issubdtype(obj, np.floating):
return float(obj)
elif np.issubdtype(obj, np.integer):
return int(obj)
elif np.issubdtype(obj, np.bool_):
return bool(obj)
return obj | def function[_numpy_to_python, parameter[obj]]:
constant[ Convert an nested dict/list/tuple that might contain numpy objects
to their python equivalents. Return converted object.
]
if call[name[isinstance], parameter[name[obj], name[dict]]] begin[:]
return[<ast.DictComp object at 0x7da18f09dc00>]
return[name[obj]] | keyword[def] identifier[_numpy_to_python] ( identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[dict] ):
keyword[return] { identifier[k] : identifier[_numpy_to_python] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[obj] . identifier[items] ()}
keyword[elif] identifier[isinstance] ( identifier[obj] ,( identifier[list] , identifier[tuple] , identifier[np] . identifier[ndarray] )):
keyword[return] [ identifier[_numpy_to_python] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[obj] ]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[FormattedFeatureName] ):
keyword[return] identifier[obj] . identifier[value]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[_numpy_string_types] ):
keyword[return] identifier[six] . identifier[text_type] ( identifier[obj] )
keyword[elif] identifier[hasattr] ( identifier[obj] , literal[string] ) keyword[and] identifier[np] . identifier[isscalar] ( identifier[obj] ):
keyword[if] identifier[np] . identifier[issubdtype] ( identifier[obj] , identifier[np] . identifier[floating] ):
keyword[return] identifier[float] ( identifier[obj] )
keyword[elif] identifier[np] . identifier[issubdtype] ( identifier[obj] , identifier[np] . identifier[integer] ):
keyword[return] identifier[int] ( identifier[obj] )
keyword[elif] identifier[np] . identifier[issubdtype] ( identifier[obj] , identifier[np] . identifier[bool_] ):
keyword[return] identifier[bool] ( identifier[obj] )
keyword[return] identifier[obj] | def _numpy_to_python(obj):
""" Convert an nested dict/list/tuple that might contain numpy objects
to their python equivalents. Return converted object.
"""
if isinstance(obj, dict):
return {k: _numpy_to_python(v) for (k, v) in obj.items()} # depends on [control=['if'], data=[]]
elif isinstance(obj, (list, tuple, np.ndarray)):
return [_numpy_to_python(x) for x in obj] # depends on [control=['if'], data=[]]
elif isinstance(obj, FormattedFeatureName):
return obj.value # depends on [control=['if'], data=[]]
elif isinstance(obj, _numpy_string_types):
return six.text_type(obj) # depends on [control=['if'], data=[]]
elif hasattr(obj, 'dtype') and np.isscalar(obj):
if np.issubdtype(obj, np.floating):
return float(obj) # depends on [control=['if'], data=[]]
elif np.issubdtype(obj, np.integer):
return int(obj) # depends on [control=['if'], data=[]]
elif np.issubdtype(obj, np.bool_):
return bool(obj) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return obj |
def _add_imported_module(self, node, importedmodname):
"""notify an imported module, used to analyze dependencies"""
module_file = node.root().file
context_name = node.root().name
base = os.path.splitext(os.path.basename(module_file))[0]
try:
importedmodname = astroid.modutils.get_module_part(
importedmodname, module_file
)
except ImportError:
pass
if context_name == importedmodname:
self.add_message("import-self", node=node)
elif not astroid.modutils.is_standard_module(importedmodname):
# if this is not a package __init__ module
if base != "__init__" and context_name not in self._module_pkg:
# record the module's parent, or the module itself if this is
# a top level module, as the package it belongs to
self._module_pkg[context_name] = context_name.rsplit(".", 1)[0]
# handle dependencies
importedmodnames = self.stats["dependencies"].setdefault(
importedmodname, set()
)
if context_name not in importedmodnames:
importedmodnames.add(context_name)
# update import graph
self.import_graph[context_name].add(importedmodname)
if not self.linter.is_message_enabled("cyclic-import", line=node.lineno):
self._excluded_edges[context_name].add(importedmodname) | def function[_add_imported_module, parameter[self, node, importedmodname]]:
constant[notify an imported module, used to analyze dependencies]
variable[module_file] assign[=] call[name[node].root, parameter[]].file
variable[context_name] assign[=] call[name[node].root, parameter[]].name
variable[base] assign[=] call[call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[name[module_file]]]]]][constant[0]]
<ast.Try object at 0x7da1b0383c70>
if compare[name[context_name] equal[==] name[importedmodname]] begin[:]
call[name[self].add_message, parameter[constant[import-self]]] | keyword[def] identifier[_add_imported_module] ( identifier[self] , identifier[node] , identifier[importedmodname] ):
literal[string]
identifier[module_file] = identifier[node] . identifier[root] (). identifier[file]
identifier[context_name] = identifier[node] . identifier[root] (). identifier[name]
identifier[base] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[module_file] ))[ literal[int] ]
keyword[try] :
identifier[importedmodname] = identifier[astroid] . identifier[modutils] . identifier[get_module_part] (
identifier[importedmodname] , identifier[module_file]
)
keyword[except] identifier[ImportError] :
keyword[pass]
keyword[if] identifier[context_name] == identifier[importedmodname] :
identifier[self] . identifier[add_message] ( literal[string] , identifier[node] = identifier[node] )
keyword[elif] keyword[not] identifier[astroid] . identifier[modutils] . identifier[is_standard_module] ( identifier[importedmodname] ):
keyword[if] identifier[base] != literal[string] keyword[and] identifier[context_name] keyword[not] keyword[in] identifier[self] . identifier[_module_pkg] :
identifier[self] . identifier[_module_pkg] [ identifier[context_name] ]= identifier[context_name] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]
identifier[importedmodnames] = identifier[self] . identifier[stats] [ literal[string] ]. identifier[setdefault] (
identifier[importedmodname] , identifier[set] ()
)
keyword[if] identifier[context_name] keyword[not] keyword[in] identifier[importedmodnames] :
identifier[importedmodnames] . identifier[add] ( identifier[context_name] )
identifier[self] . identifier[import_graph] [ identifier[context_name] ]. identifier[add] ( identifier[importedmodname] )
keyword[if] keyword[not] identifier[self] . identifier[linter] . identifier[is_message_enabled] ( literal[string] , identifier[line] = identifier[node] . identifier[lineno] ):
identifier[self] . identifier[_excluded_edges] [ identifier[context_name] ]. identifier[add] ( identifier[importedmodname] ) | def _add_imported_module(self, node, importedmodname):
"""notify an imported module, used to analyze dependencies"""
module_file = node.root().file
context_name = node.root().name
base = os.path.splitext(os.path.basename(module_file))[0]
try:
importedmodname = astroid.modutils.get_module_part(importedmodname, module_file) # depends on [control=['try'], data=[]]
except ImportError:
pass # depends on [control=['except'], data=[]]
if context_name == importedmodname:
self.add_message('import-self', node=node) # depends on [control=['if'], data=[]]
elif not astroid.modutils.is_standard_module(importedmodname):
# if this is not a package __init__ module
if base != '__init__' and context_name not in self._module_pkg:
# record the module's parent, or the module itself if this is
# a top level module, as the package it belongs to
self._module_pkg[context_name] = context_name.rsplit('.', 1)[0] # depends on [control=['if'], data=[]]
# handle dependencies
importedmodnames = self.stats['dependencies'].setdefault(importedmodname, set())
if context_name not in importedmodnames:
importedmodnames.add(context_name) # depends on [control=['if'], data=['context_name', 'importedmodnames']]
# update import graph
self.import_graph[context_name].add(importedmodname)
if not self.linter.is_message_enabled('cyclic-import', line=node.lineno):
self._excluded_edges[context_name].add(importedmodname) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def add_if_none_match(self):
"""
Add the if-none-match option to the request.
"""
option = Option()
option.number = defines.OptionRegistry.IF_NONE_MATCH.number
option.value = None
self.add_option(option) | def function[add_if_none_match, parameter[self]]:
constant[
Add the if-none-match option to the request.
]
variable[option] assign[=] call[name[Option], parameter[]]
name[option].number assign[=] name[defines].OptionRegistry.IF_NONE_MATCH.number
name[option].value assign[=] constant[None]
call[name[self].add_option, parameter[name[option]]] | keyword[def] identifier[add_if_none_match] ( identifier[self] ):
literal[string]
identifier[option] = identifier[Option] ()
identifier[option] . identifier[number] = identifier[defines] . identifier[OptionRegistry] . identifier[IF_NONE_MATCH] . identifier[number]
identifier[option] . identifier[value] = keyword[None]
identifier[self] . identifier[add_option] ( identifier[option] ) | def add_if_none_match(self):
"""
Add the if-none-match option to the request.
"""
option = Option()
option.number = defines.OptionRegistry.IF_NONE_MATCH.number
option.value = None
self.add_option(option) |
def update_hacluster_vip(service, relation_data):
""" Configure VIP resources based on provided configuration
@param service: Name of the service being configured
@param relation_data: Pointer to dictionary of relation data.
"""
cluster_config = get_hacluster_config()
vip_group = []
vips_to_delete = []
for vip in cluster_config['vip'].split():
if is_ipv6(vip):
res_vip = 'ocf:heartbeat:IPv6addr'
vip_params = 'ipv6addr'
else:
res_vip = 'ocf:heartbeat:IPaddr2'
vip_params = 'ip'
iface, netmask, fallback = get_vip_settings(vip)
vip_monitoring = 'op monitor depth="0" timeout="20s" interval="10s"'
if iface is not None:
# NOTE(jamespage): Delete old VIP resources
# Old style naming encoding iface in name
# does not work well in environments where
# interface/subnet wiring is not consistent
vip_key = 'res_{}_{}_vip'.format(service, iface)
if vip_key in vips_to_delete:
vip_key = '{}_{}'.format(vip_key, vip_params)
vips_to_delete.append(vip_key)
vip_key = 'res_{}_{}_vip'.format(
service,
hashlib.sha1(vip.encode('UTF-8')).hexdigest()[:7])
relation_data['resources'][vip_key] = res_vip
# NOTE(jamespage):
# Use option provided vip params if these where used
# instead of auto-detected values
if fallback:
relation_data['resource_params'][vip_key] = (
'params {ip}="{vip}" cidr_netmask="{netmask}" '
'nic="{iface}" {vip_monitoring}'.format(
ip=vip_params,
vip=vip,
iface=iface,
netmask=netmask,
vip_monitoring=vip_monitoring))
else:
# NOTE(jamespage):
# let heartbeat figure out which interface and
# netmask to configure, which works nicely
# when network interface naming is not
# consistent across units.
relation_data['resource_params'][vip_key] = (
'params {ip}="{vip}" {vip_monitoring}'.format(
ip=vip_params,
vip=vip,
vip_monitoring=vip_monitoring))
vip_group.append(vip_key)
if vips_to_delete:
try:
relation_data['delete_resources'].extend(vips_to_delete)
except KeyError:
relation_data['delete_resources'] = vips_to_delete
if len(vip_group) >= 1:
key = VIP_GROUP_NAME.format(service=service)
try:
relation_data['groups'][key] = ' '.join(vip_group)
except KeyError:
relation_data['groups'] = {
key: ' '.join(vip_group)
} | def function[update_hacluster_vip, parameter[service, relation_data]]:
constant[ Configure VIP resources based on provided configuration
@param service: Name of the service being configured
@param relation_data: Pointer to dictionary of relation data.
]
variable[cluster_config] assign[=] call[name[get_hacluster_config], parameter[]]
variable[vip_group] assign[=] list[[]]
variable[vips_to_delete] assign[=] list[[]]
for taget[name[vip]] in starred[call[call[name[cluster_config]][constant[vip]].split, parameter[]]] begin[:]
if call[name[is_ipv6], parameter[name[vip]]] begin[:]
variable[res_vip] assign[=] constant[ocf:heartbeat:IPv6addr]
variable[vip_params] assign[=] constant[ipv6addr]
<ast.Tuple object at 0x7da1b124af20> assign[=] call[name[get_vip_settings], parameter[name[vip]]]
variable[vip_monitoring] assign[=] constant[op monitor depth="0" timeout="20s" interval="10s"]
if compare[name[iface] is_not constant[None]] begin[:]
variable[vip_key] assign[=] call[constant[res_{}_{}_vip].format, parameter[name[service], name[iface]]]
if compare[name[vip_key] in name[vips_to_delete]] begin[:]
variable[vip_key] assign[=] call[constant[{}_{}].format, parameter[name[vip_key], name[vip_params]]]
call[name[vips_to_delete].append, parameter[name[vip_key]]]
variable[vip_key] assign[=] call[constant[res_{}_{}_vip].format, parameter[name[service], call[call[call[name[hashlib].sha1, parameter[call[name[vip].encode, parameter[constant[UTF-8]]]]].hexdigest, parameter[]]][<ast.Slice object at 0x7da1b124b220>]]]
call[call[name[relation_data]][constant[resources]]][name[vip_key]] assign[=] name[res_vip]
if name[fallback] begin[:]
call[call[name[relation_data]][constant[resource_params]]][name[vip_key]] assign[=] call[constant[params {ip}="{vip}" cidr_netmask="{netmask}" nic="{iface}" {vip_monitoring}].format, parameter[]]
call[name[vip_group].append, parameter[name[vip_key]]]
if name[vips_to_delete] begin[:]
<ast.Try object at 0x7da1b12183a0>
if compare[call[name[len], parameter[name[vip_group]]] greater_or_equal[>=] constant[1]] begin[:]
variable[key] assign[=] call[name[VIP_GROUP_NAME].format, parameter[]]
<ast.Try object at 0x7da1b121b670> | keyword[def] identifier[update_hacluster_vip] ( identifier[service] , identifier[relation_data] ):
literal[string]
identifier[cluster_config] = identifier[get_hacluster_config] ()
identifier[vip_group] =[]
identifier[vips_to_delete] =[]
keyword[for] identifier[vip] keyword[in] identifier[cluster_config] [ literal[string] ]. identifier[split] ():
keyword[if] identifier[is_ipv6] ( identifier[vip] ):
identifier[res_vip] = literal[string]
identifier[vip_params] = literal[string]
keyword[else] :
identifier[res_vip] = literal[string]
identifier[vip_params] = literal[string]
identifier[iface] , identifier[netmask] , identifier[fallback] = identifier[get_vip_settings] ( identifier[vip] )
identifier[vip_monitoring] = literal[string]
keyword[if] identifier[iface] keyword[is] keyword[not] keyword[None] :
identifier[vip_key] = literal[string] . identifier[format] ( identifier[service] , identifier[iface] )
keyword[if] identifier[vip_key] keyword[in] identifier[vips_to_delete] :
identifier[vip_key] = literal[string] . identifier[format] ( identifier[vip_key] , identifier[vip_params] )
identifier[vips_to_delete] . identifier[append] ( identifier[vip_key] )
identifier[vip_key] = literal[string] . identifier[format] (
identifier[service] ,
identifier[hashlib] . identifier[sha1] ( identifier[vip] . identifier[encode] ( literal[string] )). identifier[hexdigest] ()[: literal[int] ])
identifier[relation_data] [ literal[string] ][ identifier[vip_key] ]= identifier[res_vip]
keyword[if] identifier[fallback] :
identifier[relation_data] [ literal[string] ][ identifier[vip_key] ]=(
literal[string]
literal[string] . identifier[format] (
identifier[ip] = identifier[vip_params] ,
identifier[vip] = identifier[vip] ,
identifier[iface] = identifier[iface] ,
identifier[netmask] = identifier[netmask] ,
identifier[vip_monitoring] = identifier[vip_monitoring] ))
keyword[else] :
identifier[relation_data] [ literal[string] ][ identifier[vip_key] ]=(
literal[string] . identifier[format] (
identifier[ip] = identifier[vip_params] ,
identifier[vip] = identifier[vip] ,
identifier[vip_monitoring] = identifier[vip_monitoring] ))
identifier[vip_group] . identifier[append] ( identifier[vip_key] )
keyword[if] identifier[vips_to_delete] :
keyword[try] :
identifier[relation_data] [ literal[string] ]. identifier[extend] ( identifier[vips_to_delete] )
keyword[except] identifier[KeyError] :
identifier[relation_data] [ literal[string] ]= identifier[vips_to_delete]
keyword[if] identifier[len] ( identifier[vip_group] )>= literal[int] :
identifier[key] = identifier[VIP_GROUP_NAME] . identifier[format] ( identifier[service] = identifier[service] )
keyword[try] :
identifier[relation_data] [ literal[string] ][ identifier[key] ]= literal[string] . identifier[join] ( identifier[vip_group] )
keyword[except] identifier[KeyError] :
identifier[relation_data] [ literal[string] ]={
identifier[key] : literal[string] . identifier[join] ( identifier[vip_group] )
} | def update_hacluster_vip(service, relation_data):
""" Configure VIP resources based on provided configuration
@param service: Name of the service being configured
@param relation_data: Pointer to dictionary of relation data.
"""
cluster_config = get_hacluster_config()
vip_group = []
vips_to_delete = []
for vip in cluster_config['vip'].split():
if is_ipv6(vip):
res_vip = 'ocf:heartbeat:IPv6addr'
vip_params = 'ipv6addr' # depends on [control=['if'], data=[]]
else:
res_vip = 'ocf:heartbeat:IPaddr2'
vip_params = 'ip'
(iface, netmask, fallback) = get_vip_settings(vip)
vip_monitoring = 'op monitor depth="0" timeout="20s" interval="10s"'
if iface is not None:
# NOTE(jamespage): Delete old VIP resources
# Old style naming encoding iface in name
# does not work well in environments where
# interface/subnet wiring is not consistent
vip_key = 'res_{}_{}_vip'.format(service, iface)
if vip_key in vips_to_delete:
vip_key = '{}_{}'.format(vip_key, vip_params) # depends on [control=['if'], data=['vip_key']]
vips_to_delete.append(vip_key)
vip_key = 'res_{}_{}_vip'.format(service, hashlib.sha1(vip.encode('UTF-8')).hexdigest()[:7])
relation_data['resources'][vip_key] = res_vip
# NOTE(jamespage):
# Use option provided vip params if these where used
# instead of auto-detected values
if fallback:
relation_data['resource_params'][vip_key] = 'params {ip}="{vip}" cidr_netmask="{netmask}" nic="{iface}" {vip_monitoring}'.format(ip=vip_params, vip=vip, iface=iface, netmask=netmask, vip_monitoring=vip_monitoring) # depends on [control=['if'], data=[]]
else:
# NOTE(jamespage):
# let heartbeat figure out which interface and
# netmask to configure, which works nicely
# when network interface naming is not
# consistent across units.
relation_data['resource_params'][vip_key] = 'params {ip}="{vip}" {vip_monitoring}'.format(ip=vip_params, vip=vip, vip_monitoring=vip_monitoring)
vip_group.append(vip_key) # depends on [control=['if'], data=['iface']] # depends on [control=['for'], data=['vip']]
if vips_to_delete:
try:
relation_data['delete_resources'].extend(vips_to_delete) # depends on [control=['try'], data=[]]
except KeyError:
relation_data['delete_resources'] = vips_to_delete # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if len(vip_group) >= 1:
key = VIP_GROUP_NAME.format(service=service)
try:
relation_data['groups'][key] = ' '.join(vip_group) # depends on [control=['try'], data=[]]
except KeyError:
relation_data['groups'] = {key: ' '.join(vip_group)} # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def shift_click(self, locator, params=None, timeout=None):
"""
Shift-click web element.
:param locator: locator tuple or WebElement instance
:param params: (optional) locator parameters
:param timeout: (optional) time to wait for element
:return: None
"""
self._click(locator, params, timeout, Keys.SHIFT) | def function[shift_click, parameter[self, locator, params, timeout]]:
constant[
Shift-click web element.
:param locator: locator tuple or WebElement instance
:param params: (optional) locator parameters
:param timeout: (optional) time to wait for element
:return: None
]
call[name[self]._click, parameter[name[locator], name[params], name[timeout], name[Keys].SHIFT]] | keyword[def] identifier[shift_click] ( identifier[self] , identifier[locator] , identifier[params] = keyword[None] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[self] . identifier[_click] ( identifier[locator] , identifier[params] , identifier[timeout] , identifier[Keys] . identifier[SHIFT] ) | def shift_click(self, locator, params=None, timeout=None):
"""
Shift-click web element.
:param locator: locator tuple or WebElement instance
:param params: (optional) locator parameters
:param timeout: (optional) time to wait for element
:return: None
"""
self._click(locator, params, timeout, Keys.SHIFT) |
def concat_t_vars_np(self, vars_idx=None):
"""
Concatenate `self.np_t` with `self.np_vars` and return a single matrix.
The first column corresponds to time, and the rest of the matrix is the variables.
Returns
-------
np.array : concatenated matrix
"""
selected_np_vars = self.np_vars
if vars_idx is not None:
selected_np_vars = self.np_vars[:, vars_idx]
return np.concatenate([self.np_t[:self.np_nrows].reshape((-1, 1)),
selected_np_vars[:self.np_nrows, :]], axis=1) | def function[concat_t_vars_np, parameter[self, vars_idx]]:
constant[
Concatenate `self.np_t` with `self.np_vars` and return a single matrix.
The first column corresponds to time, and the rest of the matrix is the variables.
Returns
-------
np.array : concatenated matrix
]
variable[selected_np_vars] assign[=] name[self].np_vars
if compare[name[vars_idx] is_not constant[None]] begin[:]
variable[selected_np_vars] assign[=] call[name[self].np_vars][tuple[[<ast.Slice object at 0x7da20cabd870>, <ast.Name object at 0x7da20cabc820>]]]
return[call[name[np].concatenate, parameter[list[[<ast.Call object at 0x7da20cabdc30>, <ast.Subscript object at 0x7da18eb56560>]]]]] | keyword[def] identifier[concat_t_vars_np] ( identifier[self] , identifier[vars_idx] = keyword[None] ):
literal[string]
identifier[selected_np_vars] = identifier[self] . identifier[np_vars]
keyword[if] identifier[vars_idx] keyword[is] keyword[not] keyword[None] :
identifier[selected_np_vars] = identifier[self] . identifier[np_vars] [:, identifier[vars_idx] ]
keyword[return] identifier[np] . identifier[concatenate] ([ identifier[self] . identifier[np_t] [: identifier[self] . identifier[np_nrows] ]. identifier[reshape] ((- literal[int] , literal[int] )),
identifier[selected_np_vars] [: identifier[self] . identifier[np_nrows] ,:]], identifier[axis] = literal[int] ) | def concat_t_vars_np(self, vars_idx=None):
"""
Concatenate `self.np_t` with `self.np_vars` and return a single matrix.
The first column corresponds to time, and the rest of the matrix is the variables.
Returns
-------
np.array : concatenated matrix
"""
selected_np_vars = self.np_vars
if vars_idx is not None:
selected_np_vars = self.np_vars[:, vars_idx] # depends on [control=['if'], data=['vars_idx']]
return np.concatenate([self.np_t[:self.np_nrows].reshape((-1, 1)), selected_np_vars[:self.np_nrows, :]], axis=1) |
def add_user(self, team, params={}, **options):
"""The user making this call must be a member of the team in order to add others.
The user to add must exist in the same organization as the team in order to be added.
The user to add can be referenced by their globally unique user ID or their email address.
Returns the full user record for the added user.
Parameters
----------
team : {Id} Globally unique identifier for the team.
[data] : {Object} Data for the request
- user : {String} An identifier for the user. Can be one of an email address,
the globally unique identifier for the user, or the keyword `me`
to indicate the current user making the request.
"""
path = "/teams/%s/addUser" % (team)
return self.client.post(path, params, **options) | def function[add_user, parameter[self, team, params]]:
constant[The user making this call must be a member of the team in order to add others.
The user to add must exist in the same organization as the team in order to be added.
The user to add can be referenced by their globally unique user ID or their email address.
Returns the full user record for the added user.
Parameters
----------
team : {Id} Globally unique identifier for the team.
[data] : {Object} Data for the request
- user : {String} An identifier for the user. Can be one of an email address,
the globally unique identifier for the user, or the keyword `me`
to indicate the current user making the request.
]
variable[path] assign[=] binary_operation[constant[/teams/%s/addUser] <ast.Mod object at 0x7da2590d6920> name[team]]
return[call[name[self].client.post, parameter[name[path], name[params]]]] | keyword[def] identifier[add_user] ( identifier[self] , identifier[team] , identifier[params] ={},** identifier[options] ):
literal[string]
identifier[path] = literal[string] %( identifier[team] )
keyword[return] identifier[self] . identifier[client] . identifier[post] ( identifier[path] , identifier[params] ,** identifier[options] ) | def add_user(self, team, params={}, **options):
"""The user making this call must be a member of the team in order to add others.
The user to add must exist in the same organization as the team in order to be added.
The user to add can be referenced by their globally unique user ID or their email address.
Returns the full user record for the added user.
Parameters
----------
team : {Id} Globally unique identifier for the team.
[data] : {Object} Data for the request
- user : {String} An identifier for the user. Can be one of an email address,
the globally unique identifier for the user, or the keyword `me`
to indicate the current user making the request.
"""
path = '/teams/%s/addUser' % team
return self.client.post(path, params, **options) |
def update_house(self, complex: str, id: str, **kwargs):
"""
Update the existing house
"""
self.check_house(complex, id)
self.put('developers/{developer}/complexes/{complex}/houses/{id}'.format(
developer=self.developer,
complex=complex,
id=id,
), data=kwargs) | def function[update_house, parameter[self, complex, id]]:
constant[
Update the existing house
]
call[name[self].check_house, parameter[name[complex], name[id]]]
call[name[self].put, parameter[call[constant[developers/{developer}/complexes/{complex}/houses/{id}].format, parameter[]]]] | keyword[def] identifier[update_house] ( identifier[self] , identifier[complex] : identifier[str] , identifier[id] : identifier[str] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[check_house] ( identifier[complex] , identifier[id] )
identifier[self] . identifier[put] ( literal[string] . identifier[format] (
identifier[developer] = identifier[self] . identifier[developer] ,
identifier[complex] = identifier[complex] ,
identifier[id] = identifier[id] ,
), identifier[data] = identifier[kwargs] ) | def update_house(self, complex: str, id: str, **kwargs):
"""
Update the existing house
"""
self.check_house(complex, id)
self.put('developers/{developer}/complexes/{complex}/houses/{id}'.format(developer=self.developer, complex=complex, id=id), data=kwargs) |
def next_frame_header(socket):
"""
Returns the stream and size of the next frame of data waiting to be read
from socket, according to the protocol defined here:
https://docs.docker.com/engine/api/v1.24/#attach-to-a-container
"""
try:
data = read_exactly(socket, 8)
except SocketError:
return (-1, -1)
stream, actual = struct.unpack('>BxxxL', data)
return (stream, actual) | def function[next_frame_header, parameter[socket]]:
constant[
Returns the stream and size of the next frame of data waiting to be read
from socket, according to the protocol defined here:
https://docs.docker.com/engine/api/v1.24/#attach-to-a-container
]
<ast.Try object at 0x7da18ede7040>
<ast.Tuple object at 0x7da18ede5d80> assign[=] call[name[struct].unpack, parameter[constant[>BxxxL], name[data]]]
return[tuple[[<ast.Name object at 0x7da18ede4d00>, <ast.Name object at 0x7da18ede59f0>]]] | keyword[def] identifier[next_frame_header] ( identifier[socket] ):
literal[string]
keyword[try] :
identifier[data] = identifier[read_exactly] ( identifier[socket] , literal[int] )
keyword[except] identifier[SocketError] :
keyword[return] (- literal[int] ,- literal[int] )
identifier[stream] , identifier[actual] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[data] )
keyword[return] ( identifier[stream] , identifier[actual] ) | def next_frame_header(socket):
"""
Returns the stream and size of the next frame of data waiting to be read
from socket, according to the protocol defined here:
https://docs.docker.com/engine/api/v1.24/#attach-to-a-container
"""
try:
data = read_exactly(socket, 8) # depends on [control=['try'], data=[]]
except SocketError:
return (-1, -1) # depends on [control=['except'], data=[]]
(stream, actual) = struct.unpack('>BxxxL', data)
return (stream, actual) |
def add_router_interface(self, context, router_info):
"""Adds an interface to a router created on Arista HW router.
This deals with both IPv6 and IPv4 configurations.
"""
if router_info:
self._select_dicts(router_info['ip_version'])
cidr = router_info['cidr']
subnet_mask = cidr.split('/')[1]
router_name = self._arista_router_name(router_info['id'],
router_info['name'])
if self._mlag_configured:
# For MLAG, we send a specific IP address as opposed to cidr
# For now, we are using x.x.x.253 and x.x.x.254 as virtual IP
mlag_peer_failed = False
for i, server in enumerate(self._servers):
# Get appropriate virtual IP address for this router
router_ip = self._get_router_ip(cidr, i,
router_info['ip_version'])
try:
self.add_interface_to_router(router_info['seg_id'],
router_name,
router_info['gip'],
router_ip, subnet_mask,
server)
mlag_peer_failed = False
except Exception:
if not mlag_peer_failed:
mlag_peer_failed = True
else:
msg = (_('Failed to add interface to router '
'%s on EOS') % router_name)
LOG.exception(msg)
raise arista_exc.AristaServicePluginRpcError(
msg=msg)
else:
for s in self._servers:
self.add_interface_to_router(router_info['seg_id'],
router_name,
router_info['gip'],
None, subnet_mask, s) | def function[add_router_interface, parameter[self, context, router_info]]:
constant[Adds an interface to a router created on Arista HW router.
This deals with both IPv6 and IPv4 configurations.
]
if name[router_info] begin[:]
call[name[self]._select_dicts, parameter[call[name[router_info]][constant[ip_version]]]]
variable[cidr] assign[=] call[name[router_info]][constant[cidr]]
variable[subnet_mask] assign[=] call[call[name[cidr].split, parameter[constant[/]]]][constant[1]]
variable[router_name] assign[=] call[name[self]._arista_router_name, parameter[call[name[router_info]][constant[id]], call[name[router_info]][constant[name]]]]
if name[self]._mlag_configured begin[:]
variable[mlag_peer_failed] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da1b1964bb0>, <ast.Name object at 0x7da1b1964b20>]]] in starred[call[name[enumerate], parameter[name[self]._servers]]] begin[:]
variable[router_ip] assign[=] call[name[self]._get_router_ip, parameter[name[cidr], name[i], call[name[router_info]][constant[ip_version]]]]
<ast.Try object at 0x7da1b19663b0> | keyword[def] identifier[add_router_interface] ( identifier[self] , identifier[context] , identifier[router_info] ):
literal[string]
keyword[if] identifier[router_info] :
identifier[self] . identifier[_select_dicts] ( identifier[router_info] [ literal[string] ])
identifier[cidr] = identifier[router_info] [ literal[string] ]
identifier[subnet_mask] = identifier[cidr] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[router_name] = identifier[self] . identifier[_arista_router_name] ( identifier[router_info] [ literal[string] ],
identifier[router_info] [ literal[string] ])
keyword[if] identifier[self] . identifier[_mlag_configured] :
identifier[mlag_peer_failed] = keyword[False]
keyword[for] identifier[i] , identifier[server] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_servers] ):
identifier[router_ip] = identifier[self] . identifier[_get_router_ip] ( identifier[cidr] , identifier[i] ,
identifier[router_info] [ literal[string] ])
keyword[try] :
identifier[self] . identifier[add_interface_to_router] ( identifier[router_info] [ literal[string] ],
identifier[router_name] ,
identifier[router_info] [ literal[string] ],
identifier[router_ip] , identifier[subnet_mask] ,
identifier[server] )
identifier[mlag_peer_failed] = keyword[False]
keyword[except] identifier[Exception] :
keyword[if] keyword[not] identifier[mlag_peer_failed] :
identifier[mlag_peer_failed] = keyword[True]
keyword[else] :
identifier[msg] =( identifier[_] ( literal[string]
literal[string] )% identifier[router_name] )
identifier[LOG] . identifier[exception] ( identifier[msg] )
keyword[raise] identifier[arista_exc] . identifier[AristaServicePluginRpcError] (
identifier[msg] = identifier[msg] )
keyword[else] :
keyword[for] identifier[s] keyword[in] identifier[self] . identifier[_servers] :
identifier[self] . identifier[add_interface_to_router] ( identifier[router_info] [ literal[string] ],
identifier[router_name] ,
identifier[router_info] [ literal[string] ],
keyword[None] , identifier[subnet_mask] , identifier[s] ) | def add_router_interface(self, context, router_info):
"""Adds an interface to a router created on Arista HW router.
This deals with both IPv6 and IPv4 configurations.
"""
if router_info:
self._select_dicts(router_info['ip_version'])
cidr = router_info['cidr']
subnet_mask = cidr.split('/')[1]
router_name = self._arista_router_name(router_info['id'], router_info['name'])
if self._mlag_configured:
# For MLAG, we send a specific IP address as opposed to cidr
# For now, we are using x.x.x.253 and x.x.x.254 as virtual IP
mlag_peer_failed = False
for (i, server) in enumerate(self._servers):
# Get appropriate virtual IP address for this router
router_ip = self._get_router_ip(cidr, i, router_info['ip_version'])
try:
self.add_interface_to_router(router_info['seg_id'], router_name, router_info['gip'], router_ip, subnet_mask, server)
mlag_peer_failed = False # depends on [control=['try'], data=[]]
except Exception:
if not mlag_peer_failed:
mlag_peer_failed = True # depends on [control=['if'], data=[]]
else:
msg = _('Failed to add interface to router %s on EOS') % router_name
LOG.exception(msg)
raise arista_exc.AristaServicePluginRpcError(msg=msg) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
for s in self._servers:
self.add_interface_to_router(router_info['seg_id'], router_name, router_info['gip'], None, subnet_mask, s) # depends on [control=['for'], data=['s']] # depends on [control=['if'], data=[]] |
def nodes(self, frequency=None):
"""
Returns all nodes of the specified frequency that are related to
the given Session
Parameters
----------
frequency : str | None
The frequency of the nodes to return
Returns
-------
nodes : iterable[TreeNode]
All nodes related to the Session for the specified frequency
"""
if frequency is None:
[]
elif frequency == 'per_session':
return [self]
elif frequency in ('per_visit', 'per_subject'):
return [self.parent]
elif frequency == 'per_study':
return [self.parent.parent] | def function[nodes, parameter[self, frequency]]:
constant[
Returns all nodes of the specified frequency that are related to
the given Session
Parameters
----------
frequency : str | None
The frequency of the nodes to return
Returns
-------
nodes : iterable[TreeNode]
All nodes related to the Session for the specified frequency
]
if compare[name[frequency] is constant[None]] begin[:]
list[[]] | keyword[def] identifier[nodes] ( identifier[self] , identifier[frequency] = keyword[None] ):
literal[string]
keyword[if] identifier[frequency] keyword[is] keyword[None] :
[]
keyword[elif] identifier[frequency] == literal[string] :
keyword[return] [ identifier[self] ]
keyword[elif] identifier[frequency] keyword[in] ( literal[string] , literal[string] ):
keyword[return] [ identifier[self] . identifier[parent] ]
keyword[elif] identifier[frequency] == literal[string] :
keyword[return] [ identifier[self] . identifier[parent] . identifier[parent] ] | def nodes(self, frequency=None):
"""
Returns all nodes of the specified frequency that are related to
the given Session
Parameters
----------
frequency : str | None
The frequency of the nodes to return
Returns
-------
nodes : iterable[TreeNode]
All nodes related to the Session for the specified frequency
"""
if frequency is None:
[] # depends on [control=['if'], data=[]]
elif frequency == 'per_session':
return [self] # depends on [control=['if'], data=[]]
elif frequency in ('per_visit', 'per_subject'):
return [self.parent] # depends on [control=['if'], data=[]]
elif frequency == 'per_study':
return [self.parent.parent] # depends on [control=['if'], data=[]] |
def get(self, key, default=None):
"""
Get the value for `key`.
Gives priority to command-line overrides.
Args:
key: str, the key to get the value for.
Returns:
object: The value for `key`
"""
if key in self.__cli:
return self.__cli[key]
if key in self.__config:
return self.__config.get(key)
if key in self.__defaults:
return self.__defaults.get(key)
return default | def function[get, parameter[self, key, default]]:
constant[
Get the value for `key`.
Gives priority to command-line overrides.
Args:
key: str, the key to get the value for.
Returns:
object: The value for `key`
]
if compare[name[key] in name[self].__cli] begin[:]
return[call[name[self].__cli][name[key]]]
if compare[name[key] in name[self].__config] begin[:]
return[call[name[self].__config.get, parameter[name[key]]]]
if compare[name[key] in name[self].__defaults] begin[:]
return[call[name[self].__defaults.get, parameter[name[key]]]]
return[name[default]] | keyword[def] identifier[get] ( identifier[self] , identifier[key] , identifier[default] = keyword[None] ):
literal[string]
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[__cli] :
keyword[return] identifier[self] . identifier[__cli] [ identifier[key] ]
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[__config] :
keyword[return] identifier[self] . identifier[__config] . identifier[get] ( identifier[key] )
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[__defaults] :
keyword[return] identifier[self] . identifier[__defaults] . identifier[get] ( identifier[key] )
keyword[return] identifier[default] | def get(self, key, default=None):
"""
Get the value for `key`.
Gives priority to command-line overrides.
Args:
key: str, the key to get the value for.
Returns:
object: The value for `key`
"""
if key in self.__cli:
return self.__cli[key] # depends on [control=['if'], data=['key']]
if key in self.__config:
return self.__config.get(key) # depends on [control=['if'], data=['key']]
if key in self.__defaults:
return self.__defaults.get(key) # depends on [control=['if'], data=['key']]
return default |
def has_response(self, beacon_config, request, client_address):
""" :meth:`.WBeaconMessengerBase.has_response` method implementation. This method compares request
header with internal one.
"""
try:
self._message_address_parse(request, invert_hello=self.__invert_hello)
return True
except ValueError:
pass
return False | def function[has_response, parameter[self, beacon_config, request, client_address]]:
constant[ :meth:`.WBeaconMessengerBase.has_response` method implementation. This method compares request
header with internal one.
]
<ast.Try object at 0x7da18bcc9de0>
return[constant[False]] | keyword[def] identifier[has_response] ( identifier[self] , identifier[beacon_config] , identifier[request] , identifier[client_address] ):
literal[string]
keyword[try] :
identifier[self] . identifier[_message_address_parse] ( identifier[request] , identifier[invert_hello] = identifier[self] . identifier[__invert_hello] )
keyword[return] keyword[True]
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[return] keyword[False] | def has_response(self, beacon_config, request, client_address):
""" :meth:`.WBeaconMessengerBase.has_response` method implementation. This method compares request
header with internal one.
"""
try:
self._message_address_parse(request, invert_hello=self.__invert_hello)
return True # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]]
return False |
def warn(warn_string, log=None):
"""Warning
This method creates custom warning messages.
Parameters
----------
warn_string : str
Warning message string
log : instance, optional
Logging structure instance
"""
if import_fail:
warn_txt = 'WARNING'
else:
warn_txt = colored('WARNING', 'yellow')
# Print warning to stdout.
sys.stderr.write(warn_txt + ': ' + warn_string + '\n')
# Check if a logging structure is provided.
if not isinstance(log, type(None)):
warnings.warn(warn_string) | def function[warn, parameter[warn_string, log]]:
constant[Warning
This method creates custom warning messages.
Parameters
----------
warn_string : str
Warning message string
log : instance, optional
Logging structure instance
]
if name[import_fail] begin[:]
variable[warn_txt] assign[=] constant[WARNING]
call[name[sys].stderr.write, parameter[binary_operation[binary_operation[binary_operation[name[warn_txt] + constant[: ]] + name[warn_string]] + constant[
]]]]
if <ast.UnaryOp object at 0x7da1b0dbf4c0> begin[:]
call[name[warnings].warn, parameter[name[warn_string]]] | keyword[def] identifier[warn] ( identifier[warn_string] , identifier[log] = keyword[None] ):
literal[string]
keyword[if] identifier[import_fail] :
identifier[warn_txt] = literal[string]
keyword[else] :
identifier[warn_txt] = identifier[colored] ( literal[string] , literal[string] )
identifier[sys] . identifier[stderr] . identifier[write] ( identifier[warn_txt] + literal[string] + identifier[warn_string] + literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[log] , identifier[type] ( keyword[None] )):
identifier[warnings] . identifier[warn] ( identifier[warn_string] ) | def warn(warn_string, log=None):
"""Warning
This method creates custom warning messages.
Parameters
----------
warn_string : str
Warning message string
log : instance, optional
Logging structure instance
"""
if import_fail:
warn_txt = 'WARNING' # depends on [control=['if'], data=[]]
else:
warn_txt = colored('WARNING', 'yellow')
# Print warning to stdout.
sys.stderr.write(warn_txt + ': ' + warn_string + '\n')
# Check if a logging structure is provided.
if not isinstance(log, type(None)):
warnings.warn(warn_string) # depends on [control=['if'], data=[]] |
def fill(self, text=""):
"Indent a piece of text, according to the current indentation level"
self.f.write(self.line_marker + " " * self._indent + text)
self.line_marker = "\n" | def function[fill, parameter[self, text]]:
constant[Indent a piece of text, according to the current indentation level]
call[name[self].f.write, parameter[binary_operation[binary_operation[name[self].line_marker + binary_operation[constant[ ] * name[self]._indent]] + name[text]]]]
name[self].line_marker assign[=] constant[
] | keyword[def] identifier[fill] ( identifier[self] , identifier[text] = literal[string] ):
literal[string]
identifier[self] . identifier[f] . identifier[write] ( identifier[self] . identifier[line_marker] + literal[string] * identifier[self] . identifier[_indent] + identifier[text] )
identifier[self] . identifier[line_marker] = literal[string] | def fill(self, text=''):
"""Indent a piece of text, according to the current indentation level"""
self.f.write(self.line_marker + ' ' * self._indent + text)
self.line_marker = '\n' |
def _extract_actions_unique_topics(self, movement_counts, max_movements, cluster_topology, max_movement_size):
"""Extract actions limiting to given max value such that
the resultant has the minimum possible number of duplicate topics.
Algorithm:
1. Group actions by by topic-name: {topic: action-list}
2. Iterate through the dictionary in circular fashion and keep
extracting actions with until max_partition_movements
are reached.
:param movement_counts: list of tuple ((topic, partition), movement count)
:param max_movements: max number of movements to extract
:param cluster_topology: cluster topology containing the new proposed assignment for the cluster
:param max_movement_size: maximum size of data to move at a time in extracted actions
:return: list of tuple (topic, partitions) to include in the reduced plan
"""
# Group actions by topic
topic_actions = defaultdict(list)
for t_p, replica_change_cnt in movement_counts:
topic_actions[t_p[0]].append((t_p, replica_change_cnt))
# Create reduced assignment minimizing duplication of topics
extracted_actions = []
curr_movements = 0
curr_size = 0
action_available = True
while curr_movements < max_movements and curr_size <= max_movement_size and action_available:
action_available = False
for topic, actions in six.iteritems(topic_actions):
for action in actions:
action_size = cluster_topology.partitions[action[0]].size
if curr_movements + action[1] > max_movements or curr_size + action_size > max_movement_size:
# Remove action since it won't be possible to use it
actions.remove(action)
else:
# Append (topic, partition) to the list of movements
action_available = True
extracted_actions.append(action[0])
curr_movements += action[1]
curr_size += action_size
actions.remove(action)
break
return extracted_actions | def function[_extract_actions_unique_topics, parameter[self, movement_counts, max_movements, cluster_topology, max_movement_size]]:
constant[Extract actions limiting to given max value such that
the resultant has the minimum possible number of duplicate topics.
Algorithm:
1. Group actions by by topic-name: {topic: action-list}
2. Iterate through the dictionary in circular fashion and keep
extracting actions with until max_partition_movements
are reached.
:param movement_counts: list of tuple ((topic, partition), movement count)
:param max_movements: max number of movements to extract
:param cluster_topology: cluster topology containing the new proposed assignment for the cluster
:param max_movement_size: maximum size of data to move at a time in extracted actions
:return: list of tuple (topic, partitions) to include in the reduced plan
]
variable[topic_actions] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[tuple[[<ast.Name object at 0x7da1b0788400>, <ast.Name object at 0x7da1b078a2f0>]]] in starred[name[movement_counts]] begin[:]
call[call[name[topic_actions]][call[name[t_p]][constant[0]]].append, parameter[tuple[[<ast.Name object at 0x7da1b07887f0>, <ast.Name object at 0x7da1b0788af0>]]]]
variable[extracted_actions] assign[=] list[[]]
variable[curr_movements] assign[=] constant[0]
variable[curr_size] assign[=] constant[0]
variable[action_available] assign[=] constant[True]
while <ast.BoolOp object at 0x7da1b0789ea0> begin[:]
variable[action_available] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da1b0788b80>, <ast.Name object at 0x7da1b0788c10>]]] in starred[call[name[six].iteritems, parameter[name[topic_actions]]]] begin[:]
for taget[name[action]] in starred[name[actions]] begin[:]
variable[action_size] assign[=] call[name[cluster_topology].partitions][call[name[action]][constant[0]]].size
if <ast.BoolOp object at 0x7da1b0788be0> begin[:]
call[name[actions].remove, parameter[name[action]]]
return[name[extracted_actions]] | keyword[def] identifier[_extract_actions_unique_topics] ( identifier[self] , identifier[movement_counts] , identifier[max_movements] , identifier[cluster_topology] , identifier[max_movement_size] ):
literal[string]
identifier[topic_actions] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[t_p] , identifier[replica_change_cnt] keyword[in] identifier[movement_counts] :
identifier[topic_actions] [ identifier[t_p] [ literal[int] ]]. identifier[append] (( identifier[t_p] , identifier[replica_change_cnt] ))
identifier[extracted_actions] =[]
identifier[curr_movements] = literal[int]
identifier[curr_size] = literal[int]
identifier[action_available] = keyword[True]
keyword[while] identifier[curr_movements] < identifier[max_movements] keyword[and] identifier[curr_size] <= identifier[max_movement_size] keyword[and] identifier[action_available] :
identifier[action_available] = keyword[False]
keyword[for] identifier[topic] , identifier[actions] keyword[in] identifier[six] . identifier[iteritems] ( identifier[topic_actions] ):
keyword[for] identifier[action] keyword[in] identifier[actions] :
identifier[action_size] = identifier[cluster_topology] . identifier[partitions] [ identifier[action] [ literal[int] ]]. identifier[size]
keyword[if] identifier[curr_movements] + identifier[action] [ literal[int] ]> identifier[max_movements] keyword[or] identifier[curr_size] + identifier[action_size] > identifier[max_movement_size] :
identifier[actions] . identifier[remove] ( identifier[action] )
keyword[else] :
identifier[action_available] = keyword[True]
identifier[extracted_actions] . identifier[append] ( identifier[action] [ literal[int] ])
identifier[curr_movements] += identifier[action] [ literal[int] ]
identifier[curr_size] += identifier[action_size]
identifier[actions] . identifier[remove] ( identifier[action] )
keyword[break]
keyword[return] identifier[extracted_actions] | def _extract_actions_unique_topics(self, movement_counts, max_movements, cluster_topology, max_movement_size):
"""Extract actions limiting to given max value such that
the resultant has the minimum possible number of duplicate topics.
Algorithm:
1. Group actions by by topic-name: {topic: action-list}
2. Iterate through the dictionary in circular fashion and keep
extracting actions with until max_partition_movements
are reached.
:param movement_counts: list of tuple ((topic, partition), movement count)
:param max_movements: max number of movements to extract
:param cluster_topology: cluster topology containing the new proposed assignment for the cluster
:param max_movement_size: maximum size of data to move at a time in extracted actions
:return: list of tuple (topic, partitions) to include in the reduced plan
"""
# Group actions by topic
topic_actions = defaultdict(list)
for (t_p, replica_change_cnt) in movement_counts:
topic_actions[t_p[0]].append((t_p, replica_change_cnt)) # depends on [control=['for'], data=[]]
# Create reduced assignment minimizing duplication of topics
extracted_actions = []
curr_movements = 0
curr_size = 0
action_available = True
while curr_movements < max_movements and curr_size <= max_movement_size and action_available:
action_available = False
for (topic, actions) in six.iteritems(topic_actions):
for action in actions:
action_size = cluster_topology.partitions[action[0]].size
if curr_movements + action[1] > max_movements or curr_size + action_size > max_movement_size:
# Remove action since it won't be possible to use it
actions.remove(action) # depends on [control=['if'], data=[]]
else:
# Append (topic, partition) to the list of movements
action_available = True
extracted_actions.append(action[0])
curr_movements += action[1]
curr_size += action_size
actions.remove(action)
break # depends on [control=['for'], data=['action']] # depends on [control=['for'], data=[]] # depends on [control=['while'], data=[]]
return extracted_actions |
def saveJSON(g, data, backup=False):
"""
Saves the current setup to disk.
g : hcam_drivers.globals.Container
Container with globals
data : dict
The current setup in JSON compatible dictionary format.
backup : bool
If we are saving a backup on close, don't prompt for filename
"""
if not backup:
fname = filedialog.asksaveasfilename(
defaultextension='.json',
filetypes=[('json files', '.json'), ],
initialdir=g.cpars['app_directory']
)
else:
fname = os.path.join(os.path.expanduser('~/.hdriver'), 'app.json')
if not fname:
g.clog.warn('Aborted save to disk')
return False
with open(fname, 'w') as of:
of.write(
json.dumps(data, sort_keys=True, indent=4,
separators=(',', ': '))
)
g.clog.info('Saved setup to' + fname)
return True | def function[saveJSON, parameter[g, data, backup]]:
constant[
Saves the current setup to disk.
g : hcam_drivers.globals.Container
Container with globals
data : dict
The current setup in JSON compatible dictionary format.
backup : bool
If we are saving a backup on close, don't prompt for filename
]
if <ast.UnaryOp object at 0x7da207f98520> begin[:]
variable[fname] assign[=] call[name[filedialog].asksaveasfilename, parameter[]]
if <ast.UnaryOp object at 0x7da207f9a710> begin[:]
call[name[g].clog.warn, parameter[constant[Aborted save to disk]]]
return[constant[False]]
with call[name[open], parameter[name[fname], constant[w]]] begin[:]
call[name[of].write, parameter[call[name[json].dumps, parameter[name[data]]]]]
call[name[g].clog.info, parameter[binary_operation[constant[Saved setup to] + name[fname]]]]
return[constant[True]] | keyword[def] identifier[saveJSON] ( identifier[g] , identifier[data] , identifier[backup] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[backup] :
identifier[fname] = identifier[filedialog] . identifier[asksaveasfilename] (
identifier[defaultextension] = literal[string] ,
identifier[filetypes] =[( literal[string] , literal[string] ),],
identifier[initialdir] = identifier[g] . identifier[cpars] [ literal[string] ]
)
keyword[else] :
identifier[fname] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] ), literal[string] )
keyword[if] keyword[not] identifier[fname] :
identifier[g] . identifier[clog] . identifier[warn] ( literal[string] )
keyword[return] keyword[False]
keyword[with] identifier[open] ( identifier[fname] , literal[string] ) keyword[as] identifier[of] :
identifier[of] . identifier[write] (
identifier[json] . identifier[dumps] ( identifier[data] , identifier[sort_keys] = keyword[True] , identifier[indent] = literal[int] ,
identifier[separators] =( literal[string] , literal[string] ))
)
identifier[g] . identifier[clog] . identifier[info] ( literal[string] + identifier[fname] )
keyword[return] keyword[True] | def saveJSON(g, data, backup=False):
"""
Saves the current setup to disk.
g : hcam_drivers.globals.Container
Container with globals
data : dict
The current setup in JSON compatible dictionary format.
backup : bool
If we are saving a backup on close, don't prompt for filename
"""
if not backup:
fname = filedialog.asksaveasfilename(defaultextension='.json', filetypes=[('json files', '.json')], initialdir=g.cpars['app_directory']) # depends on [control=['if'], data=[]]
else:
fname = os.path.join(os.path.expanduser('~/.hdriver'), 'app.json')
if not fname:
g.clog.warn('Aborted save to disk')
return False # depends on [control=['if'], data=[]]
with open(fname, 'w') as of:
of.write(json.dumps(data, sort_keys=True, indent=4, separators=(',', ': '))) # depends on [control=['with'], data=['of']]
g.clog.info('Saved setup to' + fname)
return True |
def _ls2ar(inp, strinp):
r"""Convert float or linspace-input to arange/slice-input for brute."""
# Check if input is 1 or 3 elements (float, list, tuple, array)
if np.size(inp) == 1:
start = np.squeeze(inp)
stop = start+1
num = 1
elif np.size(inp) == 3:
start = inp[0]
stop = inp[1]
num = inp[2]
else:
print("* ERROR :: <"+strinp+"> must be a float or a tuple of 3 " +
"elements (start, stop, num); <"+strinp+" provided: " + str(inp))
raise ValueError(strinp)
# Re-arrange it to be compatible with np.arange/slice for brute
if num < 2 or start == stop:
stop = start
step = 1
else:
step = (stop-start)/(num-1)
return (start, stop+step/2, step) | def function[_ls2ar, parameter[inp, strinp]]:
constant[Convert float or linspace-input to arange/slice-input for brute.]
if compare[call[name[np].size, parameter[name[inp]]] equal[==] constant[1]] begin[:]
variable[start] assign[=] call[name[np].squeeze, parameter[name[inp]]]
variable[stop] assign[=] binary_operation[name[start] + constant[1]]
variable[num] assign[=] constant[1]
if <ast.BoolOp object at 0x7da18eb542e0> begin[:]
variable[stop] assign[=] name[start]
variable[step] assign[=] constant[1]
return[tuple[[<ast.Name object at 0x7da18eb56cb0>, <ast.BinOp object at 0x7da18eb558a0>, <ast.Name object at 0x7da18eb54f40>]]] | keyword[def] identifier[_ls2ar] ( identifier[inp] , identifier[strinp] ):
literal[string]
keyword[if] identifier[np] . identifier[size] ( identifier[inp] )== literal[int] :
identifier[start] = identifier[np] . identifier[squeeze] ( identifier[inp] )
identifier[stop] = identifier[start] + literal[int]
identifier[num] = literal[int]
keyword[elif] identifier[np] . identifier[size] ( identifier[inp] )== literal[int] :
identifier[start] = identifier[inp] [ literal[int] ]
identifier[stop] = identifier[inp] [ literal[int] ]
identifier[num] = identifier[inp] [ literal[int] ]
keyword[else] :
identifier[print] ( literal[string] + identifier[strinp] + literal[string] +
literal[string] + identifier[strinp] + literal[string] + identifier[str] ( identifier[inp] ))
keyword[raise] identifier[ValueError] ( identifier[strinp] )
keyword[if] identifier[num] < literal[int] keyword[or] identifier[start] == identifier[stop] :
identifier[stop] = identifier[start]
identifier[step] = literal[int]
keyword[else] :
identifier[step] =( identifier[stop] - identifier[start] )/( identifier[num] - literal[int] )
keyword[return] ( identifier[start] , identifier[stop] + identifier[step] / literal[int] , identifier[step] ) | def _ls2ar(inp, strinp):
"""Convert float or linspace-input to arange/slice-input for brute."""
# Check if input is 1 or 3 elements (float, list, tuple, array)
if np.size(inp) == 1:
start = np.squeeze(inp)
stop = start + 1
num = 1 # depends on [control=['if'], data=[]]
elif np.size(inp) == 3:
start = inp[0]
stop = inp[1]
num = inp[2] # depends on [control=['if'], data=[]]
else:
print('* ERROR :: <' + strinp + '> must be a float or a tuple of 3 ' + 'elements (start, stop, num); <' + strinp + ' provided: ' + str(inp))
raise ValueError(strinp)
# Re-arrange it to be compatible with np.arange/slice for brute
if num < 2 or start == stop:
stop = start
step = 1 # depends on [control=['if'], data=[]]
else:
step = (stop - start) / (num - 1)
return (start, stop + step / 2, step) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.