code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def smoothstep(edge0, edge1, x):
""" performs smooth Hermite interpolation
between 0 and 1 when edge0 < x < edge1. """
# Scale, bias and saturate x to 0..1 range
x = np.clip((x - edge0)/(edge1 - edge0), 0.0, 1.0)
# Evaluate polynomial
return x*x*(3 - 2*x) | def function[smoothstep, parameter[edge0, edge1, x]]:
constant[ performs smooth Hermite interpolation
between 0 and 1 when edge0 < x < edge1. ]
variable[x] assign[=] call[name[np].clip, parameter[binary_operation[binary_operation[name[x] - name[edge0]] / binary_operation[name[edge1] - name[edge0]]], constant[0.0], constant[1.0]]]
return[binary_operation[binary_operation[name[x] * name[x]] * binary_operation[constant[3] - binary_operation[constant[2] * name[x]]]]] | keyword[def] identifier[smoothstep] ( identifier[edge0] , identifier[edge1] , identifier[x] ):
literal[string]
identifier[x] = identifier[np] . identifier[clip] (( identifier[x] - identifier[edge0] )/( identifier[edge1] - identifier[edge0] ), literal[int] , literal[int] )
keyword[return] identifier[x] * identifier[x] *( literal[int] - literal[int] * identifier[x] ) | def smoothstep(edge0, edge1, x):
""" performs smooth Hermite interpolation
between 0 and 1 when edge0 < x < edge1. """
# Scale, bias and saturate x to 0..1 range
x = np.clip((x - edge0) / (edge1 - edge0), 0.0, 1.0)
# Evaluate polynomial
return x * x * (3 - 2 * x) |
def experiment_list(args):
'''get the information of all experiments'''
experiment_config = Experiments()
experiment_dict = experiment_config.get_all_experiments()
if not experiment_dict:
print('There is no experiment running...')
exit(1)
update_experiment()
experiment_id_list = []
if args.all and args.all == 'all':
for key in experiment_dict.keys():
experiment_id_list.append(key)
else:
for key in experiment_dict.keys():
if experiment_dict[key]['status'] != 'STOPPED':
experiment_id_list.append(key)
if not experiment_id_list:
print_warning('There is no experiment running...\nYou can use \'nnictl experiment list all\' to list all stopped experiments!')
experiment_information = ""
for key in experiment_id_list:
experiment_information += (EXPERIMENT_DETAIL_FORMAT % (key, experiment_dict[key]['status'], experiment_dict[key]['port'],\
experiment_dict[key].get('platform'), experiment_dict[key]['startTime'], experiment_dict[key]['endTime']))
print(EXPERIMENT_INFORMATION_FORMAT % experiment_information) | def function[experiment_list, parameter[args]]:
constant[get the information of all experiments]
variable[experiment_config] assign[=] call[name[Experiments], parameter[]]
variable[experiment_dict] assign[=] call[name[experiment_config].get_all_experiments, parameter[]]
if <ast.UnaryOp object at 0x7da1b1f39fc0> begin[:]
call[name[print], parameter[constant[There is no experiment running...]]]
call[name[exit], parameter[constant[1]]]
call[name[update_experiment], parameter[]]
variable[experiment_id_list] assign[=] list[[]]
if <ast.BoolOp object at 0x7da1b1f3b700> begin[:]
for taget[name[key]] in starred[call[name[experiment_dict].keys, parameter[]]] begin[:]
call[name[experiment_id_list].append, parameter[name[key]]]
variable[experiment_information] assign[=] constant[]
for taget[name[key]] in starred[name[experiment_id_list]] begin[:]
<ast.AugAssign object at 0x7da20cabcdf0>
call[name[print], parameter[binary_operation[name[EXPERIMENT_INFORMATION_FORMAT] <ast.Mod object at 0x7da2590d6920> name[experiment_information]]]] | keyword[def] identifier[experiment_list] ( identifier[args] ):
literal[string]
identifier[experiment_config] = identifier[Experiments] ()
identifier[experiment_dict] = identifier[experiment_config] . identifier[get_all_experiments] ()
keyword[if] keyword[not] identifier[experiment_dict] :
identifier[print] ( literal[string] )
identifier[exit] ( literal[int] )
identifier[update_experiment] ()
identifier[experiment_id_list] =[]
keyword[if] identifier[args] . identifier[all] keyword[and] identifier[args] . identifier[all] == literal[string] :
keyword[for] identifier[key] keyword[in] identifier[experiment_dict] . identifier[keys] ():
identifier[experiment_id_list] . identifier[append] ( identifier[key] )
keyword[else] :
keyword[for] identifier[key] keyword[in] identifier[experiment_dict] . identifier[keys] ():
keyword[if] identifier[experiment_dict] [ identifier[key] ][ literal[string] ]!= literal[string] :
identifier[experiment_id_list] . identifier[append] ( identifier[key] )
keyword[if] keyword[not] identifier[experiment_id_list] :
identifier[print_warning] ( literal[string] )
identifier[experiment_information] = literal[string]
keyword[for] identifier[key] keyword[in] identifier[experiment_id_list] :
identifier[experiment_information] +=( identifier[EXPERIMENT_DETAIL_FORMAT] %( identifier[key] , identifier[experiment_dict] [ identifier[key] ][ literal[string] ], identifier[experiment_dict] [ identifier[key] ][ literal[string] ], identifier[experiment_dict] [ identifier[key] ]. identifier[get] ( literal[string] ), identifier[experiment_dict] [ identifier[key] ][ literal[string] ], identifier[experiment_dict] [ identifier[key] ][ literal[string] ]))
identifier[print] ( identifier[EXPERIMENT_INFORMATION_FORMAT] % identifier[experiment_information] ) | def experiment_list(args):
"""get the information of all experiments"""
experiment_config = Experiments()
experiment_dict = experiment_config.get_all_experiments()
if not experiment_dict:
print('There is no experiment running...')
exit(1) # depends on [control=['if'], data=[]]
update_experiment()
experiment_id_list = []
if args.all and args.all == 'all':
for key in experiment_dict.keys():
experiment_id_list.append(key) # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
else:
for key in experiment_dict.keys():
if experiment_dict[key]['status'] != 'STOPPED':
experiment_id_list.append(key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
if not experiment_id_list:
print_warning("There is no experiment running...\nYou can use 'nnictl experiment list all' to list all stopped experiments!") # depends on [control=['if'], data=[]]
experiment_information = ''
for key in experiment_id_list:
experiment_information += EXPERIMENT_DETAIL_FORMAT % (key, experiment_dict[key]['status'], experiment_dict[key]['port'], experiment_dict[key].get('platform'), experiment_dict[key]['startTime'], experiment_dict[key]['endTime']) # depends on [control=['for'], data=['key']]
print(EXPERIMENT_INFORMATION_FORMAT % experiment_information) |
def oauth_get_user(client_id, account_info=None, access_token=None):
"""Retrieve user object for the given request.
Uses either the access token or extracted account information to retrieve
the user object.
:param client_id: The client id.
:param account_info: The dictionary with the account info.
(Default: ``None``)
:param access_token: The access token. (Default: ``None``)
:returns: A :class:`invenio_accounts.models.User` instance or ``None``.
"""
if access_token:
token = RemoteToken.get_by_token(client_id, access_token)
if token:
return token.remote_account.user
if account_info:
external_id = _get_external_id(account_info)
if external_id:
user_identity = UserIdentity.query.filter_by(
id=external_id['id'], method=external_id['method']).first()
if user_identity:
return user_identity.user
email = account_info.get('user', {}).get('email')
if email:
return User.query.filter_by(email=email).one_or_none()
return None | def function[oauth_get_user, parameter[client_id, account_info, access_token]]:
constant[Retrieve user object for the given request.
Uses either the access token or extracted account information to retrieve
the user object.
:param client_id: The client id.
:param account_info: The dictionary with the account info.
(Default: ``None``)
:param access_token: The access token. (Default: ``None``)
:returns: A :class:`invenio_accounts.models.User` instance or ``None``.
]
if name[access_token] begin[:]
variable[token] assign[=] call[name[RemoteToken].get_by_token, parameter[name[client_id], name[access_token]]]
if name[token] begin[:]
return[name[token].remote_account.user]
if name[account_info] begin[:]
variable[external_id] assign[=] call[name[_get_external_id], parameter[name[account_info]]]
if name[external_id] begin[:]
variable[user_identity] assign[=] call[call[name[UserIdentity].query.filter_by, parameter[]].first, parameter[]]
if name[user_identity] begin[:]
return[name[user_identity].user]
variable[email] assign[=] call[call[name[account_info].get, parameter[constant[user], dictionary[[], []]]].get, parameter[constant[email]]]
if name[email] begin[:]
return[call[call[name[User].query.filter_by, parameter[]].one_or_none, parameter[]]]
return[constant[None]] | keyword[def] identifier[oauth_get_user] ( identifier[client_id] , identifier[account_info] = keyword[None] , identifier[access_token] = keyword[None] ):
literal[string]
keyword[if] identifier[access_token] :
identifier[token] = identifier[RemoteToken] . identifier[get_by_token] ( identifier[client_id] , identifier[access_token] )
keyword[if] identifier[token] :
keyword[return] identifier[token] . identifier[remote_account] . identifier[user]
keyword[if] identifier[account_info] :
identifier[external_id] = identifier[_get_external_id] ( identifier[account_info] )
keyword[if] identifier[external_id] :
identifier[user_identity] = identifier[UserIdentity] . identifier[query] . identifier[filter_by] (
identifier[id] = identifier[external_id] [ literal[string] ], identifier[method] = identifier[external_id] [ literal[string] ]). identifier[first] ()
keyword[if] identifier[user_identity] :
keyword[return] identifier[user_identity] . identifier[user]
identifier[email] = identifier[account_info] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] )
keyword[if] identifier[email] :
keyword[return] identifier[User] . identifier[query] . identifier[filter_by] ( identifier[email] = identifier[email] ). identifier[one_or_none] ()
keyword[return] keyword[None] | def oauth_get_user(client_id, account_info=None, access_token=None):
"""Retrieve user object for the given request.
Uses either the access token or extracted account information to retrieve
the user object.
:param client_id: The client id.
:param account_info: The dictionary with the account info.
(Default: ``None``)
:param access_token: The access token. (Default: ``None``)
:returns: A :class:`invenio_accounts.models.User` instance or ``None``.
"""
if access_token:
token = RemoteToken.get_by_token(client_id, access_token)
if token:
return token.remote_account.user # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if account_info:
external_id = _get_external_id(account_info)
if external_id:
user_identity = UserIdentity.query.filter_by(id=external_id['id'], method=external_id['method']).first()
if user_identity:
return user_identity.user # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
email = account_info.get('user', {}).get('email')
if email:
return User.query.filter_by(email=email).one_or_none() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return None |
def yn2Kn2Der(nu, y, n=0, tol=5e-4, nterms=1, nu_step=0.001):
r"""Computes the function :math:`y^{\nu/2} K_{\nu}(y^{1/2})` and its derivatives.
Care has been taken to handle the conditions at :math:`y=0`.
For `n=0`, uses a direct evaluation of the expression, replacing points
where `y=0` with the appropriate value. For `n>0`, uses a general sum
expression to evaluate the expression, and handles the value at `y=0` using
a power series expansion. Where it becomes infinite, the infinities will
have the appropriate sign for a limit approaching zero from the right.
Uses a power series expansion around :math:`y=0` to avoid numerical issues.
Handles integer `nu` by performing a linear interpolation between values of
`nu` slightly above and below the requested value.
Parameters
----------
nu : float
The order of the modified Bessel function and the exponent of `y`.
y : array of float
The points to evaluate the function at. These are assumed to be
nonegative.
n : nonnegative int, optional
The order of derivative to take. Set to zero (the default) to get the
value.
tol : float, optional
The distance from zero for which the power series is used. Default is
5e-4.
nterms : int, optional
The number of terms to include in the power series. Default is 1.
nu_step : float, optional
The amount to vary `nu` by when handling integer values of `nu`. Default
is 0.001.
"""
n = int(n)
y = scipy.asarray(y, dtype=float)
if n == 0:
K = y**(nu / 2.0) * scipy.special.kv(nu, scipy.sqrt(y))
K[y == 0.0] = scipy.special.gamma(nu) / 2.0**(1.0 - nu)
else:
K = scipy.zeros_like(y)
for k in scipy.arange(0.0, n + 1.0, dtype=float):
K += (
scipy.special.binom(n, k) * fixed_poch(1.0 + nu / 2.0 - k, k) *
y**(nu / 2.0 - k) * Kn2Der(nu, y, n=n-k)
)
# Do the extra work to handle y == 0 only if we need to:
mask = (y == 0.0)
if (mask).any():
if int(nu) == nu:
K[mask] = 0.5 * (
yn2Kn2Der(nu - nu_step, y[mask], n=n, tol=tol, nterms=nterms, nu_step=nu_step) +
yn2Kn2Der(nu + nu_step, y[mask], n=n, tol=tol, nterms=nterms, nu_step=nu_step)
)
else:
if n > nu:
K[mask] = scipy.special.gamma(-nu) * fixed_poch(1 + nu - n, n) * scipy.inf
else:
K[mask] = scipy.special.gamma(nu) * scipy.special.gamma(n + 1.0) / (
2.0**(1.0 - nu + 2.0 * n) * fixed_poch(1.0 - nu, n) *
scipy.special.factorial(n)
)
if tol > 0.0:
# Replace points within tol (absolute distance) of zero with the power
# series approximation:
mask = (y <= tol) & (y > 0.0)
K[mask] = 0.0
if int(nu) == nu:
K[mask] = 0.5 * (
yn2Kn2Der(nu - nu_step, y[mask], n=n, tol=tol, nterms=nterms, nu_step=nu_step) +
yn2Kn2Der(nu + nu_step, y[mask], n=n, tol=tol, nterms=nterms, nu_step=nu_step)
)
else:
for k in scipy.arange(n, n + nterms, dtype=float):
K[mask] += (
scipy.special.gamma(nu) * fixed_poch(1.0 + k - n, n) * y[mask]**(k - n) / (
2.0**(1.0 - nu + 2 * k) * fixed_poch(1.0 - nu, k) * scipy.special.factorial(k))
)
for k in scipy.arange(0, nterms, dtype=float):
K[mask] += (
scipy.special.gamma(-nu) * fixed_poch(1.0 + nu + k - n, n) *
y[mask]**(nu + k - n) / (
2.0**(1.0 + nu + 2.0 * k) * fixed_poch(1.0 + nu, k) *
scipy.special.factorial(k)
)
)
return K | def function[yn2Kn2Der, parameter[nu, y, n, tol, nterms, nu_step]]:
constant[Computes the function :math:`y^{\nu/2} K_{\nu}(y^{1/2})` and its derivatives.
Care has been taken to handle the conditions at :math:`y=0`.
For `n=0`, uses a direct evaluation of the expression, replacing points
where `y=0` with the appropriate value. For `n>0`, uses a general sum
expression to evaluate the expression, and handles the value at `y=0` using
a power series expansion. Where it becomes infinite, the infinities will
have the appropriate sign for a limit approaching zero from the right.
Uses a power series expansion around :math:`y=0` to avoid numerical issues.
Handles integer `nu` by performing a linear interpolation between values of
`nu` slightly above and below the requested value.
Parameters
----------
nu : float
The order of the modified Bessel function and the exponent of `y`.
y : array of float
The points to evaluate the function at. These are assumed to be
nonegative.
n : nonnegative int, optional
The order of derivative to take. Set to zero (the default) to get the
value.
tol : float, optional
The distance from zero for which the power series is used. Default is
5e-4.
nterms : int, optional
The number of terms to include in the power series. Default is 1.
nu_step : float, optional
The amount to vary `nu` by when handling integer values of `nu`. Default
is 0.001.
]
variable[n] assign[=] call[name[int], parameter[name[n]]]
variable[y] assign[=] call[name[scipy].asarray, parameter[name[y]]]
if compare[name[n] equal[==] constant[0]] begin[:]
variable[K] assign[=] binary_operation[binary_operation[name[y] ** binary_operation[name[nu] / constant[2.0]]] * call[name[scipy].special.kv, parameter[name[nu], call[name[scipy].sqrt, parameter[name[y]]]]]]
call[name[K]][compare[name[y] equal[==] constant[0.0]]] assign[=] binary_operation[call[name[scipy].special.gamma, parameter[name[nu]]] / binary_operation[constant[2.0] ** binary_operation[constant[1.0] - name[nu]]]]
if compare[name[tol] greater[>] constant[0.0]] begin[:]
variable[mask] assign[=] binary_operation[compare[name[y] less_or_equal[<=] name[tol]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[y] greater[>] constant[0.0]]]
call[name[K]][name[mask]] assign[=] constant[0.0]
if compare[call[name[int], parameter[name[nu]]] equal[==] name[nu]] begin[:]
call[name[K]][name[mask]] assign[=] binary_operation[constant[0.5] * binary_operation[call[name[yn2Kn2Der], parameter[binary_operation[name[nu] - name[nu_step]], call[name[y]][name[mask]]]] + call[name[yn2Kn2Der], parameter[binary_operation[name[nu] + name[nu_step]], call[name[y]][name[mask]]]]]]
return[name[K]] | keyword[def] identifier[yn2Kn2Der] ( identifier[nu] , identifier[y] , identifier[n] = literal[int] , identifier[tol] = literal[int] , identifier[nterms] = literal[int] , identifier[nu_step] = literal[int] ):
literal[string]
identifier[n] = identifier[int] ( identifier[n] )
identifier[y] = identifier[scipy] . identifier[asarray] ( identifier[y] , identifier[dtype] = identifier[float] )
keyword[if] identifier[n] == literal[int] :
identifier[K] = identifier[y] **( identifier[nu] / literal[int] )* identifier[scipy] . identifier[special] . identifier[kv] ( identifier[nu] , identifier[scipy] . identifier[sqrt] ( identifier[y] ))
identifier[K] [ identifier[y] == literal[int] ]= identifier[scipy] . identifier[special] . identifier[gamma] ( identifier[nu] )/ literal[int] **( literal[int] - identifier[nu] )
keyword[else] :
identifier[K] = identifier[scipy] . identifier[zeros_like] ( identifier[y] )
keyword[for] identifier[k] keyword[in] identifier[scipy] . identifier[arange] ( literal[int] , identifier[n] + literal[int] , identifier[dtype] = identifier[float] ):
identifier[K] +=(
identifier[scipy] . identifier[special] . identifier[binom] ( identifier[n] , identifier[k] )* identifier[fixed_poch] ( literal[int] + identifier[nu] / literal[int] - identifier[k] , identifier[k] )*
identifier[y] **( identifier[nu] / literal[int] - identifier[k] )* identifier[Kn2Der] ( identifier[nu] , identifier[y] , identifier[n] = identifier[n] - identifier[k] )
)
identifier[mask] =( identifier[y] == literal[int] )
keyword[if] ( identifier[mask] ). identifier[any] ():
keyword[if] identifier[int] ( identifier[nu] )== identifier[nu] :
identifier[K] [ identifier[mask] ]= literal[int] *(
identifier[yn2Kn2Der] ( identifier[nu] - identifier[nu_step] , identifier[y] [ identifier[mask] ], identifier[n] = identifier[n] , identifier[tol] = identifier[tol] , identifier[nterms] = identifier[nterms] , identifier[nu_step] = identifier[nu_step] )+
identifier[yn2Kn2Der] ( identifier[nu] + identifier[nu_step] , identifier[y] [ identifier[mask] ], identifier[n] = identifier[n] , identifier[tol] = identifier[tol] , identifier[nterms] = identifier[nterms] , identifier[nu_step] = identifier[nu_step] )
)
keyword[else] :
keyword[if] identifier[n] > identifier[nu] :
identifier[K] [ identifier[mask] ]= identifier[scipy] . identifier[special] . identifier[gamma] (- identifier[nu] )* identifier[fixed_poch] ( literal[int] + identifier[nu] - identifier[n] , identifier[n] )* identifier[scipy] . identifier[inf]
keyword[else] :
identifier[K] [ identifier[mask] ]= identifier[scipy] . identifier[special] . identifier[gamma] ( identifier[nu] )* identifier[scipy] . identifier[special] . identifier[gamma] ( identifier[n] + literal[int] )/(
literal[int] **( literal[int] - identifier[nu] + literal[int] * identifier[n] )* identifier[fixed_poch] ( literal[int] - identifier[nu] , identifier[n] )*
identifier[scipy] . identifier[special] . identifier[factorial] ( identifier[n] )
)
keyword[if] identifier[tol] > literal[int] :
identifier[mask] =( identifier[y] <= identifier[tol] )&( identifier[y] > literal[int] )
identifier[K] [ identifier[mask] ]= literal[int]
keyword[if] identifier[int] ( identifier[nu] )== identifier[nu] :
identifier[K] [ identifier[mask] ]= literal[int] *(
identifier[yn2Kn2Der] ( identifier[nu] - identifier[nu_step] , identifier[y] [ identifier[mask] ], identifier[n] = identifier[n] , identifier[tol] = identifier[tol] , identifier[nterms] = identifier[nterms] , identifier[nu_step] = identifier[nu_step] )+
identifier[yn2Kn2Der] ( identifier[nu] + identifier[nu_step] , identifier[y] [ identifier[mask] ], identifier[n] = identifier[n] , identifier[tol] = identifier[tol] , identifier[nterms] = identifier[nterms] , identifier[nu_step] = identifier[nu_step] )
)
keyword[else] :
keyword[for] identifier[k] keyword[in] identifier[scipy] . identifier[arange] ( identifier[n] , identifier[n] + identifier[nterms] , identifier[dtype] = identifier[float] ):
identifier[K] [ identifier[mask] ]+=(
identifier[scipy] . identifier[special] . identifier[gamma] ( identifier[nu] )* identifier[fixed_poch] ( literal[int] + identifier[k] - identifier[n] , identifier[n] )* identifier[y] [ identifier[mask] ]**( identifier[k] - identifier[n] )/(
literal[int] **( literal[int] - identifier[nu] + literal[int] * identifier[k] )* identifier[fixed_poch] ( literal[int] - identifier[nu] , identifier[k] )* identifier[scipy] . identifier[special] . identifier[factorial] ( identifier[k] ))
)
keyword[for] identifier[k] keyword[in] identifier[scipy] . identifier[arange] ( literal[int] , identifier[nterms] , identifier[dtype] = identifier[float] ):
identifier[K] [ identifier[mask] ]+=(
identifier[scipy] . identifier[special] . identifier[gamma] (- identifier[nu] )* identifier[fixed_poch] ( literal[int] + identifier[nu] + identifier[k] - identifier[n] , identifier[n] )*
identifier[y] [ identifier[mask] ]**( identifier[nu] + identifier[k] - identifier[n] )/(
literal[int] **( literal[int] + identifier[nu] + literal[int] * identifier[k] )* identifier[fixed_poch] ( literal[int] + identifier[nu] , identifier[k] )*
identifier[scipy] . identifier[special] . identifier[factorial] ( identifier[k] )
)
)
keyword[return] identifier[K] | def yn2Kn2Der(nu, y, n=0, tol=0.0005, nterms=1, nu_step=0.001):
"""Computes the function :math:`y^{\\nu/2} K_{\\nu}(y^{1/2})` and its derivatives.
Care has been taken to handle the conditions at :math:`y=0`.
For `n=0`, uses a direct evaluation of the expression, replacing points
where `y=0` with the appropriate value. For `n>0`, uses a general sum
expression to evaluate the expression, and handles the value at `y=0` using
a power series expansion. Where it becomes infinite, the infinities will
have the appropriate sign for a limit approaching zero from the right.
Uses a power series expansion around :math:`y=0` to avoid numerical issues.
Handles integer `nu` by performing a linear interpolation between values of
`nu` slightly above and below the requested value.
Parameters
----------
nu : float
The order of the modified Bessel function and the exponent of `y`.
y : array of float
The points to evaluate the function at. These are assumed to be
nonegative.
n : nonnegative int, optional
The order of derivative to take. Set to zero (the default) to get the
value.
tol : float, optional
The distance from zero for which the power series is used. Default is
5e-4.
nterms : int, optional
The number of terms to include in the power series. Default is 1.
nu_step : float, optional
The amount to vary `nu` by when handling integer values of `nu`. Default
is 0.001.
"""
n = int(n)
y = scipy.asarray(y, dtype=float)
if n == 0:
K = y ** (nu / 2.0) * scipy.special.kv(nu, scipy.sqrt(y))
K[y == 0.0] = scipy.special.gamma(nu) / 2.0 ** (1.0 - nu) # depends on [control=['if'], data=[]]
else:
K = scipy.zeros_like(y)
for k in scipy.arange(0.0, n + 1.0, dtype=float):
K += scipy.special.binom(n, k) * fixed_poch(1.0 + nu / 2.0 - k, k) * y ** (nu / 2.0 - k) * Kn2Der(nu, y, n=n - k) # depends on [control=['for'], data=['k']]
# Do the extra work to handle y == 0 only if we need to:
mask = y == 0.0
if mask.any():
if int(nu) == nu:
K[mask] = 0.5 * (yn2Kn2Der(nu - nu_step, y[mask], n=n, tol=tol, nterms=nterms, nu_step=nu_step) + yn2Kn2Der(nu + nu_step, y[mask], n=n, tol=tol, nterms=nterms, nu_step=nu_step)) # depends on [control=['if'], data=['nu']]
elif n > nu:
K[mask] = scipy.special.gamma(-nu) * fixed_poch(1 + nu - n, n) * scipy.inf # depends on [control=['if'], data=['n', 'nu']]
else:
K[mask] = scipy.special.gamma(nu) * scipy.special.gamma(n + 1.0) / (2.0 ** (1.0 - nu + 2.0 * n) * fixed_poch(1.0 - nu, n) * scipy.special.factorial(n)) # depends on [control=['if'], data=[]]
if tol > 0.0:
# Replace points within tol (absolute distance) of zero with the power
# series approximation:
mask = (y <= tol) & (y > 0.0)
K[mask] = 0.0
if int(nu) == nu:
K[mask] = 0.5 * (yn2Kn2Der(nu - nu_step, y[mask], n=n, tol=tol, nterms=nterms, nu_step=nu_step) + yn2Kn2Der(nu + nu_step, y[mask], n=n, tol=tol, nterms=nterms, nu_step=nu_step)) # depends on [control=['if'], data=['nu']]
else:
for k in scipy.arange(n, n + nterms, dtype=float):
K[mask] += scipy.special.gamma(nu) * fixed_poch(1.0 + k - n, n) * y[mask] ** (k - n) / (2.0 ** (1.0 - nu + 2 * k) * fixed_poch(1.0 - nu, k) * scipy.special.factorial(k)) # depends on [control=['for'], data=['k']]
for k in scipy.arange(0, nterms, dtype=float):
K[mask] += scipy.special.gamma(-nu) * fixed_poch(1.0 + nu + k - n, n) * y[mask] ** (nu + k - n) / (2.0 ** (1.0 + nu + 2.0 * k) * fixed_poch(1.0 + nu, k) * scipy.special.factorial(k)) # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=['tol']]
return K |
def attachment_probability(G):
"""
Calculates the observed attachment probability for each node at each
time-step.
Attachment probability is calculated based on the observed new edges in the
next time-step. So if a node acquires new edges at time t, this will accrue
to the node's attachment probability at time t-1. Thus at a given time,
one can ask whether degree and attachment probability are related.
Parameters
----------
G : :class:`.GraphCollection`
Must be sliced by 'date'. See :func:`.GraphCollection.slice`\.
Returns
-------
probs : dict
Keyed by index in G.graphs, and then by node.
"""
warnings.warn("Removed in 0.8. Too domain-specific.")
probs = {}
G_ = None
k_ = None
for k,g in G.graphs.iteritems():
new_edges = {}
if G_ is not None:
for n in g.nodes():
try:
old_neighbors = set(G_[n].keys())
if len(old_neighbors) > 0:
new_neighbors = set(g[n].keys()) - old_neighbors
new_edges[n] = float(len(new_neighbors))
else:
new_edges[n] = 0.
except KeyError:
pass
N = sum( new_edges.values() )
probs[k_] = { n:0. for n in G_.nodes() }
if N > 0.:
for n in G.nodes():
try:
probs[k_][n] = new_edges[n]/N
except KeyError:
pass
if probs[k_] is not None:
networkx.set_node_attributes(G.graphs[k_],
'attachment_probability',
probs[k_])
G_ = G
k_ = k
# Handle last graph (no values).
key = G.graphs.keys()[-1]
zprobs = { n:0. for n in G.graphs[key].nodes() }
networkx.set_node_attributes(G.graphs[key], 'attachment_probability', zprobs)
return probs | def function[attachment_probability, parameter[G]]:
constant[
Calculates the observed attachment probability for each node at each
time-step.
Attachment probability is calculated based on the observed new edges in the
next time-step. So if a node acquires new edges at time t, this will accrue
to the node's attachment probability at time t-1. Thus at a given time,
one can ask whether degree and attachment probability are related.
Parameters
----------
G : :class:`.GraphCollection`
Must be sliced by 'date'. See :func:`.GraphCollection.slice`\.
Returns
-------
probs : dict
Keyed by index in G.graphs, and then by node.
]
call[name[warnings].warn, parameter[constant[Removed in 0.8. Too domain-specific.]]]
variable[probs] assign[=] dictionary[[], []]
variable[G_] assign[=] constant[None]
variable[k_] assign[=] constant[None]
for taget[tuple[[<ast.Name object at 0x7da1b12d9600>, <ast.Name object at 0x7da1b12dbd00>]]] in starred[call[name[G].graphs.iteritems, parameter[]]] begin[:]
variable[new_edges] assign[=] dictionary[[], []]
if compare[name[G_] is_not constant[None]] begin[:]
for taget[name[n]] in starred[call[name[g].nodes, parameter[]]] begin[:]
<ast.Try object at 0x7da1b12d96f0>
variable[N] assign[=] call[name[sum], parameter[call[name[new_edges].values, parameter[]]]]
call[name[probs]][name[k_]] assign[=] <ast.DictComp object at 0x7da1b12a9f00>
if compare[name[N] greater[>] constant[0.0]] begin[:]
for taget[name[n]] in starred[call[name[G].nodes, parameter[]]] begin[:]
<ast.Try object at 0x7da1b12b4c10>
if compare[call[name[probs]][name[k_]] is_not constant[None]] begin[:]
call[name[networkx].set_node_attributes, parameter[call[name[G].graphs][name[k_]], constant[attachment_probability], call[name[probs]][name[k_]]]]
variable[G_] assign[=] name[G]
variable[k_] assign[=] name[k]
variable[key] assign[=] call[call[name[G].graphs.keys, parameter[]]][<ast.UnaryOp object at 0x7da1b12b4b20>]
variable[zprobs] assign[=] <ast.DictComp object at 0x7da1b12b5ba0>
call[name[networkx].set_node_attributes, parameter[call[name[G].graphs][name[key]], constant[attachment_probability], name[zprobs]]]
return[name[probs]] | keyword[def] identifier[attachment_probability] ( identifier[G] ):
literal[string]
identifier[warnings] . identifier[warn] ( literal[string] )
identifier[probs] ={}
identifier[G_] = keyword[None]
identifier[k_] = keyword[None]
keyword[for] identifier[k] , identifier[g] keyword[in] identifier[G] . identifier[graphs] . identifier[iteritems] ():
identifier[new_edges] ={}
keyword[if] identifier[G_] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[n] keyword[in] identifier[g] . identifier[nodes] ():
keyword[try] :
identifier[old_neighbors] = identifier[set] ( identifier[G_] [ identifier[n] ]. identifier[keys] ())
keyword[if] identifier[len] ( identifier[old_neighbors] )> literal[int] :
identifier[new_neighbors] = identifier[set] ( identifier[g] [ identifier[n] ]. identifier[keys] ())- identifier[old_neighbors]
identifier[new_edges] [ identifier[n] ]= identifier[float] ( identifier[len] ( identifier[new_neighbors] ))
keyword[else] :
identifier[new_edges] [ identifier[n] ]= literal[int]
keyword[except] identifier[KeyError] :
keyword[pass]
identifier[N] = identifier[sum] ( identifier[new_edges] . identifier[values] ())
identifier[probs] [ identifier[k_] ]={ identifier[n] : literal[int] keyword[for] identifier[n] keyword[in] identifier[G_] . identifier[nodes] ()}
keyword[if] identifier[N] > literal[int] :
keyword[for] identifier[n] keyword[in] identifier[G] . identifier[nodes] ():
keyword[try] :
identifier[probs] [ identifier[k_] ][ identifier[n] ]= identifier[new_edges] [ identifier[n] ]/ identifier[N]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[if] identifier[probs] [ identifier[k_] ] keyword[is] keyword[not] keyword[None] :
identifier[networkx] . identifier[set_node_attributes] ( identifier[G] . identifier[graphs] [ identifier[k_] ],
literal[string] ,
identifier[probs] [ identifier[k_] ])
identifier[G_] = identifier[G]
identifier[k_] = identifier[k]
identifier[key] = identifier[G] . identifier[graphs] . identifier[keys] ()[- literal[int] ]
identifier[zprobs] ={ identifier[n] : literal[int] keyword[for] identifier[n] keyword[in] identifier[G] . identifier[graphs] [ identifier[key] ]. identifier[nodes] ()}
identifier[networkx] . identifier[set_node_attributes] ( identifier[G] . identifier[graphs] [ identifier[key] ], literal[string] , identifier[zprobs] )
keyword[return] identifier[probs] | def attachment_probability(G):
"""
Calculates the observed attachment probability for each node at each
time-step.
Attachment probability is calculated based on the observed new edges in the
next time-step. So if a node acquires new edges at time t, this will accrue
to the node's attachment probability at time t-1. Thus at a given time,
one can ask whether degree and attachment probability are related.
Parameters
----------
G : :class:`.GraphCollection`
Must be sliced by 'date'. See :func:`.GraphCollection.slice`\\.
Returns
-------
probs : dict
Keyed by index in G.graphs, and then by node.
"""
warnings.warn('Removed in 0.8. Too domain-specific.')
probs = {}
G_ = None
k_ = None
for (k, g) in G.graphs.iteritems():
new_edges = {}
if G_ is not None:
for n in g.nodes():
try:
old_neighbors = set(G_[n].keys())
if len(old_neighbors) > 0:
new_neighbors = set(g[n].keys()) - old_neighbors
new_edges[n] = float(len(new_neighbors)) # depends on [control=['if'], data=[]]
else:
new_edges[n] = 0.0 # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['n']]
N = sum(new_edges.values())
probs[k_] = {n: 0.0 for n in G_.nodes()}
if N > 0.0:
for n in G.nodes():
try:
probs[k_][n] = new_edges[n] / N # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['n']] # depends on [control=['if'], data=['N']]
if probs[k_] is not None:
networkx.set_node_attributes(G.graphs[k_], 'attachment_probability', probs[k_]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['G_']]
G_ = G
k_ = k # depends on [control=['for'], data=[]]
# Handle last graph (no values).
key = G.graphs.keys()[-1]
zprobs = {n: 0.0 for n in G.graphs[key].nodes()}
networkx.set_node_attributes(G.graphs[key], 'attachment_probability', zprobs)
return probs |
def create_port(self, name, network, device_id=None, admin_state_up=True):
'''
Creates a new port
'''
net_id = self._find_network_id(network)
body = {'admin_state_up': admin_state_up,
'name': name,
'network_id': net_id}
if device_id:
body['device_id'] = device_id
return self.network_conn.create_port(body={'port': body}) | def function[create_port, parameter[self, name, network, device_id, admin_state_up]]:
constant[
Creates a new port
]
variable[net_id] assign[=] call[name[self]._find_network_id, parameter[name[network]]]
variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c464a0>, <ast.Constant object at 0x7da1b1c45690>, <ast.Constant object at 0x7da1b1c45870>], [<ast.Name object at 0x7da1b1c46e90>, <ast.Name object at 0x7da1b1c46470>, <ast.Name object at 0x7da1b1c45db0>]]
if name[device_id] begin[:]
call[name[body]][constant[device_id]] assign[=] name[device_id]
return[call[name[self].network_conn.create_port, parameter[]]] | keyword[def] identifier[create_port] ( identifier[self] , identifier[name] , identifier[network] , identifier[device_id] = keyword[None] , identifier[admin_state_up] = keyword[True] ):
literal[string]
identifier[net_id] = identifier[self] . identifier[_find_network_id] ( identifier[network] )
identifier[body] ={ literal[string] : identifier[admin_state_up] ,
literal[string] : identifier[name] ,
literal[string] : identifier[net_id] }
keyword[if] identifier[device_id] :
identifier[body] [ literal[string] ]= identifier[device_id]
keyword[return] identifier[self] . identifier[network_conn] . identifier[create_port] ( identifier[body] ={ literal[string] : identifier[body] }) | def create_port(self, name, network, device_id=None, admin_state_up=True):
"""
Creates a new port
"""
net_id = self._find_network_id(network)
body = {'admin_state_up': admin_state_up, 'name': name, 'network_id': net_id}
if device_id:
body['device_id'] = device_id # depends on [control=['if'], data=[]]
return self.network_conn.create_port(body={'port': body}) |
def seqfy(strs):
''' 序列化 字符串--->实际效果是,为字符串,添加行号,返回字符串
Sampe usage:
strs = ["", None, u"First-line\nSecond-line\nThird-line", u"没有换行符"]
for s in strs:
print "---"
result = seqfy(s)
print result
print unseqfy(result)
'''
if not strs:
return
result = ""
seq = 1
ss = strs.split("\n")
for i in ss:
if i:
result = "".join([result, str(seq), ".", i, "\n"])
seq = seq + 1
return result | def function[seqfy, parameter[strs]]:
constant[ 序列化 字符串--->实际效果是,为字符串,添加行号,返回字符串
Sampe usage:
strs = ["", None, u"First-line
Second-line
Third-line", u"没有换行符"]
for s in strs:
print "---"
result = seqfy(s)
print result
print unseqfy(result)
]
if <ast.UnaryOp object at 0x7da1b10243a0> begin[:]
return[None]
variable[result] assign[=] constant[]
variable[seq] assign[=] constant[1]
variable[ss] assign[=] call[name[strs].split, parameter[constant[
]]]
for taget[name[i]] in starred[name[ss]] begin[:]
if name[i] begin[:]
variable[result] assign[=] call[constant[].join, parameter[list[[<ast.Name object at 0x7da1b1024fd0>, <ast.Call object at 0x7da1b1025870>, <ast.Constant object at 0x7da1b1024250>, <ast.Name object at 0x7da1b1027e20>, <ast.Constant object at 0x7da1b10250c0>]]]]
variable[seq] assign[=] binary_operation[name[seq] + constant[1]]
return[name[result]] | keyword[def] identifier[seqfy] ( identifier[strs] ):
literal[string]
keyword[if] keyword[not] identifier[strs] :
keyword[return]
identifier[result] = literal[string]
identifier[seq] = literal[int]
identifier[ss] = identifier[strs] . identifier[split] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[ss] :
keyword[if] identifier[i] :
identifier[result] = literal[string] . identifier[join] ([ identifier[result] , identifier[str] ( identifier[seq] ), literal[string] , identifier[i] , literal[string] ])
identifier[seq] = identifier[seq] + literal[int]
keyword[return] identifier[result] | def seqfy(strs):
""" 序列化 字符串--->实际效果是,为字符串,添加行号,返回字符串
Sampe usage:
strs = ["", None, u"First-line
Second-line
Third-line", u"没有换行符"]
for s in strs:
print "---"
result = seqfy(s)
print result
print unseqfy(result)
"""
if not strs:
return # depends on [control=['if'], data=[]]
result = ''
seq = 1
ss = strs.split('\n')
for i in ss:
if i:
result = ''.join([result, str(seq), '.', i, '\n'])
seq = seq + 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return result |
def check_num_tasks(chain, task_count):
"""Make sure there are a specific number of specific task types.
Currently we only check decision tasks.
Args:
chain (ChainOfTrust): the chain we're operating on
task_count (dict): mapping task type to the number of links.
Raises:
CoTError: on failure.
"""
errors = []
# hardcode for now. If we need a different set of constraints, either
# go by cot_product settings or by task_count['docker-image'] + 1
min_decision_tasks = 1
if task_count['decision'] < min_decision_tasks:
errors.append("{} decision tasks; we must have at least {}!".format(
task_count['decision'], min_decision_tasks
))
raise_on_errors(errors) | def function[check_num_tasks, parameter[chain, task_count]]:
constant[Make sure there are a specific number of specific task types.
Currently we only check decision tasks.
Args:
chain (ChainOfTrust): the chain we're operating on
task_count (dict): mapping task type to the number of links.
Raises:
CoTError: on failure.
]
variable[errors] assign[=] list[[]]
variable[min_decision_tasks] assign[=] constant[1]
if compare[call[name[task_count]][constant[decision]] less[<] name[min_decision_tasks]] begin[:]
call[name[errors].append, parameter[call[constant[{} decision tasks; we must have at least {}!].format, parameter[call[name[task_count]][constant[decision]], name[min_decision_tasks]]]]]
call[name[raise_on_errors], parameter[name[errors]]] | keyword[def] identifier[check_num_tasks] ( identifier[chain] , identifier[task_count] ):
literal[string]
identifier[errors] =[]
identifier[min_decision_tasks] = literal[int]
keyword[if] identifier[task_count] [ literal[string] ]< identifier[min_decision_tasks] :
identifier[errors] . identifier[append] ( literal[string] . identifier[format] (
identifier[task_count] [ literal[string] ], identifier[min_decision_tasks]
))
identifier[raise_on_errors] ( identifier[errors] ) | def check_num_tasks(chain, task_count):
"""Make sure there are a specific number of specific task types.
Currently we only check decision tasks.
Args:
chain (ChainOfTrust): the chain we're operating on
task_count (dict): mapping task type to the number of links.
Raises:
CoTError: on failure.
"""
errors = []
# hardcode for now. If we need a different set of constraints, either
# go by cot_product settings or by task_count['docker-image'] + 1
min_decision_tasks = 1
if task_count['decision'] < min_decision_tasks:
errors.append('{} decision tasks; we must have at least {}!'.format(task_count['decision'], min_decision_tasks)) # depends on [control=['if'], data=['min_decision_tasks']]
raise_on_errors(errors) |
def set_children(self, child_ids):
"""Sets the children.
arg: child_ids (osid.id.Id[]): the children``Ids``
raise: InvalidArgument - ``child_ids`` is invalid
raise: NoAccess - ``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
if not isinstance(child_ids, list):
raise errors.InvalidArgument()
if self.get_children_metadata().is_read_only():
raise errors.NoAccess()
idstr_list = []
for object_id in child_ids:
if not self._is_valid_id(object_id):
raise errors.InvalidArgument()
if str(object_id) not in idstr_list:
idstr_list.append(str(object_id))
self._my_map['childIds'] = idstr_list | def function[set_children, parameter[self, child_ids]]:
constant[Sets the children.
arg: child_ids (osid.id.Id[]): the children``Ids``
raise: InvalidArgument - ``child_ids`` is invalid
raise: NoAccess - ``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
]
if <ast.UnaryOp object at 0x7da18f58ee60> begin[:]
<ast.Raise object at 0x7da18f58f490>
if call[call[name[self].get_children_metadata, parameter[]].is_read_only, parameter[]] begin[:]
<ast.Raise object at 0x7da18f58f580>
variable[idstr_list] assign[=] list[[]]
for taget[name[object_id]] in starred[name[child_ids]] begin[:]
if <ast.UnaryOp object at 0x7da18f58ee30> begin[:]
<ast.Raise object at 0x7da18f58f190>
if compare[call[name[str], parameter[name[object_id]]] <ast.NotIn object at 0x7da2590d7190> name[idstr_list]] begin[:]
call[name[idstr_list].append, parameter[call[name[str], parameter[name[object_id]]]]]
call[name[self]._my_map][constant[childIds]] assign[=] name[idstr_list] | keyword[def] identifier[set_children] ( identifier[self] , identifier[child_ids] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[child_ids] , identifier[list] ):
keyword[raise] identifier[errors] . identifier[InvalidArgument] ()
keyword[if] identifier[self] . identifier[get_children_metadata] (). identifier[is_read_only] ():
keyword[raise] identifier[errors] . identifier[NoAccess] ()
identifier[idstr_list] =[]
keyword[for] identifier[object_id] keyword[in] identifier[child_ids] :
keyword[if] keyword[not] identifier[self] . identifier[_is_valid_id] ( identifier[object_id] ):
keyword[raise] identifier[errors] . identifier[InvalidArgument] ()
keyword[if] identifier[str] ( identifier[object_id] ) keyword[not] keyword[in] identifier[idstr_list] :
identifier[idstr_list] . identifier[append] ( identifier[str] ( identifier[object_id] ))
identifier[self] . identifier[_my_map] [ literal[string] ]= identifier[idstr_list] | def set_children(self, child_ids):
"""Sets the children.
arg: child_ids (osid.id.Id[]): the children``Ids``
raise: InvalidArgument - ``child_ids`` is invalid
raise: NoAccess - ``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
if not isinstance(child_ids, list):
raise errors.InvalidArgument() # depends on [control=['if'], data=[]]
if self.get_children_metadata().is_read_only():
raise errors.NoAccess() # depends on [control=['if'], data=[]]
idstr_list = []
for object_id in child_ids:
if not self._is_valid_id(object_id):
raise errors.InvalidArgument() # depends on [control=['if'], data=[]]
if str(object_id) not in idstr_list:
idstr_list.append(str(object_id)) # depends on [control=['if'], data=['idstr_list']] # depends on [control=['for'], data=['object_id']]
self._my_map['childIds'] = idstr_list |
def write_tabular_otu_table(self, sample_names, read_taxonomies, combined_output_otu_table_io):
'''A function that takes a hash of trusted placements, and compiles them
into an OTU-esque table.'''
delim = u'\t'
combined_output_otu_table_io.write(delim.join(['#ID',
delim.join(sample_names),
'ConsensusLineage']))
combined_output_otu_table_io.write(u"\n")
for otu_id, tax, counts in self._iterate_otu_table_rows(read_taxonomies):
combined_output_otu_table_io.write(delim.join(\
(str(otu_id),
delim.join([str(c) for c in counts]),
'; '.join(tax)))+"\n") | def function[write_tabular_otu_table, parameter[self, sample_names, read_taxonomies, combined_output_otu_table_io]]:
constant[A function that takes a hash of trusted placements, and compiles them
into an OTU-esque table.]
variable[delim] assign[=] constant[ ]
call[name[combined_output_otu_table_io].write, parameter[call[name[delim].join, parameter[list[[<ast.Constant object at 0x7da2047ea620>, <ast.Call object at 0x7da2047ea740>, <ast.Constant object at 0x7da2047e8310>]]]]]]
call[name[combined_output_otu_table_io].write, parameter[constant[
]]]
for taget[tuple[[<ast.Name object at 0x7da2047e8b80>, <ast.Name object at 0x7da2047e8e80>, <ast.Name object at 0x7da2047ea8c0>]]] in starred[call[name[self]._iterate_otu_table_rows, parameter[name[read_taxonomies]]]] begin[:]
call[name[combined_output_otu_table_io].write, parameter[binary_operation[call[name[delim].join, parameter[tuple[[<ast.Call object at 0x7da2047ebbe0>, <ast.Call object at 0x7da2047e9510>, <ast.Call object at 0x7da2047e9e10>]]]] + constant[
]]]] | keyword[def] identifier[write_tabular_otu_table] ( identifier[self] , identifier[sample_names] , identifier[read_taxonomies] , identifier[combined_output_otu_table_io] ):
literal[string]
identifier[delim] = literal[string]
identifier[combined_output_otu_table_io] . identifier[write] ( identifier[delim] . identifier[join] ([ literal[string] ,
identifier[delim] . identifier[join] ( identifier[sample_names] ),
literal[string] ]))
identifier[combined_output_otu_table_io] . identifier[write] ( literal[string] )
keyword[for] identifier[otu_id] , identifier[tax] , identifier[counts] keyword[in] identifier[self] . identifier[_iterate_otu_table_rows] ( identifier[read_taxonomies] ):
identifier[combined_output_otu_table_io] . identifier[write] ( identifier[delim] . identifier[join] (( identifier[str] ( identifier[otu_id] ),
identifier[delim] . identifier[join] ([ identifier[str] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[counts] ]),
literal[string] . identifier[join] ( identifier[tax] )))+ literal[string] ) | def write_tabular_otu_table(self, sample_names, read_taxonomies, combined_output_otu_table_io):
"""A function that takes a hash of trusted placements, and compiles them
into an OTU-esque table."""
delim = u'\t'
combined_output_otu_table_io.write(delim.join(['#ID', delim.join(sample_names), 'ConsensusLineage']))
combined_output_otu_table_io.write(u'\n')
for (otu_id, tax, counts) in self._iterate_otu_table_rows(read_taxonomies):
combined_output_otu_table_io.write(delim.join((str(otu_id), delim.join([str(c) for c in counts]), '; '.join(tax))) + '\n') # depends on [control=['for'], data=[]] |
def url_for_hit(hit, default="#"):
"""Helper for building URLs from results."""
try:
object_type = hit["object_type"]
object_id = int(hit["id"])
return current_app.default_view.url_for(hit, object_type, object_id)
except KeyError:
return default
except Exception:
logger.error("Error building URL for search result", exc_info=True)
return default | def function[url_for_hit, parameter[hit, default]]:
constant[Helper for building URLs from results.]
<ast.Try object at 0x7da20c6c4d90> | keyword[def] identifier[url_for_hit] ( identifier[hit] , identifier[default] = literal[string] ):
literal[string]
keyword[try] :
identifier[object_type] = identifier[hit] [ literal[string] ]
identifier[object_id] = identifier[int] ( identifier[hit] [ literal[string] ])
keyword[return] identifier[current_app] . identifier[default_view] . identifier[url_for] ( identifier[hit] , identifier[object_type] , identifier[object_id] )
keyword[except] identifier[KeyError] :
keyword[return] identifier[default]
keyword[except] identifier[Exception] :
identifier[logger] . identifier[error] ( literal[string] , identifier[exc_info] = keyword[True] )
keyword[return] identifier[default] | def url_for_hit(hit, default='#'):
"""Helper for building URLs from results."""
try:
object_type = hit['object_type']
object_id = int(hit['id'])
return current_app.default_view.url_for(hit, object_type, object_id) # depends on [control=['try'], data=[]]
except KeyError:
return default # depends on [control=['except'], data=[]]
except Exception:
logger.error('Error building URL for search result', exc_info=True)
return default # depends on [control=['except'], data=[]] |
def _setup_google_spreadsheets(settings, path, prompt=True):
"""
Set up a Google spreadsheet.
"""
ret = {}
if prompt:
use = raw_input("\nWould you like to use Google spreadsheets [Y/n]? ")
if use.lower() != "y" and use != "":
return settings
dirname = os.path.dirname(path)
path = os.path.join(dirname, "client_secrets.json")
write_secrets = True
if os.path.isfile(path):
write_secrets_input = raw_input("client_secrets.json already exists. Would you like to overwrite it? [y/N] ")
if not write_secrets_input.lower().startswith('y'):
write_secrets = False
if write_secrets:
puts(("\nLogin in to Google and go to {0} to create an app and generate a "
"\nclient_secrets authentication file. You should create credentials for an `installed app`. See "
"\n{1} for more information."
.format(colored.red("https://console.developers.google.com/project"),
colored.red("http://tarbell.readthedocs.org/en/{0}/install.html#configure-google-spreadsheet-access-optional".format(LONG_VERSION))
)
))
secrets_path = raw_input(("\nWhere is your client secrets file? "
"[~/Downloads/client_secrets.json] "
))
if secrets_path == "":
secrets_path = os.path.join("~", "Downloads/client_secrets.json")
secrets_path = os.path.expanduser(secrets_path)
puts("\nCopying {0} to {1}\n"
.format(colored.green(secrets_path),
colored.green(dirname))
)
_backup(dirname, "client_secrets.json")
try:
shutil.copy(secrets_path, os.path.join(dirname, 'client_secrets.json'))
except shutil.Error as e:
show_error(str(e))
# Now, try and obtain the API for the first time
get_api = raw_input("Would you like to authenticate your client_secrets.json? [Y/n] ")
if get_api == '' or get_api.lower().startswith('y'):
get_drive_api_from_client_secrets(path, reset_creds=True)
default_account = settings.get("google_account", "")
account = raw_input(("What Google account(s) should have access to new spreadsheets? "
"(e.g. [email protected], leave blank to specify for each new "
"project, separate multiple addresses with commas) [{0}] "
.format(default_account)
))
if default_account != "" and account == "":
account = default_account
if account != "":
ret = { "google_account" : account }
puts("\n- Done configuring Google spreadsheets.")
return ret | def function[_setup_google_spreadsheets, parameter[settings, path, prompt]]:
constant[
Set up a Google spreadsheet.
]
variable[ret] assign[=] dictionary[[], []]
if name[prompt] begin[:]
variable[use] assign[=] call[name[raw_input], parameter[constant[
Would you like to use Google spreadsheets [Y/n]? ]]]
if <ast.BoolOp object at 0x7da1b1a8e200> begin[:]
return[name[settings]]
variable[dirname] assign[=] call[name[os].path.dirname, parameter[name[path]]]
variable[path] assign[=] call[name[os].path.join, parameter[name[dirname], constant[client_secrets.json]]]
variable[write_secrets] assign[=] constant[True]
if call[name[os].path.isfile, parameter[name[path]]] begin[:]
variable[write_secrets_input] assign[=] call[name[raw_input], parameter[constant[client_secrets.json already exists. Would you like to overwrite it? [y/N] ]]]
if <ast.UnaryOp object at 0x7da1b1a8cd30> begin[:]
variable[write_secrets] assign[=] constant[False]
if name[write_secrets] begin[:]
call[name[puts], parameter[call[constant[
Login in to Google and go to {0} to create an app and generate a
client_secrets authentication file. You should create credentials for an `installed app`. See
{1} for more information.].format, parameter[call[name[colored].red, parameter[constant[https://console.developers.google.com/project]]], call[name[colored].red, parameter[call[constant[http://tarbell.readthedocs.org/en/{0}/install.html#configure-google-spreadsheet-access-optional].format, parameter[name[LONG_VERSION]]]]]]]]]
variable[secrets_path] assign[=] call[name[raw_input], parameter[constant[
Where is your client secrets file? [~/Downloads/client_secrets.json] ]]]
if compare[name[secrets_path] equal[==] constant[]] begin[:]
variable[secrets_path] assign[=] call[name[os].path.join, parameter[constant[~], constant[Downloads/client_secrets.json]]]
variable[secrets_path] assign[=] call[name[os].path.expanduser, parameter[name[secrets_path]]]
call[name[puts], parameter[call[constant[
Copying {0} to {1}
].format, parameter[call[name[colored].green, parameter[name[secrets_path]]], call[name[colored].green, parameter[name[dirname]]]]]]]
call[name[_backup], parameter[name[dirname], constant[client_secrets.json]]]
<ast.Try object at 0x7da1b1950d30>
variable[get_api] assign[=] call[name[raw_input], parameter[constant[Would you like to authenticate your client_secrets.json? [Y/n] ]]]
if <ast.BoolOp object at 0x7da1b1950f70> begin[:]
call[name[get_drive_api_from_client_secrets], parameter[name[path]]]
variable[default_account] assign[=] call[name[settings].get, parameter[constant[google_account], constant[]]]
variable[account] assign[=] call[name[raw_input], parameter[call[constant[What Google account(s) should have access to new spreadsheets? (e.g. [email protected], leave blank to specify for each new project, separate multiple addresses with commas) [{0}] ].format, parameter[name[default_account]]]]]
if <ast.BoolOp object at 0x7da1b1952d10> begin[:]
variable[account] assign[=] name[default_account]
if compare[name[account] not_equal[!=] constant[]] begin[:]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1917ac0>], [<ast.Name object at 0x7da1b1915b10>]]
call[name[puts], parameter[constant[
- Done configuring Google spreadsheets.]]]
return[name[ret]] | keyword[def] identifier[_setup_google_spreadsheets] ( identifier[settings] , identifier[path] , identifier[prompt] = keyword[True] ):
literal[string]
identifier[ret] ={}
keyword[if] identifier[prompt] :
identifier[use] = identifier[raw_input] ( literal[string] )
keyword[if] identifier[use] . identifier[lower] ()!= literal[string] keyword[and] identifier[use] != literal[string] :
keyword[return] identifier[settings]
identifier[dirname] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[path] )
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] , literal[string] )
identifier[write_secrets] = keyword[True]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ):
identifier[write_secrets_input] = identifier[raw_input] ( literal[string] )
keyword[if] keyword[not] identifier[write_secrets_input] . identifier[lower] (). identifier[startswith] ( literal[string] ):
identifier[write_secrets] = keyword[False]
keyword[if] identifier[write_secrets] :
identifier[puts] (( literal[string]
literal[string]
literal[string]
. identifier[format] ( identifier[colored] . identifier[red] ( literal[string] ),
identifier[colored] . identifier[red] ( literal[string] . identifier[format] ( identifier[LONG_VERSION] ))
)
))
identifier[secrets_path] = identifier[raw_input] (( literal[string]
literal[string]
))
keyword[if] identifier[secrets_path] == literal[string] :
identifier[secrets_path] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] )
identifier[secrets_path] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[secrets_path] )
identifier[puts] ( literal[string]
. identifier[format] ( identifier[colored] . identifier[green] ( identifier[secrets_path] ),
identifier[colored] . identifier[green] ( identifier[dirname] ))
)
identifier[_backup] ( identifier[dirname] , literal[string] )
keyword[try] :
identifier[shutil] . identifier[copy] ( identifier[secrets_path] , identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] , literal[string] ))
keyword[except] identifier[shutil] . identifier[Error] keyword[as] identifier[e] :
identifier[show_error] ( identifier[str] ( identifier[e] ))
identifier[get_api] = identifier[raw_input] ( literal[string] )
keyword[if] identifier[get_api] == literal[string] keyword[or] identifier[get_api] . identifier[lower] (). identifier[startswith] ( literal[string] ):
identifier[get_drive_api_from_client_secrets] ( identifier[path] , identifier[reset_creds] = keyword[True] )
identifier[default_account] = identifier[settings] . identifier[get] ( literal[string] , literal[string] )
identifier[account] = identifier[raw_input] (( literal[string]
literal[string]
literal[string]
. identifier[format] ( identifier[default_account] )
))
keyword[if] identifier[default_account] != literal[string] keyword[and] identifier[account] == literal[string] :
identifier[account] = identifier[default_account]
keyword[if] identifier[account] != literal[string] :
identifier[ret] ={ literal[string] : identifier[account] }
identifier[puts] ( literal[string] )
keyword[return] identifier[ret] | def _setup_google_spreadsheets(settings, path, prompt=True):
"""
Set up a Google spreadsheet.
"""
ret = {}
if prompt:
use = raw_input('\nWould you like to use Google spreadsheets [Y/n]? ')
if use.lower() != 'y' and use != '':
return settings # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
dirname = os.path.dirname(path)
path = os.path.join(dirname, 'client_secrets.json')
write_secrets = True
if os.path.isfile(path):
write_secrets_input = raw_input('client_secrets.json already exists. Would you like to overwrite it? [y/N] ')
if not write_secrets_input.lower().startswith('y'):
write_secrets = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if write_secrets:
puts('\nLogin in to Google and go to {0} to create an app and generate a \nclient_secrets authentication file. You should create credentials for an `installed app`. See \n{1} for more information.'.format(colored.red('https://console.developers.google.com/project'), colored.red('http://tarbell.readthedocs.org/en/{0}/install.html#configure-google-spreadsheet-access-optional'.format(LONG_VERSION))))
secrets_path = raw_input('\nWhere is your client secrets file? [~/Downloads/client_secrets.json] ')
if secrets_path == '':
secrets_path = os.path.join('~', 'Downloads/client_secrets.json') # depends on [control=['if'], data=['secrets_path']]
secrets_path = os.path.expanduser(secrets_path)
puts('\nCopying {0} to {1}\n'.format(colored.green(secrets_path), colored.green(dirname)))
_backup(dirname, 'client_secrets.json')
try:
shutil.copy(secrets_path, os.path.join(dirname, 'client_secrets.json')) # depends on [control=['try'], data=[]]
except shutil.Error as e:
show_error(str(e)) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
# Now, try and obtain the API for the first time
get_api = raw_input('Would you like to authenticate your client_secrets.json? [Y/n] ')
if get_api == '' or get_api.lower().startswith('y'):
get_drive_api_from_client_secrets(path, reset_creds=True) # depends on [control=['if'], data=[]]
default_account = settings.get('google_account', '')
account = raw_input('What Google account(s) should have access to new spreadsheets? (e.g. [email protected], leave blank to specify for each new project, separate multiple addresses with commas) [{0}] '.format(default_account))
if default_account != '' and account == '':
account = default_account # depends on [control=['if'], data=[]]
if account != '':
ret = {'google_account': account} # depends on [control=['if'], data=['account']]
puts('\n- Done configuring Google spreadsheets.')
return ret |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'voices') and self.voices is not None:
_dict['voices'] = [x._to_dict() for x in self.voices]
return _dict | def function[_to_dict, parameter[self]]:
constant[Return a json dictionary representing this model.]
variable[_dict] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da204623400> begin[:]
call[name[_dict]][constant[voices]] assign[=] <ast.ListComp object at 0x7da204621300>
return[name[_dict]] | keyword[def] identifier[_to_dict] ( identifier[self] ):
literal[string]
identifier[_dict] ={}
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[voices] keyword[is] keyword[not] keyword[None] :
identifier[_dict] [ literal[string] ]=[ identifier[x] . identifier[_to_dict] () keyword[for] identifier[x] keyword[in] identifier[self] . identifier[voices] ]
keyword[return] identifier[_dict] | def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'voices') and self.voices is not None:
_dict['voices'] = [x._to_dict() for x in self.voices] # depends on [control=['if'], data=[]]
return _dict |
def _create_delegate_handler(delegate):
"""Creates a handler function that creates a co-routine that can yield once with the given
positional arguments to the delegate as a transition.
Args:
delegate (Coroutine): The co-routine to delegate to.
Returns:
A :class:`callable` handler that returns a co-routine that ignores the data it receives
and sends with the arguments given to the handler as a :class:`Transition`.
"""
@coroutine
def handler(*args):
yield
yield delegate.send(Transition(args, delegate))
return handler | def function[_create_delegate_handler, parameter[delegate]]:
constant[Creates a handler function that creates a co-routine that can yield once with the given
positional arguments to the delegate as a transition.
Args:
delegate (Coroutine): The co-routine to delegate to.
Returns:
A :class:`callable` handler that returns a co-routine that ignores the data it receives
and sends with the arguments given to the handler as a :class:`Transition`.
]
def function[handler, parameter[]]:
<ast.Yield object at 0x7da1b15ace80>
<ast.Yield object at 0x7da1b15ac340>
return[name[handler]] | keyword[def] identifier[_create_delegate_handler] ( identifier[delegate] ):
literal[string]
@ identifier[coroutine]
keyword[def] identifier[handler] (* identifier[args] ):
keyword[yield]
keyword[yield] identifier[delegate] . identifier[send] ( identifier[Transition] ( identifier[args] , identifier[delegate] ))
keyword[return] identifier[handler] | def _create_delegate_handler(delegate):
"""Creates a handler function that creates a co-routine that can yield once with the given
positional arguments to the delegate as a transition.
Args:
delegate (Coroutine): The co-routine to delegate to.
Returns:
A :class:`callable` handler that returns a co-routine that ignores the data it receives
and sends with the arguments given to the handler as a :class:`Transition`.
"""
@coroutine
def handler(*args):
yield
yield delegate.send(Transition(args, delegate))
return handler |
def add_favicon_path(self, path: str) -> None:
"""Add path to serve favicon file.
``path`` should be a directory, which contains favicon file
(``favicon.ico``) for your app.
"""
spec = web.URLSpec(
'/(favicon.ico)',
StaticFileHandler,
dict(path=path)
)
# Need some check
handlers = self.handlers[0][1]
handlers.append(spec) | def function[add_favicon_path, parameter[self, path]]:
constant[Add path to serve favicon file.
``path`` should be a directory, which contains favicon file
(``favicon.ico``) for your app.
]
variable[spec] assign[=] call[name[web].URLSpec, parameter[constant[/(favicon.ico)], name[StaticFileHandler], call[name[dict], parameter[]]]]
variable[handlers] assign[=] call[call[name[self].handlers][constant[0]]][constant[1]]
call[name[handlers].append, parameter[name[spec]]] | keyword[def] identifier[add_favicon_path] ( identifier[self] , identifier[path] : identifier[str] )-> keyword[None] :
literal[string]
identifier[spec] = identifier[web] . identifier[URLSpec] (
literal[string] ,
identifier[StaticFileHandler] ,
identifier[dict] ( identifier[path] = identifier[path] )
)
identifier[handlers] = identifier[self] . identifier[handlers] [ literal[int] ][ literal[int] ]
identifier[handlers] . identifier[append] ( identifier[spec] ) | def add_favicon_path(self, path: str) -> None:
"""Add path to serve favicon file.
``path`` should be a directory, which contains favicon file
(``favicon.ico``) for your app.
"""
spec = web.URLSpec('/(favicon.ico)', StaticFileHandler, dict(path=path))
# Need some check
handlers = self.handlers[0][1]
handlers.append(spec) |
def get_pixels(self):
"""
Returns a list containing 64 smaller lists of [R,G,B] pixels
representing what is currently displayed on the LED matrix
"""
pixel_list = []
with open(self._fb_device, 'rb') as f:
map = self._pix_map[self._rotation]
for row in range(8):
for col in range(8):
# Two bytes per pixel in fb memory, 16 bit RGB565
f.seek(map[row][col] * 2) # row, column
pixel_list.append(self._unpack_bin(f.read(2)))
return pixel_list | def function[get_pixels, parameter[self]]:
constant[
Returns a list containing 64 smaller lists of [R,G,B] pixels
representing what is currently displayed on the LED matrix
]
variable[pixel_list] assign[=] list[[]]
with call[name[open], parameter[name[self]._fb_device, constant[rb]]] begin[:]
variable[map] assign[=] call[name[self]._pix_map][name[self]._rotation]
for taget[name[row]] in starred[call[name[range], parameter[constant[8]]]] begin[:]
for taget[name[col]] in starred[call[name[range], parameter[constant[8]]]] begin[:]
call[name[f].seek, parameter[binary_operation[call[call[name[map]][name[row]]][name[col]] * constant[2]]]]
call[name[pixel_list].append, parameter[call[name[self]._unpack_bin, parameter[call[name[f].read, parameter[constant[2]]]]]]]
return[name[pixel_list]] | keyword[def] identifier[get_pixels] ( identifier[self] ):
literal[string]
identifier[pixel_list] =[]
keyword[with] identifier[open] ( identifier[self] . identifier[_fb_device] , literal[string] ) keyword[as] identifier[f] :
identifier[map] = identifier[self] . identifier[_pix_map] [ identifier[self] . identifier[_rotation] ]
keyword[for] identifier[row] keyword[in] identifier[range] ( literal[int] ):
keyword[for] identifier[col] keyword[in] identifier[range] ( literal[int] ):
identifier[f] . identifier[seek] ( identifier[map] [ identifier[row] ][ identifier[col] ]* literal[int] )
identifier[pixel_list] . identifier[append] ( identifier[self] . identifier[_unpack_bin] ( identifier[f] . identifier[read] ( literal[int] )))
keyword[return] identifier[pixel_list] | def get_pixels(self):
"""
Returns a list containing 64 smaller lists of [R,G,B] pixels
representing what is currently displayed on the LED matrix
"""
pixel_list = []
with open(self._fb_device, 'rb') as f:
map = self._pix_map[self._rotation]
for row in range(8):
for col in range(8):
# Two bytes per pixel in fb memory, 16 bit RGB565
f.seek(map[row][col] * 2) # row, column
pixel_list.append(self._unpack_bin(f.read(2))) # depends on [control=['for'], data=['col']] # depends on [control=['for'], data=['row']] # depends on [control=['with'], data=['f']]
return pixel_list |
def serve_forever(self, banner=None):
"""Interact with the user.
:param banner: (optional) the banner to print before the first
interaction. Defaults to ``None``.
"""
if hasattr(readline, "read_history_file"):
try:
readline.read_history_file(self.histfile)
except IOError:
pass
atexit.register(self._save_history)
super(Shell, self).serve_forever(banner) | def function[serve_forever, parameter[self, banner]]:
constant[Interact with the user.
:param banner: (optional) the banner to print before the first
interaction. Defaults to ``None``.
]
if call[name[hasattr], parameter[name[readline], constant[read_history_file]]] begin[:]
<ast.Try object at 0x7da1b24cbc10>
call[name[atexit].register, parameter[name[self]._save_history]]
call[call[name[super], parameter[name[Shell], name[self]]].serve_forever, parameter[name[banner]]] | keyword[def] identifier[serve_forever] ( identifier[self] , identifier[banner] = keyword[None] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[readline] , literal[string] ):
keyword[try] :
identifier[readline] . identifier[read_history_file] ( identifier[self] . identifier[histfile] )
keyword[except] identifier[IOError] :
keyword[pass]
identifier[atexit] . identifier[register] ( identifier[self] . identifier[_save_history] )
identifier[super] ( identifier[Shell] , identifier[self] ). identifier[serve_forever] ( identifier[banner] ) | def serve_forever(self, banner=None):
"""Interact with the user.
:param banner: (optional) the banner to print before the first
interaction. Defaults to ``None``.
"""
if hasattr(readline, 'read_history_file'):
try:
readline.read_history_file(self.histfile) # depends on [control=['try'], data=[]]
except IOError:
pass # depends on [control=['except'], data=[]]
atexit.register(self._save_history) # depends on [control=['if'], data=[]]
super(Shell, self).serve_forever(banner) |
def can_edit_post(self, post, user):
""" Given a forum post, checks whether the user can edit the latter. """
checker = self._get_checker(user)
# A user can edit a post if...
# they are a superuser
# they are the original poster of the forum post
# they belong to the forum moderators
is_author = self._is_post_author(post, user)
can_edit = (
user.is_superuser or
(
is_author and checker.has_perm('can_edit_own_posts', post.topic.forum) and
not post.topic.is_locked
) or
checker.has_perm('can_edit_posts', post.topic.forum)
)
return can_edit | def function[can_edit_post, parameter[self, post, user]]:
constant[ Given a forum post, checks whether the user can edit the latter. ]
variable[checker] assign[=] call[name[self]._get_checker, parameter[name[user]]]
variable[is_author] assign[=] call[name[self]._is_post_author, parameter[name[post], name[user]]]
variable[can_edit] assign[=] <ast.BoolOp object at 0x7da20c7956c0>
return[name[can_edit]] | keyword[def] identifier[can_edit_post] ( identifier[self] , identifier[post] , identifier[user] ):
literal[string]
identifier[checker] = identifier[self] . identifier[_get_checker] ( identifier[user] )
identifier[is_author] = identifier[self] . identifier[_is_post_author] ( identifier[post] , identifier[user] )
identifier[can_edit] =(
identifier[user] . identifier[is_superuser] keyword[or]
(
identifier[is_author] keyword[and] identifier[checker] . identifier[has_perm] ( literal[string] , identifier[post] . identifier[topic] . identifier[forum] ) keyword[and]
keyword[not] identifier[post] . identifier[topic] . identifier[is_locked]
) keyword[or]
identifier[checker] . identifier[has_perm] ( literal[string] , identifier[post] . identifier[topic] . identifier[forum] )
)
keyword[return] identifier[can_edit] | def can_edit_post(self, post, user):
""" Given a forum post, checks whether the user can edit the latter. """
checker = self._get_checker(user)
# A user can edit a post if...
# they are a superuser
# they are the original poster of the forum post
# they belong to the forum moderators
is_author = self._is_post_author(post, user)
can_edit = user.is_superuser or (is_author and checker.has_perm('can_edit_own_posts', post.topic.forum) and (not post.topic.is_locked)) or checker.has_perm('can_edit_posts', post.topic.forum)
return can_edit |
def run_simulation(projectname: str, xmlfile: str):
"""Perform a HydPy workflow in agreement with the given XML configuration
file available in the directory of the given project. ToDo
Function |run_simulation| is a "script function" and is normally used as
explained in the main documentation on module |xmltools|.
"""
write = commandtools.print_textandtime
hydpy.pub.options.printprogress = False
write(f'Start HydPy project `{projectname}`')
hp = hydpytools.HydPy(projectname)
write(f'Read configuration file `{xmlfile}`')
interface = XMLInterface(xmlfile)
write('Interpret the defined options')
interface.update_options()
hydpy.pub.options.printprogress = False
write('Interpret the defined period')
interface.update_timegrids()
write('Read all network files')
hp.prepare_network()
write('Activate the selected network')
hp.update_devices(interface.fullselection)
write('Read the required control files')
hp.init_models()
write('Read the required condition files')
interface.conditions_io.load_conditions()
write('Read the required time series files')
interface.series_io.prepare_series()
interface.series_io.load_series()
write('Perform the simulation run')
hp.doit()
write('Write the desired condition files')
interface.conditions_io.save_conditions()
write('Write the desired time series files')
interface.series_io.save_series() | def function[run_simulation, parameter[projectname, xmlfile]]:
constant[Perform a HydPy workflow in agreement with the given XML configuration
file available in the directory of the given project. ToDo
Function |run_simulation| is a "script function" and is normally used as
explained in the main documentation on module |xmltools|.
]
variable[write] assign[=] name[commandtools].print_textandtime
name[hydpy].pub.options.printprogress assign[=] constant[False]
call[name[write], parameter[<ast.JoinedStr object at 0x7da18bccb520>]]
variable[hp] assign[=] call[name[hydpytools].HydPy, parameter[name[projectname]]]
call[name[write], parameter[<ast.JoinedStr object at 0x7da18bcc8310>]]
variable[interface] assign[=] call[name[XMLInterface], parameter[name[xmlfile]]]
call[name[write], parameter[constant[Interpret the defined options]]]
call[name[interface].update_options, parameter[]]
name[hydpy].pub.options.printprogress assign[=] constant[False]
call[name[write], parameter[constant[Interpret the defined period]]]
call[name[interface].update_timegrids, parameter[]]
call[name[write], parameter[constant[Read all network files]]]
call[name[hp].prepare_network, parameter[]]
call[name[write], parameter[constant[Activate the selected network]]]
call[name[hp].update_devices, parameter[name[interface].fullselection]]
call[name[write], parameter[constant[Read the required control files]]]
call[name[hp].init_models, parameter[]]
call[name[write], parameter[constant[Read the required condition files]]]
call[name[interface].conditions_io.load_conditions, parameter[]]
call[name[write], parameter[constant[Read the required time series files]]]
call[name[interface].series_io.prepare_series, parameter[]]
call[name[interface].series_io.load_series, parameter[]]
call[name[write], parameter[constant[Perform the simulation run]]]
call[name[hp].doit, parameter[]]
call[name[write], parameter[constant[Write the desired condition files]]]
call[name[interface].conditions_io.save_conditions, parameter[]]
call[name[write], parameter[constant[Write the desired time series files]]]
call[name[interface].series_io.save_series, parameter[]] | keyword[def] identifier[run_simulation] ( identifier[projectname] : identifier[str] , identifier[xmlfile] : identifier[str] ):
literal[string]
identifier[write] = identifier[commandtools] . identifier[print_textandtime]
identifier[hydpy] . identifier[pub] . identifier[options] . identifier[printprogress] = keyword[False]
identifier[write] ( literal[string] )
identifier[hp] = identifier[hydpytools] . identifier[HydPy] ( identifier[projectname] )
identifier[write] ( literal[string] )
identifier[interface] = identifier[XMLInterface] ( identifier[xmlfile] )
identifier[write] ( literal[string] )
identifier[interface] . identifier[update_options] ()
identifier[hydpy] . identifier[pub] . identifier[options] . identifier[printprogress] = keyword[False]
identifier[write] ( literal[string] )
identifier[interface] . identifier[update_timegrids] ()
identifier[write] ( literal[string] )
identifier[hp] . identifier[prepare_network] ()
identifier[write] ( literal[string] )
identifier[hp] . identifier[update_devices] ( identifier[interface] . identifier[fullselection] )
identifier[write] ( literal[string] )
identifier[hp] . identifier[init_models] ()
identifier[write] ( literal[string] )
identifier[interface] . identifier[conditions_io] . identifier[load_conditions] ()
identifier[write] ( literal[string] )
identifier[interface] . identifier[series_io] . identifier[prepare_series] ()
identifier[interface] . identifier[series_io] . identifier[load_series] ()
identifier[write] ( literal[string] )
identifier[hp] . identifier[doit] ()
identifier[write] ( literal[string] )
identifier[interface] . identifier[conditions_io] . identifier[save_conditions] ()
identifier[write] ( literal[string] )
identifier[interface] . identifier[series_io] . identifier[save_series] () | def run_simulation(projectname: str, xmlfile: str):
"""Perform a HydPy workflow in agreement with the given XML configuration
file available in the directory of the given project. ToDo
Function |run_simulation| is a "script function" and is normally used as
explained in the main documentation on module |xmltools|.
"""
write = commandtools.print_textandtime
hydpy.pub.options.printprogress = False
write(f'Start HydPy project `{projectname}`')
hp = hydpytools.HydPy(projectname)
write(f'Read configuration file `{xmlfile}`')
interface = XMLInterface(xmlfile)
write('Interpret the defined options')
interface.update_options()
hydpy.pub.options.printprogress = False
write('Interpret the defined period')
interface.update_timegrids()
write('Read all network files')
hp.prepare_network()
write('Activate the selected network')
hp.update_devices(interface.fullselection)
write('Read the required control files')
hp.init_models()
write('Read the required condition files')
interface.conditions_io.load_conditions()
write('Read the required time series files')
interface.series_io.prepare_series()
interface.series_io.load_series()
write('Perform the simulation run')
hp.doit()
write('Write the desired condition files')
interface.conditions_io.save_conditions()
write('Write the desired time series files')
interface.series_io.save_series() |
def lint(filename):
"""Lints an INI file, returning 0 in case of success."""
config = ConfigParser.ConfigParser()
try:
config.read(filename)
return 0
except ConfigParser.Error as error:
print('Error: %s' % error)
return 1
except:
print('Unexpected Error')
return 2 | def function[lint, parameter[filename]]:
constant[Lints an INI file, returning 0 in case of success.]
variable[config] assign[=] call[name[ConfigParser].ConfigParser, parameter[]]
<ast.Try object at 0x7da1b0b64580> | keyword[def] identifier[lint] ( identifier[filename] ):
literal[string]
identifier[config] = identifier[ConfigParser] . identifier[ConfigParser] ()
keyword[try] :
identifier[config] . identifier[read] ( identifier[filename] )
keyword[return] literal[int]
keyword[except] identifier[ConfigParser] . identifier[Error] keyword[as] identifier[error] :
identifier[print] ( literal[string] % identifier[error] )
keyword[return] literal[int]
keyword[except] :
identifier[print] ( literal[string] )
keyword[return] literal[int] | def lint(filename):
"""Lints an INI file, returning 0 in case of success."""
config = ConfigParser.ConfigParser()
try:
config.read(filename)
return 0 # depends on [control=['try'], data=[]]
except ConfigParser.Error as error:
print('Error: %s' % error)
return 1 # depends on [control=['except'], data=['error']]
except:
print('Unexpected Error')
return 2 # depends on [control=['except'], data=[]] |
def SYSCALL(cpu):
"""
Calls to interrupt procedure.
The INT n instruction generates a call to the interrupt or exception handler specified
with the destination operand. The INT n instruction is the general mnemonic for executing
a software-generated call to an interrupt handler. The INTO instruction is a special
mnemonic for calling overflow exception (#OF), interrupt vector number 4. The overflow
interrupt checks the OF flag in the EFLAGS register and calls the overflow interrupt handler
if the OF flag is set to 1.
:param cpu: current CPU.
"""
cpu.RCX = cpu.RIP
cpu.R11 = cpu.RFLAGS
raise Syscall() | def function[SYSCALL, parameter[cpu]]:
constant[
Calls to interrupt procedure.
The INT n instruction generates a call to the interrupt or exception handler specified
with the destination operand. The INT n instruction is the general mnemonic for executing
a software-generated call to an interrupt handler. The INTO instruction is a special
mnemonic for calling overflow exception (#OF), interrupt vector number 4. The overflow
interrupt checks the OF flag in the EFLAGS register and calls the overflow interrupt handler
if the OF flag is set to 1.
:param cpu: current CPU.
]
name[cpu].RCX assign[=] name[cpu].RIP
name[cpu].R11 assign[=] name[cpu].RFLAGS
<ast.Raise object at 0x7da18f811a50> | keyword[def] identifier[SYSCALL] ( identifier[cpu] ):
literal[string]
identifier[cpu] . identifier[RCX] = identifier[cpu] . identifier[RIP]
identifier[cpu] . identifier[R11] = identifier[cpu] . identifier[RFLAGS]
keyword[raise] identifier[Syscall] () | def SYSCALL(cpu):
"""
Calls to interrupt procedure.
The INT n instruction generates a call to the interrupt or exception handler specified
with the destination operand. The INT n instruction is the general mnemonic for executing
a software-generated call to an interrupt handler. The INTO instruction is a special
mnemonic for calling overflow exception (#OF), interrupt vector number 4. The overflow
interrupt checks the OF flag in the EFLAGS register and calls the overflow interrupt handler
if the OF flag is set to 1.
:param cpu: current CPU.
"""
cpu.RCX = cpu.RIP
cpu.R11 = cpu.RFLAGS
raise Syscall() |
def _update(self, resource, update_dict=None, params=None, **kwargs):
"""Update the object."""
url = self._client._build_url(resource, **kwargs)
response = self._client._request('PUT', url, json=update_dict, params=params)
if response.status_code != requests.codes.ok: # pragma: no cover
raise APIError("Could not update {} ({})".format(self.__class__.__name__, response.json().get('results')))
else:
self.refresh() | def function[_update, parameter[self, resource, update_dict, params]]:
constant[Update the object.]
variable[url] assign[=] call[name[self]._client._build_url, parameter[name[resource]]]
variable[response] assign[=] call[name[self]._client._request, parameter[constant[PUT], name[url]]]
if compare[name[response].status_code not_equal[!=] name[requests].codes.ok] begin[:]
<ast.Raise object at 0x7da1b24adb10> | keyword[def] identifier[_update] ( identifier[self] , identifier[resource] , identifier[update_dict] = keyword[None] , identifier[params] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[url] = identifier[self] . identifier[_client] . identifier[_build_url] ( identifier[resource] ,** identifier[kwargs] )
identifier[response] = identifier[self] . identifier[_client] . identifier[_request] ( literal[string] , identifier[url] , identifier[json] = identifier[update_dict] , identifier[params] = identifier[params] )
keyword[if] identifier[response] . identifier[status_code] != identifier[requests] . identifier[codes] . identifier[ok] :
keyword[raise] identifier[APIError] ( literal[string] . identifier[format] ( identifier[self] . identifier[__class__] . identifier[__name__] , identifier[response] . identifier[json] (). identifier[get] ( literal[string] )))
keyword[else] :
identifier[self] . identifier[refresh] () | def _update(self, resource, update_dict=None, params=None, **kwargs):
"""Update the object."""
url = self._client._build_url(resource, **kwargs)
response = self._client._request('PUT', url, json=update_dict, params=params)
if response.status_code != requests.codes.ok: # pragma: no cover
raise APIError('Could not update {} ({})'.format(self.__class__.__name__, response.json().get('results'))) # depends on [control=['if'], data=[]]
else:
self.refresh() |
def send(self, to, subject=None, body=None, reply_to=None, template=None, **kwargs):
"""
To send email
:param to: the recipients, list or string
:param subject: the subject
:param body: the body
:param reply_to: reply_to
:param template: template, will use the templates instead
:param kwargs: context args
:return: bool - True if everything is ok
"""
sender = self.config.get("MAIL_SENDER")
recipients = [to] if not isinstance(to, list) else to
kwargs.update({
"subject": subject,
"body": body,
"reply_to": reply_to
})
if not self.validated:
abort("MailmanConfigurationError")
if self.provider == "SES":
kwargs["to"] = recipients
if template:
self.mail.send_template(template=template, **kwargs)
else:
self.mail.send(**kwargs)
elif self.provider == "SMTP":
if template:
data = self._template(template=template, **kwargs)
kwargs["subject"] = data["subject"]
kwargs["body"] = data["body"]
kwargs["recipients"] = recipients
kwargs["sender"] = sender
# Remove invalid Messages keys
_safe_keys = ["recipients", "subject", "body", "html", "alts",
"cc", "bcc", "attachments", "reply_to", "sender",
"date", "charset", "extra_headers", "mail_options",
"rcpt_options"]
for k in kwargs.copy():
if k not in _safe_keys:
del kwargs[k]
message = flask_mail.Message(**kwargs)
self.mail.send(message)
else:
abort("MailmanUnknownProviderError") | def function[send, parameter[self, to, subject, body, reply_to, template]]:
constant[
To send email
:param to: the recipients, list or string
:param subject: the subject
:param body: the body
:param reply_to: reply_to
:param template: template, will use the templates instead
:param kwargs: context args
:return: bool - True if everything is ok
]
variable[sender] assign[=] call[name[self].config.get, parameter[constant[MAIL_SENDER]]]
variable[recipients] assign[=] <ast.IfExp object at 0x7da18fe92e60>
call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da18fe928c0>, <ast.Constant object at 0x7da18fe92c50>, <ast.Constant object at 0x7da18fe93f70>], [<ast.Name object at 0x7da18fe92a40>, <ast.Name object at 0x7da18fe93700>, <ast.Name object at 0x7da18fe932b0>]]]]
if <ast.UnaryOp object at 0x7da18fe90910> begin[:]
call[name[abort], parameter[constant[MailmanConfigurationError]]]
if compare[name[self].provider equal[==] constant[SES]] begin[:]
call[name[kwargs]][constant[to]] assign[=] name[recipients]
if name[template] begin[:]
call[name[self].mail.send_template, parameter[]] | keyword[def] identifier[send] ( identifier[self] , identifier[to] , identifier[subject] = keyword[None] , identifier[body] = keyword[None] , identifier[reply_to] = keyword[None] , identifier[template] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[sender] = identifier[self] . identifier[config] . identifier[get] ( literal[string] )
identifier[recipients] =[ identifier[to] ] keyword[if] keyword[not] identifier[isinstance] ( identifier[to] , identifier[list] ) keyword[else] identifier[to]
identifier[kwargs] . identifier[update] ({
literal[string] : identifier[subject] ,
literal[string] : identifier[body] ,
literal[string] : identifier[reply_to]
})
keyword[if] keyword[not] identifier[self] . identifier[validated] :
identifier[abort] ( literal[string] )
keyword[if] identifier[self] . identifier[provider] == literal[string] :
identifier[kwargs] [ literal[string] ]= identifier[recipients]
keyword[if] identifier[template] :
identifier[self] . identifier[mail] . identifier[send_template] ( identifier[template] = identifier[template] ,** identifier[kwargs] )
keyword[else] :
identifier[self] . identifier[mail] . identifier[send] (** identifier[kwargs] )
keyword[elif] identifier[self] . identifier[provider] == literal[string] :
keyword[if] identifier[template] :
identifier[data] = identifier[self] . identifier[_template] ( identifier[template] = identifier[template] ,** identifier[kwargs] )
identifier[kwargs] [ literal[string] ]= identifier[data] [ literal[string] ]
identifier[kwargs] [ literal[string] ]= identifier[data] [ literal[string] ]
identifier[kwargs] [ literal[string] ]= identifier[recipients]
identifier[kwargs] [ literal[string] ]= identifier[sender]
identifier[_safe_keys] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] ]
keyword[for] identifier[k] keyword[in] identifier[kwargs] . identifier[copy] ():
keyword[if] identifier[k] keyword[not] keyword[in] identifier[_safe_keys] :
keyword[del] identifier[kwargs] [ identifier[k] ]
identifier[message] = identifier[flask_mail] . identifier[Message] (** identifier[kwargs] )
identifier[self] . identifier[mail] . identifier[send] ( identifier[message] )
keyword[else] :
identifier[abort] ( literal[string] ) | def send(self, to, subject=None, body=None, reply_to=None, template=None, **kwargs):
"""
To send email
:param to: the recipients, list or string
:param subject: the subject
:param body: the body
:param reply_to: reply_to
:param template: template, will use the templates instead
:param kwargs: context args
:return: bool - True if everything is ok
"""
sender = self.config.get('MAIL_SENDER')
recipients = [to] if not isinstance(to, list) else to
kwargs.update({'subject': subject, 'body': body, 'reply_to': reply_to})
if not self.validated:
abort('MailmanConfigurationError') # depends on [control=['if'], data=[]]
if self.provider == 'SES':
kwargs['to'] = recipients
if template:
self.mail.send_template(template=template, **kwargs) # depends on [control=['if'], data=[]]
else:
self.mail.send(**kwargs) # depends on [control=['if'], data=[]]
elif self.provider == 'SMTP':
if template:
data = self._template(template=template, **kwargs)
kwargs['subject'] = data['subject']
kwargs['body'] = data['body'] # depends on [control=['if'], data=[]]
kwargs['recipients'] = recipients
kwargs['sender'] = sender
# Remove invalid Messages keys
_safe_keys = ['recipients', 'subject', 'body', 'html', 'alts', 'cc', 'bcc', 'attachments', 'reply_to', 'sender', 'date', 'charset', 'extra_headers', 'mail_options', 'rcpt_options']
for k in kwargs.copy():
if k not in _safe_keys:
del kwargs[k] # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=['k']]
message = flask_mail.Message(**kwargs)
self.mail.send(message) # depends on [control=['if'], data=[]]
else:
abort('MailmanUnknownProviderError') |
def right_click_high_equalarea(self, event):
"""
toggles between zoom and pan effects for the high equal area on
right click
Parameters
----------
event : the wx.MouseEvent that triggered the call of this function
Alters
------
high_EA_setting, toolbar4 setting
"""
if event.LeftIsDown():
return
elif self.high_EA_setting == "Zoom":
self.high_EA_setting = "Pan"
try:
self.toolbar4.pan('off')
except TypeError:
pass
elif self.high_EA_setting == "Pan":
self.high_EA_setting = "Zoom"
try:
self.toolbar4.zoom()
except TypeError:
pass | def function[right_click_high_equalarea, parameter[self, event]]:
constant[
toggles between zoom and pan effects for the high equal area on
right click
Parameters
----------
event : the wx.MouseEvent that triggered the call of this function
Alters
------
high_EA_setting, toolbar4 setting
]
if call[name[event].LeftIsDown, parameter[]] begin[:]
return[None] | keyword[def] identifier[right_click_high_equalarea] ( identifier[self] , identifier[event] ):
literal[string]
keyword[if] identifier[event] . identifier[LeftIsDown] ():
keyword[return]
keyword[elif] identifier[self] . identifier[high_EA_setting] == literal[string] :
identifier[self] . identifier[high_EA_setting] = literal[string]
keyword[try] :
identifier[self] . identifier[toolbar4] . identifier[pan] ( literal[string] )
keyword[except] identifier[TypeError] :
keyword[pass]
keyword[elif] identifier[self] . identifier[high_EA_setting] == literal[string] :
identifier[self] . identifier[high_EA_setting] = literal[string]
keyword[try] :
identifier[self] . identifier[toolbar4] . identifier[zoom] ()
keyword[except] identifier[TypeError] :
keyword[pass] | def right_click_high_equalarea(self, event):
"""
toggles between zoom and pan effects for the high equal area on
right click
Parameters
----------
event : the wx.MouseEvent that triggered the call of this function
Alters
------
high_EA_setting, toolbar4 setting
"""
if event.LeftIsDown():
return # depends on [control=['if'], data=[]]
elif self.high_EA_setting == 'Zoom':
self.high_EA_setting = 'Pan'
try:
self.toolbar4.pan('off') # depends on [control=['try'], data=[]]
except TypeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif self.high_EA_setting == 'Pan':
self.high_EA_setting = 'Zoom'
try:
self.toolbar4.zoom() # depends on [control=['try'], data=[]]
except TypeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def apply_config(self, config):
"""
Applies config
"""
self.hash_name = config['hash_name']
self.dim = config['dim']
self.projection_count = config['projection_count']
self.normals = config['normals']
self.tree_root = config['tree_root']
self.minimum_result_size = config['minimum_result_size'] | def function[apply_config, parameter[self, config]]:
constant[
Applies config
]
name[self].hash_name assign[=] call[name[config]][constant[hash_name]]
name[self].dim assign[=] call[name[config]][constant[dim]]
name[self].projection_count assign[=] call[name[config]][constant[projection_count]]
name[self].normals assign[=] call[name[config]][constant[normals]]
name[self].tree_root assign[=] call[name[config]][constant[tree_root]]
name[self].minimum_result_size assign[=] call[name[config]][constant[minimum_result_size]] | keyword[def] identifier[apply_config] ( identifier[self] , identifier[config] ):
literal[string]
identifier[self] . identifier[hash_name] = identifier[config] [ literal[string] ]
identifier[self] . identifier[dim] = identifier[config] [ literal[string] ]
identifier[self] . identifier[projection_count] = identifier[config] [ literal[string] ]
identifier[self] . identifier[normals] = identifier[config] [ literal[string] ]
identifier[self] . identifier[tree_root] = identifier[config] [ literal[string] ]
identifier[self] . identifier[minimum_result_size] = identifier[config] [ literal[string] ] | def apply_config(self, config):
"""
Applies config
"""
self.hash_name = config['hash_name']
self.dim = config['dim']
self.projection_count = config['projection_count']
self.normals = config['normals']
self.tree_root = config['tree_root']
self.minimum_result_size = config['minimum_result_size'] |
def _reproject(wcs1, wcs2):
"""
Perform the forward transformation of ``wcs1`` followed by the
inverse transformation of ``wcs2``.
Parameters
----------
wcs1, wcs2 : `~astropy.wcs.WCS` or `~gwcs.wcs.WCS`
The WCS objects.
Returns
-------
result : func
Function to compute the transformations. It takes x, y
positions in ``wcs1`` and returns x, y positions in
``wcs2``. The input and output x, y positions are zero
indexed.
"""
import gwcs
forward_origin = []
if isinstance(wcs1, fitswcs.WCS):
forward = wcs1.all_pix2world
forward_origin = [0]
elif isinstance(wcs2, gwcs.wcs.WCS):
forward = wcs1.forward_transform
else:
raise ValueError('wcs1 must be an astropy.wcs.WCS or '
'gwcs.wcs.WCS object.')
inverse_origin = []
if isinstance(wcs2, fitswcs.WCS):
inverse = wcs2.all_world2pix
inverse_origin = [0]
elif isinstance(wcs2, gwcs.wcs.WCS):
inverse = wcs2.forward_transform.inverse
else:
raise ValueError('wcs2 must be an astropy.wcs.WCS or '
'gwcs.wcs.WCS object.')
def _reproject_func(x, y):
forward_args = [x, y] + forward_origin
sky = forward(*forward_args)
inverse_args = sky + inverse_origin
return inverse(*inverse_args)
return _reproject_func | def function[_reproject, parameter[wcs1, wcs2]]:
constant[
Perform the forward transformation of ``wcs1`` followed by the
inverse transformation of ``wcs2``.
Parameters
----------
wcs1, wcs2 : `~astropy.wcs.WCS` or `~gwcs.wcs.WCS`
The WCS objects.
Returns
-------
result : func
Function to compute the transformations. It takes x, y
positions in ``wcs1`` and returns x, y positions in
``wcs2``. The input and output x, y positions are zero
indexed.
]
import module[gwcs]
variable[forward_origin] assign[=] list[[]]
if call[name[isinstance], parameter[name[wcs1], name[fitswcs].WCS]] begin[:]
variable[forward] assign[=] name[wcs1].all_pix2world
variable[forward_origin] assign[=] list[[<ast.Constant object at 0x7da18ede7b50>]]
variable[inverse_origin] assign[=] list[[]]
if call[name[isinstance], parameter[name[wcs2], name[fitswcs].WCS]] begin[:]
variable[inverse] assign[=] name[wcs2].all_world2pix
variable[inverse_origin] assign[=] list[[<ast.Constant object at 0x7da18ede50f0>]]
def function[_reproject_func, parameter[x, y]]:
variable[forward_args] assign[=] binary_operation[list[[<ast.Name object at 0x7da18ede6590>, <ast.Name object at 0x7da18ede6cb0>]] + name[forward_origin]]
variable[sky] assign[=] call[name[forward], parameter[<ast.Starred object at 0x7da18ede48e0>]]
variable[inverse_args] assign[=] binary_operation[name[sky] + name[inverse_origin]]
return[call[name[inverse], parameter[<ast.Starred object at 0x7da18ede6500>]]]
return[name[_reproject_func]] | keyword[def] identifier[_reproject] ( identifier[wcs1] , identifier[wcs2] ):
literal[string]
keyword[import] identifier[gwcs]
identifier[forward_origin] =[]
keyword[if] identifier[isinstance] ( identifier[wcs1] , identifier[fitswcs] . identifier[WCS] ):
identifier[forward] = identifier[wcs1] . identifier[all_pix2world]
identifier[forward_origin] =[ literal[int] ]
keyword[elif] identifier[isinstance] ( identifier[wcs2] , identifier[gwcs] . identifier[wcs] . identifier[WCS] ):
identifier[forward] = identifier[wcs1] . identifier[forward_transform]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[inverse_origin] =[]
keyword[if] identifier[isinstance] ( identifier[wcs2] , identifier[fitswcs] . identifier[WCS] ):
identifier[inverse] = identifier[wcs2] . identifier[all_world2pix]
identifier[inverse_origin] =[ literal[int] ]
keyword[elif] identifier[isinstance] ( identifier[wcs2] , identifier[gwcs] . identifier[wcs] . identifier[WCS] ):
identifier[inverse] = identifier[wcs2] . identifier[forward_transform] . identifier[inverse]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[def] identifier[_reproject_func] ( identifier[x] , identifier[y] ):
identifier[forward_args] =[ identifier[x] , identifier[y] ]+ identifier[forward_origin]
identifier[sky] = identifier[forward] (* identifier[forward_args] )
identifier[inverse_args] = identifier[sky] + identifier[inverse_origin]
keyword[return] identifier[inverse] (* identifier[inverse_args] )
keyword[return] identifier[_reproject_func] | def _reproject(wcs1, wcs2):
"""
Perform the forward transformation of ``wcs1`` followed by the
inverse transformation of ``wcs2``.
Parameters
----------
wcs1, wcs2 : `~astropy.wcs.WCS` or `~gwcs.wcs.WCS`
The WCS objects.
Returns
-------
result : func
Function to compute the transformations. It takes x, y
positions in ``wcs1`` and returns x, y positions in
``wcs2``. The input and output x, y positions are zero
indexed.
"""
import gwcs
forward_origin = []
if isinstance(wcs1, fitswcs.WCS):
forward = wcs1.all_pix2world
forward_origin = [0] # depends on [control=['if'], data=[]]
elif isinstance(wcs2, gwcs.wcs.WCS):
forward = wcs1.forward_transform # depends on [control=['if'], data=[]]
else:
raise ValueError('wcs1 must be an astropy.wcs.WCS or gwcs.wcs.WCS object.')
inverse_origin = []
if isinstance(wcs2, fitswcs.WCS):
inverse = wcs2.all_world2pix
inverse_origin = [0] # depends on [control=['if'], data=[]]
elif isinstance(wcs2, gwcs.wcs.WCS):
inverse = wcs2.forward_transform.inverse # depends on [control=['if'], data=[]]
else:
raise ValueError('wcs2 must be an astropy.wcs.WCS or gwcs.wcs.WCS object.')
def _reproject_func(x, y):
forward_args = [x, y] + forward_origin
sky = forward(*forward_args)
inverse_args = sky + inverse_origin
return inverse(*inverse_args)
return _reproject_func |
def _save(self, stateName, path):
"""save into 'stateName' to pyz-path"""
print('saving...')
state = {'session': dict(self.opts),
'dialogs': self.dialogs.saveState()}
self.sigSave.emit(state)
self.saveThread.prepare(stateName, path, self.tmp_dir_session, state)
self.saveThread.start()
self.current_session = stateName
r = self.opts['recent sessions']
try:
# is this session already exists: remove it
r.pop(r.index(path))
except ValueError:
pass
# add this session at the beginning
r.insert(0, path) | def function[_save, parameter[self, stateName, path]]:
constant[save into 'stateName' to pyz-path]
call[name[print], parameter[constant[saving...]]]
variable[state] assign[=] dictionary[[<ast.Constant object at 0x7da1b158a3e0>, <ast.Constant object at 0x7da1b158ad70>], [<ast.Call object at 0x7da1b15884c0>, <ast.Call object at 0x7da1b1589840>]]
call[name[self].sigSave.emit, parameter[name[state]]]
call[name[self].saveThread.prepare, parameter[name[stateName], name[path], name[self].tmp_dir_session, name[state]]]
call[name[self].saveThread.start, parameter[]]
name[self].current_session assign[=] name[stateName]
variable[r] assign[=] call[name[self].opts][constant[recent sessions]]
<ast.Try object at 0x7da1b15be8f0>
call[name[r].insert, parameter[constant[0], name[path]]] | keyword[def] identifier[_save] ( identifier[self] , identifier[stateName] , identifier[path] ):
literal[string]
identifier[print] ( literal[string] )
identifier[state] ={ literal[string] : identifier[dict] ( identifier[self] . identifier[opts] ),
literal[string] : identifier[self] . identifier[dialogs] . identifier[saveState] ()}
identifier[self] . identifier[sigSave] . identifier[emit] ( identifier[state] )
identifier[self] . identifier[saveThread] . identifier[prepare] ( identifier[stateName] , identifier[path] , identifier[self] . identifier[tmp_dir_session] , identifier[state] )
identifier[self] . identifier[saveThread] . identifier[start] ()
identifier[self] . identifier[current_session] = identifier[stateName]
identifier[r] = identifier[self] . identifier[opts] [ literal[string] ]
keyword[try] :
identifier[r] . identifier[pop] ( identifier[r] . identifier[index] ( identifier[path] ))
keyword[except] identifier[ValueError] :
keyword[pass]
identifier[r] . identifier[insert] ( literal[int] , identifier[path] ) | def _save(self, stateName, path):
"""save into 'stateName' to pyz-path"""
print('saving...')
state = {'session': dict(self.opts), 'dialogs': self.dialogs.saveState()}
self.sigSave.emit(state)
self.saveThread.prepare(stateName, path, self.tmp_dir_session, state)
self.saveThread.start()
self.current_session = stateName
r = self.opts['recent sessions']
try: # is this session already exists: remove it
r.pop(r.index(path)) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]] # add this session at the beginning
r.insert(0, path) |
def _set_aspect(self, axes, aspect):
"""
Set the aspect on the axes based on the aspect setting.
"""
if ((isinstance(aspect, util.basestring) and aspect != 'square') or
self.data_aspect):
data_ratio = self.data_aspect or aspect
else:
(x0, x1), (y0, y1) = axes.get_xlim(), axes.get_ylim()
xsize = np.log(x1) - np.log(x0) if self.logx else x1-x0
ysize = np.log(y1) - np.log(y0) if self.logy else y1-y0
xsize = max(abs(xsize), 1e-30)
ysize = max(abs(ysize), 1e-30)
data_ratio = 1./(ysize/xsize)
if aspect != 'square':
data_ratio = data_ratio/aspect
axes.set_aspect(data_ratio) | def function[_set_aspect, parameter[self, axes, aspect]]:
constant[
Set the aspect on the axes based on the aspect setting.
]
if <ast.BoolOp object at 0x7da18dc05c00> begin[:]
variable[data_ratio] assign[=] <ast.BoolOp object at 0x7da18dc04e20>
call[name[axes].set_aspect, parameter[name[data_ratio]]] | keyword[def] identifier[_set_aspect] ( identifier[self] , identifier[axes] , identifier[aspect] ):
literal[string]
keyword[if] (( identifier[isinstance] ( identifier[aspect] , identifier[util] . identifier[basestring] ) keyword[and] identifier[aspect] != literal[string] ) keyword[or]
identifier[self] . identifier[data_aspect] ):
identifier[data_ratio] = identifier[self] . identifier[data_aspect] keyword[or] identifier[aspect]
keyword[else] :
( identifier[x0] , identifier[x1] ),( identifier[y0] , identifier[y1] )= identifier[axes] . identifier[get_xlim] (), identifier[axes] . identifier[get_ylim] ()
identifier[xsize] = identifier[np] . identifier[log] ( identifier[x1] )- identifier[np] . identifier[log] ( identifier[x0] ) keyword[if] identifier[self] . identifier[logx] keyword[else] identifier[x1] - identifier[x0]
identifier[ysize] = identifier[np] . identifier[log] ( identifier[y1] )- identifier[np] . identifier[log] ( identifier[y0] ) keyword[if] identifier[self] . identifier[logy] keyword[else] identifier[y1] - identifier[y0]
identifier[xsize] = identifier[max] ( identifier[abs] ( identifier[xsize] ), literal[int] )
identifier[ysize] = identifier[max] ( identifier[abs] ( identifier[ysize] ), literal[int] )
identifier[data_ratio] = literal[int] /( identifier[ysize] / identifier[xsize] )
keyword[if] identifier[aspect] != literal[string] :
identifier[data_ratio] = identifier[data_ratio] / identifier[aspect]
identifier[axes] . identifier[set_aspect] ( identifier[data_ratio] ) | def _set_aspect(self, axes, aspect):
"""
Set the aspect on the axes based on the aspect setting.
"""
if isinstance(aspect, util.basestring) and aspect != 'square' or self.data_aspect:
data_ratio = self.data_aspect or aspect # depends on [control=['if'], data=[]]
else:
((x0, x1), (y0, y1)) = (axes.get_xlim(), axes.get_ylim())
xsize = np.log(x1) - np.log(x0) if self.logx else x1 - x0
ysize = np.log(y1) - np.log(y0) if self.logy else y1 - y0
xsize = max(abs(xsize), 1e-30)
ysize = max(abs(ysize), 1e-30)
data_ratio = 1.0 / (ysize / xsize)
if aspect != 'square':
data_ratio = data_ratio / aspect # depends on [control=['if'], data=['aspect']]
axes.set_aspect(data_ratio) |
def pipeRecvConsole(self):
'''watch for piped data from save dialog'''
try:
while True:
console_msg = self.parent_pipe_recv_console.recv()
if console_msg is not None:
self.console.writeln(console_msg)
time.sleep(0.1)
except EOFError:
pass | def function[pipeRecvConsole, parameter[self]]:
constant[watch for piped data from save dialog]
<ast.Try object at 0x7da18f09f010> | keyword[def] identifier[pipeRecvConsole] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[while] keyword[True] :
identifier[console_msg] = identifier[self] . identifier[parent_pipe_recv_console] . identifier[recv] ()
keyword[if] identifier[console_msg] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[console] . identifier[writeln] ( identifier[console_msg] )
identifier[time] . identifier[sleep] ( literal[int] )
keyword[except] identifier[EOFError] :
keyword[pass] | def pipeRecvConsole(self):
"""watch for piped data from save dialog"""
try:
while True:
console_msg = self.parent_pipe_recv_console.recv()
if console_msg is not None:
self.console.writeln(console_msg) # depends on [control=['if'], data=['console_msg']]
time.sleep(0.1) # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except EOFError:
pass # depends on [control=['except'], data=[]] |
def a_over_Rs(P,R2,M2,M1=1,R1=1,planet=True):
"""
Returns a/Rs for given parameters.
"""
if planet:
M2 *= REARTH/RSUN
R2 *= MEARTH/MSUN
return semimajor(P,M1+M2)*AU/(R1*RSUN) | def function[a_over_Rs, parameter[P, R2, M2, M1, R1, planet]]:
constant[
Returns a/Rs for given parameters.
]
if name[planet] begin[:]
<ast.AugAssign object at 0x7da1b285b640>
<ast.AugAssign object at 0x7da1b285ab90>
return[binary_operation[binary_operation[call[name[semimajor], parameter[name[P], binary_operation[name[M1] + name[M2]]]] * name[AU]] / binary_operation[name[R1] * name[RSUN]]]] | keyword[def] identifier[a_over_Rs] ( identifier[P] , identifier[R2] , identifier[M2] , identifier[M1] = literal[int] , identifier[R1] = literal[int] , identifier[planet] = keyword[True] ):
literal[string]
keyword[if] identifier[planet] :
identifier[M2] *= identifier[REARTH] / identifier[RSUN]
identifier[R2] *= identifier[MEARTH] / identifier[MSUN]
keyword[return] identifier[semimajor] ( identifier[P] , identifier[M1] + identifier[M2] )* identifier[AU] /( identifier[R1] * identifier[RSUN] ) | def a_over_Rs(P, R2, M2, M1=1, R1=1, planet=True):
"""
Returns a/Rs for given parameters.
"""
if planet:
M2 *= REARTH / RSUN
R2 *= MEARTH / MSUN # depends on [control=['if'], data=[]]
return semimajor(P, M1 + M2) * AU / (R1 * RSUN) |
def upload_receipt(self, url, data):
"""Upload a receipt to the give url
:param url:
:param data:
:return:
"""
return self.upload_attachment(url=url, data=data, mime_type='application/vnd.mcash.receipt.v1+json') | def function[upload_receipt, parameter[self, url, data]]:
constant[Upload a receipt to the give url
:param url:
:param data:
:return:
]
return[call[name[self].upload_attachment, parameter[]]] | keyword[def] identifier[upload_receipt] ( identifier[self] , identifier[url] , identifier[data] ):
literal[string]
keyword[return] identifier[self] . identifier[upload_attachment] ( identifier[url] = identifier[url] , identifier[data] = identifier[data] , identifier[mime_type] = literal[string] ) | def upload_receipt(self, url, data):
"""Upload a receipt to the give url
:param url:
:param data:
:return:
"""
return self.upload_attachment(url=url, data=data, mime_type='application/vnd.mcash.receipt.v1+json') |
def nelect(self):
"""
Gets the default number of electrons for a given structure.
"""
# if structure is not sorted this can cause problems, so must take
# care to remove redundant symbols when counting electrons
site_symbols = list(set(self.poscar.site_symbols))
nelect = 0.
for ps in self.potcar:
if ps.element in site_symbols:
site_symbols.remove(ps.element)
nelect += self.structure.composition.element_composition[
ps.element] * ps.ZVAL
if self.use_structure_charge:
return nelect - self.structure.charge
else:
return nelect | def function[nelect, parameter[self]]:
constant[
Gets the default number of electrons for a given structure.
]
variable[site_symbols] assign[=] call[name[list], parameter[call[name[set], parameter[name[self].poscar.site_symbols]]]]
variable[nelect] assign[=] constant[0.0]
for taget[name[ps]] in starred[name[self].potcar] begin[:]
if compare[name[ps].element in name[site_symbols]] begin[:]
call[name[site_symbols].remove, parameter[name[ps].element]]
<ast.AugAssign object at 0x7da20c76f970>
if name[self].use_structure_charge begin[:]
return[binary_operation[name[nelect] - name[self].structure.charge]] | keyword[def] identifier[nelect] ( identifier[self] ):
literal[string]
identifier[site_symbols] = identifier[list] ( identifier[set] ( identifier[self] . identifier[poscar] . identifier[site_symbols] ))
identifier[nelect] = literal[int]
keyword[for] identifier[ps] keyword[in] identifier[self] . identifier[potcar] :
keyword[if] identifier[ps] . identifier[element] keyword[in] identifier[site_symbols] :
identifier[site_symbols] . identifier[remove] ( identifier[ps] . identifier[element] )
identifier[nelect] += identifier[self] . identifier[structure] . identifier[composition] . identifier[element_composition] [
identifier[ps] . identifier[element] ]* identifier[ps] . identifier[ZVAL]
keyword[if] identifier[self] . identifier[use_structure_charge] :
keyword[return] identifier[nelect] - identifier[self] . identifier[structure] . identifier[charge]
keyword[else] :
keyword[return] identifier[nelect] | def nelect(self):
"""
Gets the default number of electrons for a given structure.
"""
# if structure is not sorted this can cause problems, so must take
# care to remove redundant symbols when counting electrons
site_symbols = list(set(self.poscar.site_symbols))
nelect = 0.0
for ps in self.potcar:
if ps.element in site_symbols:
site_symbols.remove(ps.element)
nelect += self.structure.composition.element_composition[ps.element] * ps.ZVAL # depends on [control=['if'], data=['site_symbols']] # depends on [control=['for'], data=['ps']]
if self.use_structure_charge:
return nelect - self.structure.charge # depends on [control=['if'], data=[]]
else:
return nelect |
def prep_files(paths, extensions):
"""Parses `paths` (which may consist of files and/or directories).
Removes duplicates, sorts, and returns verified srt files."""
from batchpath import GeneratePaths
filenames = GeneratePaths().files(paths, os.W_OK, extensions, 0, True)
if filenames:
return filenames
else:
LOGGER.error('No valid targets were specified')
sys.exit(1) | def function[prep_files, parameter[paths, extensions]]:
constant[Parses `paths` (which may consist of files and/or directories).
Removes duplicates, sorts, and returns verified srt files.]
from relative_module[batchpath] import module[GeneratePaths]
variable[filenames] assign[=] call[call[name[GeneratePaths], parameter[]].files, parameter[name[paths], name[os].W_OK, name[extensions], constant[0], constant[True]]]
if name[filenames] begin[:]
return[name[filenames]] | keyword[def] identifier[prep_files] ( identifier[paths] , identifier[extensions] ):
literal[string]
keyword[from] identifier[batchpath] keyword[import] identifier[GeneratePaths]
identifier[filenames] = identifier[GeneratePaths] (). identifier[files] ( identifier[paths] , identifier[os] . identifier[W_OK] , identifier[extensions] , literal[int] , keyword[True] )
keyword[if] identifier[filenames] :
keyword[return] identifier[filenames]
keyword[else] :
identifier[LOGGER] . identifier[error] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] ) | def prep_files(paths, extensions):
"""Parses `paths` (which may consist of files and/or directories).
Removes duplicates, sorts, and returns verified srt files."""
from batchpath import GeneratePaths
filenames = GeneratePaths().files(paths, os.W_OK, extensions, 0, True)
if filenames:
return filenames # depends on [control=['if'], data=[]]
else:
LOGGER.error('No valid targets were specified')
sys.exit(1) |
def pages(self):
"""Get pages, reloading the site if needed."""
rev = self.db.get('site:rev')
if int(rev) != self.revision:
self.reload_site()
return self._pages | def function[pages, parameter[self]]:
constant[Get pages, reloading the site if needed.]
variable[rev] assign[=] call[name[self].db.get, parameter[constant[site:rev]]]
if compare[call[name[int], parameter[name[rev]]] not_equal[!=] name[self].revision] begin[:]
call[name[self].reload_site, parameter[]]
return[name[self]._pages] | keyword[def] identifier[pages] ( identifier[self] ):
literal[string]
identifier[rev] = identifier[self] . identifier[db] . identifier[get] ( literal[string] )
keyword[if] identifier[int] ( identifier[rev] )!= identifier[self] . identifier[revision] :
identifier[self] . identifier[reload_site] ()
keyword[return] identifier[self] . identifier[_pages] | def pages(self):
"""Get pages, reloading the site if needed."""
rev = self.db.get('site:rev')
if int(rev) != self.revision:
self.reload_site() # depends on [control=['if'], data=[]]
return self._pages |
def group(*args, **kwargs):
"""Decorator to define a command group.
The arguments to this decorator are those of the
`ArgumentParser <https://docs.python.org/3/library/argparse.html\
#argumentparser-objects>`_
object constructor.
"""
def decorator(f):
f.required = kwargs.pop('required', True)
if 'parents' in kwargs:
if not hasattr(f, '_argnames'): # pragma: no cover
f._argnames = []
for p in kwargs['parents']:
f._argnames += p._argnames if hasattr(p, '_argnames') else []
kwargs['parents'] = [p.parser for p in kwargs['parents']]
f.parser = argparse.ArgumentParser(*args, **kwargs)
f.climax = True
for arg in getattr(f, '_arguments', []):
f.parser.add_argument(*arg[0], **arg[1])
f._subparsers = f.parser.add_subparsers()
f.command = partial(_subcommand, f)
f.group = partial(_subgroup, f)
@wraps(f)
def wrapper(args=None):
parsed_args = vars(f.parser.parse_args(args))
# in Python 3.3+, sub-commands are optional by default
# so required parsers need to be validated by hand here
func = f
while '_func_' + func.__name__ in parsed_args:
func = parsed_args.get('_func_' + func.__name__)
if getattr(func, 'required', False):
f.parser.error('too few arguments')
# call the group function
filtered_args = {arg: parsed_args[arg]
for arg in parsed_args.keys()
if arg in getattr(f, '_argnames', [])}
parsed_args = {arg: parsed_args[arg] for arg in parsed_args.keys()
if arg not in filtered_args}
ctx = f(**filtered_args)
# call the sub-command function (or chain)
func = f
while '_func_' + func.__name__ in parsed_args:
func = parsed_args.pop('_func_' + func.__name__)
if getattr(func, 'climax', False):
filtered_args = {arg: parsed_args[arg]
for arg in parsed_args.keys()
if arg in getattr(func, '_argnames', [])}
parsed_args = {arg: parsed_args[arg]
for arg in parsed_args.keys()
if arg not in filtered_args}
else:
# we don't have our metadata for this subparser, so we
# send all remaining args to it
filtered_args = parsed_args
parsed_args = {}
filtered_args.update(ctx or {})
ctx = func(**filtered_args)
return ctx
return wrapper
return decorator | def function[group, parameter[]]:
constant[Decorator to define a command group.
The arguments to this decorator are those of the
`ArgumentParser <https://docs.python.org/3/library/argparse.html#argumentparser-objects>`_
object constructor.
]
def function[decorator, parameter[f]]:
name[f].required assign[=] call[name[kwargs].pop, parameter[constant[required], constant[True]]]
if compare[constant[parents] in name[kwargs]] begin[:]
if <ast.UnaryOp object at 0x7da18eb55ed0> begin[:]
name[f]._argnames assign[=] list[[]]
for taget[name[p]] in starred[call[name[kwargs]][constant[parents]]] begin[:]
<ast.AugAssign object at 0x7da18eb56fb0>
call[name[kwargs]][constant[parents]] assign[=] <ast.ListComp object at 0x7da18eb54ca0>
name[f].parser assign[=] call[name[argparse].ArgumentParser, parameter[<ast.Starred object at 0x7da18eb55960>]]
name[f].climax assign[=] constant[True]
for taget[name[arg]] in starred[call[name[getattr], parameter[name[f], constant[_arguments], list[[]]]]] begin[:]
call[name[f].parser.add_argument, parameter[<ast.Starred object at 0x7da18eb549d0>]]
name[f]._subparsers assign[=] call[name[f].parser.add_subparsers, parameter[]]
name[f].command assign[=] call[name[partial], parameter[name[_subcommand], name[f]]]
name[f].group assign[=] call[name[partial], parameter[name[_subgroup], name[f]]]
def function[wrapper, parameter[args]]:
variable[parsed_args] assign[=] call[name[vars], parameter[call[name[f].parser.parse_args, parameter[name[args]]]]]
variable[func] assign[=] name[f]
while compare[binary_operation[constant[_func_] + name[func].__name__] in name[parsed_args]] begin[:]
variable[func] assign[=] call[name[parsed_args].get, parameter[binary_operation[constant[_func_] + name[func].__name__]]]
if call[name[getattr], parameter[name[func], constant[required], constant[False]]] begin[:]
call[name[f].parser.error, parameter[constant[too few arguments]]]
variable[filtered_args] assign[=] <ast.DictComp object at 0x7da18eb57fd0>
variable[parsed_args] assign[=] <ast.DictComp object at 0x7da18eb57dc0>
variable[ctx] assign[=] call[name[f], parameter[]]
variable[func] assign[=] name[f]
while compare[binary_operation[constant[_func_] + name[func].__name__] in name[parsed_args]] begin[:]
variable[func] assign[=] call[name[parsed_args].pop, parameter[binary_operation[constant[_func_] + name[func].__name__]]]
if call[name[getattr], parameter[name[func], constant[climax], constant[False]]] begin[:]
variable[filtered_args] assign[=] <ast.DictComp object at 0x7da18eb56350>
variable[parsed_args] assign[=] <ast.DictComp object at 0x7da18f58f370>
call[name[filtered_args].update, parameter[<ast.BoolOp object at 0x7da18f58ed40>]]
variable[ctx] assign[=] call[name[func], parameter[]]
return[name[ctx]]
return[name[wrapper]]
return[name[decorator]] | keyword[def] identifier[group] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[f] ):
identifier[f] . identifier[required] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[True] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[f] , literal[string] ):
identifier[f] . identifier[_argnames] =[]
keyword[for] identifier[p] keyword[in] identifier[kwargs] [ literal[string] ]:
identifier[f] . identifier[_argnames] += identifier[p] . identifier[_argnames] keyword[if] identifier[hasattr] ( identifier[p] , literal[string] ) keyword[else] []
identifier[kwargs] [ literal[string] ]=[ identifier[p] . identifier[parser] keyword[for] identifier[p] keyword[in] identifier[kwargs] [ literal[string] ]]
identifier[f] . identifier[parser] = identifier[argparse] . identifier[ArgumentParser] (* identifier[args] ,** identifier[kwargs] )
identifier[f] . identifier[climax] = keyword[True]
keyword[for] identifier[arg] keyword[in] identifier[getattr] ( identifier[f] , literal[string] ,[]):
identifier[f] . identifier[parser] . identifier[add_argument] (* identifier[arg] [ literal[int] ],** identifier[arg] [ literal[int] ])
identifier[f] . identifier[_subparsers] = identifier[f] . identifier[parser] . identifier[add_subparsers] ()
identifier[f] . identifier[command] = identifier[partial] ( identifier[_subcommand] , identifier[f] )
identifier[f] . identifier[group] = identifier[partial] ( identifier[_subgroup] , identifier[f] )
@ identifier[wraps] ( identifier[f] )
keyword[def] identifier[wrapper] ( identifier[args] = keyword[None] ):
identifier[parsed_args] = identifier[vars] ( identifier[f] . identifier[parser] . identifier[parse_args] ( identifier[args] ))
identifier[func] = identifier[f]
keyword[while] literal[string] + identifier[func] . identifier[__name__] keyword[in] identifier[parsed_args] :
identifier[func] = identifier[parsed_args] . identifier[get] ( literal[string] + identifier[func] . identifier[__name__] )
keyword[if] identifier[getattr] ( identifier[func] , literal[string] , keyword[False] ):
identifier[f] . identifier[parser] . identifier[error] ( literal[string] )
identifier[filtered_args] ={ identifier[arg] : identifier[parsed_args] [ identifier[arg] ]
keyword[for] identifier[arg] keyword[in] identifier[parsed_args] . identifier[keys] ()
keyword[if] identifier[arg] keyword[in] identifier[getattr] ( identifier[f] , literal[string] ,[])}
identifier[parsed_args] ={ identifier[arg] : identifier[parsed_args] [ identifier[arg] ] keyword[for] identifier[arg] keyword[in] identifier[parsed_args] . identifier[keys] ()
keyword[if] identifier[arg] keyword[not] keyword[in] identifier[filtered_args] }
identifier[ctx] = identifier[f] (** identifier[filtered_args] )
identifier[func] = identifier[f]
keyword[while] literal[string] + identifier[func] . identifier[__name__] keyword[in] identifier[parsed_args] :
identifier[func] = identifier[parsed_args] . identifier[pop] ( literal[string] + identifier[func] . identifier[__name__] )
keyword[if] identifier[getattr] ( identifier[func] , literal[string] , keyword[False] ):
identifier[filtered_args] ={ identifier[arg] : identifier[parsed_args] [ identifier[arg] ]
keyword[for] identifier[arg] keyword[in] identifier[parsed_args] . identifier[keys] ()
keyword[if] identifier[arg] keyword[in] identifier[getattr] ( identifier[func] , literal[string] ,[])}
identifier[parsed_args] ={ identifier[arg] : identifier[parsed_args] [ identifier[arg] ]
keyword[for] identifier[arg] keyword[in] identifier[parsed_args] . identifier[keys] ()
keyword[if] identifier[arg] keyword[not] keyword[in] identifier[filtered_args] }
keyword[else] :
identifier[filtered_args] = identifier[parsed_args]
identifier[parsed_args] ={}
identifier[filtered_args] . identifier[update] ( identifier[ctx] keyword[or] {})
identifier[ctx] = identifier[func] (** identifier[filtered_args] )
keyword[return] identifier[ctx]
keyword[return] identifier[wrapper]
keyword[return] identifier[decorator] | def group(*args, **kwargs):
"""Decorator to define a command group.
The arguments to this decorator are those of the
`ArgumentParser <https://docs.python.org/3/library/argparse.html#argumentparser-objects>`_
object constructor.
"""
def decorator(f):
f.required = kwargs.pop('required', True)
if 'parents' in kwargs:
if not hasattr(f, '_argnames'): # pragma: no cover
f._argnames = [] # depends on [control=['if'], data=[]]
for p in kwargs['parents']:
f._argnames += p._argnames if hasattr(p, '_argnames') else [] # depends on [control=['for'], data=['p']]
kwargs['parents'] = [p.parser for p in kwargs['parents']] # depends on [control=['if'], data=['kwargs']]
f.parser = argparse.ArgumentParser(*args, **kwargs)
f.climax = True
for arg in getattr(f, '_arguments', []):
f.parser.add_argument(*arg[0], **arg[1]) # depends on [control=['for'], data=['arg']]
f._subparsers = f.parser.add_subparsers()
f.command = partial(_subcommand, f)
f.group = partial(_subgroup, f)
@wraps(f)
def wrapper(args=None):
parsed_args = vars(f.parser.parse_args(args))
# in Python 3.3+, sub-commands are optional by default
# so required parsers need to be validated by hand here
func = f
while '_func_' + func.__name__ in parsed_args:
func = parsed_args.get('_func_' + func.__name__) # depends on [control=['while'], data=['parsed_args']]
if getattr(func, 'required', False):
f.parser.error('too few arguments') # depends on [control=['if'], data=[]]
# call the group function
filtered_args = {arg: parsed_args[arg] for arg in parsed_args.keys() if arg in getattr(f, '_argnames', [])}
parsed_args = {arg: parsed_args[arg] for arg in parsed_args.keys() if arg not in filtered_args}
ctx = f(**filtered_args)
# call the sub-command function (or chain)
func = f
while '_func_' + func.__name__ in parsed_args:
func = parsed_args.pop('_func_' + func.__name__)
if getattr(func, 'climax', False):
filtered_args = {arg: parsed_args[arg] for arg in parsed_args.keys() if arg in getattr(func, '_argnames', [])}
parsed_args = {arg: parsed_args[arg] for arg in parsed_args.keys() if arg not in filtered_args} # depends on [control=['if'], data=[]]
else:
# we don't have our metadata for this subparser, so we
# send all remaining args to it
filtered_args = parsed_args
parsed_args = {}
filtered_args.update(ctx or {})
ctx = func(**filtered_args) # depends on [control=['while'], data=['parsed_args']]
return ctx
return wrapper
return decorator |
def run_fba(model, rxn_id, direction="max", single_value=True):
"""
Return the solution of an FBA to a set objective function.
Parameters
----------
model : cobra.Model
The metabolic model under investigation.
rxn_id : string
A string containing the reaction ID of the desired FBA objective.
direction: string
A string containing either "max" or "min" to specify the direction
of the desired FBA objective function.
single_value: boolean
Indicates whether the results for all reactions are gathered from the
solver, or only the result for the objective value.
Returns
-------
cobra.solution
The cobra solution object for the corresponding FBA problem.
"""
model.objective = model.reactions.get_by_id(rxn_id)
model.objective_direction = direction
if single_value:
try:
return model.slim_optimize()
except Infeasible:
return np.nan
else:
try:
solution = model.optimize()
return solution
except Infeasible:
return np.nan | def function[run_fba, parameter[model, rxn_id, direction, single_value]]:
constant[
Return the solution of an FBA to a set objective function.
Parameters
----------
model : cobra.Model
The metabolic model under investigation.
rxn_id : string
A string containing the reaction ID of the desired FBA objective.
direction: string
A string containing either "max" or "min" to specify the direction
of the desired FBA objective function.
single_value: boolean
Indicates whether the results for all reactions are gathered from the
solver, or only the result for the objective value.
Returns
-------
cobra.solution
The cobra solution object for the corresponding FBA problem.
]
name[model].objective assign[=] call[name[model].reactions.get_by_id, parameter[name[rxn_id]]]
name[model].objective_direction assign[=] name[direction]
if name[single_value] begin[:]
<ast.Try object at 0x7da1b057ab60> | keyword[def] identifier[run_fba] ( identifier[model] , identifier[rxn_id] , identifier[direction] = literal[string] , identifier[single_value] = keyword[True] ):
literal[string]
identifier[model] . identifier[objective] = identifier[model] . identifier[reactions] . identifier[get_by_id] ( identifier[rxn_id] )
identifier[model] . identifier[objective_direction] = identifier[direction]
keyword[if] identifier[single_value] :
keyword[try] :
keyword[return] identifier[model] . identifier[slim_optimize] ()
keyword[except] identifier[Infeasible] :
keyword[return] identifier[np] . identifier[nan]
keyword[else] :
keyword[try] :
identifier[solution] = identifier[model] . identifier[optimize] ()
keyword[return] identifier[solution]
keyword[except] identifier[Infeasible] :
keyword[return] identifier[np] . identifier[nan] | def run_fba(model, rxn_id, direction='max', single_value=True):
"""
Return the solution of an FBA to a set objective function.
Parameters
----------
model : cobra.Model
The metabolic model under investigation.
rxn_id : string
A string containing the reaction ID of the desired FBA objective.
direction: string
A string containing either "max" or "min" to specify the direction
of the desired FBA objective function.
single_value: boolean
Indicates whether the results for all reactions are gathered from the
solver, or only the result for the objective value.
Returns
-------
cobra.solution
The cobra solution object for the corresponding FBA problem.
"""
model.objective = model.reactions.get_by_id(rxn_id)
model.objective_direction = direction
if single_value:
try:
return model.slim_optimize() # depends on [control=['try'], data=[]]
except Infeasible:
return np.nan # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
try:
solution = model.optimize()
return solution # depends on [control=['try'], data=[]]
except Infeasible:
return np.nan # depends on [control=['except'], data=[]] |
def print_model(self, include_unsigned_edges=False):
"""Return a SIF string of the assembled model.
Parameters
----------
include_unsigned_edges : bool
If True, includes edges with an unknown activating/inactivating
relationship (e.g., most PTMs). Default is False.
"""
sif_str = ''
for edge in self.graph.edges(data=True):
n1 = edge[0]
n2 = edge[1]
data = edge[2]
polarity = data.get('polarity')
if polarity == 'negative':
rel = '-1'
elif polarity == 'positive':
rel = '1'
elif include_unsigned_edges:
rel = '0'
else:
continue
sif_str += '%s %s %s\n' % (n1, rel, n2)
return sif_str | def function[print_model, parameter[self, include_unsigned_edges]]:
constant[Return a SIF string of the assembled model.
Parameters
----------
include_unsigned_edges : bool
If True, includes edges with an unknown activating/inactivating
relationship (e.g., most PTMs). Default is False.
]
variable[sif_str] assign[=] constant[]
for taget[name[edge]] in starred[call[name[self].graph.edges, parameter[]]] begin[:]
variable[n1] assign[=] call[name[edge]][constant[0]]
variable[n2] assign[=] call[name[edge]][constant[1]]
variable[data] assign[=] call[name[edge]][constant[2]]
variable[polarity] assign[=] call[name[data].get, parameter[constant[polarity]]]
if compare[name[polarity] equal[==] constant[negative]] begin[:]
variable[rel] assign[=] constant[-1]
<ast.AugAssign object at 0x7da18ede4eb0>
return[name[sif_str]] | keyword[def] identifier[print_model] ( identifier[self] , identifier[include_unsigned_edges] = keyword[False] ):
literal[string]
identifier[sif_str] = literal[string]
keyword[for] identifier[edge] keyword[in] identifier[self] . identifier[graph] . identifier[edges] ( identifier[data] = keyword[True] ):
identifier[n1] = identifier[edge] [ literal[int] ]
identifier[n2] = identifier[edge] [ literal[int] ]
identifier[data] = identifier[edge] [ literal[int] ]
identifier[polarity] = identifier[data] . identifier[get] ( literal[string] )
keyword[if] identifier[polarity] == literal[string] :
identifier[rel] = literal[string]
keyword[elif] identifier[polarity] == literal[string] :
identifier[rel] = literal[string]
keyword[elif] identifier[include_unsigned_edges] :
identifier[rel] = literal[string]
keyword[else] :
keyword[continue]
identifier[sif_str] += literal[string] %( identifier[n1] , identifier[rel] , identifier[n2] )
keyword[return] identifier[sif_str] | def print_model(self, include_unsigned_edges=False):
"""Return a SIF string of the assembled model.
Parameters
----------
include_unsigned_edges : bool
If True, includes edges with an unknown activating/inactivating
relationship (e.g., most PTMs). Default is False.
"""
sif_str = ''
for edge in self.graph.edges(data=True):
n1 = edge[0]
n2 = edge[1]
data = edge[2]
polarity = data.get('polarity')
if polarity == 'negative':
rel = '-1' # depends on [control=['if'], data=[]]
elif polarity == 'positive':
rel = '1' # depends on [control=['if'], data=[]]
elif include_unsigned_edges:
rel = '0' # depends on [control=['if'], data=[]]
else:
continue
sif_str += '%s %s %s\n' % (n1, rel, n2) # depends on [control=['for'], data=['edge']]
return sif_str |
def pack_iterable(messages):
'''Pack an iterable of messages in the TCP protocol format'''
# [ 4-byte body size ]
# [ 4-byte num messages ]
# [ 4-byte message #1 size ][ N-byte binary data ]
# ... (repeated <num_messages> times)
return pack_string(
struct.pack('>l', len(messages)) +
''.join(map(pack_string, messages))) | def function[pack_iterable, parameter[messages]]:
constant[Pack an iterable of messages in the TCP protocol format]
return[call[name[pack_string], parameter[binary_operation[call[name[struct].pack, parameter[constant[>l], call[name[len], parameter[name[messages]]]]] + call[constant[].join, parameter[call[name[map], parameter[name[pack_string], name[messages]]]]]]]]] | keyword[def] identifier[pack_iterable] ( identifier[messages] ):
literal[string]
keyword[return] identifier[pack_string] (
identifier[struct] . identifier[pack] ( literal[string] , identifier[len] ( identifier[messages] ))+
literal[string] . identifier[join] ( identifier[map] ( identifier[pack_string] , identifier[messages] ))) | def pack_iterable(messages):
"""Pack an iterable of messages in the TCP protocol format"""
# [ 4-byte body size ]
# [ 4-byte num messages ]
# [ 4-byte message #1 size ][ N-byte binary data ]
# ... (repeated <num_messages> times)
return pack_string(struct.pack('>l', len(messages)) + ''.join(map(pack_string, messages))) |
def __setup_dfs_data(graph, adj):
"""Sets up the dfs_data object, for consistency."""
dfs_data = __get_dfs_data(graph, adj)
dfs_data['graph'] = graph
dfs_data['adj'] = adj
L1, L2 = __low_point_dfs(dfs_data)
dfs_data['lowpoint_1_lookup'] = L1
dfs_data['lowpoint_2_lookup'] = L2
edge_weights = __calculate_edge_weights(dfs_data)
dfs_data['edge_weights'] = edge_weights
return dfs_data | def function[__setup_dfs_data, parameter[graph, adj]]:
constant[Sets up the dfs_data object, for consistency.]
variable[dfs_data] assign[=] call[name[__get_dfs_data], parameter[name[graph], name[adj]]]
call[name[dfs_data]][constant[graph]] assign[=] name[graph]
call[name[dfs_data]][constant[adj]] assign[=] name[adj]
<ast.Tuple object at 0x7da1b287e200> assign[=] call[name[__low_point_dfs], parameter[name[dfs_data]]]
call[name[dfs_data]][constant[lowpoint_1_lookup]] assign[=] name[L1]
call[name[dfs_data]][constant[lowpoint_2_lookup]] assign[=] name[L2]
variable[edge_weights] assign[=] call[name[__calculate_edge_weights], parameter[name[dfs_data]]]
call[name[dfs_data]][constant[edge_weights]] assign[=] name[edge_weights]
return[name[dfs_data]] | keyword[def] identifier[__setup_dfs_data] ( identifier[graph] , identifier[adj] ):
literal[string]
identifier[dfs_data] = identifier[__get_dfs_data] ( identifier[graph] , identifier[adj] )
identifier[dfs_data] [ literal[string] ]= identifier[graph]
identifier[dfs_data] [ literal[string] ]= identifier[adj]
identifier[L1] , identifier[L2] = identifier[__low_point_dfs] ( identifier[dfs_data] )
identifier[dfs_data] [ literal[string] ]= identifier[L1]
identifier[dfs_data] [ literal[string] ]= identifier[L2]
identifier[edge_weights] = identifier[__calculate_edge_weights] ( identifier[dfs_data] )
identifier[dfs_data] [ literal[string] ]= identifier[edge_weights]
keyword[return] identifier[dfs_data] | def __setup_dfs_data(graph, adj):
"""Sets up the dfs_data object, for consistency."""
dfs_data = __get_dfs_data(graph, adj)
dfs_data['graph'] = graph
dfs_data['adj'] = adj
(L1, L2) = __low_point_dfs(dfs_data)
dfs_data['lowpoint_1_lookup'] = L1
dfs_data['lowpoint_2_lookup'] = L2
edge_weights = __calculate_edge_weights(dfs_data)
dfs_data['edge_weights'] = edge_weights
return dfs_data |
def DOM_setOuterHTML(self, nodeId, outerHTML):
"""
Function path: DOM.setOuterHTML
Domain: DOM
Method name: setOuterHTML
Parameters:
Required arguments:
'nodeId' (type: NodeId) -> Id of the node to set markup for.
'outerHTML' (type: string) -> Outer HTML markup to set.
No return value.
Description: Sets node HTML markup, returns new node id.
"""
assert isinstance(outerHTML, (str,)
), "Argument 'outerHTML' must be of type '['str']'. Received type: '%s'" % type(
outerHTML)
subdom_funcs = self.synchronous_command('DOM.setOuterHTML', nodeId=nodeId,
outerHTML=outerHTML)
return subdom_funcs | def function[DOM_setOuterHTML, parameter[self, nodeId, outerHTML]]:
constant[
Function path: DOM.setOuterHTML
Domain: DOM
Method name: setOuterHTML
Parameters:
Required arguments:
'nodeId' (type: NodeId) -> Id of the node to set markup for.
'outerHTML' (type: string) -> Outer HTML markup to set.
No return value.
Description: Sets node HTML markup, returns new node id.
]
assert[call[name[isinstance], parameter[name[outerHTML], tuple[[<ast.Name object at 0x7da1b11141f0>]]]]]
variable[subdom_funcs] assign[=] call[name[self].synchronous_command, parameter[constant[DOM.setOuterHTML]]]
return[name[subdom_funcs]] | keyword[def] identifier[DOM_setOuterHTML] ( identifier[self] , identifier[nodeId] , identifier[outerHTML] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[outerHTML] ,( identifier[str] ,)
), literal[string] % identifier[type] (
identifier[outerHTML] )
identifier[subdom_funcs] = identifier[self] . identifier[synchronous_command] ( literal[string] , identifier[nodeId] = identifier[nodeId] ,
identifier[outerHTML] = identifier[outerHTML] )
keyword[return] identifier[subdom_funcs] | def DOM_setOuterHTML(self, nodeId, outerHTML):
"""
Function path: DOM.setOuterHTML
Domain: DOM
Method name: setOuterHTML
Parameters:
Required arguments:
'nodeId' (type: NodeId) -> Id of the node to set markup for.
'outerHTML' (type: string) -> Outer HTML markup to set.
No return value.
Description: Sets node HTML markup, returns new node id.
"""
assert isinstance(outerHTML, (str,)), "Argument 'outerHTML' must be of type '['str']'. Received type: '%s'" % type(outerHTML)
subdom_funcs = self.synchronous_command('DOM.setOuterHTML', nodeId=nodeId, outerHTML=outerHTML)
return subdom_funcs |
def server_random(self):
"""
Retrieve the random value used with the server hello message.
:return: A string representing the state
"""
session = _lib.SSL_get_session(self._ssl)
if session == _ffi.NULL:
return None
length = _lib.SSL_get_server_random(self._ssl, _ffi.NULL, 0)
assert length > 0
outp = _no_zero_allocator("unsigned char[]", length)
_lib.SSL_get_server_random(self._ssl, outp, length)
return _ffi.buffer(outp, length)[:] | def function[server_random, parameter[self]]:
constant[
Retrieve the random value used with the server hello message.
:return: A string representing the state
]
variable[session] assign[=] call[name[_lib].SSL_get_session, parameter[name[self]._ssl]]
if compare[name[session] equal[==] name[_ffi].NULL] begin[:]
return[constant[None]]
variable[length] assign[=] call[name[_lib].SSL_get_server_random, parameter[name[self]._ssl, name[_ffi].NULL, constant[0]]]
assert[compare[name[length] greater[>] constant[0]]]
variable[outp] assign[=] call[name[_no_zero_allocator], parameter[constant[unsigned char[]], name[length]]]
call[name[_lib].SSL_get_server_random, parameter[name[self]._ssl, name[outp], name[length]]]
return[call[call[name[_ffi].buffer, parameter[name[outp], name[length]]]][<ast.Slice object at 0x7da1b0259450>]] | keyword[def] identifier[server_random] ( identifier[self] ):
literal[string]
identifier[session] = identifier[_lib] . identifier[SSL_get_session] ( identifier[self] . identifier[_ssl] )
keyword[if] identifier[session] == identifier[_ffi] . identifier[NULL] :
keyword[return] keyword[None]
identifier[length] = identifier[_lib] . identifier[SSL_get_server_random] ( identifier[self] . identifier[_ssl] , identifier[_ffi] . identifier[NULL] , literal[int] )
keyword[assert] identifier[length] > literal[int]
identifier[outp] = identifier[_no_zero_allocator] ( literal[string] , identifier[length] )
identifier[_lib] . identifier[SSL_get_server_random] ( identifier[self] . identifier[_ssl] , identifier[outp] , identifier[length] )
keyword[return] identifier[_ffi] . identifier[buffer] ( identifier[outp] , identifier[length] )[:] | def server_random(self):
"""
Retrieve the random value used with the server hello message.
:return: A string representing the state
"""
session = _lib.SSL_get_session(self._ssl)
if session == _ffi.NULL:
return None # depends on [control=['if'], data=[]]
length = _lib.SSL_get_server_random(self._ssl, _ffi.NULL, 0)
assert length > 0
outp = _no_zero_allocator('unsigned char[]', length)
_lib.SSL_get_server_random(self._ssl, outp, length)
return _ffi.buffer(outp, length)[:] |
def get_configurations(self, id, **kwargs):
"""
Gets the Configurations for the Specified Set
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_configurations(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Build Configuration Set id (required)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: BuildConfigurationPage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_configurations_with_http_info(id, **kwargs)
else:
(data) = self.get_configurations_with_http_info(id, **kwargs)
return data | def function[get_configurations, parameter[self, id]]:
constant[
Gets the Configurations for the Specified Set
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_configurations(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Build Configuration Set id (required)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: BuildConfigurationPage
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[callback]]] begin[:]
return[call[name[self].get_configurations_with_http_info, parameter[name[id]]]] | keyword[def] identifier[get_configurations] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[get_configurations_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[get_configurations_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[return] identifier[data] | def get_configurations(self, id, **kwargs):
"""
Gets the Configurations for the Specified Set
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_configurations(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Build Configuration Set id (required)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: BuildConfigurationPage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_configurations_with_http_info(id, **kwargs) # depends on [control=['if'], data=[]]
else:
data = self.get_configurations_with_http_info(id, **kwargs)
return data |
def google_app_engine_ndb_delete_expired_sessions(dormant_for=86400, limit=500):
"""
Deletes expired sessions
A session is expired if it expires date is set and has passed or
if it has not been accessed for a given period of time.
:param dormant_for: seconds since last access to delete sessions, defaults to 24 hours.
:type dormant_for: int
:param limit: amount to delete in one call of the method, the maximum and default for this is the NDB fetch limit of 500
:type limit: int
"""
from vishnu.backend.client.google_app_engine_ndb import VishnuSession
from google.appengine.ext import ndb
from datetime import datetime
from datetime import timedelta
now = datetime.utcnow()
last_accessed = now - timedelta(seconds=dormant_for)
query = VishnuSession.query(ndb.OR(
ndb.AND(VishnuSession.expires <= now, VishnuSession.expires != None),
VishnuSession.last_accessed <= last_accessed
))
results = query.fetch(keys_only=True, limit=limit)
ndb.delete_multi(results)
return len(results) < limit | def function[google_app_engine_ndb_delete_expired_sessions, parameter[dormant_for, limit]]:
constant[
Deletes expired sessions
A session is expired if it expires date is set and has passed or
if it has not been accessed for a given period of time.
:param dormant_for: seconds since last access to delete sessions, defaults to 24 hours.
:type dormant_for: int
:param limit: amount to delete in one call of the method, the maximum and default for this is the NDB fetch limit of 500
:type limit: int
]
from relative_module[vishnu.backend.client.google_app_engine_ndb] import module[VishnuSession]
from relative_module[google.appengine.ext] import module[ndb]
from relative_module[datetime] import module[datetime]
from relative_module[datetime] import module[timedelta]
variable[now] assign[=] call[name[datetime].utcnow, parameter[]]
variable[last_accessed] assign[=] binary_operation[name[now] - call[name[timedelta], parameter[]]]
variable[query] assign[=] call[name[VishnuSession].query, parameter[call[name[ndb].OR, parameter[call[name[ndb].AND, parameter[compare[name[VishnuSession].expires less_or_equal[<=] name[now]], compare[name[VishnuSession].expires not_equal[!=] constant[None]]]], compare[name[VishnuSession].last_accessed less_or_equal[<=] name[last_accessed]]]]]]
variable[results] assign[=] call[name[query].fetch, parameter[]]
call[name[ndb].delete_multi, parameter[name[results]]]
return[compare[call[name[len], parameter[name[results]]] less[<] name[limit]]] | keyword[def] identifier[google_app_engine_ndb_delete_expired_sessions] ( identifier[dormant_for] = literal[int] , identifier[limit] = literal[int] ):
literal[string]
keyword[from] identifier[vishnu] . identifier[backend] . identifier[client] . identifier[google_app_engine_ndb] keyword[import] identifier[VishnuSession]
keyword[from] identifier[google] . identifier[appengine] . identifier[ext] keyword[import] identifier[ndb]
keyword[from] identifier[datetime] keyword[import] identifier[datetime]
keyword[from] identifier[datetime] keyword[import] identifier[timedelta]
identifier[now] = identifier[datetime] . identifier[utcnow] ()
identifier[last_accessed] = identifier[now] - identifier[timedelta] ( identifier[seconds] = identifier[dormant_for] )
identifier[query] = identifier[VishnuSession] . identifier[query] ( identifier[ndb] . identifier[OR] (
identifier[ndb] . identifier[AND] ( identifier[VishnuSession] . identifier[expires] <= identifier[now] , identifier[VishnuSession] . identifier[expires] != keyword[None] ),
identifier[VishnuSession] . identifier[last_accessed] <= identifier[last_accessed]
))
identifier[results] = identifier[query] . identifier[fetch] ( identifier[keys_only] = keyword[True] , identifier[limit] = identifier[limit] )
identifier[ndb] . identifier[delete_multi] ( identifier[results] )
keyword[return] identifier[len] ( identifier[results] )< identifier[limit] | def google_app_engine_ndb_delete_expired_sessions(dormant_for=86400, limit=500):
"""
Deletes expired sessions
A session is expired if it expires date is set and has passed or
if it has not been accessed for a given period of time.
:param dormant_for: seconds since last access to delete sessions, defaults to 24 hours.
:type dormant_for: int
:param limit: amount to delete in one call of the method, the maximum and default for this is the NDB fetch limit of 500
:type limit: int
"""
from vishnu.backend.client.google_app_engine_ndb import VishnuSession
from google.appengine.ext import ndb
from datetime import datetime
from datetime import timedelta
now = datetime.utcnow()
last_accessed = now - timedelta(seconds=dormant_for)
query = VishnuSession.query(ndb.OR(ndb.AND(VishnuSession.expires <= now, VishnuSession.expires != None), VishnuSession.last_accessed <= last_accessed))
results = query.fetch(keys_only=True, limit=limit)
ndb.delete_multi(results)
return len(results) < limit |
def bulk_upsert(self, conflict_target: List, rows: List[Dict], index_predicate: str=None):
"""Creates a set of new records or updates the existing
ones with the specified data.
Arguments:
conflict_target:
Fields to pass into the ON CONFLICT clause.
rows:
Rows to upsert.
index_predicate:
The index predicate to satisfy an arbiter partial index (i.e. what partial index to use for checking
conflicts)
"""
if not rows or len(rows) <= 0:
return
self.on_conflict(conflict_target, ConflictAction.UPDATE, index_predicate)
return self.bulk_insert(rows) | def function[bulk_upsert, parameter[self, conflict_target, rows, index_predicate]]:
constant[Creates a set of new records or updates the existing
ones with the specified data.
Arguments:
conflict_target:
Fields to pass into the ON CONFLICT clause.
rows:
Rows to upsert.
index_predicate:
The index predicate to satisfy an arbiter partial index (i.e. what partial index to use for checking
conflicts)
]
if <ast.BoolOp object at 0x7da1b059e920> begin[:]
return[None]
call[name[self].on_conflict, parameter[name[conflict_target], name[ConflictAction].UPDATE, name[index_predicate]]]
return[call[name[self].bulk_insert, parameter[name[rows]]]] | keyword[def] identifier[bulk_upsert] ( identifier[self] , identifier[conflict_target] : identifier[List] , identifier[rows] : identifier[List] [ identifier[Dict] ], identifier[index_predicate] : identifier[str] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[rows] keyword[or] identifier[len] ( identifier[rows] )<= literal[int] :
keyword[return]
identifier[self] . identifier[on_conflict] ( identifier[conflict_target] , identifier[ConflictAction] . identifier[UPDATE] , identifier[index_predicate] )
keyword[return] identifier[self] . identifier[bulk_insert] ( identifier[rows] ) | def bulk_upsert(self, conflict_target: List, rows: List[Dict], index_predicate: str=None):
"""Creates a set of new records or updates the existing
ones with the specified data.
Arguments:
conflict_target:
Fields to pass into the ON CONFLICT clause.
rows:
Rows to upsert.
index_predicate:
The index predicate to satisfy an arbiter partial index (i.e. what partial index to use for checking
conflicts)
"""
if not rows or len(rows) <= 0:
return # depends on [control=['if'], data=[]]
self.on_conflict(conflict_target, ConflictAction.UPDATE, index_predicate)
return self.bulk_insert(rows) |
def set_default_reference(self, method, reference):
"""
Set the default reference for a method.
:arg method: name of a method
:type method: :class:`str`
{reference}
"""
if method not in self._available_methods:
raise ValueError('Unknown method: {0}'.format(method))
self._default_references[method] = reference | def function[set_default_reference, parameter[self, method, reference]]:
constant[
Set the default reference for a method.
:arg method: name of a method
:type method: :class:`str`
{reference}
]
if compare[name[method] <ast.NotIn object at 0x7da2590d7190> name[self]._available_methods] begin[:]
<ast.Raise object at 0x7da2041da5c0>
call[name[self]._default_references][name[method]] assign[=] name[reference] | keyword[def] identifier[set_default_reference] ( identifier[self] , identifier[method] , identifier[reference] ):
literal[string]
keyword[if] identifier[method] keyword[not] keyword[in] identifier[self] . identifier[_available_methods] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[method] ))
identifier[self] . identifier[_default_references] [ identifier[method] ]= identifier[reference] | def set_default_reference(self, method, reference):
"""
Set the default reference for a method.
:arg method: name of a method
:type method: :class:`str`
{reference}
"""
if method not in self._available_methods:
raise ValueError('Unknown method: {0}'.format(method)) # depends on [control=['if'], data=['method']]
self._default_references[method] = reference |
def extract_constant(code, symbol, default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
"""
if symbol not in code.co_names:
# name's not there, can't possibly be an assigment
return None
name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90
STORE_GLOBAL = 97
LOAD_CONST = 100
const = default
for op, arg in _iter_code(code):
if op==LOAD_CONST:
const = code.co_consts[arg]
elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
return const
else:
const = default | def function[extract_constant, parameter[code, symbol, default]]:
constant[Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
]
if compare[name[symbol] <ast.NotIn object at 0x7da2590d7190> name[code].co_names] begin[:]
return[constant[None]]
variable[name_idx] assign[=] call[call[name[list], parameter[name[code].co_names]].index, parameter[name[symbol]]]
variable[STORE_NAME] assign[=] constant[90]
variable[STORE_GLOBAL] assign[=] constant[97]
variable[LOAD_CONST] assign[=] constant[100]
variable[const] assign[=] name[default]
for taget[tuple[[<ast.Name object at 0x7da18f811780>, <ast.Name object at 0x7da18f812440>]]] in starred[call[name[_iter_code], parameter[name[code]]]] begin[:]
if compare[name[op] equal[==] name[LOAD_CONST]] begin[:]
variable[const] assign[=] call[name[code].co_consts][name[arg]] | keyword[def] identifier[extract_constant] ( identifier[code] , identifier[symbol] , identifier[default] =- literal[int] ):
literal[string]
keyword[if] identifier[symbol] keyword[not] keyword[in] identifier[code] . identifier[co_names] :
keyword[return] keyword[None]
identifier[name_idx] = identifier[list] ( identifier[code] . identifier[co_names] ). identifier[index] ( identifier[symbol] )
identifier[STORE_NAME] = literal[int]
identifier[STORE_GLOBAL] = literal[int]
identifier[LOAD_CONST] = literal[int]
identifier[const] = identifier[default]
keyword[for] identifier[op] , identifier[arg] keyword[in] identifier[_iter_code] ( identifier[code] ):
keyword[if] identifier[op] == identifier[LOAD_CONST] :
identifier[const] = identifier[code] . identifier[co_consts] [ identifier[arg] ]
keyword[elif] identifier[arg] == identifier[name_idx] keyword[and] ( identifier[op] == identifier[STORE_NAME] keyword[or] identifier[op] == identifier[STORE_GLOBAL] ):
keyword[return] identifier[const]
keyword[else] :
identifier[const] = identifier[default] | def extract_constant(code, symbol, default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
"""
if symbol not in code.co_names:
# name's not there, can't possibly be an assigment
return None # depends on [control=['if'], data=[]]
name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90
STORE_GLOBAL = 97
LOAD_CONST = 100
const = default
for (op, arg) in _iter_code(code):
if op == LOAD_CONST:
const = code.co_consts[arg] # depends on [control=['if'], data=[]]
elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
return const # depends on [control=['if'], data=[]]
else:
const = default # depends on [control=['for'], data=[]] |
def attached(name, force=False):
'''
Ensure zone is attached
name : string
name of the zone
force : boolean
force attach the zone
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
zones = __salt__['zoneadm.list'](installed=True, configured=True)
if name in zones:
if zones[name]['state'] == 'configured':
if __opts__['test']:
res_attach = {'status': True}
else:
res_attach = __salt__['zoneadm.attach'](name, force)
ret['result'] = res_attach['status']
if ret['result']:
ret['changes'][name] = 'attached'
ret['comment'] = 'The zone {0} was attached.'.format(name)
else:
ret['comment'] = []
ret['comment'].append('Failed to attach zone {0}!'.format(name))
if 'message' in res_attach:
ret['comment'].append(res_attach['message'])
ret['comment'] = "\n".join(ret['comment'])
else:
ret['result'] = True
ret['comment'] = 'zone {0} already attached.'.format(name)
else:
ret['result'] = False
ret['comment'] = 'zone {0} is not configured!'.format(name)
return ret | def function[attached, parameter[name, force]]:
constant[
Ensure zone is attached
name : string
name of the zone
force : boolean
force attach the zone
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c610f0>, <ast.Constant object at 0x7da1b1c60820>, <ast.Constant object at 0x7da1b1c601f0>, <ast.Constant object at 0x7da1b1c61e10>], [<ast.Name object at 0x7da1b1c63310>, <ast.Dict object at 0x7da1b1c62530>, <ast.Constant object at 0x7da1b1c604c0>, <ast.Constant object at 0x7da1b1c61060>]]
variable[zones] assign[=] call[call[name[__salt__]][constant[zoneadm.list]], parameter[]]
if compare[name[name] in name[zones]] begin[:]
if compare[call[call[name[zones]][name[name]]][constant[state]] equal[==] constant[configured]] begin[:]
if call[name[__opts__]][constant[test]] begin[:]
variable[res_attach] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c629e0>], [<ast.Constant object at 0x7da1b1c63580>]]
call[name[ret]][constant[result]] assign[=] call[name[res_attach]][constant[status]]
if call[name[ret]][constant[result]] begin[:]
call[call[name[ret]][constant[changes]]][name[name]] assign[=] constant[attached]
call[name[ret]][constant[comment]] assign[=] call[constant[The zone {0} was attached.].format, parameter[name[name]]]
return[name[ret]] | keyword[def] identifier[attached] ( identifier[name] , identifier[force] = keyword[False] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] ,
literal[string] :{},
literal[string] : keyword[None] ,
literal[string] : literal[string] }
identifier[zones] = identifier[__salt__] [ literal[string] ]( identifier[installed] = keyword[True] , identifier[configured] = keyword[True] )
keyword[if] identifier[name] keyword[in] identifier[zones] :
keyword[if] identifier[zones] [ identifier[name] ][ literal[string] ]== literal[string] :
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[res_attach] ={ literal[string] : keyword[True] }
keyword[else] :
identifier[res_attach] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[force] )
identifier[ret] [ literal[string] ]= identifier[res_attach] [ literal[string] ]
keyword[if] identifier[ret] [ literal[string] ]:
identifier[ret] [ literal[string] ][ identifier[name] ]= literal[string]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
identifier[ret] [ literal[string] ]=[]
identifier[ret] [ literal[string] ]. identifier[append] ( literal[string] . identifier[format] ( identifier[name] ))
keyword[if] literal[string] keyword[in] identifier[res_attach] :
identifier[ret] [ literal[string] ]. identifier[append] ( identifier[res_attach] [ literal[string] ])
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ( identifier[ret] [ literal[string] ])
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[return] identifier[ret] | def attached(name, force=False):
"""
Ensure zone is attached
name : string
name of the zone
force : boolean
force attach the zone
"""
ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''}
zones = __salt__['zoneadm.list'](installed=True, configured=True)
if name in zones:
if zones[name]['state'] == 'configured':
if __opts__['test']:
res_attach = {'status': True} # depends on [control=['if'], data=[]]
else:
res_attach = __salt__['zoneadm.attach'](name, force)
ret['result'] = res_attach['status']
if ret['result']:
ret['changes'][name] = 'attached'
ret['comment'] = 'The zone {0} was attached.'.format(name) # depends on [control=['if'], data=[]]
else:
ret['comment'] = []
ret['comment'].append('Failed to attach zone {0}!'.format(name))
if 'message' in res_attach:
ret['comment'].append(res_attach['message']) # depends on [control=['if'], data=['res_attach']]
ret['comment'] = '\n'.join(ret['comment']) # depends on [control=['if'], data=[]]
else:
ret['result'] = True
ret['comment'] = 'zone {0} already attached.'.format(name) # depends on [control=['if'], data=['name', 'zones']]
else:
ret['result'] = False
ret['comment'] = 'zone {0} is not configured!'.format(name)
return ret |
def parse_delta(filename):
"""Returns (alignment length, similarity errors) tuple from passed .delta.
- filename - path to the input .delta file
Extracts the aligned length and number of similarity errors for each
aligned uniquely-matched region, and returns the cumulative total for
each as a tuple.
"""
aln_length, sim_errors = 0, 0
for line in [l.strip().split() for l in open(filename, "r").readlines()]:
if line[0] == "NUCMER" or line[0].startswith(">"): # Skip headers
continue
# We only process lines with seven columns:
if len(line) == 7:
aln_length += abs(int(line[1]) - int(line[0]))
sim_errors += int(line[4])
return aln_length, sim_errors | def function[parse_delta, parameter[filename]]:
constant[Returns (alignment length, similarity errors) tuple from passed .delta.
- filename - path to the input .delta file
Extracts the aligned length and number of similarity errors for each
aligned uniquely-matched region, and returns the cumulative total for
each as a tuple.
]
<ast.Tuple object at 0x7da1b0e2b610> assign[=] tuple[[<ast.Constant object at 0x7da1b0e2b220>, <ast.Constant object at 0x7da1b0e2bc40>]]
for taget[name[line]] in starred[<ast.ListComp object at 0x7da1b0e2b340>] begin[:]
if <ast.BoolOp object at 0x7da1b0d43a30> begin[:]
continue
if compare[call[name[len], parameter[name[line]]] equal[==] constant[7]] begin[:]
<ast.AugAssign object at 0x7da1b0d43280>
<ast.AugAssign object at 0x7da1b0d433d0>
return[tuple[[<ast.Name object at 0x7da1b0d43670>, <ast.Name object at 0x7da1b0d436a0>]]] | keyword[def] identifier[parse_delta] ( identifier[filename] ):
literal[string]
identifier[aln_length] , identifier[sim_errors] = literal[int] , literal[int]
keyword[for] identifier[line] keyword[in] [ identifier[l] . identifier[strip] (). identifier[split] () keyword[for] identifier[l] keyword[in] identifier[open] ( identifier[filename] , literal[string] ). identifier[readlines] ()]:
keyword[if] identifier[line] [ literal[int] ]== literal[string] keyword[or] identifier[line] [ literal[int] ]. identifier[startswith] ( literal[string] ):
keyword[continue]
keyword[if] identifier[len] ( identifier[line] )== literal[int] :
identifier[aln_length] += identifier[abs] ( identifier[int] ( identifier[line] [ literal[int] ])- identifier[int] ( identifier[line] [ literal[int] ]))
identifier[sim_errors] += identifier[int] ( identifier[line] [ literal[int] ])
keyword[return] identifier[aln_length] , identifier[sim_errors] | def parse_delta(filename):
"""Returns (alignment length, similarity errors) tuple from passed .delta.
- filename - path to the input .delta file
Extracts the aligned length and number of similarity errors for each
aligned uniquely-matched region, and returns the cumulative total for
each as a tuple.
"""
(aln_length, sim_errors) = (0, 0)
for line in [l.strip().split() for l in open(filename, 'r').readlines()]:
if line[0] == 'NUCMER' or line[0].startswith('>'): # Skip headers
continue # depends on [control=['if'], data=[]]
# We only process lines with seven columns:
if len(line) == 7:
aln_length += abs(int(line[1]) - int(line[0]))
sim_errors += int(line[4]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return (aln_length, sim_errors) |
def is_admin(self, roles):
"""determine from a list of roles if is ldapcherry administrator"""
for r in roles:
if r in self.admin_roles:
return True
return False | def function[is_admin, parameter[self, roles]]:
constant[determine from a list of roles if is ldapcherry administrator]
for taget[name[r]] in starred[name[roles]] begin[:]
if compare[name[r] in name[self].admin_roles] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_admin] ( identifier[self] , identifier[roles] ):
literal[string]
keyword[for] identifier[r] keyword[in] identifier[roles] :
keyword[if] identifier[r] keyword[in] identifier[self] . identifier[admin_roles] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_admin(self, roles):
"""determine from a list of roles if is ldapcherry administrator"""
for r in roles:
if r in self.admin_roles:
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']]
return False |
def alpha_balance_loss(labels, loss, alpha_weights):
"""
Calculate the alpha balanced cross_entropy.
This means for each sample the cross entropy is calculated and then weighted by the class specific weight.
:param labels: A float tensor of shape [batch_size, ..., num_classes] representing the label class probabilities.
:param loss: A float tensor of shape [batch_size, ...] representing the loss that should be focused.
:param alpha_weights: A float tensor of shape [1, ..., num_classes] (... is filled with ones to match number
of dimensions to labels tensor) representing the weights for each class.
:return: A tensor representing the weighted cross entropy.
"""
with tf.variable_scope("alpha_balance"):
# Broadcast multiply labels with alpha weights to select weights and then reduce them along last axis.
weights = tf.reduce_sum(labels * alpha_weights, axis=-1)
return weights * loss | def function[alpha_balance_loss, parameter[labels, loss, alpha_weights]]:
constant[
Calculate the alpha balanced cross_entropy.
This means for each sample the cross entropy is calculated and then weighted by the class specific weight.
:param labels: A float tensor of shape [batch_size, ..., num_classes] representing the label class probabilities.
:param loss: A float tensor of shape [batch_size, ...] representing the loss that should be focused.
:param alpha_weights: A float tensor of shape [1, ..., num_classes] (... is filled with ones to match number
of dimensions to labels tensor) representing the weights for each class.
:return: A tensor representing the weighted cross entropy.
]
with call[name[tf].variable_scope, parameter[constant[alpha_balance]]] begin[:]
variable[weights] assign[=] call[name[tf].reduce_sum, parameter[binary_operation[name[labels] * name[alpha_weights]]]]
return[binary_operation[name[weights] * name[loss]]] | keyword[def] identifier[alpha_balance_loss] ( identifier[labels] , identifier[loss] , identifier[alpha_weights] ):
literal[string]
keyword[with] identifier[tf] . identifier[variable_scope] ( literal[string] ):
identifier[weights] = identifier[tf] . identifier[reduce_sum] ( identifier[labels] * identifier[alpha_weights] , identifier[axis] =- literal[int] )
keyword[return] identifier[weights] * identifier[loss] | def alpha_balance_loss(labels, loss, alpha_weights):
"""
Calculate the alpha balanced cross_entropy.
This means for each sample the cross entropy is calculated and then weighted by the class specific weight.
:param labels: A float tensor of shape [batch_size, ..., num_classes] representing the label class probabilities.
:param loss: A float tensor of shape [batch_size, ...] representing the loss that should be focused.
:param alpha_weights: A float tensor of shape [1, ..., num_classes] (... is filled with ones to match number
of dimensions to labels tensor) representing the weights for each class.
:return: A tensor representing the weighted cross entropy.
"""
with tf.variable_scope('alpha_balance'):
# Broadcast multiply labels with alpha weights to select weights and then reduce them along last axis.
weights = tf.reduce_sum(labels * alpha_weights, axis=-1)
return weights * loss # depends on [control=['with'], data=[]] |
def register_function_hooks(self, func):
"""Looks at an object method and registers it for relevent transitions."""
for hook_kind, hooks in func.xworkflows_hook.items():
for field_name, hook in hooks:
if field_name and field_name != self.state_field:
continue
for transition in self.workflow.transitions:
if hook.applies_to(transition):
implem = self.implementations[transition.name]
implem.add_hook(hook) | def function[register_function_hooks, parameter[self, func]]:
constant[Looks at an object method and registers it for relevent transitions.]
for taget[tuple[[<ast.Name object at 0x7da18f58d930>, <ast.Name object at 0x7da18f58f970>]]] in starred[call[name[func].xworkflows_hook.items, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18f58fa30>, <ast.Name object at 0x7da18f58c6d0>]]] in starred[name[hooks]] begin[:]
if <ast.BoolOp object at 0x7da1b0fad180> begin[:]
continue
for taget[name[transition]] in starred[name[self].workflow.transitions] begin[:]
if call[name[hook].applies_to, parameter[name[transition]]] begin[:]
variable[implem] assign[=] call[name[self].implementations][name[transition].name]
call[name[implem].add_hook, parameter[name[hook]]] | keyword[def] identifier[register_function_hooks] ( identifier[self] , identifier[func] ):
literal[string]
keyword[for] identifier[hook_kind] , identifier[hooks] keyword[in] identifier[func] . identifier[xworkflows_hook] . identifier[items] ():
keyword[for] identifier[field_name] , identifier[hook] keyword[in] identifier[hooks] :
keyword[if] identifier[field_name] keyword[and] identifier[field_name] != identifier[self] . identifier[state_field] :
keyword[continue]
keyword[for] identifier[transition] keyword[in] identifier[self] . identifier[workflow] . identifier[transitions] :
keyword[if] identifier[hook] . identifier[applies_to] ( identifier[transition] ):
identifier[implem] = identifier[self] . identifier[implementations] [ identifier[transition] . identifier[name] ]
identifier[implem] . identifier[add_hook] ( identifier[hook] ) | def register_function_hooks(self, func):
"""Looks at an object method and registers it for relevent transitions."""
for (hook_kind, hooks) in func.xworkflows_hook.items():
for (field_name, hook) in hooks:
if field_name and field_name != self.state_field:
continue # depends on [control=['if'], data=[]]
for transition in self.workflow.transitions:
if hook.applies_to(transition):
implem = self.implementations[transition.name]
implem.add_hook(hook) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['transition']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] |
def verify_images(path:PathOrStr, delete:bool=True, max_workers:int=4, max_size:Union[int]=None, recurse:bool=False,
dest:PathOrStr='.', n_channels:int=3, interp=PIL.Image.BILINEAR, ext:str=None, img_format:str=None,
resume:bool=None, **kwargs):
"Check if the images in `path` aren't broken, maybe resize them and copy it in `dest`."
path = Path(path)
if resume is None and dest == '.': resume=False
dest = path/Path(dest)
os.makedirs(dest, exist_ok=True)
files = get_image_files(path, recurse=recurse)
func = partial(verify_image, delete=delete, max_size=max_size, dest=dest, n_channels=n_channels, interp=interp,
ext=ext, img_format=img_format, resume=resume, **kwargs)
parallel(func, files, max_workers=max_workers) | def function[verify_images, parameter[path, delete, max_workers, max_size, recurse, dest, n_channels, interp, ext, img_format, resume]]:
constant[Check if the images in `path` aren't broken, maybe resize them and copy it in `dest`.]
variable[path] assign[=] call[name[Path], parameter[name[path]]]
if <ast.BoolOp object at 0x7da1b1e77850> begin[:]
variable[resume] assign[=] constant[False]
variable[dest] assign[=] binary_operation[name[path] / call[name[Path], parameter[name[dest]]]]
call[name[os].makedirs, parameter[name[dest]]]
variable[files] assign[=] call[name[get_image_files], parameter[name[path]]]
variable[func] assign[=] call[name[partial], parameter[name[verify_image]]]
call[name[parallel], parameter[name[func], name[files]]] | keyword[def] identifier[verify_images] ( identifier[path] : identifier[PathOrStr] , identifier[delete] : identifier[bool] = keyword[True] , identifier[max_workers] : identifier[int] = literal[int] , identifier[max_size] : identifier[Union] [ identifier[int] ]= keyword[None] , identifier[recurse] : identifier[bool] = keyword[False] ,
identifier[dest] : identifier[PathOrStr] = literal[string] , identifier[n_channels] : identifier[int] = literal[int] , identifier[interp] = identifier[PIL] . identifier[Image] . identifier[BILINEAR] , identifier[ext] : identifier[str] = keyword[None] , identifier[img_format] : identifier[str] = keyword[None] ,
identifier[resume] : identifier[bool] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[path] = identifier[Path] ( identifier[path] )
keyword[if] identifier[resume] keyword[is] keyword[None] keyword[and] identifier[dest] == literal[string] : identifier[resume] = keyword[False]
identifier[dest] = identifier[path] / identifier[Path] ( identifier[dest] )
identifier[os] . identifier[makedirs] ( identifier[dest] , identifier[exist_ok] = keyword[True] )
identifier[files] = identifier[get_image_files] ( identifier[path] , identifier[recurse] = identifier[recurse] )
identifier[func] = identifier[partial] ( identifier[verify_image] , identifier[delete] = identifier[delete] , identifier[max_size] = identifier[max_size] , identifier[dest] = identifier[dest] , identifier[n_channels] = identifier[n_channels] , identifier[interp] = identifier[interp] ,
identifier[ext] = identifier[ext] , identifier[img_format] = identifier[img_format] , identifier[resume] = identifier[resume] ,** identifier[kwargs] )
identifier[parallel] ( identifier[func] , identifier[files] , identifier[max_workers] = identifier[max_workers] ) | def verify_images(path: PathOrStr, delete: bool=True, max_workers: int=4, max_size: Union[int]=None, recurse: bool=False, dest: PathOrStr='.', n_channels: int=3, interp=PIL.Image.BILINEAR, ext: str=None, img_format: str=None, resume: bool=None, **kwargs):
"""Check if the images in `path` aren't broken, maybe resize them and copy it in `dest`."""
path = Path(path)
if resume is None and dest == '.':
resume = False # depends on [control=['if'], data=[]]
dest = path / Path(dest)
os.makedirs(dest, exist_ok=True)
files = get_image_files(path, recurse=recurse)
func = partial(verify_image, delete=delete, max_size=max_size, dest=dest, n_channels=n_channels, interp=interp, ext=ext, img_format=img_format, resume=resume, **kwargs)
parallel(func, files, max_workers=max_workers) |
def walk(self, basedir):
"""Walk all the directories of basedir except hidden directories
:param basedir: string, the directory to walk
:returns: generator, same as os.walk
"""
system_d = SitePackagesDir()
filter_system_d = system_d and os.path.commonprefix([system_d, basedir]) != system_d
for root, dirs, files in os.walk(basedir, topdown=True):
# ignore dot directories and private directories (start with underscore)
dirs[:] = [d for d in dirs if d[0] != '.' and d[0] != "_"]
if filter_system_d:
dirs[:] = [d for d in dirs if not d.startswith(system_d)]
yield root, dirs, files | def function[walk, parameter[self, basedir]]:
constant[Walk all the directories of basedir except hidden directories
:param basedir: string, the directory to walk
:returns: generator, same as os.walk
]
variable[system_d] assign[=] call[name[SitePackagesDir], parameter[]]
variable[filter_system_d] assign[=] <ast.BoolOp object at 0x7da18f813ee0>
for taget[tuple[[<ast.Name object at 0x7da18f812890>, <ast.Name object at 0x7da18f811f90>, <ast.Name object at 0x7da18f810670>]]] in starred[call[name[os].walk, parameter[name[basedir]]]] begin[:]
call[name[dirs]][<ast.Slice object at 0x7da18f810940>] assign[=] <ast.ListComp object at 0x7da18f8127d0>
if name[filter_system_d] begin[:]
call[name[dirs]][<ast.Slice object at 0x7da18f812650>] assign[=] <ast.ListComp object at 0x7da18f813f70>
<ast.Yield object at 0x7da18f813d90> | keyword[def] identifier[walk] ( identifier[self] , identifier[basedir] ):
literal[string]
identifier[system_d] = identifier[SitePackagesDir] ()
identifier[filter_system_d] = identifier[system_d] keyword[and] identifier[os] . identifier[path] . identifier[commonprefix] ([ identifier[system_d] , identifier[basedir] ])!= identifier[system_d]
keyword[for] identifier[root] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[basedir] , identifier[topdown] = keyword[True] ):
identifier[dirs] [:]=[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[dirs] keyword[if] identifier[d] [ literal[int] ]!= literal[string] keyword[and] identifier[d] [ literal[int] ]!= literal[string] ]
keyword[if] identifier[filter_system_d] :
identifier[dirs] [:]=[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[dirs] keyword[if] keyword[not] identifier[d] . identifier[startswith] ( identifier[system_d] )]
keyword[yield] identifier[root] , identifier[dirs] , identifier[files] | def walk(self, basedir):
"""Walk all the directories of basedir except hidden directories
:param basedir: string, the directory to walk
:returns: generator, same as os.walk
"""
system_d = SitePackagesDir()
filter_system_d = system_d and os.path.commonprefix([system_d, basedir]) != system_d
for (root, dirs, files) in os.walk(basedir, topdown=True):
# ignore dot directories and private directories (start with underscore)
dirs[:] = [d for d in dirs if d[0] != '.' and d[0] != '_']
if filter_system_d:
dirs[:] = [d for d in dirs if not d.startswith(system_d)] # depends on [control=['if'], data=[]]
yield (root, dirs, files) # depends on [control=['for'], data=[]] |
def notebook_to_md(notebook):
"""Convert a notebook to its Markdown representation, using Pandoc"""
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.write(ipynb_writes(notebook).encode('utf-8'))
tmp_file.close()
pandoc(u'--from ipynb --to markdown -s --atx-headers --wrap=preserve --preserve-tabs', tmp_file.name, tmp_file.name)
with open(tmp_file.name, encoding='utf-8') as opened_file:
text = opened_file.read()
os.unlink(tmp_file.name)
return '\n'.join(text.splitlines()) | def function[notebook_to_md, parameter[notebook]]:
constant[Convert a notebook to its Markdown representation, using Pandoc]
variable[tmp_file] assign[=] call[name[tempfile].NamedTemporaryFile, parameter[]]
call[name[tmp_file].write, parameter[call[call[name[ipynb_writes], parameter[name[notebook]]].encode, parameter[constant[utf-8]]]]]
call[name[tmp_file].close, parameter[]]
call[name[pandoc], parameter[constant[--from ipynb --to markdown -s --atx-headers --wrap=preserve --preserve-tabs], name[tmp_file].name, name[tmp_file].name]]
with call[name[open], parameter[name[tmp_file].name]] begin[:]
variable[text] assign[=] call[name[opened_file].read, parameter[]]
call[name[os].unlink, parameter[name[tmp_file].name]]
return[call[constant[
].join, parameter[call[name[text].splitlines, parameter[]]]]] | keyword[def] identifier[notebook_to_md] ( identifier[notebook] ):
literal[string]
identifier[tmp_file] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[delete] = keyword[False] )
identifier[tmp_file] . identifier[write] ( identifier[ipynb_writes] ( identifier[notebook] ). identifier[encode] ( literal[string] ))
identifier[tmp_file] . identifier[close] ()
identifier[pandoc] ( literal[string] , identifier[tmp_file] . identifier[name] , identifier[tmp_file] . identifier[name] )
keyword[with] identifier[open] ( identifier[tmp_file] . identifier[name] , identifier[encoding] = literal[string] ) keyword[as] identifier[opened_file] :
identifier[text] = identifier[opened_file] . identifier[read] ()
identifier[os] . identifier[unlink] ( identifier[tmp_file] . identifier[name] )
keyword[return] literal[string] . identifier[join] ( identifier[text] . identifier[splitlines] ()) | def notebook_to_md(notebook):
"""Convert a notebook to its Markdown representation, using Pandoc"""
tmp_file = tempfile.NamedTemporaryFile(delete=False)
tmp_file.write(ipynb_writes(notebook).encode('utf-8'))
tmp_file.close()
pandoc(u'--from ipynb --to markdown -s --atx-headers --wrap=preserve --preserve-tabs', tmp_file.name, tmp_file.name)
with open(tmp_file.name, encoding='utf-8') as opened_file:
text = opened_file.read() # depends on [control=['with'], data=['opened_file']]
os.unlink(tmp_file.name)
return '\n'.join(text.splitlines()) |
def allocate_ids(self, project_id, keys):
"""Perform an ``allocateIds`` request.
:type project_id: str
:param project_id: The project to connect to. This is
usually your project name in the cloud console.
:type keys: List[.entity_pb2.Key]
:param keys: The keys for which the backend should allocate IDs.
:rtype: :class:`.datastore_pb2.AllocateIdsResponse`
:returns: The returned protobuf response object.
"""
request_pb = _datastore_pb2.AllocateIdsRequest(keys=keys)
return _rpc(
self.client._http,
project_id,
"allocateIds",
self.client._base_url,
request_pb,
_datastore_pb2.AllocateIdsResponse,
) | def function[allocate_ids, parameter[self, project_id, keys]]:
constant[Perform an ``allocateIds`` request.
:type project_id: str
:param project_id: The project to connect to. This is
usually your project name in the cloud console.
:type keys: List[.entity_pb2.Key]
:param keys: The keys for which the backend should allocate IDs.
:rtype: :class:`.datastore_pb2.AllocateIdsResponse`
:returns: The returned protobuf response object.
]
variable[request_pb] assign[=] call[name[_datastore_pb2].AllocateIdsRequest, parameter[]]
return[call[name[_rpc], parameter[name[self].client._http, name[project_id], constant[allocateIds], name[self].client._base_url, name[request_pb], name[_datastore_pb2].AllocateIdsResponse]]] | keyword[def] identifier[allocate_ids] ( identifier[self] , identifier[project_id] , identifier[keys] ):
literal[string]
identifier[request_pb] = identifier[_datastore_pb2] . identifier[AllocateIdsRequest] ( identifier[keys] = identifier[keys] )
keyword[return] identifier[_rpc] (
identifier[self] . identifier[client] . identifier[_http] ,
identifier[project_id] ,
literal[string] ,
identifier[self] . identifier[client] . identifier[_base_url] ,
identifier[request_pb] ,
identifier[_datastore_pb2] . identifier[AllocateIdsResponse] ,
) | def allocate_ids(self, project_id, keys):
"""Perform an ``allocateIds`` request.
:type project_id: str
:param project_id: The project to connect to. This is
usually your project name in the cloud console.
:type keys: List[.entity_pb2.Key]
:param keys: The keys for which the backend should allocate IDs.
:rtype: :class:`.datastore_pb2.AllocateIdsResponse`
:returns: The returned protobuf response object.
"""
request_pb = _datastore_pb2.AllocateIdsRequest(keys=keys)
return _rpc(self.client._http, project_id, 'allocateIds', self.client._base_url, request_pb, _datastore_pb2.AllocateIdsResponse) |
def publish(self, body, routing_key, exchange='amq.default',
virtual_host='/', properties=None, payload_encoding='string'):
"""Publish a Message.
:param bytes|str|unicode body: Message payload
:param str routing_key: Message routing key
:param str exchange: The exchange to publish the message to
:param str virtual_host: Virtual host name
:param dict properties: Message properties
:param str payload_encoding: Payload encoding.
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:rtype: dict
"""
exchange = quote(exchange, '')
properties = properties or {}
body = json.dumps(
{
'routing_key': routing_key,
'payload': body,
'payload_encoding': payload_encoding,
'properties': properties,
'vhost': virtual_host
}
)
virtual_host = quote(virtual_host, '')
return self.http_client.post(API_BASIC_PUBLISH %
(
virtual_host,
exchange),
payload=body) | def function[publish, parameter[self, body, routing_key, exchange, virtual_host, properties, payload_encoding]]:
constant[Publish a Message.
:param bytes|str|unicode body: Message payload
:param str routing_key: Message routing key
:param str exchange: The exchange to publish the message to
:param str virtual_host: Virtual host name
:param dict properties: Message properties
:param str payload_encoding: Payload encoding.
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:rtype: dict
]
variable[exchange] assign[=] call[name[quote], parameter[name[exchange], constant[]]]
variable[properties] assign[=] <ast.BoolOp object at 0x7da18f721e70>
variable[body] assign[=] call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da18f722b30>, <ast.Constant object at 0x7da18f721a50>, <ast.Constant object at 0x7da18f720b80>, <ast.Constant object at 0x7da18f7203a0>, <ast.Constant object at 0x7da18f723b80>], [<ast.Name object at 0x7da18f720b20>, <ast.Name object at 0x7da18f722620>, <ast.Name object at 0x7da18f723070>, <ast.Name object at 0x7da18f720160>, <ast.Name object at 0x7da18f723970>]]]]
variable[virtual_host] assign[=] call[name[quote], parameter[name[virtual_host], constant[]]]
return[call[name[self].http_client.post, parameter[binary_operation[name[API_BASIC_PUBLISH] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f720070>, <ast.Name object at 0x7da18f723a90>]]]]]] | keyword[def] identifier[publish] ( identifier[self] , identifier[body] , identifier[routing_key] , identifier[exchange] = literal[string] ,
identifier[virtual_host] = literal[string] , identifier[properties] = keyword[None] , identifier[payload_encoding] = literal[string] ):
literal[string]
identifier[exchange] = identifier[quote] ( identifier[exchange] , literal[string] )
identifier[properties] = identifier[properties] keyword[or] {}
identifier[body] = identifier[json] . identifier[dumps] (
{
literal[string] : identifier[routing_key] ,
literal[string] : identifier[body] ,
literal[string] : identifier[payload_encoding] ,
literal[string] : identifier[properties] ,
literal[string] : identifier[virtual_host]
}
)
identifier[virtual_host] = identifier[quote] ( identifier[virtual_host] , literal[string] )
keyword[return] identifier[self] . identifier[http_client] . identifier[post] ( identifier[API_BASIC_PUBLISH] %
(
identifier[virtual_host] ,
identifier[exchange] ),
identifier[payload] = identifier[body] ) | def publish(self, body, routing_key, exchange='amq.default', virtual_host='/', properties=None, payload_encoding='string'):
"""Publish a Message.
:param bytes|str|unicode body: Message payload
:param str routing_key: Message routing key
:param str exchange: The exchange to publish the message to
:param str virtual_host: Virtual host name
:param dict properties: Message properties
:param str payload_encoding: Payload encoding.
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:rtype: dict
"""
exchange = quote(exchange, '')
properties = properties or {}
body = json.dumps({'routing_key': routing_key, 'payload': body, 'payload_encoding': payload_encoding, 'properties': properties, 'vhost': virtual_host})
virtual_host = quote(virtual_host, '')
return self.http_client.post(API_BASIC_PUBLISH % (virtual_host, exchange), payload=body) |
def get_cnts_levels_depths_recs(recs):
"""Collect counts of levels and depths in a Group of GO Terms."""
cnts = cx.defaultdict(lambda: cx.defaultdict(cx.Counter))
for rec in recs:
if rec is not None and not rec.is_obsolete:
cnts['level'][rec.level][rec.namespace] += 1
cnts['depth'][rec.depth][rec.namespace] += 1
return cnts | def function[get_cnts_levels_depths_recs, parameter[recs]]:
constant[Collect counts of levels and depths in a Group of GO Terms.]
variable[cnts] assign[=] call[name[cx].defaultdict, parameter[<ast.Lambda object at 0x7da2044c38b0>]]
for taget[name[rec]] in starred[name[recs]] begin[:]
if <ast.BoolOp object at 0x7da18c4cfdf0> begin[:]
<ast.AugAssign object at 0x7da18c4cef20>
<ast.AugAssign object at 0x7da18c4cfc10>
return[name[cnts]] | keyword[def] identifier[get_cnts_levels_depths_recs] ( identifier[recs] ):
literal[string]
identifier[cnts] = identifier[cx] . identifier[defaultdict] ( keyword[lambda] : identifier[cx] . identifier[defaultdict] ( identifier[cx] . identifier[Counter] ))
keyword[for] identifier[rec] keyword[in] identifier[recs] :
keyword[if] identifier[rec] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[rec] . identifier[is_obsolete] :
identifier[cnts] [ literal[string] ][ identifier[rec] . identifier[level] ][ identifier[rec] . identifier[namespace] ]+= literal[int]
identifier[cnts] [ literal[string] ][ identifier[rec] . identifier[depth] ][ identifier[rec] . identifier[namespace] ]+= literal[int]
keyword[return] identifier[cnts] | def get_cnts_levels_depths_recs(recs):
"""Collect counts of levels and depths in a Group of GO Terms."""
cnts = cx.defaultdict(lambda : cx.defaultdict(cx.Counter))
for rec in recs:
if rec is not None and (not rec.is_obsolete):
cnts['level'][rec.level][rec.namespace] += 1
cnts['depth'][rec.depth][rec.namespace] += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']]
return cnts |
def Parse(self, cmd, args, stdout, stderr, return_val, time_taken,
knowledge_base):
"""Parse the sysctl output."""
_ = stderr, time_taken, args, knowledge_base # Unused.
self.CheckReturn(cmd, return_val)
result = rdf_protodict.AttributedDict()
# The KeyValueParser generates an ordered dict by default. The sysctl vals
# aren't ordering dependent, but there's no need to un-order it.
for k, v in iteritems(self.lexer.ParseToOrderedDict(stdout)):
key = k.replace(".", "_")
if len(v) == 1:
v = v[0]
result[key] = v
return [result] | def function[Parse, parameter[self, cmd, args, stdout, stderr, return_val, time_taken, knowledge_base]]:
constant[Parse the sysctl output.]
variable[_] assign[=] tuple[[<ast.Name object at 0x7da1b1b465c0>, <ast.Name object at 0x7da1b1b463e0>, <ast.Name object at 0x7da1b1b452a0>, <ast.Name object at 0x7da1b1b47e20>]]
call[name[self].CheckReturn, parameter[name[cmd], name[return_val]]]
variable[result] assign[=] call[name[rdf_protodict].AttributedDict, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b1b479a0>, <ast.Name object at 0x7da1b1b6c6d0>]]] in starred[call[name[iteritems], parameter[call[name[self].lexer.ParseToOrderedDict, parameter[name[stdout]]]]]] begin[:]
variable[key] assign[=] call[name[k].replace, parameter[constant[.], constant[_]]]
if compare[call[name[len], parameter[name[v]]] equal[==] constant[1]] begin[:]
variable[v] assign[=] call[name[v]][constant[0]]
call[name[result]][name[key]] assign[=] name[v]
return[list[[<ast.Name object at 0x7da1b1b6e770>]]] | keyword[def] identifier[Parse] ( identifier[self] , identifier[cmd] , identifier[args] , identifier[stdout] , identifier[stderr] , identifier[return_val] , identifier[time_taken] ,
identifier[knowledge_base] ):
literal[string]
identifier[_] = identifier[stderr] , identifier[time_taken] , identifier[args] , identifier[knowledge_base]
identifier[self] . identifier[CheckReturn] ( identifier[cmd] , identifier[return_val] )
identifier[result] = identifier[rdf_protodict] . identifier[AttributedDict] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[self] . identifier[lexer] . identifier[ParseToOrderedDict] ( identifier[stdout] )):
identifier[key] = identifier[k] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[v] )== literal[int] :
identifier[v] = identifier[v] [ literal[int] ]
identifier[result] [ identifier[key] ]= identifier[v]
keyword[return] [ identifier[result] ] | def Parse(self, cmd, args, stdout, stderr, return_val, time_taken, knowledge_base):
"""Parse the sysctl output."""
_ = (stderr, time_taken, args, knowledge_base) # Unused.
self.CheckReturn(cmd, return_val)
result = rdf_protodict.AttributedDict()
# The KeyValueParser generates an ordered dict by default. The sysctl vals
# aren't ordering dependent, but there's no need to un-order it.
for (k, v) in iteritems(self.lexer.ParseToOrderedDict(stdout)):
key = k.replace('.', '_')
if len(v) == 1:
v = v[0] # depends on [control=['if'], data=[]]
result[key] = v # depends on [control=['for'], data=[]]
return [result] |
def guess_payload_class(self, payload):
"""
ISOTP encodes the frame type in the first nibble of a frame.
"""
t = (orb(payload[0]) & 0xf0) >> 4
if t == 0:
return ISOTP_SF
elif t == 1:
return ISOTP_FF
elif t == 2:
return ISOTP_CF
else:
return ISOTP_FC | def function[guess_payload_class, parameter[self, payload]]:
constant[
ISOTP encodes the frame type in the first nibble of a frame.
]
variable[t] assign[=] binary_operation[binary_operation[call[name[orb], parameter[call[name[payload]][constant[0]]]] <ast.BitAnd object at 0x7da2590d6b60> constant[240]] <ast.RShift object at 0x7da2590d6a40> constant[4]]
if compare[name[t] equal[==] constant[0]] begin[:]
return[name[ISOTP_SF]] | keyword[def] identifier[guess_payload_class] ( identifier[self] , identifier[payload] ):
literal[string]
identifier[t] =( identifier[orb] ( identifier[payload] [ literal[int] ])& literal[int] )>> literal[int]
keyword[if] identifier[t] == literal[int] :
keyword[return] identifier[ISOTP_SF]
keyword[elif] identifier[t] == literal[int] :
keyword[return] identifier[ISOTP_FF]
keyword[elif] identifier[t] == literal[int] :
keyword[return] identifier[ISOTP_CF]
keyword[else] :
keyword[return] identifier[ISOTP_FC] | def guess_payload_class(self, payload):
"""
ISOTP encodes the frame type in the first nibble of a frame.
"""
t = (orb(payload[0]) & 240) >> 4
if t == 0:
return ISOTP_SF # depends on [control=['if'], data=[]]
elif t == 1:
return ISOTP_FF # depends on [control=['if'], data=[]]
elif t == 2:
return ISOTP_CF # depends on [control=['if'], data=[]]
else:
return ISOTP_FC |
def gaus_pdf(x, mean, std):
'''Gaussian distribution's probability density function.
See, e.g. `Wikipedia <https://en.wikipedia.org/wiki/Normal_distribution>`_.
:param x: point in x-axis
:type x: float or numpy.ndarray
:param float mean: mean or expectation
:param float str: standard deviation
:returns: pdf(s) in point **x**
:rtype: float or numpy.ndarray
'''
return exp(-((x - mean) / std)**2 / 2) / sqrt(2 * pi) / std | def function[gaus_pdf, parameter[x, mean, std]]:
constant[Gaussian distribution's probability density function.
See, e.g. `Wikipedia <https://en.wikipedia.org/wiki/Normal_distribution>`_.
:param x: point in x-axis
:type x: float or numpy.ndarray
:param float mean: mean or expectation
:param float str: standard deviation
:returns: pdf(s) in point **x**
:rtype: float or numpy.ndarray
]
return[binary_operation[binary_operation[call[name[exp], parameter[binary_operation[<ast.UnaryOp object at 0x7da1b24d7ee0> / constant[2]]]] / call[name[sqrt], parameter[binary_operation[constant[2] * name[pi]]]]] / name[std]]] | keyword[def] identifier[gaus_pdf] ( identifier[x] , identifier[mean] , identifier[std] ):
literal[string]
keyword[return] identifier[exp] (-(( identifier[x] - identifier[mean] )/ identifier[std] )** literal[int] / literal[int] )/ identifier[sqrt] ( literal[int] * identifier[pi] )/ identifier[std] | def gaus_pdf(x, mean, std):
"""Gaussian distribution's probability density function.
See, e.g. `Wikipedia <https://en.wikipedia.org/wiki/Normal_distribution>`_.
:param x: point in x-axis
:type x: float or numpy.ndarray
:param float mean: mean or expectation
:param float str: standard deviation
:returns: pdf(s) in point **x**
:rtype: float or numpy.ndarray
"""
return exp(-((x - mean) / std) ** 2 / 2) / sqrt(2 * pi) / std |
def build_recursive_localize_env(destination, inputs):
"""Return a multi-line string with export statements for the variables.
Arguments:
destination: Folder where the data will be put.
For example /mnt/data
inputs: a list of InputFileParam
Returns:
a multi-line string with a shell script that sets environment variables
corresponding to the inputs.
"""
export_input_dirs = '\n'.join([
'export {0}={1}/{2}'.format(var.name, destination.rstrip('/'),
var.docker_path.rstrip('/'))
for var in inputs
if var.recursive and var.docker_path
])
return export_input_dirs | def function[build_recursive_localize_env, parameter[destination, inputs]]:
constant[Return a multi-line string with export statements for the variables.
Arguments:
destination: Folder where the data will be put.
For example /mnt/data
inputs: a list of InputFileParam
Returns:
a multi-line string with a shell script that sets environment variables
corresponding to the inputs.
]
variable[export_input_dirs] assign[=] call[constant[
].join, parameter[<ast.ListComp object at 0x7da1b009c4c0>]]
return[name[export_input_dirs]] | keyword[def] identifier[build_recursive_localize_env] ( identifier[destination] , identifier[inputs] ):
literal[string]
identifier[export_input_dirs] = literal[string] . identifier[join] ([
literal[string] . identifier[format] ( identifier[var] . identifier[name] , identifier[destination] . identifier[rstrip] ( literal[string] ),
identifier[var] . identifier[docker_path] . identifier[rstrip] ( literal[string] ))
keyword[for] identifier[var] keyword[in] identifier[inputs]
keyword[if] identifier[var] . identifier[recursive] keyword[and] identifier[var] . identifier[docker_path]
])
keyword[return] identifier[export_input_dirs] | def build_recursive_localize_env(destination, inputs):
"""Return a multi-line string with export statements for the variables.
Arguments:
destination: Folder where the data will be put.
For example /mnt/data
inputs: a list of InputFileParam
Returns:
a multi-line string with a shell script that sets environment variables
corresponding to the inputs.
"""
export_input_dirs = '\n'.join(['export {0}={1}/{2}'.format(var.name, destination.rstrip('/'), var.docker_path.rstrip('/')) for var in inputs if var.recursive and var.docker_path])
return export_input_dirs |
def parse_tree_from_dict(node, locs):
"""Processes key locations.
Parameters
----------
node: xml.etree.ElementTree.ElementTree.element
Current node.
locs: dict
A dictionary mapping key to a tuple. The tuple can either be 2 or 3
elements long. The first element maps to the location in the
current node. The second element given a processing hint. Possible
values are:
* 'text': assumes the text element of the path is wanted.
* 'child': assumes that the child of the given path is wanted.
* str: Any other string will be treated as an attribute lookup
of the path.
If 'child' is given, then a third element needs to be given
indicating the type of processing. Possible values are:
* 'text': assumes the text element of the path is wanted.
* 'tag': assumes the class tag of the path is wanted.
* str: Any other string will be treated as an attribute lookup
of the path.
"""
d = dict()
for n, l in locs.items():
try:
if l[1] == 'text':
d[n] = node.find(l[0]).text
elif l[1] == 'child':
child = node.find(l[0]).getchildren()
if len(child) > 1:
raise AmbiguousElementException(
'There are too many elements')
elif l[2] == 'text':
d[n] = child[0].text
elif l[2] == 'tag':
d[n] = child[0].tag
else:
d[n] = node.find(l[0]).get(l[1])
except:
pass
return d | def function[parse_tree_from_dict, parameter[node, locs]]:
constant[Processes key locations.
Parameters
----------
node: xml.etree.ElementTree.ElementTree.element
Current node.
locs: dict
A dictionary mapping key to a tuple. The tuple can either be 2 or 3
elements long. The first element maps to the location in the
current node. The second element given a processing hint. Possible
values are:
* 'text': assumes the text element of the path is wanted.
* 'child': assumes that the child of the given path is wanted.
* str: Any other string will be treated as an attribute lookup
of the path.
If 'child' is given, then a third element needs to be given
indicating the type of processing. Possible values are:
* 'text': assumes the text element of the path is wanted.
* 'tag': assumes the class tag of the path is wanted.
* str: Any other string will be treated as an attribute lookup
of the path.
]
variable[d] assign[=] call[name[dict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da2054a5780>, <ast.Name object at 0x7da2054a5720>]]] in starred[call[name[locs].items, parameter[]]] begin[:]
<ast.Try object at 0x7da2054a6bc0>
return[name[d]] | keyword[def] identifier[parse_tree_from_dict] ( identifier[node] , identifier[locs] ):
literal[string]
identifier[d] = identifier[dict] ()
keyword[for] identifier[n] , identifier[l] keyword[in] identifier[locs] . identifier[items] ():
keyword[try] :
keyword[if] identifier[l] [ literal[int] ]== literal[string] :
identifier[d] [ identifier[n] ]= identifier[node] . identifier[find] ( identifier[l] [ literal[int] ]). identifier[text]
keyword[elif] identifier[l] [ literal[int] ]== literal[string] :
identifier[child] = identifier[node] . identifier[find] ( identifier[l] [ literal[int] ]). identifier[getchildren] ()
keyword[if] identifier[len] ( identifier[child] )> literal[int] :
keyword[raise] identifier[AmbiguousElementException] (
literal[string] )
keyword[elif] identifier[l] [ literal[int] ]== literal[string] :
identifier[d] [ identifier[n] ]= identifier[child] [ literal[int] ]. identifier[text]
keyword[elif] identifier[l] [ literal[int] ]== literal[string] :
identifier[d] [ identifier[n] ]= identifier[child] [ literal[int] ]. identifier[tag]
keyword[else] :
identifier[d] [ identifier[n] ]= identifier[node] . identifier[find] ( identifier[l] [ literal[int] ]). identifier[get] ( identifier[l] [ literal[int] ])
keyword[except] :
keyword[pass]
keyword[return] identifier[d] | def parse_tree_from_dict(node, locs):
"""Processes key locations.
Parameters
----------
node: xml.etree.ElementTree.ElementTree.element
Current node.
locs: dict
A dictionary mapping key to a tuple. The tuple can either be 2 or 3
elements long. The first element maps to the location in the
current node. The second element given a processing hint. Possible
values are:
* 'text': assumes the text element of the path is wanted.
* 'child': assumes that the child of the given path is wanted.
* str: Any other string will be treated as an attribute lookup
of the path.
If 'child' is given, then a third element needs to be given
indicating the type of processing. Possible values are:
* 'text': assumes the text element of the path is wanted.
* 'tag': assumes the class tag of the path is wanted.
* str: Any other string will be treated as an attribute lookup
of the path.
"""
d = dict()
for (n, l) in locs.items():
try:
if l[1] == 'text':
d[n] = node.find(l[0]).text # depends on [control=['if'], data=[]]
elif l[1] == 'child':
child = node.find(l[0]).getchildren()
if len(child) > 1:
raise AmbiguousElementException('There are too many elements') # depends on [control=['if'], data=[]]
elif l[2] == 'text':
d[n] = child[0].text # depends on [control=['if'], data=[]]
elif l[2] == 'tag':
d[n] = child[0].tag # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
d[n] = node.find(l[0]).get(l[1]) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
return d |
def enable_performance_data(self):
"""Enable performance data processing (globally)
Format of the line that triggers function call::
ENABLE_PERFORMANCE_DATA
:return: None
"""
if not self.my_conf.process_performance_data:
self.my_conf.modified_attributes |= \
DICT_MODATTR["MODATTR_PERFORMANCE_DATA_ENABLED"].value
self.my_conf.process_performance_data = True
self.my_conf.explode_global_conf()
self.daemon.update_program_status() | def function[enable_performance_data, parameter[self]]:
constant[Enable performance data processing (globally)
Format of the line that triggers function call::
ENABLE_PERFORMANCE_DATA
:return: None
]
if <ast.UnaryOp object at 0x7da18f58e020> begin[:]
<ast.AugAssign object at 0x7da18f58dff0>
name[self].my_conf.process_performance_data assign[=] constant[True]
call[name[self].my_conf.explode_global_conf, parameter[]]
call[name[self].daemon.update_program_status, parameter[]] | keyword[def] identifier[enable_performance_data] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[my_conf] . identifier[process_performance_data] :
identifier[self] . identifier[my_conf] . identifier[modified_attributes] |= identifier[DICT_MODATTR] [ literal[string] ]. identifier[value]
identifier[self] . identifier[my_conf] . identifier[process_performance_data] = keyword[True]
identifier[self] . identifier[my_conf] . identifier[explode_global_conf] ()
identifier[self] . identifier[daemon] . identifier[update_program_status] () | def enable_performance_data(self):
"""Enable performance data processing (globally)
Format of the line that triggers function call::
ENABLE_PERFORMANCE_DATA
:return: None
"""
if not self.my_conf.process_performance_data:
self.my_conf.modified_attributes |= DICT_MODATTR['MODATTR_PERFORMANCE_DATA_ENABLED'].value
self.my_conf.process_performance_data = True
self.my_conf.explode_global_conf()
self.daemon.update_program_status() # depends on [control=['if'], data=[]] |
def cmd(send, msg, _):
"""Gets a slogan.
Syntax: {command} [text]
"""
if not msg:
msg = textutils.gen_word()
send(textutils.gen_slogan(msg)) | def function[cmd, parameter[send, msg, _]]:
constant[Gets a slogan.
Syntax: {command} [text]
]
if <ast.UnaryOp object at 0x7da1b20f89a0> begin[:]
variable[msg] assign[=] call[name[textutils].gen_word, parameter[]]
call[name[send], parameter[call[name[textutils].gen_slogan, parameter[name[msg]]]]] | keyword[def] identifier[cmd] ( identifier[send] , identifier[msg] , identifier[_] ):
literal[string]
keyword[if] keyword[not] identifier[msg] :
identifier[msg] = identifier[textutils] . identifier[gen_word] ()
identifier[send] ( identifier[textutils] . identifier[gen_slogan] ( identifier[msg] )) | def cmd(send, msg, _):
"""Gets a slogan.
Syntax: {command} [text]
"""
if not msg:
msg = textutils.gen_word() # depends on [control=['if'], data=[]]
send(textutils.gen_slogan(msg)) |
def _AddReprMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def __repr__(self):
return text_format.MessageToString(self)
cls.__repr__ = __repr__ | def function[_AddReprMethod, parameter[message_descriptor, cls]]:
constant[Helper for _AddMessageMethods().]
def function[__repr__, parameter[self]]:
return[call[name[text_format].MessageToString, parameter[name[self]]]]
name[cls].__repr__ assign[=] name[__repr__] | keyword[def] identifier[_AddReprMethod] ( identifier[message_descriptor] , identifier[cls] ):
literal[string]
keyword[def] identifier[__repr__] ( identifier[self] ):
keyword[return] identifier[text_format] . identifier[MessageToString] ( identifier[self] )
identifier[cls] . identifier[__repr__] = identifier[__repr__] | def _AddReprMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def __repr__(self):
return text_format.MessageToString(self)
cls.__repr__ = __repr__ |
def print_violations(self, violations):
""" Print a given set of violations to the standard error output """
for v in violations:
line_nr = v.line_nr if v.line_nr else "-"
self.display.e(u"{0}: {1}".format(line_nr, v.rule_id), exact=True)
self.display.ee(u"{0}: {1} {2}".format(line_nr, v.rule_id, v.message), exact=True)
if v.content:
self.display.eee(u"{0}: {1} {2}: \"{3}\"".format(line_nr, v.rule_id, v.message, v.content),
exact=True)
else:
self.display.eee(u"{0}: {1} {2}".format(line_nr, v.rule_id, v.message), exact=True) | def function[print_violations, parameter[self, violations]]:
constant[ Print a given set of violations to the standard error output ]
for taget[name[v]] in starred[name[violations]] begin[:]
variable[line_nr] assign[=] <ast.IfExp object at 0x7da20c7cb160>
call[name[self].display.e, parameter[call[constant[{0}: {1}].format, parameter[name[line_nr], name[v].rule_id]]]]
call[name[self].display.ee, parameter[call[constant[{0}: {1} {2}].format, parameter[name[line_nr], name[v].rule_id, name[v].message]]]]
if name[v].content begin[:]
call[name[self].display.eee, parameter[call[constant[{0}: {1} {2}: "{3}"].format, parameter[name[line_nr], name[v].rule_id, name[v].message, name[v].content]]]] | keyword[def] identifier[print_violations] ( identifier[self] , identifier[violations] ):
literal[string]
keyword[for] identifier[v] keyword[in] identifier[violations] :
identifier[line_nr] = identifier[v] . identifier[line_nr] keyword[if] identifier[v] . identifier[line_nr] keyword[else] literal[string]
identifier[self] . identifier[display] . identifier[e] ( literal[string] . identifier[format] ( identifier[line_nr] , identifier[v] . identifier[rule_id] ), identifier[exact] = keyword[True] )
identifier[self] . identifier[display] . identifier[ee] ( literal[string] . identifier[format] ( identifier[line_nr] , identifier[v] . identifier[rule_id] , identifier[v] . identifier[message] ), identifier[exact] = keyword[True] )
keyword[if] identifier[v] . identifier[content] :
identifier[self] . identifier[display] . identifier[eee] ( literal[string] . identifier[format] ( identifier[line_nr] , identifier[v] . identifier[rule_id] , identifier[v] . identifier[message] , identifier[v] . identifier[content] ),
identifier[exact] = keyword[True] )
keyword[else] :
identifier[self] . identifier[display] . identifier[eee] ( literal[string] . identifier[format] ( identifier[line_nr] , identifier[v] . identifier[rule_id] , identifier[v] . identifier[message] ), identifier[exact] = keyword[True] ) | def print_violations(self, violations):
""" Print a given set of violations to the standard error output """
for v in violations:
line_nr = v.line_nr if v.line_nr else '-'
self.display.e(u'{0}: {1}'.format(line_nr, v.rule_id), exact=True)
self.display.ee(u'{0}: {1} {2}'.format(line_nr, v.rule_id, v.message), exact=True)
if v.content:
self.display.eee(u'{0}: {1} {2}: "{3}"'.format(line_nr, v.rule_id, v.message, v.content), exact=True) # depends on [control=['if'], data=[]]
else:
self.display.eee(u'{0}: {1} {2}'.format(line_nr, v.rule_id, v.message), exact=True) # depends on [control=['for'], data=['v']] |
def remove_prefix(self, auth, spec, recursive = False):
""" Remove prefix matching `spec`.
* `auth` [BaseAuth]
AAA options.
* `spec` [prefix_spec]
Specifies prefixe to remove.
* `recursive` [bool]
When set to True, also remove child prefixes.
This is the documentation of the internal backend function. It's
exposed over XML-RPC, please also see the XML-RPC documentation for
:py:func:`nipap.xmlrpc.NipapXMLRPC.remove_prefix` for full
understanding.
"""
self._logger.debug("remove_prefix called; spec: %s" % unicode(spec))
# sanity check - do we have all attributes?
if 'id' in spec:
# recursive requires a prefix, so translate id to prefix
p = self.list_prefix(auth, spec)[0]
del spec['id']
spec['prefix'] = p['prefix']
spec['vrf_id'] = p['vrf_id']
elif 'prefix' in spec:
pass
else:
raise NipapMissingInputError('missing prefix or id of prefix')
prefixes = self.list_prefix(auth, spec)
if recursive:
spec['type'] = 'host'
self._db_remove_prefix(spec, recursive)
del spec['type']
self._db_remove_prefix(spec, recursive)
else:
self._db_remove_prefix(spec)
# write to audit table
audit_params = {
'username': auth.username,
'authenticated_as': auth.authenticated_as,
'full_name': auth.full_name,
'authoritative_source': auth.authoritative_source
}
for p in prefixes:
audit_params['prefix_id'] = p['id']
audit_params['prefix_prefix'] = p['prefix']
audit_params['description'] = 'Removed prefix %s' % p['prefix']
audit_params['vrf_id'] = p['vrf_id']
audit_params['vrf_rt'] = p['vrf_rt']
audit_params['vrf_name'] = p['vrf_name']
sql, params = self._sql_expand_insert(audit_params)
self._execute('INSERT INTO ip_net_log %s' % sql, params)
if p['pool_id'] is not None:
pool = self._get_pool(auth, { 'id': p['pool_id'] })
audit_params2 = {
'pool_id': pool['id'],
'pool_name': pool['name'],
'prefix_id': p['id'],
'prefix_prefix': p['prefix'],
'description': 'Prefix %s removed from pool %s' % (p['prefix'], pool['name']),
'username': auth.username,
'authenticated_as': auth.authenticated_as,
'full_name': auth.full_name,
'authoritative_source': auth.authoritative_source
}
sql, params = self._sql_expand_insert(audit_params2)
self._execute('INSERT INTO ip_net_log %s' % sql, params) | def function[remove_prefix, parameter[self, auth, spec, recursive]]:
constant[ Remove prefix matching `spec`.
* `auth` [BaseAuth]
AAA options.
* `spec` [prefix_spec]
Specifies prefixe to remove.
* `recursive` [bool]
When set to True, also remove child prefixes.
This is the documentation of the internal backend function. It's
exposed over XML-RPC, please also see the XML-RPC documentation for
:py:func:`nipap.xmlrpc.NipapXMLRPC.remove_prefix` for full
understanding.
]
call[name[self]._logger.debug, parameter[binary_operation[constant[remove_prefix called; spec: %s] <ast.Mod object at 0x7da2590d6920> call[name[unicode], parameter[name[spec]]]]]]
if compare[constant[id] in name[spec]] begin[:]
variable[p] assign[=] call[call[name[self].list_prefix, parameter[name[auth], name[spec]]]][constant[0]]
<ast.Delete object at 0x7da18dc07ac0>
call[name[spec]][constant[prefix]] assign[=] call[name[p]][constant[prefix]]
call[name[spec]][constant[vrf_id]] assign[=] call[name[p]][constant[vrf_id]]
variable[prefixes] assign[=] call[name[self].list_prefix, parameter[name[auth], name[spec]]]
if name[recursive] begin[:]
call[name[spec]][constant[type]] assign[=] constant[host]
call[name[self]._db_remove_prefix, parameter[name[spec], name[recursive]]]
<ast.Delete object at 0x7da18dc07730>
call[name[self]._db_remove_prefix, parameter[name[spec], name[recursive]]]
variable[audit_params] assign[=] dictionary[[<ast.Constant object at 0x7da18bcca1d0>, <ast.Constant object at 0x7da18bcc9e40>, <ast.Constant object at 0x7da18bcc8f40>, <ast.Constant object at 0x7da18bccae00>], [<ast.Attribute object at 0x7da18bccacb0>, <ast.Attribute object at 0x7da18eb56950>, <ast.Attribute object at 0x7da18eb57160>, <ast.Attribute object at 0x7da18eb54280>]]
for taget[name[p]] in starred[name[prefixes]] begin[:]
call[name[audit_params]][constant[prefix_id]] assign[=] call[name[p]][constant[id]]
call[name[audit_params]][constant[prefix_prefix]] assign[=] call[name[p]][constant[prefix]]
call[name[audit_params]][constant[description]] assign[=] binary_operation[constant[Removed prefix %s] <ast.Mod object at 0x7da2590d6920> call[name[p]][constant[prefix]]]
call[name[audit_params]][constant[vrf_id]] assign[=] call[name[p]][constant[vrf_id]]
call[name[audit_params]][constant[vrf_rt]] assign[=] call[name[p]][constant[vrf_rt]]
call[name[audit_params]][constant[vrf_name]] assign[=] call[name[p]][constant[vrf_name]]
<ast.Tuple object at 0x7da18eb56fb0> assign[=] call[name[self]._sql_expand_insert, parameter[name[audit_params]]]
call[name[self]._execute, parameter[binary_operation[constant[INSERT INTO ip_net_log %s] <ast.Mod object at 0x7da2590d6920> name[sql]], name[params]]]
if compare[call[name[p]][constant[pool_id]] is_not constant[None]] begin[:]
variable[pool] assign[=] call[name[self]._get_pool, parameter[name[auth], dictionary[[<ast.Constant object at 0x7da18eb569e0>], [<ast.Subscript object at 0x7da18eb54040>]]]]
variable[audit_params2] assign[=] dictionary[[<ast.Constant object at 0x7da18eb55180>, <ast.Constant object at 0x7da18eb54520>, <ast.Constant object at 0x7da18eb568c0>, <ast.Constant object at 0x7da18eb54190>, <ast.Constant object at 0x7da18eb57310>, <ast.Constant object at 0x7da18eb57d60>, <ast.Constant object at 0x7da18eb56290>, <ast.Constant object at 0x7da18eb57a30>, <ast.Constant object at 0x7da18eb57070>], [<ast.Subscript object at 0x7da18eb54940>, <ast.Subscript object at 0x7da18eb56680>, <ast.Subscript object at 0x7da18eb549a0>, <ast.Subscript object at 0x7da18eb548b0>, <ast.BinOp object at 0x7da18eb57130>, <ast.Attribute object at 0x7da18eb545e0>, <ast.Attribute object at 0x7da18eb57af0>, <ast.Attribute object at 0x7da18eb55ae0>, <ast.Attribute object at 0x7da18eb55000>]]
<ast.Tuple object at 0x7da18eb54a60> assign[=] call[name[self]._sql_expand_insert, parameter[name[audit_params2]]]
call[name[self]._execute, parameter[binary_operation[constant[INSERT INTO ip_net_log %s] <ast.Mod object at 0x7da2590d6920> name[sql]], name[params]]] | keyword[def] identifier[remove_prefix] ( identifier[self] , identifier[auth] , identifier[spec] , identifier[recursive] = keyword[False] ):
literal[string]
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] % identifier[unicode] ( identifier[spec] ))
keyword[if] literal[string] keyword[in] identifier[spec] :
identifier[p] = identifier[self] . identifier[list_prefix] ( identifier[auth] , identifier[spec] )[ literal[int] ]
keyword[del] identifier[spec] [ literal[string] ]
identifier[spec] [ literal[string] ]= identifier[p] [ literal[string] ]
identifier[spec] [ literal[string] ]= identifier[p] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[spec] :
keyword[pass]
keyword[else] :
keyword[raise] identifier[NipapMissingInputError] ( literal[string] )
identifier[prefixes] = identifier[self] . identifier[list_prefix] ( identifier[auth] , identifier[spec] )
keyword[if] identifier[recursive] :
identifier[spec] [ literal[string] ]= literal[string]
identifier[self] . identifier[_db_remove_prefix] ( identifier[spec] , identifier[recursive] )
keyword[del] identifier[spec] [ literal[string] ]
identifier[self] . identifier[_db_remove_prefix] ( identifier[spec] , identifier[recursive] )
keyword[else] :
identifier[self] . identifier[_db_remove_prefix] ( identifier[spec] )
identifier[audit_params] ={
literal[string] : identifier[auth] . identifier[username] ,
literal[string] : identifier[auth] . identifier[authenticated_as] ,
literal[string] : identifier[auth] . identifier[full_name] ,
literal[string] : identifier[auth] . identifier[authoritative_source]
}
keyword[for] identifier[p] keyword[in] identifier[prefixes] :
identifier[audit_params] [ literal[string] ]= identifier[p] [ literal[string] ]
identifier[audit_params] [ literal[string] ]= identifier[p] [ literal[string] ]
identifier[audit_params] [ literal[string] ]= literal[string] % identifier[p] [ literal[string] ]
identifier[audit_params] [ literal[string] ]= identifier[p] [ literal[string] ]
identifier[audit_params] [ literal[string] ]= identifier[p] [ literal[string] ]
identifier[audit_params] [ literal[string] ]= identifier[p] [ literal[string] ]
identifier[sql] , identifier[params] = identifier[self] . identifier[_sql_expand_insert] ( identifier[audit_params] )
identifier[self] . identifier[_execute] ( literal[string] % identifier[sql] , identifier[params] )
keyword[if] identifier[p] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
identifier[pool] = identifier[self] . identifier[_get_pool] ( identifier[auth] ,{ literal[string] : identifier[p] [ literal[string] ]})
identifier[audit_params2] ={
literal[string] : identifier[pool] [ literal[string] ],
literal[string] : identifier[pool] [ literal[string] ],
literal[string] : identifier[p] [ literal[string] ],
literal[string] : identifier[p] [ literal[string] ],
literal[string] : literal[string] %( identifier[p] [ literal[string] ], identifier[pool] [ literal[string] ]),
literal[string] : identifier[auth] . identifier[username] ,
literal[string] : identifier[auth] . identifier[authenticated_as] ,
literal[string] : identifier[auth] . identifier[full_name] ,
literal[string] : identifier[auth] . identifier[authoritative_source]
}
identifier[sql] , identifier[params] = identifier[self] . identifier[_sql_expand_insert] ( identifier[audit_params2] )
identifier[self] . identifier[_execute] ( literal[string] % identifier[sql] , identifier[params] ) | def remove_prefix(self, auth, spec, recursive=False):
""" Remove prefix matching `spec`.
* `auth` [BaseAuth]
AAA options.
* `spec` [prefix_spec]
Specifies prefixe to remove.
* `recursive` [bool]
When set to True, also remove child prefixes.
This is the documentation of the internal backend function. It's
exposed over XML-RPC, please also see the XML-RPC documentation for
:py:func:`nipap.xmlrpc.NipapXMLRPC.remove_prefix` for full
understanding.
"""
self._logger.debug('remove_prefix called; spec: %s' % unicode(spec))
# sanity check - do we have all attributes?
if 'id' in spec:
# recursive requires a prefix, so translate id to prefix
p = self.list_prefix(auth, spec)[0]
del spec['id']
spec['prefix'] = p['prefix']
spec['vrf_id'] = p['vrf_id'] # depends on [control=['if'], data=['spec']]
elif 'prefix' in spec:
pass # depends on [control=['if'], data=[]]
else:
raise NipapMissingInputError('missing prefix or id of prefix')
prefixes = self.list_prefix(auth, spec)
if recursive:
spec['type'] = 'host'
self._db_remove_prefix(spec, recursive)
del spec['type']
self._db_remove_prefix(spec, recursive) # depends on [control=['if'], data=[]]
else:
self._db_remove_prefix(spec)
# write to audit table
audit_params = {'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source}
for p in prefixes:
audit_params['prefix_id'] = p['id']
audit_params['prefix_prefix'] = p['prefix']
audit_params['description'] = 'Removed prefix %s' % p['prefix']
audit_params['vrf_id'] = p['vrf_id']
audit_params['vrf_rt'] = p['vrf_rt']
audit_params['vrf_name'] = p['vrf_name']
(sql, params) = self._sql_expand_insert(audit_params)
self._execute('INSERT INTO ip_net_log %s' % sql, params)
if p['pool_id'] is not None:
pool = self._get_pool(auth, {'id': p['pool_id']})
audit_params2 = {'pool_id': pool['id'], 'pool_name': pool['name'], 'prefix_id': p['id'], 'prefix_prefix': p['prefix'], 'description': 'Prefix %s removed from pool %s' % (p['prefix'], pool['name']), 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source}
(sql, params) = self._sql_expand_insert(audit_params2)
self._execute('INSERT INTO ip_net_log %s' % sql, params) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']] |
def commit(self):
"""
.. seealso:: :py:meth:`sqlite3.Connection.commit`
"""
try:
self.check_connection()
except NullDatabaseConnectionError:
return
logger.debug("commit: path='{}'".format(self.database_path))
try:
self.connection.commit()
except sqlite3.ProgrammingError:
pass | def function[commit, parameter[self]]:
constant[
.. seealso:: :py:meth:`sqlite3.Connection.commit`
]
<ast.Try object at 0x7da1b04eff40>
call[name[logger].debug, parameter[call[constant[commit: path='{}'].format, parameter[name[self].database_path]]]]
<ast.Try object at 0x7da1b04ec490> | keyword[def] identifier[commit] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[self] . identifier[check_connection] ()
keyword[except] identifier[NullDatabaseConnectionError] :
keyword[return]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[self] . identifier[database_path] ))
keyword[try] :
identifier[self] . identifier[connection] . identifier[commit] ()
keyword[except] identifier[sqlite3] . identifier[ProgrammingError] :
keyword[pass] | def commit(self):
"""
.. seealso:: :py:meth:`sqlite3.Connection.commit`
"""
try:
self.check_connection() # depends on [control=['try'], data=[]]
except NullDatabaseConnectionError:
return # depends on [control=['except'], data=[]]
logger.debug("commit: path='{}'".format(self.database_path))
try:
self.connection.commit() # depends on [control=['try'], data=[]]
except sqlite3.ProgrammingError:
pass # depends on [control=['except'], data=[]] |
def main():
"""A simple command-line interface for :py:func:`run_simple`."""
# in contrast to argparse, this works at least under Python < 2.7
import optparse
from .utils import import_string
parser = optparse.OptionParser(usage="Usage: %prog [options] app_module:app_object")
parser.add_option(
"-b",
"--bind",
dest="address",
help="The hostname:port the app should listen on.",
)
parser.add_option(
"-d",
"--debug",
dest="use_debugger",
action="store_true",
default=False,
help="Use Werkzeug's debugger.",
)
parser.add_option(
"-r",
"--reload",
dest="use_reloader",
action="store_true",
default=False,
help="Reload Python process if modules change.",
)
options, args = parser.parse_args()
hostname, port = None, None
if options.address:
address = options.address.split(":")
hostname = address[0]
if len(address) > 1:
port = address[1]
if len(args) != 1:
sys.stdout.write("No application supplied, or too much. See --help\n")
sys.exit(1)
app = import_string(args[0])
run_simple(
hostname=(hostname or "127.0.0.1"),
port=int(port or 5000),
application=app,
use_reloader=options.use_reloader,
use_debugger=options.use_debugger,
) | def function[main, parameter[]]:
constant[A simple command-line interface for :py:func:`run_simple`.]
import module[optparse]
from relative_module[utils] import module[import_string]
variable[parser] assign[=] call[name[optparse].OptionParser, parameter[]]
call[name[parser].add_option, parameter[constant[-b], constant[--bind]]]
call[name[parser].add_option, parameter[constant[-d], constant[--debug]]]
call[name[parser].add_option, parameter[constant[-r], constant[--reload]]]
<ast.Tuple object at 0x7da20c794490> assign[=] call[name[parser].parse_args, parameter[]]
<ast.Tuple object at 0x7da20c7955a0> assign[=] tuple[[<ast.Constant object at 0x7da20c795060>, <ast.Constant object at 0x7da20c795b10>]]
if name[options].address begin[:]
variable[address] assign[=] call[name[options].address.split, parameter[constant[:]]]
variable[hostname] assign[=] call[name[address]][constant[0]]
if compare[call[name[len], parameter[name[address]]] greater[>] constant[1]] begin[:]
variable[port] assign[=] call[name[address]][constant[1]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:]
call[name[sys].stdout.write, parameter[constant[No application supplied, or too much. See --help
]]]
call[name[sys].exit, parameter[constant[1]]]
variable[app] assign[=] call[name[import_string], parameter[call[name[args]][constant[0]]]]
call[name[run_simple], parameter[]] | keyword[def] identifier[main] ():
literal[string]
keyword[import] identifier[optparse]
keyword[from] . identifier[utils] keyword[import] identifier[import_string]
identifier[parser] = identifier[optparse] . identifier[OptionParser] ( identifier[usage] = literal[string] )
identifier[parser] . identifier[add_option] (
literal[string] ,
literal[string] ,
identifier[dest] = literal[string] ,
identifier[help] = literal[string] ,
)
identifier[parser] . identifier[add_option] (
literal[string] ,
literal[string] ,
identifier[dest] = literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string] ,
)
identifier[parser] . identifier[add_option] (
literal[string] ,
literal[string] ,
identifier[dest] = literal[string] ,
identifier[action] = literal[string] ,
identifier[default] = keyword[False] ,
identifier[help] = literal[string] ,
)
identifier[options] , identifier[args] = identifier[parser] . identifier[parse_args] ()
identifier[hostname] , identifier[port] = keyword[None] , keyword[None]
keyword[if] identifier[options] . identifier[address] :
identifier[address] = identifier[options] . identifier[address] . identifier[split] ( literal[string] )
identifier[hostname] = identifier[address] [ literal[int] ]
keyword[if] identifier[len] ( identifier[address] )> literal[int] :
identifier[port] = identifier[address] [ literal[int] ]
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[stdout] . identifier[write] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
identifier[app] = identifier[import_string] ( identifier[args] [ literal[int] ])
identifier[run_simple] (
identifier[hostname] =( identifier[hostname] keyword[or] literal[string] ),
identifier[port] = identifier[int] ( identifier[port] keyword[or] literal[int] ),
identifier[application] = identifier[app] ,
identifier[use_reloader] = identifier[options] . identifier[use_reloader] ,
identifier[use_debugger] = identifier[options] . identifier[use_debugger] ,
) | def main():
"""A simple command-line interface for :py:func:`run_simple`."""
# in contrast to argparse, this works at least under Python < 2.7
import optparse
from .utils import import_string
parser = optparse.OptionParser(usage='Usage: %prog [options] app_module:app_object')
parser.add_option('-b', '--bind', dest='address', help='The hostname:port the app should listen on.')
parser.add_option('-d', '--debug', dest='use_debugger', action='store_true', default=False, help="Use Werkzeug's debugger.")
parser.add_option('-r', '--reload', dest='use_reloader', action='store_true', default=False, help='Reload Python process if modules change.')
(options, args) = parser.parse_args()
(hostname, port) = (None, None)
if options.address:
address = options.address.split(':')
hostname = address[0]
if len(address) > 1:
port = address[1] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if len(args) != 1:
sys.stdout.write('No application supplied, or too much. See --help\n')
sys.exit(1) # depends on [control=['if'], data=[]]
app = import_string(args[0])
run_simple(hostname=hostname or '127.0.0.1', port=int(port or 5000), application=app, use_reloader=options.use_reloader, use_debugger=options.use_debugger) |
def quote_single_identifier(self, string):
"""
Quotes a single identifier (no dot chain separation).
:param string: The identifier name to be quoted.
:type string: str
:return: The quoted identifier string.
:rtype: str
"""
c = self.get_identifier_quote_character()
return "%s%s%s" % (c, string.replace(c, c + c), c) | def function[quote_single_identifier, parameter[self, string]]:
constant[
Quotes a single identifier (no dot chain separation).
:param string: The identifier name to be quoted.
:type string: str
:return: The quoted identifier string.
:rtype: str
]
variable[c] assign[=] call[name[self].get_identifier_quote_character, parameter[]]
return[binary_operation[constant[%s%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e9b1300>, <ast.Call object at 0x7da20e9b2d40>, <ast.Name object at 0x7da20e9b3c10>]]]] | keyword[def] identifier[quote_single_identifier] ( identifier[self] , identifier[string] ):
literal[string]
identifier[c] = identifier[self] . identifier[get_identifier_quote_character] ()
keyword[return] literal[string] %( identifier[c] , identifier[string] . identifier[replace] ( identifier[c] , identifier[c] + identifier[c] ), identifier[c] ) | def quote_single_identifier(self, string):
"""
Quotes a single identifier (no dot chain separation).
:param string: The identifier name to be quoted.
:type string: str
:return: The quoted identifier string.
:rtype: str
"""
c = self.get_identifier_quote_character()
return '%s%s%s' % (c, string.replace(c, c + c), c) |
def gx_coords(node):
"""
Given a KML DOM node, grab its <gx:coord> and <gx:timestamp><when>subnodes, and convert them into a dictionary with the keys and values
- ``'coordinates'``: list of lists of float coordinates
- ``'times'``: list of timestamps corresponding to the coordinates
"""
els = get(node, 'gx:coord')
coordinates = []
times = []
coordinates = [gx_coords1(val(el)) for el in els]
time_els = get(node, 'when')
times = [val(t) for t in time_els]
return {
'coordinates': coordinates,
'times': times,
} | def function[gx_coords, parameter[node]]:
constant[
Given a KML DOM node, grab its <gx:coord> and <gx:timestamp><when>subnodes, and convert them into a dictionary with the keys and values
- ``'coordinates'``: list of lists of float coordinates
- ``'times'``: list of timestamps corresponding to the coordinates
]
variable[els] assign[=] call[name[get], parameter[name[node], constant[gx:coord]]]
variable[coordinates] assign[=] list[[]]
variable[times] assign[=] list[[]]
variable[coordinates] assign[=] <ast.ListComp object at 0x7da20e9b0910>
variable[time_els] assign[=] call[name[get], parameter[name[node], constant[when]]]
variable[times] assign[=] <ast.ListComp object at 0x7da20e9b2ad0>
return[dictionary[[<ast.Constant object at 0x7da20e9b3b80>, <ast.Constant object at 0x7da204567af0>], [<ast.Name object at 0x7da204566290>, <ast.Name object at 0x7da204565cf0>]]] | keyword[def] identifier[gx_coords] ( identifier[node] ):
literal[string]
identifier[els] = identifier[get] ( identifier[node] , literal[string] )
identifier[coordinates] =[]
identifier[times] =[]
identifier[coordinates] =[ identifier[gx_coords1] ( identifier[val] ( identifier[el] )) keyword[for] identifier[el] keyword[in] identifier[els] ]
identifier[time_els] = identifier[get] ( identifier[node] , literal[string] )
identifier[times] =[ identifier[val] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[time_els] ]
keyword[return] {
literal[string] : identifier[coordinates] ,
literal[string] : identifier[times] ,
} | def gx_coords(node):
"""
Given a KML DOM node, grab its <gx:coord> and <gx:timestamp><when>subnodes, and convert them into a dictionary with the keys and values
- ``'coordinates'``: list of lists of float coordinates
- ``'times'``: list of timestamps corresponding to the coordinates
"""
els = get(node, 'gx:coord')
coordinates = []
times = []
coordinates = [gx_coords1(val(el)) for el in els]
time_els = get(node, 'when')
times = [val(t) for t in time_els]
return {'coordinates': coordinates, 'times': times} |
def get_overlay_gateway(self):
"""
Get overlay-gateway name on the switch
Args:
callback (function): A function executed upon completion of the
method.
Returns:
Dictionary containing details of VXLAN Overlay Gateway.
Raises:
None
"""
urn = "urn:brocade.com:mgmt:brocade-tunnels"
config = ET.Element("config")
ET.SubElement(config, "overlay-gateway", xmlns=urn)
output = self._callback(config, handler='get_config')
result = {}
element = ET.fromstring(str(output))
for overlayGw in element.iter('{%s}overlay-gateway' % urn):
result['name'] = overlayGw.find('{%s}name' % urn).text
isactivate = overlayGw.find('{%s}activate' % urn)
if isactivate is None:
result['activate'] = False
else:
result['activate'] = True
gwtype = overlayGw.find('{%s}gw-type' % urn)
if gwtype is None:
result['gwtype'] = None
else:
result['gwtype'] = gwtype.text
attach = overlayGw.find('{%s}attach' % urn)
if attach is not None:
rbridgeId = attach.find('{%s}rbridge-id' % urn)
if rbridgeId is None:
result['attached-rbridgeId'] = None
else:
result['attached-rbridgeId'] = rbridgeId.find('{%s}rb-add' % urn).text
result['attached-vlan'] = None
vlans = []
for vlan in attach.iter('{%s}vlan'%urn):
vlans.append(vlan.find('{%s}vid' % urn).text)
result['attached-vlan'] = vlans
return result | def function[get_overlay_gateway, parameter[self]]:
constant[
Get overlay-gateway name on the switch
Args:
callback (function): A function executed upon completion of the
method.
Returns:
Dictionary containing details of VXLAN Overlay Gateway.
Raises:
None
]
variable[urn] assign[=] constant[urn:brocade.com:mgmt:brocade-tunnels]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
call[name[ET].SubElement, parameter[name[config], constant[overlay-gateway]]]
variable[output] assign[=] call[name[self]._callback, parameter[name[config]]]
variable[result] assign[=] dictionary[[], []]
variable[element] assign[=] call[name[ET].fromstring, parameter[call[name[str], parameter[name[output]]]]]
for taget[name[overlayGw]] in starred[call[name[element].iter, parameter[binary_operation[constant[{%s}overlay-gateway] <ast.Mod object at 0x7da2590d6920> name[urn]]]]] begin[:]
call[name[result]][constant[name]] assign[=] call[name[overlayGw].find, parameter[binary_operation[constant[{%s}name] <ast.Mod object at 0x7da2590d6920> name[urn]]]].text
variable[isactivate] assign[=] call[name[overlayGw].find, parameter[binary_operation[constant[{%s}activate] <ast.Mod object at 0x7da2590d6920> name[urn]]]]
if compare[name[isactivate] is constant[None]] begin[:]
call[name[result]][constant[activate]] assign[=] constant[False]
variable[gwtype] assign[=] call[name[overlayGw].find, parameter[binary_operation[constant[{%s}gw-type] <ast.Mod object at 0x7da2590d6920> name[urn]]]]
if compare[name[gwtype] is constant[None]] begin[:]
call[name[result]][constant[gwtype]] assign[=] constant[None]
variable[attach] assign[=] call[name[overlayGw].find, parameter[binary_operation[constant[{%s}attach] <ast.Mod object at 0x7da2590d6920> name[urn]]]]
if compare[name[attach] is_not constant[None]] begin[:]
variable[rbridgeId] assign[=] call[name[attach].find, parameter[binary_operation[constant[{%s}rbridge-id] <ast.Mod object at 0x7da2590d6920> name[urn]]]]
if compare[name[rbridgeId] is constant[None]] begin[:]
call[name[result]][constant[attached-rbridgeId]] assign[=] constant[None]
call[name[result]][constant[attached-vlan]] assign[=] constant[None]
variable[vlans] assign[=] list[[]]
for taget[name[vlan]] in starred[call[name[attach].iter, parameter[binary_operation[constant[{%s}vlan] <ast.Mod object at 0x7da2590d6920> name[urn]]]]] begin[:]
call[name[vlans].append, parameter[call[name[vlan].find, parameter[binary_operation[constant[{%s}vid] <ast.Mod object at 0x7da2590d6920> name[urn]]]].text]]
call[name[result]][constant[attached-vlan]] assign[=] name[vlans]
return[name[result]] | keyword[def] identifier[get_overlay_gateway] ( identifier[self] ):
literal[string]
identifier[urn] = literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = identifier[urn] )
identifier[output] = identifier[self] . identifier[_callback] ( identifier[config] , identifier[handler] = literal[string] )
identifier[result] ={}
identifier[element] = identifier[ET] . identifier[fromstring] ( identifier[str] ( identifier[output] ))
keyword[for] identifier[overlayGw] keyword[in] identifier[element] . identifier[iter] ( literal[string] % identifier[urn] ):
identifier[result] [ literal[string] ]= identifier[overlayGw] . identifier[find] ( literal[string] % identifier[urn] ). identifier[text]
identifier[isactivate] = identifier[overlayGw] . identifier[find] ( literal[string] % identifier[urn] )
keyword[if] identifier[isactivate] keyword[is] keyword[None] :
identifier[result] [ literal[string] ]= keyword[False]
keyword[else] :
identifier[result] [ literal[string] ]= keyword[True]
identifier[gwtype] = identifier[overlayGw] . identifier[find] ( literal[string] % identifier[urn] )
keyword[if] identifier[gwtype] keyword[is] keyword[None] :
identifier[result] [ literal[string] ]= keyword[None]
keyword[else] :
identifier[result] [ literal[string] ]= identifier[gwtype] . identifier[text]
identifier[attach] = identifier[overlayGw] . identifier[find] ( literal[string] % identifier[urn] )
keyword[if] identifier[attach] keyword[is] keyword[not] keyword[None] :
identifier[rbridgeId] = identifier[attach] . identifier[find] ( literal[string] % identifier[urn] )
keyword[if] identifier[rbridgeId] keyword[is] keyword[None] :
identifier[result] [ literal[string] ]= keyword[None]
keyword[else] :
identifier[result] [ literal[string] ]= identifier[rbridgeId] . identifier[find] ( literal[string] % identifier[urn] ). identifier[text]
identifier[result] [ literal[string] ]= keyword[None]
identifier[vlans] =[]
keyword[for] identifier[vlan] keyword[in] identifier[attach] . identifier[iter] ( literal[string] % identifier[urn] ):
identifier[vlans] . identifier[append] ( identifier[vlan] . identifier[find] ( literal[string] % identifier[urn] ). identifier[text] )
identifier[result] [ literal[string] ]= identifier[vlans]
keyword[return] identifier[result] | def get_overlay_gateway(self):
"""
Get overlay-gateway name on the switch
Args:
callback (function): A function executed upon completion of the
method.
Returns:
Dictionary containing details of VXLAN Overlay Gateway.
Raises:
None
"""
urn = 'urn:brocade.com:mgmt:brocade-tunnels'
config = ET.Element('config')
ET.SubElement(config, 'overlay-gateway', xmlns=urn)
output = self._callback(config, handler='get_config')
result = {}
element = ET.fromstring(str(output))
for overlayGw in element.iter('{%s}overlay-gateway' % urn):
result['name'] = overlayGw.find('{%s}name' % urn).text
isactivate = overlayGw.find('{%s}activate' % urn)
if isactivate is None:
result['activate'] = False # depends on [control=['if'], data=[]]
else:
result['activate'] = True
gwtype = overlayGw.find('{%s}gw-type' % urn)
if gwtype is None:
result['gwtype'] = None # depends on [control=['if'], data=[]]
else:
result['gwtype'] = gwtype.text
attach = overlayGw.find('{%s}attach' % urn)
if attach is not None:
rbridgeId = attach.find('{%s}rbridge-id' % urn)
if rbridgeId is None:
result['attached-rbridgeId'] = None # depends on [control=['if'], data=[]]
else:
result['attached-rbridgeId'] = rbridgeId.find('{%s}rb-add' % urn).text
result['attached-vlan'] = None
vlans = []
for vlan in attach.iter('{%s}vlan' % urn):
vlans.append(vlan.find('{%s}vid' % urn).text) # depends on [control=['for'], data=['vlan']]
result['attached-vlan'] = vlans # depends on [control=['if'], data=['attach']] # depends on [control=['for'], data=['overlayGw']]
return result |
def __ipv4_netmask(value):
'''validate an IPv4 dotted quad or integer CIDR netmask'''
valid, errmsg = False, 'dotted quad or integer CIDR (0->32)'
valid, value, _ = __int(value)
if not (valid and 0 <= value <= 32):
valid = salt.utils.validate.net.netmask(value)
return (valid, value, errmsg) | def function[__ipv4_netmask, parameter[value]]:
constant[validate an IPv4 dotted quad or integer CIDR netmask]
<ast.Tuple object at 0x7da1b1f9ab00> assign[=] tuple[[<ast.Constant object at 0x7da1b1f9bd60>, <ast.Constant object at 0x7da1b1f9b9d0>]]
<ast.Tuple object at 0x7da1b1f9b6a0> assign[=] call[name[__int], parameter[name[value]]]
if <ast.UnaryOp object at 0x7da1b1f9a9e0> begin[:]
variable[valid] assign[=] call[name[salt].utils.validate.net.netmask, parameter[name[value]]]
return[tuple[[<ast.Name object at 0x7da1b2184df0>, <ast.Name object at 0x7da1b2187d90>, <ast.Name object at 0x7da1b2186350>]]] | keyword[def] identifier[__ipv4_netmask] ( identifier[value] ):
literal[string]
identifier[valid] , identifier[errmsg] = keyword[False] , literal[string]
identifier[valid] , identifier[value] , identifier[_] = identifier[__int] ( identifier[value] )
keyword[if] keyword[not] ( identifier[valid] keyword[and] literal[int] <= identifier[value] <= literal[int] ):
identifier[valid] = identifier[salt] . identifier[utils] . identifier[validate] . identifier[net] . identifier[netmask] ( identifier[value] )
keyword[return] ( identifier[valid] , identifier[value] , identifier[errmsg] ) | def __ipv4_netmask(value):
"""validate an IPv4 dotted quad or integer CIDR netmask"""
(valid, errmsg) = (False, 'dotted quad or integer CIDR (0->32)')
(valid, value, _) = __int(value)
if not (valid and 0 <= value <= 32):
valid = salt.utils.validate.net.netmask(value) # depends on [control=['if'], data=[]]
return (valid, value, errmsg) |
def receive(self, ikpdb):
"""Waits for a message from the debugger and returns it as a dict.
"""
# with self._connection_lock:
while self._network_loop:
_logger.n_debug("Enter socket.recv(%s) with self._received_data = %s",
self.SOCKET_BUFFER_SIZE,
self._received_data)
try:
# We may land here with a full packet already in self.received_data
# In that case we must not enter recv()
if self.SOCKET_BUFFER_SIZE:
data = self._connection.recv(self.SOCKET_BUFFER_SIZE)
else:
data = b''
_logger.n_debug("Socket.recv(%s) => %s", self.SOCKET_BUFFER_SIZE, data)
except socket.timeout:
_logger.n_debug("socket.timeout witk ikpdb.status=%s", ikpdb.status)
if ikpdb.status == 'terminated':
_logger.n_debug("breaking IKPdbConnectionHandler.receive() "
"network loop as ikpdb state is 'terminated'.")
return {
'command': '_InternalQuit',
'args':{}
}
continue
except socket.error as socket_err:
if ikpdb.status == 'terminated':
return {'command': '_InternalQuit',
'args':{'socket_error_number': socket_err.errno,
'socket_error_str': socket_err.strerror}}
continue
except Exception as exc:
_logger.g_error("Unexecpected Error: '%s' in IKPdbConnectionHandler"
".command_loop.", exc)
_logger.g_error(traceback.format_exc())
print("".join(traceback.format_stack()))
return {
'command': '_InternalQuit',
'args':{
"error": exc.__class__.__name__,
"message": exc.message
}
}
# received data is utf8 encoded
self._received_data += data.decode('utf-8')
# have we received a MAGIC_CODE
try:
magic_code_idx = self._received_data.index(self.MAGIC_CODE)
except ValueError:
continue
# Have we received a 'length='
try:
length_idx = self._received_data.index(u'length=')
except ValueError:
continue
# extract length content from received data
json_length = int(self._received_data[length_idx + 7:magic_code_idx])
message_length = magic_code_idx + len(self.MAGIC_CODE) + json_length
if message_length <= len(self._received_data):
full_message = self._received_data[:message_length]
self._received_data = self._received_data[message_length:]
if len(self._received_data) > 0:
self.SOCKET_BUFFER_SIZE = 0
else:
self.SOCKET_BUFFER_SIZE = 4096
break
else:
self.SOCKET_BUFFER_SIZE = message_length - len(self._received_data)
self.log_received(full_message)
obj = self.decode(full_message)
return obj | def function[receive, parameter[self, ikpdb]]:
constant[Waits for a message from the debugger and returns it as a dict.
]
while name[self]._network_loop begin[:]
call[name[_logger].n_debug, parameter[constant[Enter socket.recv(%s) with self._received_data = %s], name[self].SOCKET_BUFFER_SIZE, name[self]._received_data]]
<ast.Try object at 0x7da20e954100>
<ast.AugAssign object at 0x7da18fe92d70>
<ast.Try object at 0x7da18fe90b80>
<ast.Try object at 0x7da18fe918d0>
variable[json_length] assign[=] call[name[int], parameter[call[name[self]._received_data][<ast.Slice object at 0x7da1b1be9f00>]]]
variable[message_length] assign[=] binary_operation[binary_operation[name[magic_code_idx] + call[name[len], parameter[name[self].MAGIC_CODE]]] + name[json_length]]
if compare[name[message_length] less_or_equal[<=] call[name[len], parameter[name[self]._received_data]]] begin[:]
variable[full_message] assign[=] call[name[self]._received_data][<ast.Slice object at 0x7da20cabd750>]
name[self]._received_data assign[=] call[name[self]._received_data][<ast.Slice object at 0x7da20cabd7b0>]
if compare[call[name[len], parameter[name[self]._received_data]] greater[>] constant[0]] begin[:]
name[self].SOCKET_BUFFER_SIZE assign[=] constant[0]
break
call[name[self].log_received, parameter[name[full_message]]]
variable[obj] assign[=] call[name[self].decode, parameter[name[full_message]]]
return[name[obj]] | keyword[def] identifier[receive] ( identifier[self] , identifier[ikpdb] ):
literal[string]
keyword[while] identifier[self] . identifier[_network_loop] :
identifier[_logger] . identifier[n_debug] ( literal[string] ,
identifier[self] . identifier[SOCKET_BUFFER_SIZE] ,
identifier[self] . identifier[_received_data] )
keyword[try] :
keyword[if] identifier[self] . identifier[SOCKET_BUFFER_SIZE] :
identifier[data] = identifier[self] . identifier[_connection] . identifier[recv] ( identifier[self] . identifier[SOCKET_BUFFER_SIZE] )
keyword[else] :
identifier[data] = literal[string]
identifier[_logger] . identifier[n_debug] ( literal[string] , identifier[self] . identifier[SOCKET_BUFFER_SIZE] , identifier[data] )
keyword[except] identifier[socket] . identifier[timeout] :
identifier[_logger] . identifier[n_debug] ( literal[string] , identifier[ikpdb] . identifier[status] )
keyword[if] identifier[ikpdb] . identifier[status] == literal[string] :
identifier[_logger] . identifier[n_debug] ( literal[string]
literal[string] )
keyword[return] {
literal[string] : literal[string] ,
literal[string] :{}
}
keyword[continue]
keyword[except] identifier[socket] . identifier[error] keyword[as] identifier[socket_err] :
keyword[if] identifier[ikpdb] . identifier[status] == literal[string] :
keyword[return] { literal[string] : literal[string] ,
literal[string] :{ literal[string] : identifier[socket_err] . identifier[errno] ,
literal[string] : identifier[socket_err] . identifier[strerror] }}
keyword[continue]
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
identifier[_logger] . identifier[g_error] ( literal[string]
literal[string] , identifier[exc] )
identifier[_logger] . identifier[g_error] ( identifier[traceback] . identifier[format_exc] ())
identifier[print] ( literal[string] . identifier[join] ( identifier[traceback] . identifier[format_stack] ()))
keyword[return] {
literal[string] : literal[string] ,
literal[string] :{
literal[string] : identifier[exc] . identifier[__class__] . identifier[__name__] ,
literal[string] : identifier[exc] . identifier[message]
}
}
identifier[self] . identifier[_received_data] += identifier[data] . identifier[decode] ( literal[string] )
keyword[try] :
identifier[magic_code_idx] = identifier[self] . identifier[_received_data] . identifier[index] ( identifier[self] . identifier[MAGIC_CODE] )
keyword[except] identifier[ValueError] :
keyword[continue]
keyword[try] :
identifier[length_idx] = identifier[self] . identifier[_received_data] . identifier[index] ( literal[string] )
keyword[except] identifier[ValueError] :
keyword[continue]
identifier[json_length] = identifier[int] ( identifier[self] . identifier[_received_data] [ identifier[length_idx] + literal[int] : identifier[magic_code_idx] ])
identifier[message_length] = identifier[magic_code_idx] + identifier[len] ( identifier[self] . identifier[MAGIC_CODE] )+ identifier[json_length]
keyword[if] identifier[message_length] <= identifier[len] ( identifier[self] . identifier[_received_data] ):
identifier[full_message] = identifier[self] . identifier[_received_data] [: identifier[message_length] ]
identifier[self] . identifier[_received_data] = identifier[self] . identifier[_received_data] [ identifier[message_length] :]
keyword[if] identifier[len] ( identifier[self] . identifier[_received_data] )> literal[int] :
identifier[self] . identifier[SOCKET_BUFFER_SIZE] = literal[int]
keyword[else] :
identifier[self] . identifier[SOCKET_BUFFER_SIZE] = literal[int]
keyword[break]
keyword[else] :
identifier[self] . identifier[SOCKET_BUFFER_SIZE] = identifier[message_length] - identifier[len] ( identifier[self] . identifier[_received_data] )
identifier[self] . identifier[log_received] ( identifier[full_message] )
identifier[obj] = identifier[self] . identifier[decode] ( identifier[full_message] )
keyword[return] identifier[obj] | def receive(self, ikpdb):
"""Waits for a message from the debugger and returns it as a dict.
"""
# with self._connection_lock:
while self._network_loop:
_logger.n_debug('Enter socket.recv(%s) with self._received_data = %s', self.SOCKET_BUFFER_SIZE, self._received_data)
try:
# We may land here with a full packet already in self.received_data
# In that case we must not enter recv()
if self.SOCKET_BUFFER_SIZE:
data = self._connection.recv(self.SOCKET_BUFFER_SIZE) # depends on [control=['if'], data=[]]
else:
data = b''
_logger.n_debug('Socket.recv(%s) => %s', self.SOCKET_BUFFER_SIZE, data) # depends on [control=['try'], data=[]]
except socket.timeout:
_logger.n_debug('socket.timeout witk ikpdb.status=%s', ikpdb.status)
if ikpdb.status == 'terminated':
_logger.n_debug("breaking IKPdbConnectionHandler.receive() network loop as ikpdb state is 'terminated'.")
return {'command': '_InternalQuit', 'args': {}} # depends on [control=['if'], data=[]]
continue # depends on [control=['except'], data=[]]
except socket.error as socket_err:
if ikpdb.status == 'terminated':
return {'command': '_InternalQuit', 'args': {'socket_error_number': socket_err.errno, 'socket_error_str': socket_err.strerror}} # depends on [control=['if'], data=[]]
continue # depends on [control=['except'], data=['socket_err']]
except Exception as exc:
_logger.g_error("Unexecpected Error: '%s' in IKPdbConnectionHandler.command_loop.", exc)
_logger.g_error(traceback.format_exc())
print(''.join(traceback.format_stack()))
return {'command': '_InternalQuit', 'args': {'error': exc.__class__.__name__, 'message': exc.message}} # depends on [control=['except'], data=['exc']]
# received data is utf8 encoded
self._received_data += data.decode('utf-8')
# have we received a MAGIC_CODE
try:
magic_code_idx = self._received_data.index(self.MAGIC_CODE) # depends on [control=['try'], data=[]]
except ValueError:
continue # depends on [control=['except'], data=[]]
# Have we received a 'length='
try:
length_idx = self._received_data.index(u'length=') # depends on [control=['try'], data=[]]
except ValueError:
continue # depends on [control=['except'], data=[]]
# extract length content from received data
json_length = int(self._received_data[length_idx + 7:magic_code_idx])
message_length = magic_code_idx + len(self.MAGIC_CODE) + json_length
if message_length <= len(self._received_data):
full_message = self._received_data[:message_length]
self._received_data = self._received_data[message_length:]
if len(self._received_data) > 0:
self.SOCKET_BUFFER_SIZE = 0 # depends on [control=['if'], data=[]]
else:
self.SOCKET_BUFFER_SIZE = 4096
break # depends on [control=['if'], data=['message_length']]
else:
self.SOCKET_BUFFER_SIZE = message_length - len(self._received_data) # depends on [control=['while'], data=[]]
self.log_received(full_message)
obj = self.decode(full_message)
return obj |
def authenticate(self):
"""
Indicate to the client that it needs to authenticate via a 401.
"""
if request.headers.get('Authorization') or request.args.get('access_token'):
realm = 'Bearer realm="%s", error="invalid_token"' % __package__
else:
realm = 'Bearer realm="%s"' % __package__
resp = Response(None, 401, {'WWW-Authenticate': realm})
abort(401, description='Please provide proper credentials', response=resp) | def function[authenticate, parameter[self]]:
constant[
Indicate to the client that it needs to authenticate via a 401.
]
if <ast.BoolOp object at 0x7da1b27edfc0> begin[:]
variable[realm] assign[=] binary_operation[constant[Bearer realm="%s", error="invalid_token"] <ast.Mod object at 0x7da2590d6920> name[__package__]]
variable[resp] assign[=] call[name[Response], parameter[constant[None], constant[401], dictionary[[<ast.Constant object at 0x7da2054a6590>], [<ast.Name object at 0x7da2054a7580>]]]]
call[name[abort], parameter[constant[401]]] | keyword[def] identifier[authenticate] ( identifier[self] ):
literal[string]
keyword[if] identifier[request] . identifier[headers] . identifier[get] ( literal[string] ) keyword[or] identifier[request] . identifier[args] . identifier[get] ( literal[string] ):
identifier[realm] = literal[string] % identifier[__package__]
keyword[else] :
identifier[realm] = literal[string] % identifier[__package__]
identifier[resp] = identifier[Response] ( keyword[None] , literal[int] ,{ literal[string] : identifier[realm] })
identifier[abort] ( literal[int] , identifier[description] = literal[string] , identifier[response] = identifier[resp] ) | def authenticate(self):
"""
Indicate to the client that it needs to authenticate via a 401.
"""
if request.headers.get('Authorization') or request.args.get('access_token'):
realm = 'Bearer realm="%s", error="invalid_token"' % __package__ # depends on [control=['if'], data=[]]
else:
realm = 'Bearer realm="%s"' % __package__
resp = Response(None, 401, {'WWW-Authenticate': realm})
abort(401, description='Please provide proper credentials', response=resp) |
def _get_branch_opts(branch, local_branch, all_local_branches,
desired_upstream, git_ver=None):
'''
DRY helper to build list of opts for git.branch, for the purposes of
setting upstream tracking branch
'''
if branch is not None and branch not in all_local_branches:
# We won't be setting upstream because the act of checking out a new
# branch will set upstream for us
return None
if git_ver is None:
git_ver = _LooseVersion(__salt__['git.version'](versioninfo=False))
ret = []
if git_ver >= _LooseVersion('1.8.0'):
ret.extend(['--set-upstream-to', desired_upstream])
else:
ret.append('--set-upstream')
# --set-upstream does not assume the current branch, so we have to
# tell it which branch we'll be using
ret.append(local_branch if branch is None else branch)
ret.append(desired_upstream)
return ret | def function[_get_branch_opts, parameter[branch, local_branch, all_local_branches, desired_upstream, git_ver]]:
constant[
DRY helper to build list of opts for git.branch, for the purposes of
setting upstream tracking branch
]
if <ast.BoolOp object at 0x7da1b1fa27a0> begin[:]
return[constant[None]]
if compare[name[git_ver] is constant[None]] begin[:]
variable[git_ver] assign[=] call[name[_LooseVersion], parameter[call[call[name[__salt__]][constant[git.version]], parameter[]]]]
variable[ret] assign[=] list[[]]
if compare[name[git_ver] greater_or_equal[>=] call[name[_LooseVersion], parameter[constant[1.8.0]]]] begin[:]
call[name[ret].extend, parameter[list[[<ast.Constant object at 0x7da1b2187160>, <ast.Name object at 0x7da1b21876a0>]]]]
return[name[ret]] | keyword[def] identifier[_get_branch_opts] ( identifier[branch] , identifier[local_branch] , identifier[all_local_branches] ,
identifier[desired_upstream] , identifier[git_ver] = keyword[None] ):
literal[string]
keyword[if] identifier[branch] keyword[is] keyword[not] keyword[None] keyword[and] identifier[branch] keyword[not] keyword[in] identifier[all_local_branches] :
keyword[return] keyword[None]
keyword[if] identifier[git_ver] keyword[is] keyword[None] :
identifier[git_ver] = identifier[_LooseVersion] ( identifier[__salt__] [ literal[string] ]( identifier[versioninfo] = keyword[False] ))
identifier[ret] =[]
keyword[if] identifier[git_ver] >= identifier[_LooseVersion] ( literal[string] ):
identifier[ret] . identifier[extend] ([ literal[string] , identifier[desired_upstream] ])
keyword[else] :
identifier[ret] . identifier[append] ( literal[string] )
identifier[ret] . identifier[append] ( identifier[local_branch] keyword[if] identifier[branch] keyword[is] keyword[None] keyword[else] identifier[branch] )
identifier[ret] . identifier[append] ( identifier[desired_upstream] )
keyword[return] identifier[ret] | def _get_branch_opts(branch, local_branch, all_local_branches, desired_upstream, git_ver=None):
"""
DRY helper to build list of opts for git.branch, for the purposes of
setting upstream tracking branch
"""
if branch is not None and branch not in all_local_branches:
# We won't be setting upstream because the act of checking out a new
# branch will set upstream for us
return None # depends on [control=['if'], data=[]]
if git_ver is None:
git_ver = _LooseVersion(__salt__['git.version'](versioninfo=False)) # depends on [control=['if'], data=['git_ver']]
ret = []
if git_ver >= _LooseVersion('1.8.0'):
ret.extend(['--set-upstream-to', desired_upstream]) # depends on [control=['if'], data=[]]
else:
ret.append('--set-upstream')
# --set-upstream does not assume the current branch, so we have to
# tell it which branch we'll be using
ret.append(local_branch if branch is None else branch)
ret.append(desired_upstream)
return ret |
def fcoe_fcoe_map_fcoe_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe = ET.SubElement(config, "fcoe", xmlns="urn:brocade.com:mgmt:brocade-fcoe")
fcoe_map = ET.SubElement(fcoe, "fcoe-map")
fcoe_map_name = ET.SubElement(fcoe_map, "fcoe-map-name")
fcoe_map_name.text = kwargs.pop('fcoe_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[fcoe_fcoe_map_fcoe_map_name, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[fcoe] assign[=] call[name[ET].SubElement, parameter[name[config], constant[fcoe]]]
variable[fcoe_map] assign[=] call[name[ET].SubElement, parameter[name[fcoe], constant[fcoe-map]]]
variable[fcoe_map_name] assign[=] call[name[ET].SubElement, parameter[name[fcoe_map], constant[fcoe-map-name]]]
name[fcoe_map_name].text assign[=] call[name[kwargs].pop, parameter[constant[fcoe_map_name]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[fcoe_fcoe_map_fcoe_map_name] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[fcoe] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[fcoe_map] = identifier[ET] . identifier[SubElement] ( identifier[fcoe] , literal[string] )
identifier[fcoe_map_name] = identifier[ET] . identifier[SubElement] ( identifier[fcoe_map] , literal[string] )
identifier[fcoe_map_name] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def fcoe_fcoe_map_fcoe_map_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
fcoe = ET.SubElement(config, 'fcoe', xmlns='urn:brocade.com:mgmt:brocade-fcoe')
fcoe_map = ET.SubElement(fcoe, 'fcoe-map')
fcoe_map_name = ET.SubElement(fcoe_map, 'fcoe-map-name')
fcoe_map_name.text = kwargs.pop('fcoe_map_name')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def url(self):
"""
The URL to access this preview.
"""
return reverse('%s:detail' % URL_NAMESPACE, kwargs={
'module': self.module,
'preview': type(self).__name__,
}) | def function[url, parameter[self]]:
constant[
The URL to access this preview.
]
return[call[name[reverse], parameter[binary_operation[constant[%s:detail] <ast.Mod object at 0x7da2590d6920> name[URL_NAMESPACE]]]]] | keyword[def] identifier[url] ( identifier[self] ):
literal[string]
keyword[return] identifier[reverse] ( literal[string] % identifier[URL_NAMESPACE] , identifier[kwargs] ={
literal[string] : identifier[self] . identifier[module] ,
literal[string] : identifier[type] ( identifier[self] ). identifier[__name__] ,
}) | def url(self):
"""
The URL to access this preview.
"""
return reverse('%s:detail' % URL_NAMESPACE, kwargs={'module': self.module, 'preview': type(self).__name__}) |
def pause_transfer_operation(self, operation_name):
"""
Pauses an transfer operation in Google Storage Transfer Service.
:param operation_name: (Required) Name of the transfer operation.
:type operation_name: str
:rtype: None
"""
self.get_conn().transferOperations().pause(name=operation_name).execute(num_retries=self.num_retries) | def function[pause_transfer_operation, parameter[self, operation_name]]:
constant[
Pauses an transfer operation in Google Storage Transfer Service.
:param operation_name: (Required) Name of the transfer operation.
:type operation_name: str
:rtype: None
]
call[call[call[call[name[self].get_conn, parameter[]].transferOperations, parameter[]].pause, parameter[]].execute, parameter[]] | keyword[def] identifier[pause_transfer_operation] ( identifier[self] , identifier[operation_name] ):
literal[string]
identifier[self] . identifier[get_conn] (). identifier[transferOperations] (). identifier[pause] ( identifier[name] = identifier[operation_name] ). identifier[execute] ( identifier[num_retries] = identifier[self] . identifier[num_retries] ) | def pause_transfer_operation(self, operation_name):
"""
Pauses an transfer operation in Google Storage Transfer Service.
:param operation_name: (Required) Name of the transfer operation.
:type operation_name: str
:rtype: None
"""
self.get_conn().transferOperations().pause(name=operation_name).execute(num_retries=self.num_retries) |
def post(self, request, format=None):
""" authenticate """
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
login(request, serializer.instance)
if request.data.get('remember'):
# TODO: remember configurable
request.session.set_expiry(60 * 60 * 24 * 7 * 3)
else:
request.session.set_expiry(0)
return Response({
'detail': _(u'Logged in successfully'),
'user': ProfileOwnSerializer(serializer.instance,
context={'request': request}).data
})
return Response(serializer.errors, status=400) | def function[post, parameter[self, request, format]]:
constant[ authenticate ]
variable[serializer] assign[=] call[name[self].serializer_class, parameter[]]
if call[name[serializer].is_valid, parameter[]] begin[:]
call[name[login], parameter[name[request], name[serializer].instance]]
if call[name[request].data.get, parameter[constant[remember]]] begin[:]
call[name[request].session.set_expiry, parameter[binary_operation[binary_operation[binary_operation[binary_operation[constant[60] * constant[60]] * constant[24]] * constant[7]] * constant[3]]]]
return[call[name[Response], parameter[dictionary[[<ast.Constant object at 0x7da20c6e5c90>, <ast.Constant object at 0x7da20c6e7c40>], [<ast.Call object at 0x7da20c6e6e60>, <ast.Attribute object at 0x7da20c6e45e0>]]]]]
return[call[name[Response], parameter[name[serializer].errors]]] | keyword[def] identifier[post] ( identifier[self] , identifier[request] , identifier[format] = keyword[None] ):
literal[string]
identifier[serializer] = identifier[self] . identifier[serializer_class] ( identifier[data] = identifier[request] . identifier[data] )
keyword[if] identifier[serializer] . identifier[is_valid] ():
identifier[login] ( identifier[request] , identifier[serializer] . identifier[instance] )
keyword[if] identifier[request] . identifier[data] . identifier[get] ( literal[string] ):
identifier[request] . identifier[session] . identifier[set_expiry] ( literal[int] * literal[int] * literal[int] * literal[int] * literal[int] )
keyword[else] :
identifier[request] . identifier[session] . identifier[set_expiry] ( literal[int] )
keyword[return] identifier[Response] ({
literal[string] : identifier[_] ( literal[string] ),
literal[string] : identifier[ProfileOwnSerializer] ( identifier[serializer] . identifier[instance] ,
identifier[context] ={ literal[string] : identifier[request] }). identifier[data]
})
keyword[return] identifier[Response] ( identifier[serializer] . identifier[errors] , identifier[status] = literal[int] ) | def post(self, request, format=None):
""" authenticate """
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
login(request, serializer.instance)
if request.data.get('remember'):
# TODO: remember configurable
request.session.set_expiry(60 * 60 * 24 * 7 * 3) # depends on [control=['if'], data=[]]
else:
request.session.set_expiry(0)
return Response({'detail': _(u'Logged in successfully'), 'user': ProfileOwnSerializer(serializer.instance, context={'request': request}).data}) # depends on [control=['if'], data=[]]
return Response(serializer.errors, status=400) |
def create_app_factory(app_name, config_loader=None,
extension_entry_points=None, extensions=None,
blueprint_entry_points=None, blueprints=None,
converter_entry_points=None, converters=None,
wsgi_factory=None, **app_kwargs):
"""Create a Flask application factory.
The application factory will load Flask extensions and blueprints specified
using both entry points and directly in the arguments. Loading order of
entry points are not guaranteed and can happen in any order.
:param app_name: Flask application name.
:param config_loader: Callable which will be invoked on application
creation in order to load the Flask configuration. See example below.
:param extension_entry_points: List of entry points, which specifies Flask
extensions that will be initialized only by passing in the Flask
application object
:param extensions: List of Flask extensions that can be initialized only by
passing in the Flask application object.
:param blueprint_entry_points: List of entry points, which specifies
Blueprints that will be registered on the Flask application.
:param blueprints: List of Blueprints that will be registered on the
Flask application.
:param converter_entry_points: List of entry points, which specifies
Werkzeug URL map converters that will be added to
``app.url_map.converters``.
:param converters: Map of Werkzeug URL map converter classes that will
be added to ``app.url_map.converters``.
:param wsgi_factory: A callable that will be passed the Flask application
object in order to overwrite the default WSGI application (e.g. to
install ``DispatcherMiddleware``).
:param app_kwargs: Keyword arguments passed to :py:meth:`base_app`.
:returns: Flask application factory.
Example of a configuration loader:
.. code-block:: python
def my_config_loader(app, **kwargs):
app.config.from_module('mysite.config')
app.config.update(**kwargs)
.. note::
`Invenio-Config <https://pythonhosted.org/invenio-config>`_ provides a
factory creating default configuration loader (see
:func:`invenio_config.utils.create_config_loader`) which is sufficient
for most cases.
Example of a WSGI factory:
.. code-block:: python
def my_wsgi_factory(app):
return DispatcherMiddleware(app.wsgi_app, {'/api': api_app})
.. versionadded: 1.0.0
"""
def _create_app(**kwargs):
app = base_app(app_name, **app_kwargs)
app_created.send(_create_app, app=app)
debug = kwargs.get('debug')
if debug is not None:
app.debug = debug
# Load configuration
if config_loader:
config_loader(app, **kwargs)
# Load URL converters.
converter_loader(
app,
entry_points=converter_entry_points,
modules=converters,
)
# Load application based on entrypoints.
app_loader(
app,
entry_points=extension_entry_points,
modules=extensions,
)
# Load blueprints
blueprint_loader(
app,
entry_points=blueprint_entry_points,
modules=blueprints,
)
app_loaded.send(_create_app, app=app)
# Replace WSGI application using factory if provided (e.g. to install
# WSGI middleware).
if wsgi_factory:
app.wsgi_app = wsgi_factory(app, **kwargs)
return app
return _create_app | def function[create_app_factory, parameter[app_name, config_loader, extension_entry_points, extensions, blueprint_entry_points, blueprints, converter_entry_points, converters, wsgi_factory]]:
constant[Create a Flask application factory.
The application factory will load Flask extensions and blueprints specified
using both entry points and directly in the arguments. Loading order of
entry points are not guaranteed and can happen in any order.
:param app_name: Flask application name.
:param config_loader: Callable which will be invoked on application
creation in order to load the Flask configuration. See example below.
:param extension_entry_points: List of entry points, which specifies Flask
extensions that will be initialized only by passing in the Flask
application object
:param extensions: List of Flask extensions that can be initialized only by
passing in the Flask application object.
:param blueprint_entry_points: List of entry points, which specifies
Blueprints that will be registered on the Flask application.
:param blueprints: List of Blueprints that will be registered on the
Flask application.
:param converter_entry_points: List of entry points, which specifies
Werkzeug URL map converters that will be added to
``app.url_map.converters``.
:param converters: Map of Werkzeug URL map converter classes that will
be added to ``app.url_map.converters``.
:param wsgi_factory: A callable that will be passed the Flask application
object in order to overwrite the default WSGI application (e.g. to
install ``DispatcherMiddleware``).
:param app_kwargs: Keyword arguments passed to :py:meth:`base_app`.
:returns: Flask application factory.
Example of a configuration loader:
.. code-block:: python
def my_config_loader(app, **kwargs):
app.config.from_module('mysite.config')
app.config.update(**kwargs)
.. note::
`Invenio-Config <https://pythonhosted.org/invenio-config>`_ provides a
factory creating default configuration loader (see
:func:`invenio_config.utils.create_config_loader`) which is sufficient
for most cases.
Example of a WSGI factory:
.. code-block:: python
def my_wsgi_factory(app):
return DispatcherMiddleware(app.wsgi_app, {'/api': api_app})
.. versionadded: 1.0.0
]
def function[_create_app, parameter[]]:
variable[app] assign[=] call[name[base_app], parameter[name[app_name]]]
call[name[app_created].send, parameter[name[_create_app]]]
variable[debug] assign[=] call[name[kwargs].get, parameter[constant[debug]]]
if compare[name[debug] is_not constant[None]] begin[:]
name[app].debug assign[=] name[debug]
if name[config_loader] begin[:]
call[name[config_loader], parameter[name[app]]]
call[name[converter_loader], parameter[name[app]]]
call[name[app_loader], parameter[name[app]]]
call[name[blueprint_loader], parameter[name[app]]]
call[name[app_loaded].send, parameter[name[_create_app]]]
if name[wsgi_factory] begin[:]
name[app].wsgi_app assign[=] call[name[wsgi_factory], parameter[name[app]]]
return[name[app]]
return[name[_create_app]] | keyword[def] identifier[create_app_factory] ( identifier[app_name] , identifier[config_loader] = keyword[None] ,
identifier[extension_entry_points] = keyword[None] , identifier[extensions] = keyword[None] ,
identifier[blueprint_entry_points] = keyword[None] , identifier[blueprints] = keyword[None] ,
identifier[converter_entry_points] = keyword[None] , identifier[converters] = keyword[None] ,
identifier[wsgi_factory] = keyword[None] ,** identifier[app_kwargs] ):
literal[string]
keyword[def] identifier[_create_app] (** identifier[kwargs] ):
identifier[app] = identifier[base_app] ( identifier[app_name] ,** identifier[app_kwargs] )
identifier[app_created] . identifier[send] ( identifier[_create_app] , identifier[app] = identifier[app] )
identifier[debug] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] identifier[debug] keyword[is] keyword[not] keyword[None] :
identifier[app] . identifier[debug] = identifier[debug]
keyword[if] identifier[config_loader] :
identifier[config_loader] ( identifier[app] ,** identifier[kwargs] )
identifier[converter_loader] (
identifier[app] ,
identifier[entry_points] = identifier[converter_entry_points] ,
identifier[modules] = identifier[converters] ,
)
identifier[app_loader] (
identifier[app] ,
identifier[entry_points] = identifier[extension_entry_points] ,
identifier[modules] = identifier[extensions] ,
)
identifier[blueprint_loader] (
identifier[app] ,
identifier[entry_points] = identifier[blueprint_entry_points] ,
identifier[modules] = identifier[blueprints] ,
)
identifier[app_loaded] . identifier[send] ( identifier[_create_app] , identifier[app] = identifier[app] )
keyword[if] identifier[wsgi_factory] :
identifier[app] . identifier[wsgi_app] = identifier[wsgi_factory] ( identifier[app] ,** identifier[kwargs] )
keyword[return] identifier[app]
keyword[return] identifier[_create_app] | def create_app_factory(app_name, config_loader=None, extension_entry_points=None, extensions=None, blueprint_entry_points=None, blueprints=None, converter_entry_points=None, converters=None, wsgi_factory=None, **app_kwargs):
"""Create a Flask application factory.
The application factory will load Flask extensions and blueprints specified
using both entry points and directly in the arguments. Loading order of
entry points are not guaranteed and can happen in any order.
:param app_name: Flask application name.
:param config_loader: Callable which will be invoked on application
creation in order to load the Flask configuration. See example below.
:param extension_entry_points: List of entry points, which specifies Flask
extensions that will be initialized only by passing in the Flask
application object
:param extensions: List of Flask extensions that can be initialized only by
passing in the Flask application object.
:param blueprint_entry_points: List of entry points, which specifies
Blueprints that will be registered on the Flask application.
:param blueprints: List of Blueprints that will be registered on the
Flask application.
:param converter_entry_points: List of entry points, which specifies
Werkzeug URL map converters that will be added to
``app.url_map.converters``.
:param converters: Map of Werkzeug URL map converter classes that will
be added to ``app.url_map.converters``.
:param wsgi_factory: A callable that will be passed the Flask application
object in order to overwrite the default WSGI application (e.g. to
install ``DispatcherMiddleware``).
:param app_kwargs: Keyword arguments passed to :py:meth:`base_app`.
:returns: Flask application factory.
Example of a configuration loader:
.. code-block:: python
def my_config_loader(app, **kwargs):
app.config.from_module('mysite.config')
app.config.update(**kwargs)
.. note::
`Invenio-Config <https://pythonhosted.org/invenio-config>`_ provides a
factory creating default configuration loader (see
:func:`invenio_config.utils.create_config_loader`) which is sufficient
for most cases.
Example of a WSGI factory:
.. code-block:: python
def my_wsgi_factory(app):
return DispatcherMiddleware(app.wsgi_app, {'/api': api_app})
.. versionadded: 1.0.0
"""
def _create_app(**kwargs):
app = base_app(app_name, **app_kwargs)
app_created.send(_create_app, app=app)
debug = kwargs.get('debug')
if debug is not None:
app.debug = debug # depends on [control=['if'], data=['debug']]
# Load configuration
if config_loader:
config_loader(app, **kwargs) # depends on [control=['if'], data=[]]
# Load URL converters.
converter_loader(app, entry_points=converter_entry_points, modules=converters)
# Load application based on entrypoints.
app_loader(app, entry_points=extension_entry_points, modules=extensions)
# Load blueprints
blueprint_loader(app, entry_points=blueprint_entry_points, modules=blueprints)
app_loaded.send(_create_app, app=app)
# Replace WSGI application using factory if provided (e.g. to install
# WSGI middleware).
if wsgi_factory:
app.wsgi_app = wsgi_factory(app, **kwargs) # depends on [control=['if'], data=[]]
return app
return _create_app |
def finalize(self, block):
"""
The final step of CBC-MAC encrypts before xor.
"""
t1 = self.mac_aes.encrypt(block)
t2 = _xor_block(self.mac, t1)
self.mac = t2 | def function[finalize, parameter[self, block]]:
constant[
The final step of CBC-MAC encrypts before xor.
]
variable[t1] assign[=] call[name[self].mac_aes.encrypt, parameter[name[block]]]
variable[t2] assign[=] call[name[_xor_block], parameter[name[self].mac, name[t1]]]
name[self].mac assign[=] name[t2] | keyword[def] identifier[finalize] ( identifier[self] , identifier[block] ):
literal[string]
identifier[t1] = identifier[self] . identifier[mac_aes] . identifier[encrypt] ( identifier[block] )
identifier[t2] = identifier[_xor_block] ( identifier[self] . identifier[mac] , identifier[t1] )
identifier[self] . identifier[mac] = identifier[t2] | def finalize(self, block):
"""
The final step of CBC-MAC encrypts before xor.
"""
t1 = self.mac_aes.encrypt(block)
t2 = _xor_block(self.mac, t1)
self.mac = t2 |
def _apply_search_backrefs(pattern, flags=0):
"""Apply the search backrefs to the search pattern."""
if isinstance(pattern, (str, bytes)):
re_verbose = VERBOSE & flags
if flags & V0:
re_version = V0
elif flags & V1:
re_version = V1
else:
re_version = 0
if not (flags & DEBUG):
pattern = _cached_search_compile(pattern, re_verbose, re_version, type(pattern))
else: # pragma: no cover
pattern = _bregex_parse._SearchParser(pattern, re_verbose, re_version).parse()
elif isinstance(pattern, Bregex):
if flags:
raise ValueError("Cannot process flags argument with a compiled pattern")
pattern = pattern._pattern
elif isinstance(pattern, _REGEX_TYPE):
if flags:
raise ValueError("Cannot process flags argument with a compiled pattern!")
else:
raise TypeError("Not a string or compiled pattern!")
return pattern | def function[_apply_search_backrefs, parameter[pattern, flags]]:
constant[Apply the search backrefs to the search pattern.]
if call[name[isinstance], parameter[name[pattern], tuple[[<ast.Name object at 0x7da1b04b11b0>, <ast.Name object at 0x7da1b04b2380>]]]] begin[:]
variable[re_verbose] assign[=] binary_operation[name[VERBOSE] <ast.BitAnd object at 0x7da2590d6b60> name[flags]]
if binary_operation[name[flags] <ast.BitAnd object at 0x7da2590d6b60> name[V0]] begin[:]
variable[re_version] assign[=] name[V0]
if <ast.UnaryOp object at 0x7da18eb54940> begin[:]
variable[pattern] assign[=] call[name[_cached_search_compile], parameter[name[pattern], name[re_verbose], name[re_version], call[name[type], parameter[name[pattern]]]]]
return[name[pattern]] | keyword[def] identifier[_apply_search_backrefs] ( identifier[pattern] , identifier[flags] = literal[int] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[pattern] ,( identifier[str] , identifier[bytes] )):
identifier[re_verbose] = identifier[VERBOSE] & identifier[flags]
keyword[if] identifier[flags] & identifier[V0] :
identifier[re_version] = identifier[V0]
keyword[elif] identifier[flags] & identifier[V1] :
identifier[re_version] = identifier[V1]
keyword[else] :
identifier[re_version] = literal[int]
keyword[if] keyword[not] ( identifier[flags] & identifier[DEBUG] ):
identifier[pattern] = identifier[_cached_search_compile] ( identifier[pattern] , identifier[re_verbose] , identifier[re_version] , identifier[type] ( identifier[pattern] ))
keyword[else] :
identifier[pattern] = identifier[_bregex_parse] . identifier[_SearchParser] ( identifier[pattern] , identifier[re_verbose] , identifier[re_version] ). identifier[parse] ()
keyword[elif] identifier[isinstance] ( identifier[pattern] , identifier[Bregex] ):
keyword[if] identifier[flags] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[pattern] = identifier[pattern] . identifier[_pattern]
keyword[elif] identifier[isinstance] ( identifier[pattern] , identifier[_REGEX_TYPE] ):
keyword[if] identifier[flags] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[return] identifier[pattern] | def _apply_search_backrefs(pattern, flags=0):
"""Apply the search backrefs to the search pattern."""
if isinstance(pattern, (str, bytes)):
re_verbose = VERBOSE & flags
if flags & V0:
re_version = V0 # depends on [control=['if'], data=[]]
elif flags & V1:
re_version = V1 # depends on [control=['if'], data=[]]
else:
re_version = 0
if not flags & DEBUG:
pattern = _cached_search_compile(pattern, re_verbose, re_version, type(pattern)) # depends on [control=['if'], data=[]]
else: # pragma: no cover
pattern = _bregex_parse._SearchParser(pattern, re_verbose, re_version).parse() # depends on [control=['if'], data=[]]
elif isinstance(pattern, Bregex):
if flags:
raise ValueError('Cannot process flags argument with a compiled pattern') # depends on [control=['if'], data=[]]
pattern = pattern._pattern # depends on [control=['if'], data=[]]
elif isinstance(pattern, _REGEX_TYPE):
if flags:
raise ValueError('Cannot process flags argument with a compiled pattern!') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
raise TypeError('Not a string or compiled pattern!')
return pattern |
def nodes(xmrs):
"""Return the list of Nodes for *xmrs*."""
nodes = []
_props = xmrs.properties
varsplit = sort_vid_split
for p in xmrs.eps():
sortinfo = None
iv = p.intrinsic_variable
if iv is not None:
sort, _ = varsplit(iv)
sortinfo = _props(iv)
sortinfo[CVARSORT] = sort
nodes.append(
Node(p.nodeid, p.pred, sortinfo, p.lnk, p.surface, p.base, p.carg)
)
return nodes | def function[nodes, parameter[xmrs]]:
constant[Return the list of Nodes for *xmrs*.]
variable[nodes] assign[=] list[[]]
variable[_props] assign[=] name[xmrs].properties
variable[varsplit] assign[=] name[sort_vid_split]
for taget[name[p]] in starred[call[name[xmrs].eps, parameter[]]] begin[:]
variable[sortinfo] assign[=] constant[None]
variable[iv] assign[=] name[p].intrinsic_variable
if compare[name[iv] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da1b03492d0> assign[=] call[name[varsplit], parameter[name[iv]]]
variable[sortinfo] assign[=] call[name[_props], parameter[name[iv]]]
call[name[sortinfo]][name[CVARSORT]] assign[=] name[sort]
call[name[nodes].append, parameter[call[name[Node], parameter[name[p].nodeid, name[p].pred, name[sortinfo], name[p].lnk, name[p].surface, name[p].base, name[p].carg]]]]
return[name[nodes]] | keyword[def] identifier[nodes] ( identifier[xmrs] ):
literal[string]
identifier[nodes] =[]
identifier[_props] = identifier[xmrs] . identifier[properties]
identifier[varsplit] = identifier[sort_vid_split]
keyword[for] identifier[p] keyword[in] identifier[xmrs] . identifier[eps] ():
identifier[sortinfo] = keyword[None]
identifier[iv] = identifier[p] . identifier[intrinsic_variable]
keyword[if] identifier[iv] keyword[is] keyword[not] keyword[None] :
identifier[sort] , identifier[_] = identifier[varsplit] ( identifier[iv] )
identifier[sortinfo] = identifier[_props] ( identifier[iv] )
identifier[sortinfo] [ identifier[CVARSORT] ]= identifier[sort]
identifier[nodes] . identifier[append] (
identifier[Node] ( identifier[p] . identifier[nodeid] , identifier[p] . identifier[pred] , identifier[sortinfo] , identifier[p] . identifier[lnk] , identifier[p] . identifier[surface] , identifier[p] . identifier[base] , identifier[p] . identifier[carg] )
)
keyword[return] identifier[nodes] | def nodes(xmrs):
"""Return the list of Nodes for *xmrs*."""
nodes = []
_props = xmrs.properties
varsplit = sort_vid_split
for p in xmrs.eps():
sortinfo = None
iv = p.intrinsic_variable
if iv is not None:
(sort, _) = varsplit(iv)
sortinfo = _props(iv)
sortinfo[CVARSORT] = sort # depends on [control=['if'], data=['iv']]
nodes.append(Node(p.nodeid, p.pred, sortinfo, p.lnk, p.surface, p.base, p.carg)) # depends on [control=['for'], data=['p']]
return nodes |
def countries(instance):
"""Ensure that the `country` property of `location` objects is a valid
ISO 3166-1 ALPHA-2 Code.
"""
if (instance['type'] == 'location' and 'country' in instance and not
instance['country'].upper() in enums.COUNTRY_CODES):
return JSONError("Location `country` should be a valid ISO 3166-1 "
"ALPHA-2 Code.",
instance['id'], 'marking-definition-type') | def function[countries, parameter[instance]]:
constant[Ensure that the `country` property of `location` objects is a valid
ISO 3166-1 ALPHA-2 Code.
]
if <ast.BoolOp object at 0x7da1b0fd4730> begin[:]
return[call[name[JSONError], parameter[constant[Location `country` should be a valid ISO 3166-1 ALPHA-2 Code.], call[name[instance]][constant[id]], constant[marking-definition-type]]]] | keyword[def] identifier[countries] ( identifier[instance] ):
literal[string]
keyword[if] ( identifier[instance] [ literal[string] ]== literal[string] keyword[and] literal[string] keyword[in] identifier[instance] keyword[and] keyword[not]
identifier[instance] [ literal[string] ]. identifier[upper] () keyword[in] identifier[enums] . identifier[COUNTRY_CODES] ):
keyword[return] identifier[JSONError] ( literal[string]
literal[string] ,
identifier[instance] [ literal[string] ], literal[string] ) | def countries(instance):
"""Ensure that the `country` property of `location` objects is a valid
ISO 3166-1 ALPHA-2 Code.
"""
if instance['type'] == 'location' and 'country' in instance and (not instance['country'].upper() in enums.COUNTRY_CODES):
return JSONError('Location `country` should be a valid ISO 3166-1 ALPHA-2 Code.', instance['id'], 'marking-definition-type') # depends on [control=['if'], data=[]] |
def scan_download(self, scan_id, format='v2'):
"""scan_download scan_id [format]
Will download an individual scan and return a string with the results.
"""
payload = {
'downloadType': format,
'scanResultID': scan_id,
}
data = self.raw_query('scanResult', 'download', data=payload, dejson=False)
bobj = StringIO()
bobj.write(data)
zfile = ZipFile(bobj)
return zfile.read(zfile.namelist()[0]) | def function[scan_download, parameter[self, scan_id, format]]:
constant[scan_download scan_id [format]
Will download an individual scan and return a string with the results.
]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b28d6bc0>, <ast.Constant object at 0x7da1b28d64d0>], [<ast.Name object at 0x7da1b28d6e90>, <ast.Name object at 0x7da1b28d6410>]]
variable[data] assign[=] call[name[self].raw_query, parameter[constant[scanResult], constant[download]]]
variable[bobj] assign[=] call[name[StringIO], parameter[]]
call[name[bobj].write, parameter[name[data]]]
variable[zfile] assign[=] call[name[ZipFile], parameter[name[bobj]]]
return[call[name[zfile].read, parameter[call[call[name[zfile].namelist, parameter[]]][constant[0]]]]] | keyword[def] identifier[scan_download] ( identifier[self] , identifier[scan_id] , identifier[format] = literal[string] ):
literal[string]
identifier[payload] ={
literal[string] : identifier[format] ,
literal[string] : identifier[scan_id] ,
}
identifier[data] = identifier[self] . identifier[raw_query] ( literal[string] , literal[string] , identifier[data] = identifier[payload] , identifier[dejson] = keyword[False] )
identifier[bobj] = identifier[StringIO] ()
identifier[bobj] . identifier[write] ( identifier[data] )
identifier[zfile] = identifier[ZipFile] ( identifier[bobj] )
keyword[return] identifier[zfile] . identifier[read] ( identifier[zfile] . identifier[namelist] ()[ literal[int] ]) | def scan_download(self, scan_id, format='v2'):
"""scan_download scan_id [format]
Will download an individual scan and return a string with the results.
"""
payload = {'downloadType': format, 'scanResultID': scan_id}
data = self.raw_query('scanResult', 'download', data=payload, dejson=False)
bobj = StringIO()
bobj.write(data)
zfile = ZipFile(bobj)
return zfile.read(zfile.namelist()[0]) |
def get_stp_mst_detail_output_msti_port_edge_delay(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_stp_mst_detail = ET.Element("get_stp_mst_detail")
config = get_stp_mst_detail
output = ET.SubElement(get_stp_mst_detail, "output")
msti = ET.SubElement(output, "msti")
instance_id_key = ET.SubElement(msti, "instance-id")
instance_id_key.text = kwargs.pop('instance_id')
port = ET.SubElement(msti, "port")
edge_delay = ET.SubElement(port, "edge-delay")
edge_delay.text = kwargs.pop('edge_delay')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_stp_mst_detail_output_msti_port_edge_delay, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_stp_mst_detail] assign[=] call[name[ET].Element, parameter[constant[get_stp_mst_detail]]]
variable[config] assign[=] name[get_stp_mst_detail]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_stp_mst_detail], constant[output]]]
variable[msti] assign[=] call[name[ET].SubElement, parameter[name[output], constant[msti]]]
variable[instance_id_key] assign[=] call[name[ET].SubElement, parameter[name[msti], constant[instance-id]]]
name[instance_id_key].text assign[=] call[name[kwargs].pop, parameter[constant[instance_id]]]
variable[port] assign[=] call[name[ET].SubElement, parameter[name[msti], constant[port]]]
variable[edge_delay] assign[=] call[name[ET].SubElement, parameter[name[port], constant[edge-delay]]]
name[edge_delay].text assign[=] call[name[kwargs].pop, parameter[constant[edge_delay]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_stp_mst_detail_output_msti_port_edge_delay] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_stp_mst_detail] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_stp_mst_detail]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_stp_mst_detail] , literal[string] )
identifier[msti] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[instance_id_key] = identifier[ET] . identifier[SubElement] ( identifier[msti] , literal[string] )
identifier[instance_id_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[port] = identifier[ET] . identifier[SubElement] ( identifier[msti] , literal[string] )
identifier[edge_delay] = identifier[ET] . identifier[SubElement] ( identifier[port] , literal[string] )
identifier[edge_delay] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_stp_mst_detail_output_msti_port_edge_delay(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_stp_mst_detail = ET.Element('get_stp_mst_detail')
config = get_stp_mst_detail
output = ET.SubElement(get_stp_mst_detail, 'output')
msti = ET.SubElement(output, 'msti')
instance_id_key = ET.SubElement(msti, 'instance-id')
instance_id_key.text = kwargs.pop('instance_id')
port = ET.SubElement(msti, 'port')
edge_delay = ET.SubElement(port, 'edge-delay')
edge_delay.text = kwargs.pop('edge_delay')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def connect_to_images(region=None, public=True):
"""Creates a client for working with Images."""
return _create_client(ep_name="image", region=region, public=public) | def function[connect_to_images, parameter[region, public]]:
constant[Creates a client for working with Images.]
return[call[name[_create_client], parameter[]]] | keyword[def] identifier[connect_to_images] ( identifier[region] = keyword[None] , identifier[public] = keyword[True] ):
literal[string]
keyword[return] identifier[_create_client] ( identifier[ep_name] = literal[string] , identifier[region] = identifier[region] , identifier[public] = identifier[public] ) | def connect_to_images(region=None, public=True):
"""Creates a client for working with Images."""
return _create_client(ep_name='image', region=region, public=public) |
def _encrypt_xor(a, b, aes):
""" Returns encrypt(a ^ b). """
a = unhexlify("%0.32x" % (int((a), 16) ^ int(hexlify(b), 16)))
return aes.encrypt(a) | def function[_encrypt_xor, parameter[a, b, aes]]:
constant[ Returns encrypt(a ^ b). ]
variable[a] assign[=] call[name[unhexlify], parameter[binary_operation[constant[%0.32x] <ast.Mod object at 0x7da2590d6920> binary_operation[call[name[int], parameter[name[a], constant[16]]] <ast.BitXor object at 0x7da2590d6b00> call[name[int], parameter[call[name[hexlify], parameter[name[b]]], constant[16]]]]]]]
return[call[name[aes].encrypt, parameter[name[a]]]] | keyword[def] identifier[_encrypt_xor] ( identifier[a] , identifier[b] , identifier[aes] ):
literal[string]
identifier[a] = identifier[unhexlify] ( literal[string] %( identifier[int] (( identifier[a] ), literal[int] )^ identifier[int] ( identifier[hexlify] ( identifier[b] ), literal[int] )))
keyword[return] identifier[aes] . identifier[encrypt] ( identifier[a] ) | def _encrypt_xor(a, b, aes):
""" Returns encrypt(a ^ b). """
a = unhexlify('%0.32x' % (int(a, 16) ^ int(hexlify(b), 16)))
return aes.encrypt(a) |
def sign_metadata(metadata, key, cert, sign_algorithm=OneLogin_Saml2_Constants.RSA_SHA1, digest_algorithm=OneLogin_Saml2_Constants.SHA1):
"""
Signs the metadata with the key/cert provided
:param metadata: SAML Metadata XML
:type metadata: string
:param key: x509 key
:type key: string
:param cert: x509 cert
:type cert: string
:returns: Signed Metadata
:rtype: string
:param sign_algorithm: Signature algorithm method
:type sign_algorithm: string
:param digest_algorithm: Digest algorithm method
:type digest_algorithm: string
"""
return OneLogin_Saml2_Utils.add_sign(metadata, key, cert, False, sign_algorithm, digest_algorithm) | def function[sign_metadata, parameter[metadata, key, cert, sign_algorithm, digest_algorithm]]:
constant[
Signs the metadata with the key/cert provided
:param metadata: SAML Metadata XML
:type metadata: string
:param key: x509 key
:type key: string
:param cert: x509 cert
:type cert: string
:returns: Signed Metadata
:rtype: string
:param sign_algorithm: Signature algorithm method
:type sign_algorithm: string
:param digest_algorithm: Digest algorithm method
:type digest_algorithm: string
]
return[call[name[OneLogin_Saml2_Utils].add_sign, parameter[name[metadata], name[key], name[cert], constant[False], name[sign_algorithm], name[digest_algorithm]]]] | keyword[def] identifier[sign_metadata] ( identifier[metadata] , identifier[key] , identifier[cert] , identifier[sign_algorithm] = identifier[OneLogin_Saml2_Constants] . identifier[RSA_SHA1] , identifier[digest_algorithm] = identifier[OneLogin_Saml2_Constants] . identifier[SHA1] ):
literal[string]
keyword[return] identifier[OneLogin_Saml2_Utils] . identifier[add_sign] ( identifier[metadata] , identifier[key] , identifier[cert] , keyword[False] , identifier[sign_algorithm] , identifier[digest_algorithm] ) | def sign_metadata(metadata, key, cert, sign_algorithm=OneLogin_Saml2_Constants.RSA_SHA1, digest_algorithm=OneLogin_Saml2_Constants.SHA1):
"""
Signs the metadata with the key/cert provided
:param metadata: SAML Metadata XML
:type metadata: string
:param key: x509 key
:type key: string
:param cert: x509 cert
:type cert: string
:returns: Signed Metadata
:rtype: string
:param sign_algorithm: Signature algorithm method
:type sign_algorithm: string
:param digest_algorithm: Digest algorithm method
:type digest_algorithm: string
"""
return OneLogin_Saml2_Utils.add_sign(metadata, key, cert, False, sign_algorithm, digest_algorithm) |
def create_dep(self, projects):
"""Create and return a new dep
:param projects: the projects for the dep
:type projects: :class:`jukeboxcore.djadapter.models.Project`
:returns: The created dep or None
:rtype: None | :class:`jukeboxcore.djadapter.models.Dep`
:raises: None
"""
dialog = DepCreatorDialog(projects=projects, parent=self)
dialog.exec_()
dep = dialog.dep
return dep | def function[create_dep, parameter[self, projects]]:
constant[Create and return a new dep
:param projects: the projects for the dep
:type projects: :class:`jukeboxcore.djadapter.models.Project`
:returns: The created dep or None
:rtype: None | :class:`jukeboxcore.djadapter.models.Dep`
:raises: None
]
variable[dialog] assign[=] call[name[DepCreatorDialog], parameter[]]
call[name[dialog].exec_, parameter[]]
variable[dep] assign[=] name[dialog].dep
return[name[dep]] | keyword[def] identifier[create_dep] ( identifier[self] , identifier[projects] ):
literal[string]
identifier[dialog] = identifier[DepCreatorDialog] ( identifier[projects] = identifier[projects] , identifier[parent] = identifier[self] )
identifier[dialog] . identifier[exec_] ()
identifier[dep] = identifier[dialog] . identifier[dep]
keyword[return] identifier[dep] | def create_dep(self, projects):
"""Create and return a new dep
:param projects: the projects for the dep
:type projects: :class:`jukeboxcore.djadapter.models.Project`
:returns: The created dep or None
:rtype: None | :class:`jukeboxcore.djadapter.models.Dep`
:raises: None
"""
dialog = DepCreatorDialog(projects=projects, parent=self)
dialog.exec_()
dep = dialog.dep
return dep |
def render(obj, backend=None, **kwargs):
"""
Renders the HoloViews object to the corresponding object in the
specified backend, e.g. a Matplotlib or Bokeh figure.
The backend defaults to the currently declared default
backend. The resulting object can then be used with other objects
in the specified backend. For instance, if you want to make a
multi-part Bokeh figure using a plot type only available in
HoloViews, you can use this function to return a Bokeh figure that
you can use like any hand-constructed Bokeh figure in a Bokeh
layout.
Arguments
---------
obj: HoloViews object
The HoloViews object to render
backend: string
A valid HoloViews rendering backend
**kwargs: dict
Additional keyword arguments passed to the renderer,
e.g. fps for animations
Returns
-------
renderered:
The rendered representation of the HoloViews object, e.g.
if backend='matplotlib' a matplotlib Figure or FuncAnimation
"""
backend = backend or Store.current_backend
renderer_obj = renderer(backend)
if kwargs:
renderer_obj = renderer_obj.instance(**kwargs)
plot = renderer_obj.get_plot(obj)
if backend == 'matplotlib' and len(plot) > 1:
return plot.anim(fps=renderer_obj.fps)
return renderer_obj.get_plot(obj).state | def function[render, parameter[obj, backend]]:
constant[
Renders the HoloViews object to the corresponding object in the
specified backend, e.g. a Matplotlib or Bokeh figure.
The backend defaults to the currently declared default
backend. The resulting object can then be used with other objects
in the specified backend. For instance, if you want to make a
multi-part Bokeh figure using a plot type only available in
HoloViews, you can use this function to return a Bokeh figure that
you can use like any hand-constructed Bokeh figure in a Bokeh
layout.
Arguments
---------
obj: HoloViews object
The HoloViews object to render
backend: string
A valid HoloViews rendering backend
**kwargs: dict
Additional keyword arguments passed to the renderer,
e.g. fps for animations
Returns
-------
renderered:
The rendered representation of the HoloViews object, e.g.
if backend='matplotlib' a matplotlib Figure or FuncAnimation
]
variable[backend] assign[=] <ast.BoolOp object at 0x7da204622b00>
variable[renderer_obj] assign[=] call[name[renderer], parameter[name[backend]]]
if name[kwargs] begin[:]
variable[renderer_obj] assign[=] call[name[renderer_obj].instance, parameter[]]
variable[plot] assign[=] call[name[renderer_obj].get_plot, parameter[name[obj]]]
if <ast.BoolOp object at 0x7da204620ca0> begin[:]
return[call[name[plot].anim, parameter[]]]
return[call[name[renderer_obj].get_plot, parameter[name[obj]]].state] | keyword[def] identifier[render] ( identifier[obj] , identifier[backend] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[backend] = identifier[backend] keyword[or] identifier[Store] . identifier[current_backend]
identifier[renderer_obj] = identifier[renderer] ( identifier[backend] )
keyword[if] identifier[kwargs] :
identifier[renderer_obj] = identifier[renderer_obj] . identifier[instance] (** identifier[kwargs] )
identifier[plot] = identifier[renderer_obj] . identifier[get_plot] ( identifier[obj] )
keyword[if] identifier[backend] == literal[string] keyword[and] identifier[len] ( identifier[plot] )> literal[int] :
keyword[return] identifier[plot] . identifier[anim] ( identifier[fps] = identifier[renderer_obj] . identifier[fps] )
keyword[return] identifier[renderer_obj] . identifier[get_plot] ( identifier[obj] ). identifier[state] | def render(obj, backend=None, **kwargs):
"""
Renders the HoloViews object to the corresponding object in the
specified backend, e.g. a Matplotlib or Bokeh figure.
The backend defaults to the currently declared default
backend. The resulting object can then be used with other objects
in the specified backend. For instance, if you want to make a
multi-part Bokeh figure using a plot type only available in
HoloViews, you can use this function to return a Bokeh figure that
you can use like any hand-constructed Bokeh figure in a Bokeh
layout.
Arguments
---------
obj: HoloViews object
The HoloViews object to render
backend: string
A valid HoloViews rendering backend
**kwargs: dict
Additional keyword arguments passed to the renderer,
e.g. fps for animations
Returns
-------
renderered:
The rendered representation of the HoloViews object, e.g.
if backend='matplotlib' a matplotlib Figure or FuncAnimation
"""
backend = backend or Store.current_backend
renderer_obj = renderer(backend)
if kwargs:
renderer_obj = renderer_obj.instance(**kwargs) # depends on [control=['if'], data=[]]
plot = renderer_obj.get_plot(obj)
if backend == 'matplotlib' and len(plot) > 1:
return plot.anim(fps=renderer_obj.fps) # depends on [control=['if'], data=[]]
return renderer_obj.get_plot(obj).state |
def openioc_embedding_pred(self,parent, child, ns_mapping):
"""
Predicate for recognizing inlined content in an XML; to
be used for DINGO's xml-import hook 'embedded_predicate'.
The question this predicate must answer is whether
the child should be extracted into a separate object.
The function returns either
- False (the child is not to be extracted)
- True (the child is extracted but nothing can be inferred
about what kind of object is extracted)
- a string giving some indication about the object type
(if nothing else is known: the name of the element, often the
namespace of the embedded object)
- a dictionary, of the following form::
{'id_and_revision_info' : { 'id': something/None,
'ts': something/None,
... other information you want to
record for this object for later usage,
},
'embedded_ns': False/True/some indication about object type as string}
Note: the 'parent' and 'child' arguments are XMLNodes as defined
by the Python libxml2 bindings. If you have never worked with these, have a look at
- Mike Kneller's brief intro: http://mikekneller.com/kb/python/libxml2python/part1
- the functions in django-dingos core.xml_utils module
"""
# For openIOC, we extract the Indicator-Item elements,
# since those correspond to observables.
child_attributes = extract_attributes(child,prefix_key_char='')
if ('id' in child_attributes and child.name == 'IndicatorItem'):
# The embedding predicate is supposed to not only return
# 'True' or 'False', but in case there is an embedding,
# it should also contain information regarding the type of
# object that is embedded. This is used, for example, to
# create the DataType information for the embedding element
# (it is a reference to an object of type X).
# In OpenIOC, The IndicatorItems have the following form::
#
# <IndicatorItem id="b9ef2559-cc59-4463-81d9-52800545e16e" condition="contains">
# <Context document="FileItem" search="FileItem/PEInfo/Sections/Section/Name" type="mir"/>
# <Content type="string">.stub</Content>
# </IndicatorItem>
#
# We take the 'document' attribute of the 'Context' element as object type
# of the embedded object (as we shall see below, upon import, we rewrite
# the IndicatorItem such that it corresponds to the 'fact_term = value' structure
# used for STIX/CybOX data.
grandchild = child.children
type_info = None
while grandchild is not None:
if grandchild.name == 'Context':
context_attributes = extract_attributes(grandchild,prefix_key_char='')
if 'document' in context_attributes:
type_info = context_attributes['document']
break
grandchild = grandchild.next
if type_info:
return type_info
else:
return True
else:
return False | def function[openioc_embedding_pred, parameter[self, parent, child, ns_mapping]]:
constant[
Predicate for recognizing inlined content in an XML; to
be used for DINGO's xml-import hook 'embedded_predicate'.
The question this predicate must answer is whether
the child should be extracted into a separate object.
The function returns either
- False (the child is not to be extracted)
- True (the child is extracted but nothing can be inferred
about what kind of object is extracted)
- a string giving some indication about the object type
(if nothing else is known: the name of the element, often the
namespace of the embedded object)
- a dictionary, of the following form::
{'id_and_revision_info' : { 'id': something/None,
'ts': something/None,
... other information you want to
record for this object for later usage,
},
'embedded_ns': False/True/some indication about object type as string}
Note: the 'parent' and 'child' arguments are XMLNodes as defined
by the Python libxml2 bindings. If you have never worked with these, have a look at
- Mike Kneller's brief intro: http://mikekneller.com/kb/python/libxml2python/part1
- the functions in django-dingos core.xml_utils module
]
variable[child_attributes] assign[=] call[name[extract_attributes], parameter[name[child]]]
if <ast.BoolOp object at 0x7da18dc05cc0> begin[:]
variable[grandchild] assign[=] name[child].children
variable[type_info] assign[=] constant[None]
while compare[name[grandchild] is_not constant[None]] begin[:]
if compare[name[grandchild].name equal[==] constant[Context]] begin[:]
variable[context_attributes] assign[=] call[name[extract_attributes], parameter[name[grandchild]]]
if compare[constant[document] in name[context_attributes]] begin[:]
variable[type_info] assign[=] call[name[context_attributes]][constant[document]]
break
variable[grandchild] assign[=] name[grandchild].next
if name[type_info] begin[:]
return[name[type_info]] | keyword[def] identifier[openioc_embedding_pred] ( identifier[self] , identifier[parent] , identifier[child] , identifier[ns_mapping] ):
literal[string]
identifier[child_attributes] = identifier[extract_attributes] ( identifier[child] , identifier[prefix_key_char] = literal[string] )
keyword[if] ( literal[string] keyword[in] identifier[child_attributes] keyword[and] identifier[child] . identifier[name] == literal[string] ):
identifier[grandchild] = identifier[child] . identifier[children]
identifier[type_info] = keyword[None]
keyword[while] identifier[grandchild] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[grandchild] . identifier[name] == literal[string] :
identifier[context_attributes] = identifier[extract_attributes] ( identifier[grandchild] , identifier[prefix_key_char] = literal[string] )
keyword[if] literal[string] keyword[in] identifier[context_attributes] :
identifier[type_info] = identifier[context_attributes] [ literal[string] ]
keyword[break]
identifier[grandchild] = identifier[grandchild] . identifier[next]
keyword[if] identifier[type_info] :
keyword[return] identifier[type_info]
keyword[else] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False] | def openioc_embedding_pred(self, parent, child, ns_mapping):
"""
Predicate for recognizing inlined content in an XML; to
be used for DINGO's xml-import hook 'embedded_predicate'.
The question this predicate must answer is whether
the child should be extracted into a separate object.
The function returns either
- False (the child is not to be extracted)
- True (the child is extracted but nothing can be inferred
about what kind of object is extracted)
- a string giving some indication about the object type
(if nothing else is known: the name of the element, often the
namespace of the embedded object)
- a dictionary, of the following form::
{'id_and_revision_info' : { 'id': something/None,
'ts': something/None,
... other information you want to
record for this object for later usage,
},
'embedded_ns': False/True/some indication about object type as string}
Note: the 'parent' and 'child' arguments are XMLNodes as defined
by the Python libxml2 bindings. If you have never worked with these, have a look at
- Mike Kneller's brief intro: http://mikekneller.com/kb/python/libxml2python/part1
- the functions in django-dingos core.xml_utils module
"""
# For openIOC, we extract the Indicator-Item elements,
# since those correspond to observables.
child_attributes = extract_attributes(child, prefix_key_char='')
if 'id' in child_attributes and child.name == 'IndicatorItem':
# The embedding predicate is supposed to not only return
# 'True' or 'False', but in case there is an embedding,
# it should also contain information regarding the type of
# object that is embedded. This is used, for example, to
# create the DataType information for the embedding element
# (it is a reference to an object of type X).
# In OpenIOC, The IndicatorItems have the following form::
#
# <IndicatorItem id="b9ef2559-cc59-4463-81d9-52800545e16e" condition="contains">
# <Context document="FileItem" search="FileItem/PEInfo/Sections/Section/Name" type="mir"/>
# <Content type="string">.stub</Content>
# </IndicatorItem>
#
# We take the 'document' attribute of the 'Context' element as object type
# of the embedded object (as we shall see below, upon import, we rewrite
# the IndicatorItem such that it corresponds to the 'fact_term = value' structure
# used for STIX/CybOX data.
grandchild = child.children
type_info = None
while grandchild is not None:
if grandchild.name == 'Context':
context_attributes = extract_attributes(grandchild, prefix_key_char='')
if 'document' in context_attributes:
type_info = context_attributes['document'] # depends on [control=['if'], data=['context_attributes']]
break # depends on [control=['if'], data=[]]
grandchild = grandchild.next # depends on [control=['while'], data=['grandchild']]
if type_info:
return type_info # depends on [control=['if'], data=[]]
else:
return True # depends on [control=['if'], data=[]]
else:
return False |
def __respond_with_dict(self, data):
"""
Builds a python dictionary from a json object
:param data: the json object
:returns: a nested dictionary
"""
response = {}
if isinstance(data, list):
temp_data, data = data, {}
for key, value in enumerate(temp_data):
data[key] = value
data.pop('seq', None)
for index, item in data.items():
values = item
if isinstance(item, list) or isinstance(item, dict):
values = self.__respond_with_dict(item)
if isinstance(values, dict) and len(values) == 1:
(key, values), = values.items()
response[index] = values
return response | def function[__respond_with_dict, parameter[self, data]]:
constant[
Builds a python dictionary from a json object
:param data: the json object
:returns: a nested dictionary
]
variable[response] assign[=] dictionary[[], []]
if call[name[isinstance], parameter[name[data], name[list]]] begin[:]
<ast.Tuple object at 0x7da18ede7430> assign[=] tuple[[<ast.Name object at 0x7da18ede4bb0>, <ast.Dict object at 0x7da18ede5720>]]
for taget[tuple[[<ast.Name object at 0x7da18ede4910>, <ast.Name object at 0x7da18ede7640>]]] in starred[call[name[enumerate], parameter[name[temp_data]]]] begin[:]
call[name[data]][name[key]] assign[=] name[value]
call[name[data].pop, parameter[constant[seq], constant[None]]]
for taget[tuple[[<ast.Name object at 0x7da20c6e5990>, <ast.Name object at 0x7da20c6e52a0>]]] in starred[call[name[data].items, parameter[]]] begin[:]
variable[values] assign[=] name[item]
if <ast.BoolOp object at 0x7da20c6e6ce0> begin[:]
variable[values] assign[=] call[name[self].__respond_with_dict, parameter[name[item]]]
if <ast.BoolOp object at 0x7da20c7cb340> begin[:]
<ast.Tuple object at 0x7da20c7ca1a0> assign[=] call[name[values].items, parameter[]]
call[name[response]][name[index]] assign[=] name[values]
return[name[response]] | keyword[def] identifier[__respond_with_dict] ( identifier[self] , identifier[data] ):
literal[string]
identifier[response] ={}
keyword[if] identifier[isinstance] ( identifier[data] , identifier[list] ):
identifier[temp_data] , identifier[data] = identifier[data] ,{}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[enumerate] ( identifier[temp_data] ):
identifier[data] [ identifier[key] ]= identifier[value]
identifier[data] . identifier[pop] ( literal[string] , keyword[None] )
keyword[for] identifier[index] , identifier[item] keyword[in] identifier[data] . identifier[items] ():
identifier[values] = identifier[item]
keyword[if] identifier[isinstance] ( identifier[item] , identifier[list] ) keyword[or] identifier[isinstance] ( identifier[item] , identifier[dict] ):
identifier[values] = identifier[self] . identifier[__respond_with_dict] ( identifier[item] )
keyword[if] identifier[isinstance] ( identifier[values] , identifier[dict] ) keyword[and] identifier[len] ( identifier[values] )== literal[int] :
( identifier[key] , identifier[values] ),= identifier[values] . identifier[items] ()
identifier[response] [ identifier[index] ]= identifier[values]
keyword[return] identifier[response] | def __respond_with_dict(self, data):
"""
Builds a python dictionary from a json object
:param data: the json object
:returns: a nested dictionary
"""
response = {}
if isinstance(data, list):
(temp_data, data) = (data, {})
for (key, value) in enumerate(temp_data):
data[key] = value # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
data.pop('seq', None)
for (index, item) in data.items():
values = item
if isinstance(item, list) or isinstance(item, dict):
values = self.__respond_with_dict(item) # depends on [control=['if'], data=[]]
if isinstance(values, dict) and len(values) == 1:
((key, values),) = values.items() # depends on [control=['if'], data=[]]
response[index] = values # depends on [control=['for'], data=[]]
return response |
def show_zoning_enabled_configuration_input_request_type_get_request_zone_name_pattern(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_zoning_enabled_configuration = ET.Element("show_zoning_enabled_configuration")
config = show_zoning_enabled_configuration
input = ET.SubElement(show_zoning_enabled_configuration, "input")
request_type = ET.SubElement(input, "request-type")
get_request = ET.SubElement(request_type, "get-request")
zone_name_pattern = ET.SubElement(get_request, "zone-name-pattern")
zone_name_pattern.text = kwargs.pop('zone_name_pattern')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[show_zoning_enabled_configuration_input_request_type_get_request_zone_name_pattern, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[show_zoning_enabled_configuration] assign[=] call[name[ET].Element, parameter[constant[show_zoning_enabled_configuration]]]
variable[config] assign[=] name[show_zoning_enabled_configuration]
variable[input] assign[=] call[name[ET].SubElement, parameter[name[show_zoning_enabled_configuration], constant[input]]]
variable[request_type] assign[=] call[name[ET].SubElement, parameter[name[input], constant[request-type]]]
variable[get_request] assign[=] call[name[ET].SubElement, parameter[name[request_type], constant[get-request]]]
variable[zone_name_pattern] assign[=] call[name[ET].SubElement, parameter[name[get_request], constant[zone-name-pattern]]]
name[zone_name_pattern].text assign[=] call[name[kwargs].pop, parameter[constant[zone_name_pattern]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[show_zoning_enabled_configuration_input_request_type_get_request_zone_name_pattern] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[show_zoning_enabled_configuration] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[show_zoning_enabled_configuration]
identifier[input] = identifier[ET] . identifier[SubElement] ( identifier[show_zoning_enabled_configuration] , literal[string] )
identifier[request_type] = identifier[ET] . identifier[SubElement] ( identifier[input] , literal[string] )
identifier[get_request] = identifier[ET] . identifier[SubElement] ( identifier[request_type] , literal[string] )
identifier[zone_name_pattern] = identifier[ET] . identifier[SubElement] ( identifier[get_request] , literal[string] )
identifier[zone_name_pattern] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def show_zoning_enabled_configuration_input_request_type_get_request_zone_name_pattern(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
show_zoning_enabled_configuration = ET.Element('show_zoning_enabled_configuration')
config = show_zoning_enabled_configuration
input = ET.SubElement(show_zoning_enabled_configuration, 'input')
request_type = ET.SubElement(input, 'request-type')
get_request = ET.SubElement(request_type, 'get-request')
zone_name_pattern = ET.SubElement(get_request, 'zone-name-pattern')
zone_name_pattern.text = kwargs.pop('zone_name_pattern')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def merge(self, other, successor=None):
"""
Merge two abstract states.
For any node A whose dominance frontier that the current node (at the current program location) belongs to, we
create a phi variable V' for each variable V that is defined in A, and then replace all existence of V with V'
in the merged abstract state.
:param VariableRecoveryState other: The other abstract state to merge.
:return: The merged abstract state.
:rtype: VariableRecoveryState
"""
replacements = {}
if successor in self.dominance_frontiers:
replacements = self._make_phi_variables(successor, self, other)
merged_stack_region = self.stack_region.copy().replace(replacements).merge(other.stack_region,
replacements=replacements)
merged_register_region = self.register_region.copy().replace(replacements).merge(other.register_region,
replacements=replacements)
state = VariableRecoveryFastState(
successor,
self._analysis,
self.arch,
self.function,
stack_region=merged_stack_region,
register_region=merged_register_region,
processor_state=self.processor_state.copy().merge(other.processor_state),
)
return state | def function[merge, parameter[self, other, successor]]:
constant[
Merge two abstract states.
For any node A whose dominance frontier that the current node (at the current program location) belongs to, we
create a phi variable V' for each variable V that is defined in A, and then replace all existence of V with V'
in the merged abstract state.
:param VariableRecoveryState other: The other abstract state to merge.
:return: The merged abstract state.
:rtype: VariableRecoveryState
]
variable[replacements] assign[=] dictionary[[], []]
if compare[name[successor] in name[self].dominance_frontiers] begin[:]
variable[replacements] assign[=] call[name[self]._make_phi_variables, parameter[name[successor], name[self], name[other]]]
variable[merged_stack_region] assign[=] call[call[call[name[self].stack_region.copy, parameter[]].replace, parameter[name[replacements]]].merge, parameter[name[other].stack_region]]
variable[merged_register_region] assign[=] call[call[call[name[self].register_region.copy, parameter[]].replace, parameter[name[replacements]]].merge, parameter[name[other].register_region]]
variable[state] assign[=] call[name[VariableRecoveryFastState], parameter[name[successor], name[self]._analysis, name[self].arch, name[self].function]]
return[name[state]] | keyword[def] identifier[merge] ( identifier[self] , identifier[other] , identifier[successor] = keyword[None] ):
literal[string]
identifier[replacements] ={}
keyword[if] identifier[successor] keyword[in] identifier[self] . identifier[dominance_frontiers] :
identifier[replacements] = identifier[self] . identifier[_make_phi_variables] ( identifier[successor] , identifier[self] , identifier[other] )
identifier[merged_stack_region] = identifier[self] . identifier[stack_region] . identifier[copy] (). identifier[replace] ( identifier[replacements] ). identifier[merge] ( identifier[other] . identifier[stack_region] ,
identifier[replacements] = identifier[replacements] )
identifier[merged_register_region] = identifier[self] . identifier[register_region] . identifier[copy] (). identifier[replace] ( identifier[replacements] ). identifier[merge] ( identifier[other] . identifier[register_region] ,
identifier[replacements] = identifier[replacements] )
identifier[state] = identifier[VariableRecoveryFastState] (
identifier[successor] ,
identifier[self] . identifier[_analysis] ,
identifier[self] . identifier[arch] ,
identifier[self] . identifier[function] ,
identifier[stack_region] = identifier[merged_stack_region] ,
identifier[register_region] = identifier[merged_register_region] ,
identifier[processor_state] = identifier[self] . identifier[processor_state] . identifier[copy] (). identifier[merge] ( identifier[other] . identifier[processor_state] ),
)
keyword[return] identifier[state] | def merge(self, other, successor=None):
"""
Merge two abstract states.
For any node A whose dominance frontier that the current node (at the current program location) belongs to, we
create a phi variable V' for each variable V that is defined in A, and then replace all existence of V with V'
in the merged abstract state.
:param VariableRecoveryState other: The other abstract state to merge.
:return: The merged abstract state.
:rtype: VariableRecoveryState
"""
replacements = {}
if successor in self.dominance_frontiers:
replacements = self._make_phi_variables(successor, self, other) # depends on [control=['if'], data=['successor']]
merged_stack_region = self.stack_region.copy().replace(replacements).merge(other.stack_region, replacements=replacements)
merged_register_region = self.register_region.copy().replace(replacements).merge(other.register_region, replacements=replacements)
state = VariableRecoveryFastState(successor, self._analysis, self.arch, self.function, stack_region=merged_stack_region, register_region=merged_register_region, processor_state=self.processor_state.copy().merge(other.processor_state))
return state |
def make_article_info_dates(self):
"""
Makes the section containing important dates for the article: typically
Received, Accepted, and Published.
"""
dates_div = etree.Element('div', {'id': 'article-dates'})
d = './front/article-meta/history/date'
received = self.article.root.xpath(d + "[@date-type='received']")
accepted = self.article.root.xpath(d + "[@date-type='accepted']")
if received:
b = etree.SubElement(dates_div, 'b')
b.text = 'Received: '
dt = self.date_tuple_from_date(received[0], 'Received')
formatted_date_string = self.format_date_string(dt)
append_new_text(dates_div, formatted_date_string + '; ')
if accepted:
b = etree.SubElement(dates_div, 'b')
b.text = 'Accepted: '
dt = self.date_tuple_from_date(accepted[0], 'Accepted')
formatted_date_string = self.format_date_string(dt)
append_new_text(dates_div, formatted_date_string + '; ')
#Published date is required
pub_date = self.article.root.xpath("./front/article-meta/pub-date[@pub-type='epub']")[0]
b = etree.SubElement(dates_div, 'b')
b.text = 'Published: '
dt = self.date_tuple_from_date(pub_date, 'Published')
formatted_date_string = self.format_date_string(dt)
append_new_text(dates_div, formatted_date_string)
return dates_div | def function[make_article_info_dates, parameter[self]]:
constant[
Makes the section containing important dates for the article: typically
Received, Accepted, and Published.
]
variable[dates_div] assign[=] call[name[etree].Element, parameter[constant[div], dictionary[[<ast.Constant object at 0x7da18dc9a7a0>], [<ast.Constant object at 0x7da18dc9a860>]]]]
variable[d] assign[=] constant[./front/article-meta/history/date]
variable[received] assign[=] call[name[self].article.root.xpath, parameter[binary_operation[name[d] + constant[[@date-type='received']]]]]
variable[accepted] assign[=] call[name[self].article.root.xpath, parameter[binary_operation[name[d] + constant[[@date-type='accepted']]]]]
if name[received] begin[:]
variable[b] assign[=] call[name[etree].SubElement, parameter[name[dates_div], constant[b]]]
name[b].text assign[=] constant[Received: ]
variable[dt] assign[=] call[name[self].date_tuple_from_date, parameter[call[name[received]][constant[0]], constant[Received]]]
variable[formatted_date_string] assign[=] call[name[self].format_date_string, parameter[name[dt]]]
call[name[append_new_text], parameter[name[dates_div], binary_operation[name[formatted_date_string] + constant[; ]]]]
if name[accepted] begin[:]
variable[b] assign[=] call[name[etree].SubElement, parameter[name[dates_div], constant[b]]]
name[b].text assign[=] constant[Accepted: ]
variable[dt] assign[=] call[name[self].date_tuple_from_date, parameter[call[name[accepted]][constant[0]], constant[Accepted]]]
variable[formatted_date_string] assign[=] call[name[self].format_date_string, parameter[name[dt]]]
call[name[append_new_text], parameter[name[dates_div], binary_operation[name[formatted_date_string] + constant[; ]]]]
variable[pub_date] assign[=] call[call[name[self].article.root.xpath, parameter[constant[./front/article-meta/pub-date[@pub-type='epub']]]]][constant[0]]
variable[b] assign[=] call[name[etree].SubElement, parameter[name[dates_div], constant[b]]]
name[b].text assign[=] constant[Published: ]
variable[dt] assign[=] call[name[self].date_tuple_from_date, parameter[name[pub_date], constant[Published]]]
variable[formatted_date_string] assign[=] call[name[self].format_date_string, parameter[name[dt]]]
call[name[append_new_text], parameter[name[dates_div], name[formatted_date_string]]]
return[name[dates_div]] | keyword[def] identifier[make_article_info_dates] ( identifier[self] ):
literal[string]
identifier[dates_div] = identifier[etree] . identifier[Element] ( literal[string] ,{ literal[string] : literal[string] })
identifier[d] = literal[string]
identifier[received] = identifier[self] . identifier[article] . identifier[root] . identifier[xpath] ( identifier[d] + literal[string] )
identifier[accepted] = identifier[self] . identifier[article] . identifier[root] . identifier[xpath] ( identifier[d] + literal[string] )
keyword[if] identifier[received] :
identifier[b] = identifier[etree] . identifier[SubElement] ( identifier[dates_div] , literal[string] )
identifier[b] . identifier[text] = literal[string]
identifier[dt] = identifier[self] . identifier[date_tuple_from_date] ( identifier[received] [ literal[int] ], literal[string] )
identifier[formatted_date_string] = identifier[self] . identifier[format_date_string] ( identifier[dt] )
identifier[append_new_text] ( identifier[dates_div] , identifier[formatted_date_string] + literal[string] )
keyword[if] identifier[accepted] :
identifier[b] = identifier[etree] . identifier[SubElement] ( identifier[dates_div] , literal[string] )
identifier[b] . identifier[text] = literal[string]
identifier[dt] = identifier[self] . identifier[date_tuple_from_date] ( identifier[accepted] [ literal[int] ], literal[string] )
identifier[formatted_date_string] = identifier[self] . identifier[format_date_string] ( identifier[dt] )
identifier[append_new_text] ( identifier[dates_div] , identifier[formatted_date_string] + literal[string] )
identifier[pub_date] = identifier[self] . identifier[article] . identifier[root] . identifier[xpath] ( literal[string] )[ literal[int] ]
identifier[b] = identifier[etree] . identifier[SubElement] ( identifier[dates_div] , literal[string] )
identifier[b] . identifier[text] = literal[string]
identifier[dt] = identifier[self] . identifier[date_tuple_from_date] ( identifier[pub_date] , literal[string] )
identifier[formatted_date_string] = identifier[self] . identifier[format_date_string] ( identifier[dt] )
identifier[append_new_text] ( identifier[dates_div] , identifier[formatted_date_string] )
keyword[return] identifier[dates_div] | def make_article_info_dates(self):
"""
Makes the section containing important dates for the article: typically
Received, Accepted, and Published.
"""
dates_div = etree.Element('div', {'id': 'article-dates'})
d = './front/article-meta/history/date'
received = self.article.root.xpath(d + "[@date-type='received']")
accepted = self.article.root.xpath(d + "[@date-type='accepted']")
if received:
b = etree.SubElement(dates_div, 'b')
b.text = 'Received: '
dt = self.date_tuple_from_date(received[0], 'Received')
formatted_date_string = self.format_date_string(dt)
append_new_text(dates_div, formatted_date_string + '; ') # depends on [control=['if'], data=[]]
if accepted:
b = etree.SubElement(dates_div, 'b')
b.text = 'Accepted: '
dt = self.date_tuple_from_date(accepted[0], 'Accepted')
formatted_date_string = self.format_date_string(dt)
append_new_text(dates_div, formatted_date_string + '; ') # depends on [control=['if'], data=[]]
#Published date is required
pub_date = self.article.root.xpath("./front/article-meta/pub-date[@pub-type='epub']")[0]
b = etree.SubElement(dates_div, 'b')
b.text = 'Published: '
dt = self.date_tuple_from_date(pub_date, 'Published')
formatted_date_string = self.format_date_string(dt)
append_new_text(dates_div, formatted_date_string)
return dates_div |
def process(self, context, internal_response):
"""
Manage consent and attribute filtering
:type context: satosa.context.Context
:type internal_response: satosa.internal.InternalData
:rtype: satosa.response.Response
:param context: response context
:param internal_response: the response
:return: response
"""
consent_state = context.state[STATE_KEY]
internal_response.attributes = self._filter_attributes(internal_response.attributes, consent_state["filter"])
id_hash = self._get_consent_id(internal_response.requester, internal_response.subject_id,
internal_response.attributes)
try:
# Check if consent is already given
consent_attributes = self._verify_consent(id_hash)
except requests.exceptions.ConnectionError as e:
satosa_logging(logger, logging.ERROR,
"Consent service is not reachable, no consent given.", context.state)
# Send an internal_response without any attributes
internal_response.attributes = {}
return self._end_consent(context, internal_response)
# Previous consent was given
if consent_attributes is not None:
satosa_logging(logger, logging.DEBUG, "Previous consent was given", context.state)
internal_response.attributes = self._filter_attributes(internal_response.attributes, consent_attributes)
return self._end_consent(context, internal_response)
# No previous consent, request consent by user
return self._approve_new_consent(context, internal_response, id_hash) | def function[process, parameter[self, context, internal_response]]:
constant[
Manage consent and attribute filtering
:type context: satosa.context.Context
:type internal_response: satosa.internal.InternalData
:rtype: satosa.response.Response
:param context: response context
:param internal_response: the response
:return: response
]
variable[consent_state] assign[=] call[name[context].state][name[STATE_KEY]]
name[internal_response].attributes assign[=] call[name[self]._filter_attributes, parameter[name[internal_response].attributes, call[name[consent_state]][constant[filter]]]]
variable[id_hash] assign[=] call[name[self]._get_consent_id, parameter[name[internal_response].requester, name[internal_response].subject_id, name[internal_response].attributes]]
<ast.Try object at 0x7da1b153e860>
if compare[name[consent_attributes] is_not constant[None]] begin[:]
call[name[satosa_logging], parameter[name[logger], name[logging].DEBUG, constant[Previous consent was given], name[context].state]]
name[internal_response].attributes assign[=] call[name[self]._filter_attributes, parameter[name[internal_response].attributes, name[consent_attributes]]]
return[call[name[self]._end_consent, parameter[name[context], name[internal_response]]]]
return[call[name[self]._approve_new_consent, parameter[name[context], name[internal_response], name[id_hash]]]] | keyword[def] identifier[process] ( identifier[self] , identifier[context] , identifier[internal_response] ):
literal[string]
identifier[consent_state] = identifier[context] . identifier[state] [ identifier[STATE_KEY] ]
identifier[internal_response] . identifier[attributes] = identifier[self] . identifier[_filter_attributes] ( identifier[internal_response] . identifier[attributes] , identifier[consent_state] [ literal[string] ])
identifier[id_hash] = identifier[self] . identifier[_get_consent_id] ( identifier[internal_response] . identifier[requester] , identifier[internal_response] . identifier[subject_id] ,
identifier[internal_response] . identifier[attributes] )
keyword[try] :
identifier[consent_attributes] = identifier[self] . identifier[_verify_consent] ( identifier[id_hash] )
keyword[except] identifier[requests] . identifier[exceptions] . identifier[ConnectionError] keyword[as] identifier[e] :
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[ERROR] ,
literal[string] , identifier[context] . identifier[state] )
identifier[internal_response] . identifier[attributes] ={}
keyword[return] identifier[self] . identifier[_end_consent] ( identifier[context] , identifier[internal_response] )
keyword[if] identifier[consent_attributes] keyword[is] keyword[not] keyword[None] :
identifier[satosa_logging] ( identifier[logger] , identifier[logging] . identifier[DEBUG] , literal[string] , identifier[context] . identifier[state] )
identifier[internal_response] . identifier[attributes] = identifier[self] . identifier[_filter_attributes] ( identifier[internal_response] . identifier[attributes] , identifier[consent_attributes] )
keyword[return] identifier[self] . identifier[_end_consent] ( identifier[context] , identifier[internal_response] )
keyword[return] identifier[self] . identifier[_approve_new_consent] ( identifier[context] , identifier[internal_response] , identifier[id_hash] ) | def process(self, context, internal_response):
"""
Manage consent and attribute filtering
:type context: satosa.context.Context
:type internal_response: satosa.internal.InternalData
:rtype: satosa.response.Response
:param context: response context
:param internal_response: the response
:return: response
"""
consent_state = context.state[STATE_KEY]
internal_response.attributes = self._filter_attributes(internal_response.attributes, consent_state['filter'])
id_hash = self._get_consent_id(internal_response.requester, internal_response.subject_id, internal_response.attributes)
try:
# Check if consent is already given
consent_attributes = self._verify_consent(id_hash) # depends on [control=['try'], data=[]]
except requests.exceptions.ConnectionError as e:
satosa_logging(logger, logging.ERROR, 'Consent service is not reachable, no consent given.', context.state)
# Send an internal_response without any attributes
internal_response.attributes = {}
return self._end_consent(context, internal_response) # depends on [control=['except'], data=[]]
# Previous consent was given
if consent_attributes is not None:
satosa_logging(logger, logging.DEBUG, 'Previous consent was given', context.state)
internal_response.attributes = self._filter_attributes(internal_response.attributes, consent_attributes)
return self._end_consent(context, internal_response) # depends on [control=['if'], data=['consent_attributes']]
# No previous consent, request consent by user
return self._approve_new_consent(context, internal_response, id_hash) |
def intr_write(self, dev_handle, ep, intf, data, timeout):
r"""Perform an interrupt write.
dev_handle is the value returned by the open_device() method.
The ep parameter is the bEndpointAddress field whose endpoint
the data will be sent to. intf is the bInterfaceNumber field
of the interface containing the endpoint. The data parameter
is the data to be sent. It must be an instance of the array.array
class. The timeout parameter specifies a time limit to the operation
in miliseconds.
The method returns the number of bytes written.
"""
_not_implemented(self.intr_write) | def function[intr_write, parameter[self, dev_handle, ep, intf, data, timeout]]:
constant[Perform an interrupt write.
dev_handle is the value returned by the open_device() method.
The ep parameter is the bEndpointAddress field whose endpoint
the data will be sent to. intf is the bInterfaceNumber field
of the interface containing the endpoint. The data parameter
is the data to be sent. It must be an instance of the array.array
class. The timeout parameter specifies a time limit to the operation
in miliseconds.
The method returns the number of bytes written.
]
call[name[_not_implemented], parameter[name[self].intr_write]] | keyword[def] identifier[intr_write] ( identifier[self] , identifier[dev_handle] , identifier[ep] , identifier[intf] , identifier[data] , identifier[timeout] ):
literal[string]
identifier[_not_implemented] ( identifier[self] . identifier[intr_write] ) | def intr_write(self, dev_handle, ep, intf, data, timeout):
"""Perform an interrupt write.
dev_handle is the value returned by the open_device() method.
The ep parameter is the bEndpointAddress field whose endpoint
the data will be sent to. intf is the bInterfaceNumber field
of the interface containing the endpoint. The data parameter
is the data to be sent. It must be an instance of the array.array
class. The timeout parameter specifies a time limit to the operation
in miliseconds.
The method returns the number of bytes written.
"""
_not_implemented(self.intr_write) |
def schema(self):
"""List[google.cloud.bigquery.schema.SchemaField]: Schema of the
destination table.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema
"""
schema = _helpers._get_sub_prop(self._properties, ["load", "schema", "fields"])
if schema is None:
return
return [SchemaField.from_api_repr(field) for field in schema] | def function[schema, parameter[self]]:
constant[List[google.cloud.bigquery.schema.SchemaField]: Schema of the
destination table.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema
]
variable[schema] assign[=] call[name[_helpers]._get_sub_prop, parameter[name[self]._properties, list[[<ast.Constant object at 0x7da20c6e52d0>, <ast.Constant object at 0x7da20c6e6020>, <ast.Constant object at 0x7da20c6e4340>]]]]
if compare[name[schema] is constant[None]] begin[:]
return[None]
return[<ast.ListComp object at 0x7da20c6e6fb0>] | keyword[def] identifier[schema] ( identifier[self] ):
literal[string]
identifier[schema] = identifier[_helpers] . identifier[_get_sub_prop] ( identifier[self] . identifier[_properties] ,[ literal[string] , literal[string] , literal[string] ])
keyword[if] identifier[schema] keyword[is] keyword[None] :
keyword[return]
keyword[return] [ identifier[SchemaField] . identifier[from_api_repr] ( identifier[field] ) keyword[for] identifier[field] keyword[in] identifier[schema] ] | def schema(self):
"""List[google.cloud.bigquery.schema.SchemaField]: Schema of the
destination table.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema
"""
schema = _helpers._get_sub_prop(self._properties, ['load', 'schema', 'fields'])
if schema is None:
return # depends on [control=['if'], data=[]]
return [SchemaField.from_api_repr(field) for field in schema] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.