code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def minimize(self, time, variables, **kwargs):
"""
Performs an optimization step.
Args:
time: Time tensor.
variables: List of variables to optimize.
**kwargs: Additional optimizer-specific arguments. The following arguments are used
by some optimizers:
- arguments: Dict of arguments for callables, like fn_loss.
- fn_loss: A callable returning the loss of the current model.
- fn_reference: A callable returning the reference values, in case of a comparative
loss.
- fn_kl_divergence: A callable returning the KL-divergence relative to the
current model.
- sampled_loss: A sampled loss (integer).
- return_estimated_improvement: Returns the estimated improvement resulting from
the natural gradient calculation if true.
- source_variables: List of source variables to synchronize with.
- global_variables: List of global variables to apply the proposed optimization
step to.
Returns:
The optimization operation.
"""
# # Add training variable gradient histograms/scalars to summary output
# # if 'gradients' in self.summary_labels:
# if any(k in self.summary_labels for k in ['gradients', 'gradients_histogram', 'gradients_scalar']):
# valid = True
# if isinstance(self, tensorforce.core.optimizers.TFOptimizer):
# gradients = self.optimizer.compute_gradients(kwargs['fn_loss']())
# elif isinstance(self.optimizer, tensorforce.core.optimizers.TFOptimizer):
# # This section handles "Multi_step" and may handle others
# # if failure is found, add another elif to handle that case
# gradients = self.optimizer.optimizer.compute_gradients(kwargs['fn_loss']())
# else:
# # Didn't find proper gradient information
# valid = False
# # Valid gradient data found, create summary data items
# if valid:
# for grad, var in gradients:
# if grad is not None:
# if any(k in self.summary_labels for k in ('gradients', 'gradients_scalar')):
# axes = list(range(len(grad.shape)))
# mean, var = tf.nn.moments(grad, axes)
# tf.contrib.summary.scalar(name='gradients/' + var.name + "/mean", tensor=mean)
# tf.contrib.summary.scalar(name='gradients/' + var.name + "/variance", tensor=var)
# if any(k in self.summary_labels for k in ('gradients', 'gradients_histogram')):
# tf.contrib.summary.histogram(name='gradients/' + var.name, tensor=grad)
deltas = self.step(time=time, variables=variables, **kwargs)
with tf.control_dependencies(control_inputs=deltas):
return tf.no_op() | def function[minimize, parameter[self, time, variables]]:
constant[
Performs an optimization step.
Args:
time: Time tensor.
variables: List of variables to optimize.
**kwargs: Additional optimizer-specific arguments. The following arguments are used
by some optimizers:
- arguments: Dict of arguments for callables, like fn_loss.
- fn_loss: A callable returning the loss of the current model.
- fn_reference: A callable returning the reference values, in case of a comparative
loss.
- fn_kl_divergence: A callable returning the KL-divergence relative to the
current model.
- sampled_loss: A sampled loss (integer).
- return_estimated_improvement: Returns the estimated improvement resulting from
the natural gradient calculation if true.
- source_variables: List of source variables to synchronize with.
- global_variables: List of global variables to apply the proposed optimization
step to.
Returns:
The optimization operation.
]
variable[deltas] assign[=] call[name[self].step, parameter[]]
with call[name[tf].control_dependencies, parameter[]] begin[:]
return[call[name[tf].no_op, parameter[]]] | keyword[def] identifier[minimize] ( identifier[self] , identifier[time] , identifier[variables] ,** identifier[kwargs] ):
literal[string]
identifier[deltas] = identifier[self] . identifier[step] ( identifier[time] = identifier[time] , identifier[variables] = identifier[variables] ,** identifier[kwargs] )
keyword[with] identifier[tf] . identifier[control_dependencies] ( identifier[control_inputs] = identifier[deltas] ):
keyword[return] identifier[tf] . identifier[no_op] () | def minimize(self, time, variables, **kwargs):
"""
Performs an optimization step.
Args:
time: Time tensor.
variables: List of variables to optimize.
**kwargs: Additional optimizer-specific arguments. The following arguments are used
by some optimizers:
- arguments: Dict of arguments for callables, like fn_loss.
- fn_loss: A callable returning the loss of the current model.
- fn_reference: A callable returning the reference values, in case of a comparative
loss.
- fn_kl_divergence: A callable returning the KL-divergence relative to the
current model.
- sampled_loss: A sampled loss (integer).
- return_estimated_improvement: Returns the estimated improvement resulting from
the natural gradient calculation if true.
- source_variables: List of source variables to synchronize with.
- global_variables: List of global variables to apply the proposed optimization
step to.
Returns:
The optimization operation.
"""
# # Add training variable gradient histograms/scalars to summary output
# # if 'gradients' in self.summary_labels:
# if any(k in self.summary_labels for k in ['gradients', 'gradients_histogram', 'gradients_scalar']):
# valid = True
# if isinstance(self, tensorforce.core.optimizers.TFOptimizer):
# gradients = self.optimizer.compute_gradients(kwargs['fn_loss']())
# elif isinstance(self.optimizer, tensorforce.core.optimizers.TFOptimizer):
# # This section handles "Multi_step" and may handle others
# # if failure is found, add another elif to handle that case
# gradients = self.optimizer.optimizer.compute_gradients(kwargs['fn_loss']())
# else:
# # Didn't find proper gradient information
# valid = False
# # Valid gradient data found, create summary data items
# if valid:
# for grad, var in gradients:
# if grad is not None:
# if any(k in self.summary_labels for k in ('gradients', 'gradients_scalar')):
# axes = list(range(len(grad.shape)))
# mean, var = tf.nn.moments(grad, axes)
# tf.contrib.summary.scalar(name='gradients/' + var.name + "/mean", tensor=mean)
# tf.contrib.summary.scalar(name='gradients/' + var.name + "/variance", tensor=var)
# if any(k in self.summary_labels for k in ('gradients', 'gradients_histogram')):
# tf.contrib.summary.histogram(name='gradients/' + var.name, tensor=grad)
deltas = self.step(time=time, variables=variables, **kwargs)
with tf.control_dependencies(control_inputs=deltas):
return tf.no_op() # depends on [control=['with'], data=[]] |
def perform(self):
"""Runs through all of the steps in the chain and runs each of them in sequence.
:return: The value from the lat "do" step performed
"""
last_value = None
last_step = None
while self.items.qsize():
item = self.items.get()
if item.flag == self.do:
last_value = item.item(*item.args, **item.kwargs)
last_step = item.message
elif item.flag == self.expect:
message = item.message
local = {'value': last_value, 'expectation': item.item}
expression = 'value {operator} expectation'.format(operator=item.operator)
result = eval(expression, local)
# Format the error message
format_vars = {
'actual': last_value,
'expected': item.item,
'step': last_step,
'operator': item.operator
}
for var, val in format_vars.iteritems():
message = message.replace('{' + str(var) + '}', str(val))
assert result, message
return last_value | def function[perform, parameter[self]]:
constant[Runs through all of the steps in the chain and runs each of them in sequence.
:return: The value from the lat "do" step performed
]
variable[last_value] assign[=] constant[None]
variable[last_step] assign[=] constant[None]
while call[name[self].items.qsize, parameter[]] begin[:]
variable[item] assign[=] call[name[self].items.get, parameter[]]
if compare[name[item].flag equal[==] name[self].do] begin[:]
variable[last_value] assign[=] call[name[item].item, parameter[<ast.Starred object at 0x7da1b27eb2e0>]]
variable[last_step] assign[=] name[item].message
return[name[last_value]] | keyword[def] identifier[perform] ( identifier[self] ):
literal[string]
identifier[last_value] = keyword[None]
identifier[last_step] = keyword[None]
keyword[while] identifier[self] . identifier[items] . identifier[qsize] ():
identifier[item] = identifier[self] . identifier[items] . identifier[get] ()
keyword[if] identifier[item] . identifier[flag] == identifier[self] . identifier[do] :
identifier[last_value] = identifier[item] . identifier[item] (* identifier[item] . identifier[args] ,** identifier[item] . identifier[kwargs] )
identifier[last_step] = identifier[item] . identifier[message]
keyword[elif] identifier[item] . identifier[flag] == identifier[self] . identifier[expect] :
identifier[message] = identifier[item] . identifier[message]
identifier[local] ={ literal[string] : identifier[last_value] , literal[string] : identifier[item] . identifier[item] }
identifier[expression] = literal[string] . identifier[format] ( identifier[operator] = identifier[item] . identifier[operator] )
identifier[result] = identifier[eval] ( identifier[expression] , identifier[local] )
identifier[format_vars] ={
literal[string] : identifier[last_value] ,
literal[string] : identifier[item] . identifier[item] ,
literal[string] : identifier[last_step] ,
literal[string] : identifier[item] . identifier[operator]
}
keyword[for] identifier[var] , identifier[val] keyword[in] identifier[format_vars] . identifier[iteritems] ():
identifier[message] = identifier[message] . identifier[replace] ( literal[string] + identifier[str] ( identifier[var] )+ literal[string] , identifier[str] ( identifier[val] ))
keyword[assert] identifier[result] , identifier[message]
keyword[return] identifier[last_value] | def perform(self):
"""Runs through all of the steps in the chain and runs each of them in sequence.
:return: The value from the lat "do" step performed
"""
last_value = None
last_step = None
while self.items.qsize():
item = self.items.get()
if item.flag == self.do:
last_value = item.item(*item.args, **item.kwargs)
last_step = item.message # depends on [control=['if'], data=[]]
elif item.flag == self.expect:
message = item.message
local = {'value': last_value, 'expectation': item.item}
expression = 'value {operator} expectation'.format(operator=item.operator)
result = eval(expression, local)
# Format the error message
format_vars = {'actual': last_value, 'expected': item.item, 'step': last_step, 'operator': item.operator}
for (var, val) in format_vars.iteritems():
message = message.replace('{' + str(var) + '}', str(val)) # depends on [control=['for'], data=[]]
assert result, message # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return last_value |
def json(
self,
*,
include: 'SetStr' = None,
exclude: 'SetStr' = None,
by_alias: bool = False,
skip_defaults: bool = False,
encoder: Optional[Callable[[Any], Any]] = None,
**dumps_kwargs: Any,
) -> str:
"""
Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`.
`encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`.
"""
encoder = cast(Callable[[Any], Any], encoder or self._json_encoder)
return json.dumps(
self.dict(include=include, exclude=exclude, by_alias=by_alias, skip_defaults=skip_defaults),
default=encoder,
**dumps_kwargs,
) | def function[json, parameter[self]]:
constant[
Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`.
`encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`.
]
variable[encoder] assign[=] call[name[cast], parameter[call[name[Callable]][tuple[[<ast.List object at 0x7da18eb540a0>, <ast.Name object at 0x7da18eb545e0>]]], <ast.BoolOp object at 0x7da18eb557b0>]]
return[call[name[json].dumps, parameter[call[name[self].dict, parameter[]]]]] | keyword[def] identifier[json] (
identifier[self] ,
*,
identifier[include] : literal[string] = keyword[None] ,
identifier[exclude] : literal[string] = keyword[None] ,
identifier[by_alias] : identifier[bool] = keyword[False] ,
identifier[skip_defaults] : identifier[bool] = keyword[False] ,
identifier[encoder] : identifier[Optional] [ identifier[Callable] [[ identifier[Any] ], identifier[Any] ]]= keyword[None] ,
** identifier[dumps_kwargs] : identifier[Any] ,
)-> identifier[str] :
literal[string]
identifier[encoder] = identifier[cast] ( identifier[Callable] [[ identifier[Any] ], identifier[Any] ], identifier[encoder] keyword[or] identifier[self] . identifier[_json_encoder] )
keyword[return] identifier[json] . identifier[dumps] (
identifier[self] . identifier[dict] ( identifier[include] = identifier[include] , identifier[exclude] = identifier[exclude] , identifier[by_alias] = identifier[by_alias] , identifier[skip_defaults] = identifier[skip_defaults] ),
identifier[default] = identifier[encoder] ,
** identifier[dumps_kwargs] ,
) | def json(self, *, include: 'SetStr'=None, exclude: 'SetStr'=None, by_alias: bool=False, skip_defaults: bool=False, encoder: Optional[Callable[[Any], Any]]=None, **dumps_kwargs: Any) -> str:
"""
Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`.
`encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`.
"""
encoder = cast(Callable[[Any], Any], encoder or self._json_encoder)
return json.dumps(self.dict(include=include, exclude=exclude, by_alias=by_alias, skip_defaults=skip_defaults), default=encoder, **dumps_kwargs) |
def total_statements(self, filename=None):
"""
Return the total number of statements for the file
`filename`. If `filename` is not given, return the total
number of statements for all files.
"""
if filename is not None:
statements = self._get_lines_by_filename(filename)
return len(statements)
total = 0
for filename in self.files():
statements = self._get_lines_by_filename(filename)
total += len(statements)
return total | def function[total_statements, parameter[self, filename]]:
constant[
Return the total number of statements for the file
`filename`. If `filename` is not given, return the total
number of statements for all files.
]
if compare[name[filename] is_not constant[None]] begin[:]
variable[statements] assign[=] call[name[self]._get_lines_by_filename, parameter[name[filename]]]
return[call[name[len], parameter[name[statements]]]]
variable[total] assign[=] constant[0]
for taget[name[filename]] in starred[call[name[self].files, parameter[]]] begin[:]
variable[statements] assign[=] call[name[self]._get_lines_by_filename, parameter[name[filename]]]
<ast.AugAssign object at 0x7da1b0fc56c0>
return[name[total]] | keyword[def] identifier[total_statements] ( identifier[self] , identifier[filename] = keyword[None] ):
literal[string]
keyword[if] identifier[filename] keyword[is] keyword[not] keyword[None] :
identifier[statements] = identifier[self] . identifier[_get_lines_by_filename] ( identifier[filename] )
keyword[return] identifier[len] ( identifier[statements] )
identifier[total] = literal[int]
keyword[for] identifier[filename] keyword[in] identifier[self] . identifier[files] ():
identifier[statements] = identifier[self] . identifier[_get_lines_by_filename] ( identifier[filename] )
identifier[total] += identifier[len] ( identifier[statements] )
keyword[return] identifier[total] | def total_statements(self, filename=None):
"""
Return the total number of statements for the file
`filename`. If `filename` is not given, return the total
number of statements for all files.
"""
if filename is not None:
statements = self._get_lines_by_filename(filename)
return len(statements) # depends on [control=['if'], data=['filename']]
total = 0
for filename in self.files():
statements = self._get_lines_by_filename(filename)
total += len(statements) # depends on [control=['for'], data=['filename']]
return total |
def hide_routemap_holder_route_map_content_set_extcommunity_rt_ASN_NN_rt(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_routemap_holder = ET.SubElement(config, "hide-routemap-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy")
route_map = ET.SubElement(hide_routemap_holder, "route-map")
name_key = ET.SubElement(route_map, "name")
name_key.text = kwargs.pop('name')
action_rm_key = ET.SubElement(route_map, "action-rm")
action_rm_key.text = kwargs.pop('action_rm')
instance_key = ET.SubElement(route_map, "instance")
instance_key.text = kwargs.pop('instance')
content = ET.SubElement(route_map, "content")
set = ET.SubElement(content, "set")
extcommunity = ET.SubElement(set, "extcommunity")
rt = ET.SubElement(extcommunity, "rt")
ASN_NN_rt = ET.SubElement(rt, "ASN-NN-rt")
ASN_NN_rt.text = kwargs.pop('ASN_NN_rt')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[hide_routemap_holder_route_map_content_set_extcommunity_rt_ASN_NN_rt, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[hide_routemap_holder] assign[=] call[name[ET].SubElement, parameter[name[config], constant[hide-routemap-holder]]]
variable[route_map] assign[=] call[name[ET].SubElement, parameter[name[hide_routemap_holder], constant[route-map]]]
variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[route_map], constant[name]]]
name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]]
variable[action_rm_key] assign[=] call[name[ET].SubElement, parameter[name[route_map], constant[action-rm]]]
name[action_rm_key].text assign[=] call[name[kwargs].pop, parameter[constant[action_rm]]]
variable[instance_key] assign[=] call[name[ET].SubElement, parameter[name[route_map], constant[instance]]]
name[instance_key].text assign[=] call[name[kwargs].pop, parameter[constant[instance]]]
variable[content] assign[=] call[name[ET].SubElement, parameter[name[route_map], constant[content]]]
variable[set] assign[=] call[name[ET].SubElement, parameter[name[content], constant[set]]]
variable[extcommunity] assign[=] call[name[ET].SubElement, parameter[name[set], constant[extcommunity]]]
variable[rt] assign[=] call[name[ET].SubElement, parameter[name[extcommunity], constant[rt]]]
variable[ASN_NN_rt] assign[=] call[name[ET].SubElement, parameter[name[rt], constant[ASN-NN-rt]]]
name[ASN_NN_rt].text assign[=] call[name[kwargs].pop, parameter[constant[ASN_NN_rt]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[hide_routemap_holder_route_map_content_set_extcommunity_rt_ASN_NN_rt] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[hide_routemap_holder] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[route_map] = identifier[ET] . identifier[SubElement] ( identifier[hide_routemap_holder] , literal[string] )
identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[route_map] , literal[string] )
identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[action_rm_key] = identifier[ET] . identifier[SubElement] ( identifier[route_map] , literal[string] )
identifier[action_rm_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[instance_key] = identifier[ET] . identifier[SubElement] ( identifier[route_map] , literal[string] )
identifier[instance_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[content] = identifier[ET] . identifier[SubElement] ( identifier[route_map] , literal[string] )
identifier[set] = identifier[ET] . identifier[SubElement] ( identifier[content] , literal[string] )
identifier[extcommunity] = identifier[ET] . identifier[SubElement] ( identifier[set] , literal[string] )
identifier[rt] = identifier[ET] . identifier[SubElement] ( identifier[extcommunity] , literal[string] )
identifier[ASN_NN_rt] = identifier[ET] . identifier[SubElement] ( identifier[rt] , literal[string] )
identifier[ASN_NN_rt] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def hide_routemap_holder_route_map_content_set_extcommunity_rt_ASN_NN_rt(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
hide_routemap_holder = ET.SubElement(config, 'hide-routemap-holder', xmlns='urn:brocade.com:mgmt:brocade-ip-policy')
route_map = ET.SubElement(hide_routemap_holder, 'route-map')
name_key = ET.SubElement(route_map, 'name')
name_key.text = kwargs.pop('name')
action_rm_key = ET.SubElement(route_map, 'action-rm')
action_rm_key.text = kwargs.pop('action_rm')
instance_key = ET.SubElement(route_map, 'instance')
instance_key.text = kwargs.pop('instance')
content = ET.SubElement(route_map, 'content')
set = ET.SubElement(content, 'set')
extcommunity = ET.SubElement(set, 'extcommunity')
rt = ET.SubElement(extcommunity, 'rt')
ASN_NN_rt = ET.SubElement(rt, 'ASN-NN-rt')
ASN_NN_rt.text = kwargs.pop('ASN_NN_rt')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def error(self, s, pos):
"""Show text and a caret under that. For example:
x = 2y + z
^
"""
print("Lexical error:")
print("%s" % s[:pos+10]) # + 10 for trailing context
print("%s^" % (" "*(pos-1)))
for t in self.rv: print(t)
raise SystemExit | def function[error, parameter[self, s, pos]]:
constant[Show text and a caret under that. For example:
x = 2y + z
^
]
call[name[print], parameter[constant[Lexical error:]]]
call[name[print], parameter[binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> call[name[s]][<ast.Slice object at 0x7da18f811a80>]]]]
call[name[print], parameter[binary_operation[constant[%s^] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[ ] * binary_operation[name[pos] - constant[1]]]]]]
for taget[name[t]] in starred[name[self].rv] begin[:]
call[name[print], parameter[name[t]]]
<ast.Raise object at 0x7da18f812b00> | keyword[def] identifier[error] ( identifier[self] , identifier[s] , identifier[pos] ):
literal[string]
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[s] [: identifier[pos] + literal[int] ])
identifier[print] ( literal[string] %( literal[string] *( identifier[pos] - literal[int] )))
keyword[for] identifier[t] keyword[in] identifier[self] . identifier[rv] : identifier[print] ( identifier[t] )
keyword[raise] identifier[SystemExit] | def error(self, s, pos):
"""Show text and a caret under that. For example:
x = 2y + z
^
"""
print('Lexical error:')
print('%s' % s[:pos + 10]) # + 10 for trailing context
print('%s^' % (' ' * (pos - 1)))
for t in self.rv:
print(t) # depends on [control=['for'], data=['t']]
raise SystemExit |
def as_dict(self):
"""
Makes XcFunc obey the general json interface used in pymatgen for easier serialization.
"""
d = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__}
# print("in as_dict", type(self.x), type(self.c), type(self.xc))
if self.x is not None: d["x"] = self.x.as_dict()
if self.c is not None: d["c"] = self.c.as_dict()
if self.xc is not None: d["xc"] = self.xc.as_dict()
return d | def function[as_dict, parameter[self]]:
constant[
Makes XcFunc obey the general json interface used in pymatgen for easier serialization.
]
variable[d] assign[=] dictionary[[<ast.Constant object at 0x7da20c992080>, <ast.Constant object at 0x7da20c990d30>], [<ast.Attribute object at 0x7da20c9917e0>, <ast.Attribute object at 0x7da20c990e50>]]
if compare[name[self].x is_not constant[None]] begin[:]
call[name[d]][constant[x]] assign[=] call[name[self].x.as_dict, parameter[]]
if compare[name[self].c is_not constant[None]] begin[:]
call[name[d]][constant[c]] assign[=] call[name[self].c.as_dict, parameter[]]
if compare[name[self].xc is_not constant[None]] begin[:]
call[name[d]][constant[xc]] assign[=] call[name[self].xc.as_dict, parameter[]]
return[name[d]] | keyword[def] identifier[as_dict] ( identifier[self] ):
literal[string]
identifier[d] ={ literal[string] : identifier[self] . identifier[__class__] . identifier[__module__] ,
literal[string] : identifier[self] . identifier[__class__] . identifier[__name__] }
keyword[if] identifier[self] . identifier[x] keyword[is] keyword[not] keyword[None] : identifier[d] [ literal[string] ]= identifier[self] . identifier[x] . identifier[as_dict] ()
keyword[if] identifier[self] . identifier[c] keyword[is] keyword[not] keyword[None] : identifier[d] [ literal[string] ]= identifier[self] . identifier[c] . identifier[as_dict] ()
keyword[if] identifier[self] . identifier[xc] keyword[is] keyword[not] keyword[None] : identifier[d] [ literal[string] ]= identifier[self] . identifier[xc] . identifier[as_dict] ()
keyword[return] identifier[d] | def as_dict(self):
"""
Makes XcFunc obey the general json interface used in pymatgen for easier serialization.
"""
d = {'@module': self.__class__.__module__, '@class': self.__class__.__name__}
# print("in as_dict", type(self.x), type(self.c), type(self.xc))
if self.x is not None:
d['x'] = self.x.as_dict() # depends on [control=['if'], data=[]]
if self.c is not None:
d['c'] = self.c.as_dict() # depends on [control=['if'], data=[]]
if self.xc is not None:
d['xc'] = self.xc.as_dict() # depends on [control=['if'], data=[]]
return d |
def clear_input_score_start_range(self):
"""Clears the input score start.
raise: NoAccess - ``Metadata.isRequired()`` or
``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.grading.GradeSystemForm.clear_lowest_numeric_score
if (self.get_input_score_start_range_metadata().is_read_only() or
self.get_input_score_start_range_metadata().is_required()):
raise errors.NoAccess()
self._my_map['inputScoreStartRange'] = self._input_score_start_range_default | def function[clear_input_score_start_range, parameter[self]]:
constant[Clears the input score start.
raise: NoAccess - ``Metadata.isRequired()`` or
``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
]
if <ast.BoolOp object at 0x7da1b26ac940> begin[:]
<ast.Raise object at 0x7da1b26ad960>
call[name[self]._my_map][constant[inputScoreStartRange]] assign[=] name[self]._input_score_start_range_default | keyword[def] identifier[clear_input_score_start_range] ( identifier[self] ):
literal[string]
keyword[if] ( identifier[self] . identifier[get_input_score_start_range_metadata] (). identifier[is_read_only] () keyword[or]
identifier[self] . identifier[get_input_score_start_range_metadata] (). identifier[is_required] ()):
keyword[raise] identifier[errors] . identifier[NoAccess] ()
identifier[self] . identifier[_my_map] [ literal[string] ]= identifier[self] . identifier[_input_score_start_range_default] | def clear_input_score_start_range(self):
"""Clears the input score start.
raise: NoAccess - ``Metadata.isRequired()`` or
``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.grading.GradeSystemForm.clear_lowest_numeric_score
if self.get_input_score_start_range_metadata().is_read_only() or self.get_input_score_start_range_metadata().is_required():
raise errors.NoAccess() # depends on [control=['if'], data=[]]
self._my_map['inputScoreStartRange'] = self._input_score_start_range_default |
def parse_bookmark_data (data):
"""Return iterator for bookmarks of the form (url, name, line number).
Bookmarks are not sorted.
"""
name = None
lineno = 0
for line in data.splitlines():
lineno += 1
line = line.strip()
if line.startswith("NAME="):
name = line[5:]
elif line.startswith("URL="):
url = line[4:]
if url and name is not None:
yield (url, name, lineno)
else:
name = None | def function[parse_bookmark_data, parameter[data]]:
constant[Return iterator for bookmarks of the form (url, name, line number).
Bookmarks are not sorted.
]
variable[name] assign[=] constant[None]
variable[lineno] assign[=] constant[0]
for taget[name[line]] in starred[call[name[data].splitlines, parameter[]]] begin[:]
<ast.AugAssign object at 0x7da1b2345570>
variable[line] assign[=] call[name[line].strip, parameter[]]
if call[name[line].startswith, parameter[constant[NAME=]]] begin[:]
variable[name] assign[=] call[name[line]][<ast.Slice object at 0x7da1b2346bc0>] | keyword[def] identifier[parse_bookmark_data] ( identifier[data] ):
literal[string]
identifier[name] = keyword[None]
identifier[lineno] = literal[int]
keyword[for] identifier[line] keyword[in] identifier[data] . identifier[splitlines] ():
identifier[lineno] += literal[int]
identifier[line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[name] = identifier[line] [ literal[int] :]
keyword[elif] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[url] = identifier[line] [ literal[int] :]
keyword[if] identifier[url] keyword[and] identifier[name] keyword[is] keyword[not] keyword[None] :
keyword[yield] ( identifier[url] , identifier[name] , identifier[lineno] )
keyword[else] :
identifier[name] = keyword[None] | def parse_bookmark_data(data):
"""Return iterator for bookmarks of the form (url, name, line number).
Bookmarks are not sorted.
"""
name = None
lineno = 0
for line in data.splitlines():
lineno += 1
line = line.strip()
if line.startswith('NAME='):
name = line[5:] # depends on [control=['if'], data=[]]
elif line.startswith('URL='):
url = line[4:]
if url and name is not None:
yield (url, name, lineno) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
name = None # depends on [control=['for'], data=['line']] |
def project_update(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fupdate
"""
return DXHTTPRequest('/%s/update' % object_id, input_params, always_retry=always_retry, **kwargs) | def function[project_update, parameter[object_id, input_params, always_retry]]:
constant[
Invokes the /project-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fupdate
]
return[call[name[DXHTTPRequest], parameter[binary_operation[constant[/%s/update] <ast.Mod object at 0x7da2590d6920> name[object_id]], name[input_params]]]] | keyword[def] identifier[project_update] ( identifier[object_id] , identifier[input_params] ={}, identifier[always_retry] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[DXHTTPRequest] ( literal[string] % identifier[object_id] , identifier[input_params] , identifier[always_retry] = identifier[always_retry] ,** identifier[kwargs] ) | def project_update(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /project-xxxx/update API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Projects#API-method%3A-%2Fproject-xxxx%2Fupdate
"""
return DXHTTPRequest('/%s/update' % object_id, input_params, always_retry=always_retry, **kwargs) |
def javascript_tag(self, *args):
"""
Convenience tag to output 1 or more javascript tags.
:param args: 1 or more javascript file names
:return: Script tag(s) containing the asset
"""
tags = []
for arg in args:
asset_path = self.asset_url_for('{0}.js'.format(arg))
if asset_path:
tags.append('<script src="{0}"></script>'.format(asset_path))
return '\n'.join(tags) | def function[javascript_tag, parameter[self]]:
constant[
Convenience tag to output 1 or more javascript tags.
:param args: 1 or more javascript file names
:return: Script tag(s) containing the asset
]
variable[tags] assign[=] list[[]]
for taget[name[arg]] in starred[name[args]] begin[:]
variable[asset_path] assign[=] call[name[self].asset_url_for, parameter[call[constant[{0}.js].format, parameter[name[arg]]]]]
if name[asset_path] begin[:]
call[name[tags].append, parameter[call[constant[<script src="{0}"></script>].format, parameter[name[asset_path]]]]]
return[call[constant[
].join, parameter[name[tags]]]] | keyword[def] identifier[javascript_tag] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[tags] =[]
keyword[for] identifier[arg] keyword[in] identifier[args] :
identifier[asset_path] = identifier[self] . identifier[asset_url_for] ( literal[string] . identifier[format] ( identifier[arg] ))
keyword[if] identifier[asset_path] :
identifier[tags] . identifier[append] ( literal[string] . identifier[format] ( identifier[asset_path] ))
keyword[return] literal[string] . identifier[join] ( identifier[tags] ) | def javascript_tag(self, *args):
"""
Convenience tag to output 1 or more javascript tags.
:param args: 1 or more javascript file names
:return: Script tag(s) containing the asset
"""
tags = []
for arg in args:
asset_path = self.asset_url_for('{0}.js'.format(arg))
if asset_path:
tags.append('<script src="{0}"></script>'.format(asset_path)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']]
return '\n'.join(tags) |
def forward_until(self, condition):
"""Forward until one of the provided matches is found.
The returned string contains all characters found *before the condition
was met. In other words, the condition will be true for the remainder
of the buffer.
:param condition: set of valid strings
"""
c = TokenWithPosition('', self.peek().position)
while self.hasNext() and not condition(self.peek()):
c += self.forward(1)
return c | def function[forward_until, parameter[self, condition]]:
constant[Forward until one of the provided matches is found.
The returned string contains all characters found *before the condition
was met. In other words, the condition will be true for the remainder
of the buffer.
:param condition: set of valid strings
]
variable[c] assign[=] call[name[TokenWithPosition], parameter[constant[], call[name[self].peek, parameter[]].position]]
while <ast.BoolOp object at 0x7da1b063d510> begin[:]
<ast.AugAssign object at 0x7da1b063d600>
return[name[c]] | keyword[def] identifier[forward_until] ( identifier[self] , identifier[condition] ):
literal[string]
identifier[c] = identifier[TokenWithPosition] ( literal[string] , identifier[self] . identifier[peek] (). identifier[position] )
keyword[while] identifier[self] . identifier[hasNext] () keyword[and] keyword[not] identifier[condition] ( identifier[self] . identifier[peek] ()):
identifier[c] += identifier[self] . identifier[forward] ( literal[int] )
keyword[return] identifier[c] | def forward_until(self, condition):
"""Forward until one of the provided matches is found.
The returned string contains all characters found *before the condition
was met. In other words, the condition will be true for the remainder
of the buffer.
:param condition: set of valid strings
"""
c = TokenWithPosition('', self.peek().position)
while self.hasNext() and (not condition(self.peek())):
c += self.forward(1) # depends on [control=['while'], data=[]]
return c |
def to_dict(self):
"""Return a dict all all data about the release"""
data = model_to_dict(self, exclude=['id'])
data['title'] = unicode(self)
data['slug'] = self.slug
data['release_date'] = self.release_date.date().isoformat()
data['created'] = self.created.isoformat()
data['modified'] = self.modified.isoformat()
new_features, known_issues = self.notes(public_only=False)
for note in known_issues:
note.tag = 'Known'
data['notes'] = [n.to_dict(self) for n in chain(new_features, known_issues)]
return data | def function[to_dict, parameter[self]]:
constant[Return a dict all all data about the release]
variable[data] assign[=] call[name[model_to_dict], parameter[name[self]]]
call[name[data]][constant[title]] assign[=] call[name[unicode], parameter[name[self]]]
call[name[data]][constant[slug]] assign[=] name[self].slug
call[name[data]][constant[release_date]] assign[=] call[call[name[self].release_date.date, parameter[]].isoformat, parameter[]]
call[name[data]][constant[created]] assign[=] call[name[self].created.isoformat, parameter[]]
call[name[data]][constant[modified]] assign[=] call[name[self].modified.isoformat, parameter[]]
<ast.Tuple object at 0x7da1b28fe140> assign[=] call[name[self].notes, parameter[]]
for taget[name[note]] in starred[name[known_issues]] begin[:]
name[note].tag assign[=] constant[Known]
call[name[data]][constant[notes]] assign[=] <ast.ListComp object at 0x7da1b2775300>
return[name[data]] | keyword[def] identifier[to_dict] ( identifier[self] ):
literal[string]
identifier[data] = identifier[model_to_dict] ( identifier[self] , identifier[exclude] =[ literal[string] ])
identifier[data] [ literal[string] ]= identifier[unicode] ( identifier[self] )
identifier[data] [ literal[string] ]= identifier[self] . identifier[slug]
identifier[data] [ literal[string] ]= identifier[self] . identifier[release_date] . identifier[date] (). identifier[isoformat] ()
identifier[data] [ literal[string] ]= identifier[self] . identifier[created] . identifier[isoformat] ()
identifier[data] [ literal[string] ]= identifier[self] . identifier[modified] . identifier[isoformat] ()
identifier[new_features] , identifier[known_issues] = identifier[self] . identifier[notes] ( identifier[public_only] = keyword[False] )
keyword[for] identifier[note] keyword[in] identifier[known_issues] :
identifier[note] . identifier[tag] = literal[string]
identifier[data] [ literal[string] ]=[ identifier[n] . identifier[to_dict] ( identifier[self] ) keyword[for] identifier[n] keyword[in] identifier[chain] ( identifier[new_features] , identifier[known_issues] )]
keyword[return] identifier[data] | def to_dict(self):
"""Return a dict all all data about the release"""
data = model_to_dict(self, exclude=['id'])
data['title'] = unicode(self)
data['slug'] = self.slug
data['release_date'] = self.release_date.date().isoformat()
data['created'] = self.created.isoformat()
data['modified'] = self.modified.isoformat()
(new_features, known_issues) = self.notes(public_only=False)
for note in known_issues:
note.tag = 'Known' # depends on [control=['for'], data=['note']]
data['notes'] = [n.to_dict(self) for n in chain(new_features, known_issues)]
return data |
def _on_error(self, websock, e):
'''
Raises BrowsingException in the thread that created this instance.
'''
if isinstance(e, (
websocket.WebSocketConnectionClosedException,
ConnectionResetError)):
self.logger.error('websocket closed, did chrome die?')
else:
self.logger.error(
'exception from websocket receiver thread',
exc_info=1)
brozzler.thread_raise(self.calling_thread, BrowsingException) | def function[_on_error, parameter[self, websock, e]]:
constant[
Raises BrowsingException in the thread that created this instance.
]
if call[name[isinstance], parameter[name[e], tuple[[<ast.Attribute object at 0x7da1b1e92710>, <ast.Name object at 0x7da1b1e90be0>]]]] begin[:]
call[name[self].logger.error, parameter[constant[websocket closed, did chrome die?]]]
call[name[brozzler].thread_raise, parameter[name[self].calling_thread, name[BrowsingException]]] | keyword[def] identifier[_on_error] ( identifier[self] , identifier[websock] , identifier[e] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[e] ,(
identifier[websocket] . identifier[WebSocketConnectionClosedException] ,
identifier[ConnectionResetError] )):
identifier[self] . identifier[logger] . identifier[error] ( literal[string] )
keyword[else] :
identifier[self] . identifier[logger] . identifier[error] (
literal[string] ,
identifier[exc_info] = literal[int] )
identifier[brozzler] . identifier[thread_raise] ( identifier[self] . identifier[calling_thread] , identifier[BrowsingException] ) | def _on_error(self, websock, e):
"""
Raises BrowsingException in the thread that created this instance.
"""
if isinstance(e, (websocket.WebSocketConnectionClosedException, ConnectionResetError)):
self.logger.error('websocket closed, did chrome die?') # depends on [control=['if'], data=[]]
else:
self.logger.error('exception from websocket receiver thread', exc_info=1)
brozzler.thread_raise(self.calling_thread, BrowsingException) |
def do_disable_commands(self, _):
"""Disable the Application Management commands"""
message_to_print = "{} is not available while {} commands are disabled".format(COMMAND_NAME,
self.CMD_CAT_APP_MGMT)
self.disable_category(self.CMD_CAT_APP_MGMT, message_to_print)
self.poutput("The Application Management commands have been disabled") | def function[do_disable_commands, parameter[self, _]]:
constant[Disable the Application Management commands]
variable[message_to_print] assign[=] call[constant[{} is not available while {} commands are disabled].format, parameter[name[COMMAND_NAME], name[self].CMD_CAT_APP_MGMT]]
call[name[self].disable_category, parameter[name[self].CMD_CAT_APP_MGMT, name[message_to_print]]]
call[name[self].poutput, parameter[constant[The Application Management commands have been disabled]]] | keyword[def] identifier[do_disable_commands] ( identifier[self] , identifier[_] ):
literal[string]
identifier[message_to_print] = literal[string] . identifier[format] ( identifier[COMMAND_NAME] ,
identifier[self] . identifier[CMD_CAT_APP_MGMT] )
identifier[self] . identifier[disable_category] ( identifier[self] . identifier[CMD_CAT_APP_MGMT] , identifier[message_to_print] )
identifier[self] . identifier[poutput] ( literal[string] ) | def do_disable_commands(self, _):
"""Disable the Application Management commands"""
message_to_print = '{} is not available while {} commands are disabled'.format(COMMAND_NAME, self.CMD_CAT_APP_MGMT)
self.disable_category(self.CMD_CAT_APP_MGMT, message_to_print)
self.poutput('The Application Management commands have been disabled') |
def geometry_hash(geometry):
"""
Get an MD5 for a geometry object
Parameters
------------
geometry : object
Returns
------------
MD5 : str
"""
if hasattr(geometry, 'md5'):
# for most of our trimesh objects
md5 = geometry.md5()
elif hasattr(geometry, 'tostring'):
# for unwrapped ndarray objects
md5 = str(hash(geometry.tostring()))
if hasattr(geometry, 'visual'):
# if visual properties are defined
md5 += str(geometry.visual.crc())
return md5 | def function[geometry_hash, parameter[geometry]]:
constant[
Get an MD5 for a geometry object
Parameters
------------
geometry : object
Returns
------------
MD5 : str
]
if call[name[hasattr], parameter[name[geometry], constant[md5]]] begin[:]
variable[md5] assign[=] call[name[geometry].md5, parameter[]]
if call[name[hasattr], parameter[name[geometry], constant[visual]]] begin[:]
<ast.AugAssign object at 0x7da20c991840>
return[name[md5]] | keyword[def] identifier[geometry_hash] ( identifier[geometry] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[geometry] , literal[string] ):
identifier[md5] = identifier[geometry] . identifier[md5] ()
keyword[elif] identifier[hasattr] ( identifier[geometry] , literal[string] ):
identifier[md5] = identifier[str] ( identifier[hash] ( identifier[geometry] . identifier[tostring] ()))
keyword[if] identifier[hasattr] ( identifier[geometry] , literal[string] ):
identifier[md5] += identifier[str] ( identifier[geometry] . identifier[visual] . identifier[crc] ())
keyword[return] identifier[md5] | def geometry_hash(geometry):
"""
Get an MD5 for a geometry object
Parameters
------------
geometry : object
Returns
------------
MD5 : str
"""
if hasattr(geometry, 'md5'):
# for most of our trimesh objects
md5 = geometry.md5() # depends on [control=['if'], data=[]]
elif hasattr(geometry, 'tostring'):
# for unwrapped ndarray objects
md5 = str(hash(geometry.tostring())) # depends on [control=['if'], data=[]]
if hasattr(geometry, 'visual'):
# if visual properties are defined
md5 += str(geometry.visual.crc()) # depends on [control=['if'], data=[]]
return md5 |
def load_plugins(self):
"""Refresh the list of available collectors and auditors
Returns:
`None`
"""
for entry_point in CINQ_PLUGINS['cloud_inquisitor.plugins.collectors']['plugins']:
cls = entry_point.load()
if cls.enabled():
self.log.debug('Collector loaded: {} in module {}'.format(cls.__name__, cls.__module__))
self.collectors.setdefault(cls.type, []).append(Worker(
cls.name,
cls.interval,
{
'name': entry_point.name,
'module_name': entry_point.module_name,
'attrs': entry_point.attrs
}
))
else:
self.log.debug('Collector disabled: {} in module {}'.format(cls.__name__, cls.__module__))
for entry_point in CINQ_PLUGINS['cloud_inquisitor.plugins.auditors']['plugins']:
cls = entry_point.load()
if cls.enabled():
self.log.debug('Auditor loaded: {} in module {}'.format(cls.__name__, cls.__module__))
self.auditors.append(Worker(
cls.name,
cls.interval,
{
'name': entry_point.name,
'module_name': entry_point.module_name,
'attrs': entry_point.attrs
}
))
else:
self.log.debug('Auditor disabled: {} in module {}'.format(cls.__name__, cls.__module__))
collector_count = sum(len(x) for x in self.collectors.values())
auditor_count = len(self.auditors)
if collector_count + auditor_count == 0:
raise Exception('No auditors or collectors loaded, aborting scheduler')
self.log.info('Scheduler loaded {} collectors and {} auditors'.format(collector_count, auditor_count)) | def function[load_plugins, parameter[self]]:
constant[Refresh the list of available collectors and auditors
Returns:
`None`
]
for taget[name[entry_point]] in starred[call[call[name[CINQ_PLUGINS]][constant[cloud_inquisitor.plugins.collectors]]][constant[plugins]]] begin[:]
variable[cls] assign[=] call[name[entry_point].load, parameter[]]
if call[name[cls].enabled, parameter[]] begin[:]
call[name[self].log.debug, parameter[call[constant[Collector loaded: {} in module {}].format, parameter[name[cls].__name__, name[cls].__module__]]]]
call[call[name[self].collectors.setdefault, parameter[name[cls].type, list[[]]]].append, parameter[call[name[Worker], parameter[name[cls].name, name[cls].interval, dictionary[[<ast.Constant object at 0x7da1b2017850>, <ast.Constant object at 0x7da1b20178b0>, <ast.Constant object at 0x7da1b20177c0>], [<ast.Attribute object at 0x7da1b2017640>, <ast.Attribute object at 0x7da1b2017760>, <ast.Attribute object at 0x7da1b2017670>]]]]]]
for taget[name[entry_point]] in starred[call[call[name[CINQ_PLUGINS]][constant[cloud_inquisitor.plugins.auditors]]][constant[plugins]]] begin[:]
variable[cls] assign[=] call[name[entry_point].load, parameter[]]
if call[name[cls].enabled, parameter[]] begin[:]
call[name[self].log.debug, parameter[call[constant[Auditor loaded: {} in module {}].format, parameter[name[cls].__name__, name[cls].__module__]]]]
call[name[self].auditors.append, parameter[call[name[Worker], parameter[name[cls].name, name[cls].interval, dictionary[[<ast.Constant object at 0x7da1b2016d70>, <ast.Constant object at 0x7da1b2016c80>, <ast.Constant object at 0x7da1b2016ad0>], [<ast.Attribute object at 0x7da1b2016bc0>, <ast.Attribute object at 0x7da1b2016bf0>, <ast.Attribute object at 0x7da1b2016b90>]]]]]]
variable[collector_count] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b2016350>]]
variable[auditor_count] assign[=] call[name[len], parameter[name[self].auditors]]
if compare[binary_operation[name[collector_count] + name[auditor_count]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b2015990>
call[name[self].log.info, parameter[call[constant[Scheduler loaded {} collectors and {} auditors].format, parameter[name[collector_count], name[auditor_count]]]]] | keyword[def] identifier[load_plugins] ( identifier[self] ):
literal[string]
keyword[for] identifier[entry_point] keyword[in] identifier[CINQ_PLUGINS] [ literal[string] ][ literal[string] ]:
identifier[cls] = identifier[entry_point] . identifier[load] ()
keyword[if] identifier[cls] . identifier[enabled] ():
identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[cls] . identifier[__name__] , identifier[cls] . identifier[__module__] ))
identifier[self] . identifier[collectors] . identifier[setdefault] ( identifier[cls] . identifier[type] ,[]). identifier[append] ( identifier[Worker] (
identifier[cls] . identifier[name] ,
identifier[cls] . identifier[interval] ,
{
literal[string] : identifier[entry_point] . identifier[name] ,
literal[string] : identifier[entry_point] . identifier[module_name] ,
literal[string] : identifier[entry_point] . identifier[attrs]
}
))
keyword[else] :
identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[cls] . identifier[__name__] , identifier[cls] . identifier[__module__] ))
keyword[for] identifier[entry_point] keyword[in] identifier[CINQ_PLUGINS] [ literal[string] ][ literal[string] ]:
identifier[cls] = identifier[entry_point] . identifier[load] ()
keyword[if] identifier[cls] . identifier[enabled] ():
identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[cls] . identifier[__name__] , identifier[cls] . identifier[__module__] ))
identifier[self] . identifier[auditors] . identifier[append] ( identifier[Worker] (
identifier[cls] . identifier[name] ,
identifier[cls] . identifier[interval] ,
{
literal[string] : identifier[entry_point] . identifier[name] ,
literal[string] : identifier[entry_point] . identifier[module_name] ,
literal[string] : identifier[entry_point] . identifier[attrs]
}
))
keyword[else] :
identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[cls] . identifier[__name__] , identifier[cls] . identifier[__module__] ))
identifier[collector_count] = identifier[sum] ( identifier[len] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[self] . identifier[collectors] . identifier[values] ())
identifier[auditor_count] = identifier[len] ( identifier[self] . identifier[auditors] )
keyword[if] identifier[collector_count] + identifier[auditor_count] == literal[int] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[self] . identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[collector_count] , identifier[auditor_count] )) | def load_plugins(self):
"""Refresh the list of available collectors and auditors
Returns:
`None`
"""
for entry_point in CINQ_PLUGINS['cloud_inquisitor.plugins.collectors']['plugins']:
cls = entry_point.load()
if cls.enabled():
self.log.debug('Collector loaded: {} in module {}'.format(cls.__name__, cls.__module__))
self.collectors.setdefault(cls.type, []).append(Worker(cls.name, cls.interval, {'name': entry_point.name, 'module_name': entry_point.module_name, 'attrs': entry_point.attrs})) # depends on [control=['if'], data=[]]
else:
self.log.debug('Collector disabled: {} in module {}'.format(cls.__name__, cls.__module__)) # depends on [control=['for'], data=['entry_point']]
for entry_point in CINQ_PLUGINS['cloud_inquisitor.plugins.auditors']['plugins']:
cls = entry_point.load()
if cls.enabled():
self.log.debug('Auditor loaded: {} in module {}'.format(cls.__name__, cls.__module__))
self.auditors.append(Worker(cls.name, cls.interval, {'name': entry_point.name, 'module_name': entry_point.module_name, 'attrs': entry_point.attrs})) # depends on [control=['if'], data=[]]
else:
self.log.debug('Auditor disabled: {} in module {}'.format(cls.__name__, cls.__module__)) # depends on [control=['for'], data=['entry_point']]
collector_count = sum((len(x) for x in self.collectors.values()))
auditor_count = len(self.auditors)
if collector_count + auditor_count == 0:
raise Exception('No auditors or collectors loaded, aborting scheduler') # depends on [control=['if'], data=[]]
self.log.info('Scheduler loaded {} collectors and {} auditors'.format(collector_count, auditor_count)) |
def describe_snapshots(kwargs=None, call=None):
'''
Describe a snapshot (or snapshots)
snapshot_id
One or more snapshot IDs. Multiple IDs must be separated by ",".
owner
Return the snapshots owned by the specified owner. Valid values
include: self, amazon, <AWS Account ID>. Multiple values must be
separated by ",".
restorable_by
One or more AWS accounts IDs that can create volumes from the snapshot.
Multiple aws account IDs must be separated by ",".
TODO: Add all of the filters.
'''
if call != 'function':
log.error(
'The describe_snapshot function must be called with -f '
'or --function.'
)
return False
params = {'Action': 'DescribeSnapshots'}
# The AWS correct way is to use non-plurals like snapshot_id INSTEAD of snapshot_ids.
if 'snapshot_ids' in kwargs:
kwargs['snapshot_id'] = kwargs['snapshot_ids']
if 'snapshot_id' in kwargs:
snapshot_ids = kwargs['snapshot_id'].split(',')
for snapshot_index, snapshot_id in enumerate(snapshot_ids):
params['SnapshotId.{0}'.format(snapshot_index)] = snapshot_id
if 'owner' in kwargs:
owners = kwargs['owner'].split(',')
for owner_index, owner in enumerate(owners):
params['Owner.{0}'.format(owner_index)] = owner
if 'restorable_by' in kwargs:
restorable_bys = kwargs['restorable_by'].split(',')
for restorable_by_index, restorable_by in enumerate(restorable_bys):
params[
'RestorableBy.{0}'.format(restorable_by_index)
] = restorable_by
log.debug(params)
data = aws.query(params,
return_url=True,
location=get_location(),
provider=get_provider(),
opts=__opts__,
sigver='4')
return data | def function[describe_snapshots, parameter[kwargs, call]]:
constant[
Describe a snapshot (or snapshots)
snapshot_id
One or more snapshot IDs. Multiple IDs must be separated by ",".
owner
Return the snapshots owned by the specified owner. Valid values
include: self, amazon, <AWS Account ID>. Multiple values must be
separated by ",".
restorable_by
One or more AWS accounts IDs that can create volumes from the snapshot.
Multiple aws account IDs must be separated by ",".
TODO: Add all of the filters.
]
if compare[name[call] not_equal[!=] constant[function]] begin[:]
call[name[log].error, parameter[constant[The describe_snapshot function must be called with -f or --function.]]]
return[constant[False]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da207f993f0>], [<ast.Constant object at 0x7da207f988b0>]]
if compare[constant[snapshot_ids] in name[kwargs]] begin[:]
call[name[kwargs]][constant[snapshot_id]] assign[=] call[name[kwargs]][constant[snapshot_ids]]
if compare[constant[snapshot_id] in name[kwargs]] begin[:]
variable[snapshot_ids] assign[=] call[call[name[kwargs]][constant[snapshot_id]].split, parameter[constant[,]]]
for taget[tuple[[<ast.Name object at 0x7da207f98580>, <ast.Name object at 0x7da207f9ba30>]]] in starred[call[name[enumerate], parameter[name[snapshot_ids]]]] begin[:]
call[name[params]][call[constant[SnapshotId.{0}].format, parameter[name[snapshot_index]]]] assign[=] name[snapshot_id]
if compare[constant[owner] in name[kwargs]] begin[:]
variable[owners] assign[=] call[call[name[kwargs]][constant[owner]].split, parameter[constant[,]]]
for taget[tuple[[<ast.Name object at 0x7da18bc73a60>, <ast.Name object at 0x7da18bc70820>]]] in starred[call[name[enumerate], parameter[name[owners]]]] begin[:]
call[name[params]][call[constant[Owner.{0}].format, parameter[name[owner_index]]]] assign[=] name[owner]
if compare[constant[restorable_by] in name[kwargs]] begin[:]
variable[restorable_bys] assign[=] call[call[name[kwargs]][constant[restorable_by]].split, parameter[constant[,]]]
for taget[tuple[[<ast.Name object at 0x7da1b23473d0>, <ast.Name object at 0x7da1b2344760>]]] in starred[call[name[enumerate], parameter[name[restorable_bys]]]] begin[:]
call[name[params]][call[constant[RestorableBy.{0}].format, parameter[name[restorable_by_index]]]] assign[=] name[restorable_by]
call[name[log].debug, parameter[name[params]]]
variable[data] assign[=] call[name[aws].query, parameter[name[params]]]
return[name[data]] | keyword[def] identifier[describe_snapshots] ( identifier[kwargs] = keyword[None] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
identifier[log] . identifier[error] (
literal[string]
literal[string]
)
keyword[return] keyword[False]
identifier[params] ={ literal[string] : literal[string] }
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[kwargs] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[snapshot_ids] = identifier[kwargs] [ literal[string] ]. identifier[split] ( literal[string] )
keyword[for] identifier[snapshot_index] , identifier[snapshot_id] keyword[in] identifier[enumerate] ( identifier[snapshot_ids] ):
identifier[params] [ literal[string] . identifier[format] ( identifier[snapshot_index] )]= identifier[snapshot_id]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[owners] = identifier[kwargs] [ literal[string] ]. identifier[split] ( literal[string] )
keyword[for] identifier[owner_index] , identifier[owner] keyword[in] identifier[enumerate] ( identifier[owners] ):
identifier[params] [ literal[string] . identifier[format] ( identifier[owner_index] )]= identifier[owner]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[restorable_bys] = identifier[kwargs] [ literal[string] ]. identifier[split] ( literal[string] )
keyword[for] identifier[restorable_by_index] , identifier[restorable_by] keyword[in] identifier[enumerate] ( identifier[restorable_bys] ):
identifier[params] [
literal[string] . identifier[format] ( identifier[restorable_by_index] )
]= identifier[restorable_by]
identifier[log] . identifier[debug] ( identifier[params] )
identifier[data] = identifier[aws] . identifier[query] ( identifier[params] ,
identifier[return_url] = keyword[True] ,
identifier[location] = identifier[get_location] (),
identifier[provider] = identifier[get_provider] (),
identifier[opts] = identifier[__opts__] ,
identifier[sigver] = literal[string] )
keyword[return] identifier[data] | def describe_snapshots(kwargs=None, call=None):
"""
Describe a snapshot (or snapshots)
snapshot_id
One or more snapshot IDs. Multiple IDs must be separated by ",".
owner
Return the snapshots owned by the specified owner. Valid values
include: self, amazon, <AWS Account ID>. Multiple values must be
separated by ",".
restorable_by
One or more AWS accounts IDs that can create volumes from the snapshot.
Multiple aws account IDs must be separated by ",".
TODO: Add all of the filters.
"""
if call != 'function':
log.error('The describe_snapshot function must be called with -f or --function.')
return False # depends on [control=['if'], data=[]]
params = {'Action': 'DescribeSnapshots'}
# The AWS correct way is to use non-plurals like snapshot_id INSTEAD of snapshot_ids.
if 'snapshot_ids' in kwargs:
kwargs['snapshot_id'] = kwargs['snapshot_ids'] # depends on [control=['if'], data=['kwargs']]
if 'snapshot_id' in kwargs:
snapshot_ids = kwargs['snapshot_id'].split(',')
for (snapshot_index, snapshot_id) in enumerate(snapshot_ids):
params['SnapshotId.{0}'.format(snapshot_index)] = snapshot_id # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['kwargs']]
if 'owner' in kwargs:
owners = kwargs['owner'].split(',')
for (owner_index, owner) in enumerate(owners):
params['Owner.{0}'.format(owner_index)] = owner # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['kwargs']]
if 'restorable_by' in kwargs:
restorable_bys = kwargs['restorable_by'].split(',')
for (restorable_by_index, restorable_by) in enumerate(restorable_bys):
params['RestorableBy.{0}'.format(restorable_by_index)] = restorable_by # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['kwargs']]
log.debug(params)
data = aws.query(params, return_url=True, location=get_location(), provider=get_provider(), opts=__opts__, sigver='4')
return data |
def candidates(self):
"""A list of candidate addresses (as dictionaries) from a geocode
operation"""
# convert x['location'] to a point from a json point struct
def cditer():
for candidate in self._json_struct['candidates']:
newcandidate = candidate.copy()
newcandidate['location'] = \
geometry.fromJson(newcandidate['location'])
yield newcandidate
return list(cditer()) | def function[candidates, parameter[self]]:
constant[A list of candidate addresses (as dictionaries) from a geocode
operation]
def function[cditer, parameter[]]:
for taget[name[candidate]] in starred[call[name[self]._json_struct][constant[candidates]]] begin[:]
variable[newcandidate] assign[=] call[name[candidate].copy, parameter[]]
call[name[newcandidate]][constant[location]] assign[=] call[name[geometry].fromJson, parameter[call[name[newcandidate]][constant[location]]]]
<ast.Yield object at 0x7da18bc71300>
return[call[name[list], parameter[call[name[cditer], parameter[]]]]] | keyword[def] identifier[candidates] ( identifier[self] ):
literal[string]
keyword[def] identifier[cditer] ():
keyword[for] identifier[candidate] keyword[in] identifier[self] . identifier[_json_struct] [ literal[string] ]:
identifier[newcandidate] = identifier[candidate] . identifier[copy] ()
identifier[newcandidate] [ literal[string] ]= identifier[geometry] . identifier[fromJson] ( identifier[newcandidate] [ literal[string] ])
keyword[yield] identifier[newcandidate]
keyword[return] identifier[list] ( identifier[cditer] ()) | def candidates(self):
"""A list of candidate addresses (as dictionaries) from a geocode
operation"""
# convert x['location'] to a point from a json point struct
def cditer():
for candidate in self._json_struct['candidates']:
newcandidate = candidate.copy()
newcandidate['location'] = geometry.fromJson(newcandidate['location'])
yield newcandidate # depends on [control=['for'], data=['candidate']]
return list(cditer()) |
def zfill(self, width):
"""Pad a numeric string with zeros on the left, to fill a field of the specified width.
The string is never truncated.
:param int width: Length of output string.
"""
if not self.value_no_colors:
result = self.value_no_colors.zfill(width)
else:
result = self.value_colors.replace(self.value_no_colors, self.value_no_colors.zfill(width))
return self.__class__(result, keep_tags=True) | def function[zfill, parameter[self, width]]:
constant[Pad a numeric string with zeros on the left, to fill a field of the specified width.
The string is never truncated.
:param int width: Length of output string.
]
if <ast.UnaryOp object at 0x7da18bcc8820> begin[:]
variable[result] assign[=] call[name[self].value_no_colors.zfill, parameter[name[width]]]
return[call[name[self].__class__, parameter[name[result]]]] | keyword[def] identifier[zfill] ( identifier[self] , identifier[width] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[value_no_colors] :
identifier[result] = identifier[self] . identifier[value_no_colors] . identifier[zfill] ( identifier[width] )
keyword[else] :
identifier[result] = identifier[self] . identifier[value_colors] . identifier[replace] ( identifier[self] . identifier[value_no_colors] , identifier[self] . identifier[value_no_colors] . identifier[zfill] ( identifier[width] ))
keyword[return] identifier[self] . identifier[__class__] ( identifier[result] , identifier[keep_tags] = keyword[True] ) | def zfill(self, width):
"""Pad a numeric string with zeros on the left, to fill a field of the specified width.
The string is never truncated.
:param int width: Length of output string.
"""
if not self.value_no_colors:
result = self.value_no_colors.zfill(width) # depends on [control=['if'], data=[]]
else:
result = self.value_colors.replace(self.value_no_colors, self.value_no_colors.zfill(width))
return self.__class__(result, keep_tags=True) |
def to_json(val, allow_pickle=False, pretty=False):
r"""
Converts a python object to a JSON string using the utool convention
Args:
val (object):
Returns:
str: json_str
References:
http://stackoverflow.com/questions/11561932/why-does-json-dumpslistnp
CommandLine:
python -m utool.util_cache --test-to_json
python3 -m utool.util_cache --test-to_json
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_cache import * # NOQA
>>> import utool as ut
>>> import numpy as np
>>> import uuid
>>> val = [
>>> '{"foo": "not a dict"}',
>>> 1.3,
>>> [1],
>>> # {1: 1, 2: 2, 3: 3}, cant use integer keys
>>> {1, 2, 3},
>>> slice(1, None, 1),
>>> b'an ascii string',
>>> np.array([1, 2, 3]),
>>> ut.get_zero_uuid(),
>>> ut.LazyDict(x='fo'),
>>> ut.LazyDict,
>>> {'x': {'a', 'b', 'cde'}, 'y': [1]}
>>> ]
>>> #val = ut.LazyDict(x='fo')
>>> allow_pickle = True
>>> if not allow_pickle:
>>> val = val[:-2]
>>> json_str = ut.to_json(val, allow_pickle=allow_pickle)
>>> result = ut.repr3(json_str)
>>> reload_val = ut.from_json(json_str, allow_pickle=allow_pickle)
>>> # Make sure pickle doesnt happen by default
>>> try:
>>> json_str = ut.to_json(val)
>>> assert False or not allow_pickle, 'expected a type error'
>>> except TypeError:
>>> print('Correctly got type error')
>>> try:
>>> json_str = ut.from_json(val)
>>> assert False, 'expected a type error'
>>> except TypeError:
>>> print('Correctly got type error')
>>> print(result)
>>> print('original = ' + ut.repr3(val, nl=1))
>>> print('reconstructed = ' + ut.repr3(reload_val, nl=1))
>>> assert reload_val[6] == val[6].tolist()
>>> assert reload_val[6] is not val[6]
"""
UtoolJSONEncoder = make_utool_json_encoder(allow_pickle)
json_kw = {}
json_kw['cls'] = UtoolJSONEncoder
if pretty:
json_kw['indent'] = 4
json_kw['separators'] = (',', ': ')
json_str = json.dumps(val, **json_kw)
return json_str | def function[to_json, parameter[val, allow_pickle, pretty]]:
constant[
Converts a python object to a JSON string using the utool convention
Args:
val (object):
Returns:
str: json_str
References:
http://stackoverflow.com/questions/11561932/why-does-json-dumpslistnp
CommandLine:
python -m utool.util_cache --test-to_json
python3 -m utool.util_cache --test-to_json
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_cache import * # NOQA
>>> import utool as ut
>>> import numpy as np
>>> import uuid
>>> val = [
>>> '{"foo": "not a dict"}',
>>> 1.3,
>>> [1],
>>> # {1: 1, 2: 2, 3: 3}, cant use integer keys
>>> {1, 2, 3},
>>> slice(1, None, 1),
>>> b'an ascii string',
>>> np.array([1, 2, 3]),
>>> ut.get_zero_uuid(),
>>> ut.LazyDict(x='fo'),
>>> ut.LazyDict,
>>> {'x': {'a', 'b', 'cde'}, 'y': [1]}
>>> ]
>>> #val = ut.LazyDict(x='fo')
>>> allow_pickle = True
>>> if not allow_pickle:
>>> val = val[:-2]
>>> json_str = ut.to_json(val, allow_pickle=allow_pickle)
>>> result = ut.repr3(json_str)
>>> reload_val = ut.from_json(json_str, allow_pickle=allow_pickle)
>>> # Make sure pickle doesnt happen by default
>>> try:
>>> json_str = ut.to_json(val)
>>> assert False or not allow_pickle, 'expected a type error'
>>> except TypeError:
>>> print('Correctly got type error')
>>> try:
>>> json_str = ut.from_json(val)
>>> assert False, 'expected a type error'
>>> except TypeError:
>>> print('Correctly got type error')
>>> print(result)
>>> print('original = ' + ut.repr3(val, nl=1))
>>> print('reconstructed = ' + ut.repr3(reload_val, nl=1))
>>> assert reload_val[6] == val[6].tolist()
>>> assert reload_val[6] is not val[6]
]
variable[UtoolJSONEncoder] assign[=] call[name[make_utool_json_encoder], parameter[name[allow_pickle]]]
variable[json_kw] assign[=] dictionary[[], []]
call[name[json_kw]][constant[cls]] assign[=] name[UtoolJSONEncoder]
if name[pretty] begin[:]
call[name[json_kw]][constant[indent]] assign[=] constant[4]
call[name[json_kw]][constant[separators]] assign[=] tuple[[<ast.Constant object at 0x7da1b246b9d0>, <ast.Constant object at 0x7da1b246a980>]]
variable[json_str] assign[=] call[name[json].dumps, parameter[name[val]]]
return[name[json_str]] | keyword[def] identifier[to_json] ( identifier[val] , identifier[allow_pickle] = keyword[False] , identifier[pretty] = keyword[False] ):
literal[string]
identifier[UtoolJSONEncoder] = identifier[make_utool_json_encoder] ( identifier[allow_pickle] )
identifier[json_kw] ={}
identifier[json_kw] [ literal[string] ]= identifier[UtoolJSONEncoder]
keyword[if] identifier[pretty] :
identifier[json_kw] [ literal[string] ]= literal[int]
identifier[json_kw] [ literal[string] ]=( literal[string] , literal[string] )
identifier[json_str] = identifier[json] . identifier[dumps] ( identifier[val] ,** identifier[json_kw] )
keyword[return] identifier[json_str] | def to_json(val, allow_pickle=False, pretty=False):
"""
Converts a python object to a JSON string using the utool convention
Args:
val (object):
Returns:
str: json_str
References:
http://stackoverflow.com/questions/11561932/why-does-json-dumpslistnp
CommandLine:
python -m utool.util_cache --test-to_json
python3 -m utool.util_cache --test-to_json
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_cache import * # NOQA
>>> import utool as ut
>>> import numpy as np
>>> import uuid
>>> val = [
>>> '{"foo": "not a dict"}',
>>> 1.3,
>>> [1],
>>> # {1: 1, 2: 2, 3: 3}, cant use integer keys
>>> {1, 2, 3},
>>> slice(1, None, 1),
>>> b'an ascii string',
>>> np.array([1, 2, 3]),
>>> ut.get_zero_uuid(),
>>> ut.LazyDict(x='fo'),
>>> ut.LazyDict,
>>> {'x': {'a', 'b', 'cde'}, 'y': [1]}
>>> ]
>>> #val = ut.LazyDict(x='fo')
>>> allow_pickle = True
>>> if not allow_pickle:
>>> val = val[:-2]
>>> json_str = ut.to_json(val, allow_pickle=allow_pickle)
>>> result = ut.repr3(json_str)
>>> reload_val = ut.from_json(json_str, allow_pickle=allow_pickle)
>>> # Make sure pickle doesnt happen by default
>>> try:
>>> json_str = ut.to_json(val)
>>> assert False or not allow_pickle, 'expected a type error'
>>> except TypeError:
>>> print('Correctly got type error')
>>> try:
>>> json_str = ut.from_json(val)
>>> assert False, 'expected a type error'
>>> except TypeError:
>>> print('Correctly got type error')
>>> print(result)
>>> print('original = ' + ut.repr3(val, nl=1))
>>> print('reconstructed = ' + ut.repr3(reload_val, nl=1))
>>> assert reload_val[6] == val[6].tolist()
>>> assert reload_val[6] is not val[6]
"""
UtoolJSONEncoder = make_utool_json_encoder(allow_pickle)
json_kw = {}
json_kw['cls'] = UtoolJSONEncoder
if pretty:
json_kw['indent'] = 4
json_kw['separators'] = (',', ': ') # depends on [control=['if'], data=[]]
json_str = json.dumps(val, **json_kw)
return json_str |
def Take(self: Iterable, n):
"""
[
{
'self': [1, 2, 3],
'n': 2,
'assert': lambda ret: list(ret) == [1, 2]
}
]
"""
for i, e in enumerate(self):
if i == n:
break
yield e | def function[Take, parameter[self, n]]:
constant[
[
{
'self': [1, 2, 3],
'n': 2,
'assert': lambda ret: list(ret) == [1, 2]
}
]
]
for taget[tuple[[<ast.Name object at 0x7da1b0f51960>, <ast.Name object at 0x7da1b0f52e00>]]] in starred[call[name[enumerate], parameter[name[self]]]] begin[:]
if compare[name[i] equal[==] name[n]] begin[:]
break
<ast.Yield object at 0x7da1b0f5a290> | keyword[def] identifier[Take] ( identifier[self] : identifier[Iterable] , identifier[n] ):
literal[string]
keyword[for] identifier[i] , identifier[e] keyword[in] identifier[enumerate] ( identifier[self] ):
keyword[if] identifier[i] == identifier[n] :
keyword[break]
keyword[yield] identifier[e] | def Take(self: Iterable, n):
"""
[
{
'self': [1, 2, 3],
'n': 2,
'assert': lambda ret: list(ret) == [1, 2]
}
]
"""
for (i, e) in enumerate(self):
if i == n:
break # depends on [control=['if'], data=[]]
yield e # depends on [control=['for'], data=[]] |
def unix_time(self, end_datetime=None, start_datetime=None):
"""
Get a timestamp between January 1, 1970 and now, unless passed
explicit start_datetime or end_datetime values.
:example 1061306726
"""
start_datetime = self._parse_start_datetime(start_datetime)
end_datetime = self._parse_end_datetime(end_datetime)
return self.generator.random.randint(start_datetime, end_datetime) | def function[unix_time, parameter[self, end_datetime, start_datetime]]:
constant[
Get a timestamp between January 1, 1970 and now, unless passed
explicit start_datetime or end_datetime values.
:example 1061306726
]
variable[start_datetime] assign[=] call[name[self]._parse_start_datetime, parameter[name[start_datetime]]]
variable[end_datetime] assign[=] call[name[self]._parse_end_datetime, parameter[name[end_datetime]]]
return[call[name[self].generator.random.randint, parameter[name[start_datetime], name[end_datetime]]]] | keyword[def] identifier[unix_time] ( identifier[self] , identifier[end_datetime] = keyword[None] , identifier[start_datetime] = keyword[None] ):
literal[string]
identifier[start_datetime] = identifier[self] . identifier[_parse_start_datetime] ( identifier[start_datetime] )
identifier[end_datetime] = identifier[self] . identifier[_parse_end_datetime] ( identifier[end_datetime] )
keyword[return] identifier[self] . identifier[generator] . identifier[random] . identifier[randint] ( identifier[start_datetime] , identifier[end_datetime] ) | def unix_time(self, end_datetime=None, start_datetime=None):
"""
Get a timestamp between January 1, 1970 and now, unless passed
explicit start_datetime or end_datetime values.
:example 1061306726
"""
start_datetime = self._parse_start_datetime(start_datetime)
end_datetime = self._parse_end_datetime(end_datetime)
return self.generator.random.randint(start_datetime, end_datetime) |
def blocking(func, *args, **kwargs):
"""Run a function that uses blocking IO.
The function is run in the IO thread pool.
"""
pool = get_io_pool()
fut = pool.submit(func, *args, **kwargs)
return fut.result() | def function[blocking, parameter[func]]:
constant[Run a function that uses blocking IO.
The function is run in the IO thread pool.
]
variable[pool] assign[=] call[name[get_io_pool], parameter[]]
variable[fut] assign[=] call[name[pool].submit, parameter[name[func], <ast.Starred object at 0x7da1b033b190>]]
return[call[name[fut].result, parameter[]]] | keyword[def] identifier[blocking] ( identifier[func] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[pool] = identifier[get_io_pool] ()
identifier[fut] = identifier[pool] . identifier[submit] ( identifier[func] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[fut] . identifier[result] () | def blocking(func, *args, **kwargs):
"""Run a function that uses blocking IO.
The function is run in the IO thread pool.
"""
pool = get_io_pool()
fut = pool.submit(func, *args, **kwargs)
return fut.result() |
def from_ascii_hex(text: str) -> int:
"""Converts to an int value from both ASCII and regular hex.
The format used appears to vary based on whether the command was to
get an existing value (regular hex) or set a new value (ASCII hex
mirrored back from original command).
Regular hex: 0123456789abcdef
ASCII hex: 0123456789:;<=>? """
value = 0
for index in range(0, len(text)):
char_ord = ord(text[index:index + 1])
if char_ord in range(ord('0'), ord('?') + 1):
digit = char_ord - ord('0')
elif char_ord in range(ord('a'), ord('f') + 1):
digit = 0xa + (char_ord - ord('a'))
else:
raise ValueError(
"Response contains invalid character.")
value = (value * 0x10) + digit
return value | def function[from_ascii_hex, parameter[text]]:
constant[Converts to an int value from both ASCII and regular hex.
The format used appears to vary based on whether the command was to
get an existing value (regular hex) or set a new value (ASCII hex
mirrored back from original command).
Regular hex: 0123456789abcdef
ASCII hex: 0123456789:;<=>? ]
variable[value] assign[=] constant[0]
for taget[name[index]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[text]]]]]] begin[:]
variable[char_ord] assign[=] call[name[ord], parameter[call[name[text]][<ast.Slice object at 0x7da204346050>]]]
if compare[name[char_ord] in call[name[range], parameter[call[name[ord], parameter[constant[0]]], binary_operation[call[name[ord], parameter[constant[?]]] + constant[1]]]]] begin[:]
variable[digit] assign[=] binary_operation[name[char_ord] - call[name[ord], parameter[constant[0]]]]
variable[value] assign[=] binary_operation[binary_operation[name[value] * constant[16]] + name[digit]]
return[name[value]] | keyword[def] identifier[from_ascii_hex] ( identifier[text] : identifier[str] )-> identifier[int] :
literal[string]
identifier[value] = literal[int]
keyword[for] identifier[index] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[text] )):
identifier[char_ord] = identifier[ord] ( identifier[text] [ identifier[index] : identifier[index] + literal[int] ])
keyword[if] identifier[char_ord] keyword[in] identifier[range] ( identifier[ord] ( literal[string] ), identifier[ord] ( literal[string] )+ literal[int] ):
identifier[digit] = identifier[char_ord] - identifier[ord] ( literal[string] )
keyword[elif] identifier[char_ord] keyword[in] identifier[range] ( identifier[ord] ( literal[string] ), identifier[ord] ( literal[string] )+ literal[int] ):
identifier[digit] = literal[int] +( identifier[char_ord] - identifier[ord] ( literal[string] ))
keyword[else] :
keyword[raise] identifier[ValueError] (
literal[string] )
identifier[value] =( identifier[value] * literal[int] )+ identifier[digit]
keyword[return] identifier[value] | def from_ascii_hex(text: str) -> int:
"""Converts to an int value from both ASCII and regular hex.
The format used appears to vary based on whether the command was to
get an existing value (regular hex) or set a new value (ASCII hex
mirrored back from original command).
Regular hex: 0123456789abcdef
ASCII hex: 0123456789:;<=>? """
value = 0
for index in range(0, len(text)):
char_ord = ord(text[index:index + 1])
if char_ord in range(ord('0'), ord('?') + 1):
digit = char_ord - ord('0') # depends on [control=['if'], data=['char_ord']]
elif char_ord in range(ord('a'), ord('f') + 1):
digit = 10 + (char_ord - ord('a')) # depends on [control=['if'], data=['char_ord']]
else:
raise ValueError('Response contains invalid character.')
value = value * 16 + digit # depends on [control=['for'], data=['index']]
return value |
def graphdata(data):
"""returns ratings and episode number
to be used for making graphs"""
data = jh.get_ratings(data)
num = 1
rating_final = []
episode_final = []
for k,v in data.iteritems():
rating=[]
epinum=[]
for r in v:
if r != None:
rating.append(float(r))
epinum.append(num)
num+=1
rating_final.append(rating)
episode_final.append(epinum)
return rating_final,episode_final | def function[graphdata, parameter[data]]:
constant[returns ratings and episode number
to be used for making graphs]
variable[data] assign[=] call[name[jh].get_ratings, parameter[name[data]]]
variable[num] assign[=] constant[1]
variable[rating_final] assign[=] list[[]]
variable[episode_final] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18c4ce350>, <ast.Name object at 0x7da18c4cf3d0>]]] in starred[call[name[data].iteritems, parameter[]]] begin[:]
variable[rating] assign[=] list[[]]
variable[epinum] assign[=] list[[]]
for taget[name[r]] in starred[name[v]] begin[:]
if compare[name[r] not_equal[!=] constant[None]] begin[:]
call[name[rating].append, parameter[call[name[float], parameter[name[r]]]]]
call[name[epinum].append, parameter[name[num]]]
<ast.AugAssign object at 0x7da18f813f40>
call[name[rating_final].append, parameter[name[rating]]]
call[name[episode_final].append, parameter[name[epinum]]]
return[tuple[[<ast.Name object at 0x7da18eb54a90>, <ast.Name object at 0x7da18eb57040>]]] | keyword[def] identifier[graphdata] ( identifier[data] ):
literal[string]
identifier[data] = identifier[jh] . identifier[get_ratings] ( identifier[data] )
identifier[num] = literal[int]
identifier[rating_final] =[]
identifier[episode_final] =[]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[data] . identifier[iteritems] ():
identifier[rating] =[]
identifier[epinum] =[]
keyword[for] identifier[r] keyword[in] identifier[v] :
keyword[if] identifier[r] != keyword[None] :
identifier[rating] . identifier[append] ( identifier[float] ( identifier[r] ))
identifier[epinum] . identifier[append] ( identifier[num] )
identifier[num] += literal[int]
identifier[rating_final] . identifier[append] ( identifier[rating] )
identifier[episode_final] . identifier[append] ( identifier[epinum] )
keyword[return] identifier[rating_final] , identifier[episode_final] | def graphdata(data):
"""returns ratings and episode number
to be used for making graphs"""
data = jh.get_ratings(data)
num = 1
rating_final = []
episode_final = []
for (k, v) in data.iteritems():
rating = []
epinum = []
for r in v:
if r != None:
rating.append(float(r))
epinum.append(num)
num += 1 # depends on [control=['if'], data=['r']] # depends on [control=['for'], data=['r']]
rating_final.append(rating)
episode_final.append(epinum) # depends on [control=['for'], data=[]]
return (rating_final, episode_final) |
def read_input(self):
"""Section 3 - Read Input File (.m, file)
Note: UWG_Matlab input files are xlsm, XML, .m, file.
properties:
self._init_param_dict # dictionary of simulation initialization parameters
self.sensAnth # non-building sensible heat (W/m^2)
self.SchTraffic # Traffice schedule
self.BEM # list of BEMDef objects extracted from readDOE
self.Sch # list of Schedule objects extracted from readDOE
"""
uwg_param_file_path = os.path.join(self.uwgParamDir, self.uwgParamFileName)
if not os.path.exists(uwg_param_file_path):
raise Exception("Param file: '{}' does not exist.".format(uwg_param_file_path))
# Open .uwg file and feed csv data to initializeDataFile
try:
uwg_param_data = utilities.read_csv(uwg_param_file_path)
except Exception as e:
raise Exception("Failed to read .uwg file! {}".format(e.message))
# The initialize.uwg is read with a dictionary so that users changing
# line endings or line numbers doesn't make reading input incorrect
self._init_param_dict = {}
count = 0
while count < len(uwg_param_data):
row = uwg_param_data[count]
row = [row[i].replace(" ", "") for i in range(len(row))] # strip white spaces
# Optional parameters might be empty so handle separately
is_optional_parameter = (
row != [] and
(
row[0] == "albRoof" or
row[0] == "vegRoof" or
row[0] == "glzR" or
row[0] == "hvac" or
row[0] == "albWall" or
row[0] == "SHGC"
)
)
try:
if row == [] or "#" in row[0]:
count += 1
continue
elif row[0] == "SchTraffic":
# SchTraffic: 3 x 24 matrix
trafficrows = uwg_param_data[count+1:count+4]
self._init_param_dict[row[0]] = [utilities.str2fl(r[:24]) for r in trafficrows]
count += 4
elif row[0] == "bld":
# bld: 17 x 3 matrix
bldrows = uwg_param_data[count+1:count+17]
self._init_param_dict[row[0]] = [utilities.str2fl(r[:3]) for r in bldrows]
count += 17
elif is_optional_parameter:
self._init_param_dict[row[0]] = float(row[1]) if row[1] != "" else None
count += 1
else:
self._init_param_dict[row[0]] = float(row[1])
count += 1
except ValueError:
print("Error while reading parameter at {} {}".format(count, row))
ipd = self._init_param_dict
# Define Simulation and Weather parameters
if self.Month is None: self.Month = ipd['Month']
if self.Day is None: self.Day = ipd['Day']
if self.nDay is None: self.nDay = ipd['nDay']
if self.dtSim is None: self.dtSim = ipd['dtSim']
if self.dtWeather is None: self.dtWeather = ipd['dtWeather']
# HVAC system and internal laod
if self.autosize is None: self.autosize = ipd['autosize']
if self.sensOcc is None: self.sensOcc = ipd['sensOcc']
if self.LatFOcc is None: self.LatFOcc = ipd['LatFOcc']
if self.RadFOcc is None: self.RadFOcc = ipd['RadFOcc']
if self.RadFEquip is None: self.RadFEquip = ipd['RadFEquip']
if self.RadFLight is None: self.RadFLight = ipd['RadFLight']
# Define Urban microclimate parameters
if self.h_ubl1 is None: self.h_ubl1 = ipd['h_ubl1']
if self.h_ubl2 is None: self.h_ubl2 = ipd['h_ubl2']
if self.h_ref is None: self.h_ref = ipd['h_ref']
if self.h_temp is None: self.h_temp = ipd['h_temp']
if self.h_wind is None: self.h_wind = ipd['h_wind']
if self.c_circ is None: self.c_circ = ipd['c_circ']
if self.c_exch is None: self.c_exch = ipd['c_exch']
if self.maxDay is None: self.maxDay = ipd['maxDay']
if self.maxNight is None: self.maxNight = ipd['maxNight']
if self.windMin is None: self.windMin = ipd['windMin']
if self.h_obs is None: self.h_obs = ipd['h_obs']
# Urban characteristics
if self.bldHeight is None: self.bldHeight = ipd['bldHeight']
if self.h_mix is None: self.h_mix = ipd['h_mix']
if self.bldDensity is None: self.bldDensity = ipd['bldDensity']
if self.verToHor is None: self.verToHor = ipd['verToHor']
if self.charLength is None: self.charLength = ipd['charLength']
if self.alb_road is None: self.alb_road = ipd['albRoad']
if self.d_road is None: self.d_road = ipd['dRoad']
if self.sensAnth is None: self.sensAnth = ipd['sensAnth']
# if self.latAnth is None: self.latAnth = ipd['latAnth'] # Not used, taken out by JH.
# climate Zone
if self.zone is None: self.zone = ipd['zone']
# Vegetation parameters
if self.vegCover is None: self.vegCover = ipd['vegCover']
if self.treeCoverage is None: self.treeCoverage = ipd['treeCoverage']
if self.vegStart is None: self.vegStart = ipd['vegStart']
if self.vegEnd is None: self.vegEnd = ipd['vegEnd']
if self.albVeg is None: self.albVeg = ipd['albVeg']
if self.rurVegCover is None: self.rurVegCover = ipd['rurVegCover']
if self.latGrss is None: self.latGrss = ipd['latGrss']
if self.latTree is None: self.latTree = ipd['latTree']
# Define Traffic schedule
if self.SchTraffic is None: self.SchTraffic = ipd['SchTraffic']
# Define Road (Assume 0.5m of asphalt)
if self.kRoad is None: self.kRoad = ipd['kRoad']
if self.cRoad is None: self.cRoad = ipd['cRoad']
# Building stock fraction
if self.bld is None: self.bld = ipd['bld']
# Optional parameters
if self.albRoof is None: self.albRoof = ipd['albRoof']
if self.vegRoof is None: self.vegRoof = ipd['vegRoof']
if self.glzR is None: self.glzR = ipd['glzR']
if self.albWall is None: self.albWall = ipd['albWall']
if self.SHGC is None: self.SHGC = ipd['SHGC'] | def function[read_input, parameter[self]]:
constant[Section 3 - Read Input File (.m, file)
Note: UWG_Matlab input files are xlsm, XML, .m, file.
properties:
self._init_param_dict # dictionary of simulation initialization parameters
self.sensAnth # non-building sensible heat (W/m^2)
self.SchTraffic # Traffice schedule
self.BEM # list of BEMDef objects extracted from readDOE
self.Sch # list of Schedule objects extracted from readDOE
]
variable[uwg_param_file_path] assign[=] call[name[os].path.join, parameter[name[self].uwgParamDir, name[self].uwgParamFileName]]
if <ast.UnaryOp object at 0x7da1b192fc70> begin[:]
<ast.Raise object at 0x7da1b192fb50>
<ast.Try object at 0x7da1b192fa00>
name[self]._init_param_dict assign[=] dictionary[[], []]
variable[count] assign[=] constant[0]
while compare[name[count] less[<] call[name[len], parameter[name[uwg_param_data]]]] begin[:]
variable[row] assign[=] call[name[uwg_param_data]][name[count]]
variable[row] assign[=] <ast.ListComp object at 0x7da1b192f310>
variable[is_optional_parameter] assign[=] <ast.BoolOp object at 0x7da1b192efb0>
<ast.Try object at 0x7da1b192e8f0>
variable[ipd] assign[=] name[self]._init_param_dict
if compare[name[self].Month is constant[None]] begin[:]
name[self].Month assign[=] call[name[ipd]][constant[Month]]
if compare[name[self].Day is constant[None]] begin[:]
name[self].Day assign[=] call[name[ipd]][constant[Day]]
if compare[name[self].nDay is constant[None]] begin[:]
name[self].nDay assign[=] call[name[ipd]][constant[nDay]]
if compare[name[self].dtSim is constant[None]] begin[:]
name[self].dtSim assign[=] call[name[ipd]][constant[dtSim]]
if compare[name[self].dtWeather is constant[None]] begin[:]
name[self].dtWeather assign[=] call[name[ipd]][constant[dtWeather]]
if compare[name[self].autosize is constant[None]] begin[:]
name[self].autosize assign[=] call[name[ipd]][constant[autosize]]
if compare[name[self].sensOcc is constant[None]] begin[:]
name[self].sensOcc assign[=] call[name[ipd]][constant[sensOcc]]
if compare[name[self].LatFOcc is constant[None]] begin[:]
name[self].LatFOcc assign[=] call[name[ipd]][constant[LatFOcc]]
if compare[name[self].RadFOcc is constant[None]] begin[:]
name[self].RadFOcc assign[=] call[name[ipd]][constant[RadFOcc]]
if compare[name[self].RadFEquip is constant[None]] begin[:]
name[self].RadFEquip assign[=] call[name[ipd]][constant[RadFEquip]]
if compare[name[self].RadFLight is constant[None]] begin[:]
name[self].RadFLight assign[=] call[name[ipd]][constant[RadFLight]]
if compare[name[self].h_ubl1 is constant[None]] begin[:]
name[self].h_ubl1 assign[=] call[name[ipd]][constant[h_ubl1]]
if compare[name[self].h_ubl2 is constant[None]] begin[:]
name[self].h_ubl2 assign[=] call[name[ipd]][constant[h_ubl2]]
if compare[name[self].h_ref is constant[None]] begin[:]
name[self].h_ref assign[=] call[name[ipd]][constant[h_ref]]
if compare[name[self].h_temp is constant[None]] begin[:]
name[self].h_temp assign[=] call[name[ipd]][constant[h_temp]]
if compare[name[self].h_wind is constant[None]] begin[:]
name[self].h_wind assign[=] call[name[ipd]][constant[h_wind]]
if compare[name[self].c_circ is constant[None]] begin[:]
name[self].c_circ assign[=] call[name[ipd]][constant[c_circ]]
if compare[name[self].c_exch is constant[None]] begin[:]
name[self].c_exch assign[=] call[name[ipd]][constant[c_exch]]
if compare[name[self].maxDay is constant[None]] begin[:]
name[self].maxDay assign[=] call[name[ipd]][constant[maxDay]]
if compare[name[self].maxNight is constant[None]] begin[:]
name[self].maxNight assign[=] call[name[ipd]][constant[maxNight]]
if compare[name[self].windMin is constant[None]] begin[:]
name[self].windMin assign[=] call[name[ipd]][constant[windMin]]
if compare[name[self].h_obs is constant[None]] begin[:]
name[self].h_obs assign[=] call[name[ipd]][constant[h_obs]]
if compare[name[self].bldHeight is constant[None]] begin[:]
name[self].bldHeight assign[=] call[name[ipd]][constant[bldHeight]]
if compare[name[self].h_mix is constant[None]] begin[:]
name[self].h_mix assign[=] call[name[ipd]][constant[h_mix]]
if compare[name[self].bldDensity is constant[None]] begin[:]
name[self].bldDensity assign[=] call[name[ipd]][constant[bldDensity]]
if compare[name[self].verToHor is constant[None]] begin[:]
name[self].verToHor assign[=] call[name[ipd]][constant[verToHor]]
if compare[name[self].charLength is constant[None]] begin[:]
name[self].charLength assign[=] call[name[ipd]][constant[charLength]]
if compare[name[self].alb_road is constant[None]] begin[:]
name[self].alb_road assign[=] call[name[ipd]][constant[albRoad]]
if compare[name[self].d_road is constant[None]] begin[:]
name[self].d_road assign[=] call[name[ipd]][constant[dRoad]]
if compare[name[self].sensAnth is constant[None]] begin[:]
name[self].sensAnth assign[=] call[name[ipd]][constant[sensAnth]]
if compare[name[self].zone is constant[None]] begin[:]
name[self].zone assign[=] call[name[ipd]][constant[zone]]
if compare[name[self].vegCover is constant[None]] begin[:]
name[self].vegCover assign[=] call[name[ipd]][constant[vegCover]]
if compare[name[self].treeCoverage is constant[None]] begin[:]
name[self].treeCoverage assign[=] call[name[ipd]][constant[treeCoverage]]
if compare[name[self].vegStart is constant[None]] begin[:]
name[self].vegStart assign[=] call[name[ipd]][constant[vegStart]]
if compare[name[self].vegEnd is constant[None]] begin[:]
name[self].vegEnd assign[=] call[name[ipd]][constant[vegEnd]]
if compare[name[self].albVeg is constant[None]] begin[:]
name[self].albVeg assign[=] call[name[ipd]][constant[albVeg]]
if compare[name[self].rurVegCover is constant[None]] begin[:]
name[self].rurVegCover assign[=] call[name[ipd]][constant[rurVegCover]]
if compare[name[self].latGrss is constant[None]] begin[:]
name[self].latGrss assign[=] call[name[ipd]][constant[latGrss]]
if compare[name[self].latTree is constant[None]] begin[:]
name[self].latTree assign[=] call[name[ipd]][constant[latTree]]
if compare[name[self].SchTraffic is constant[None]] begin[:]
name[self].SchTraffic assign[=] call[name[ipd]][constant[SchTraffic]]
if compare[name[self].kRoad is constant[None]] begin[:]
name[self].kRoad assign[=] call[name[ipd]][constant[kRoad]]
if compare[name[self].cRoad is constant[None]] begin[:]
name[self].cRoad assign[=] call[name[ipd]][constant[cRoad]]
if compare[name[self].bld is constant[None]] begin[:]
name[self].bld assign[=] call[name[ipd]][constant[bld]]
if compare[name[self].albRoof is constant[None]] begin[:]
name[self].albRoof assign[=] call[name[ipd]][constant[albRoof]]
if compare[name[self].vegRoof is constant[None]] begin[:]
name[self].vegRoof assign[=] call[name[ipd]][constant[vegRoof]]
if compare[name[self].glzR is constant[None]] begin[:]
name[self].glzR assign[=] call[name[ipd]][constant[glzR]]
if compare[name[self].albWall is constant[None]] begin[:]
name[self].albWall assign[=] call[name[ipd]][constant[albWall]]
if compare[name[self].SHGC is constant[None]] begin[:]
name[self].SHGC assign[=] call[name[ipd]][constant[SHGC]] | keyword[def] identifier[read_input] ( identifier[self] ):
literal[string]
identifier[uwg_param_file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[uwgParamDir] , identifier[self] . identifier[uwgParamFileName] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[uwg_param_file_path] ):
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[uwg_param_file_path] ))
keyword[try] :
identifier[uwg_param_data] = identifier[utilities] . identifier[read_csv] ( identifier[uwg_param_file_path] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[e] . identifier[message] ))
identifier[self] . identifier[_init_param_dict] ={}
identifier[count] = literal[int]
keyword[while] identifier[count] < identifier[len] ( identifier[uwg_param_data] ):
identifier[row] = identifier[uwg_param_data] [ identifier[count] ]
identifier[row] =[ identifier[row] [ identifier[i] ]. identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[row] ))]
identifier[is_optional_parameter] =(
identifier[row] !=[] keyword[and]
(
identifier[row] [ literal[int] ]== literal[string] keyword[or]
identifier[row] [ literal[int] ]== literal[string] keyword[or]
identifier[row] [ literal[int] ]== literal[string] keyword[or]
identifier[row] [ literal[int] ]== literal[string] keyword[or]
identifier[row] [ literal[int] ]== literal[string] keyword[or]
identifier[row] [ literal[int] ]== literal[string]
)
)
keyword[try] :
keyword[if] identifier[row] ==[] keyword[or] literal[string] keyword[in] identifier[row] [ literal[int] ]:
identifier[count] += literal[int]
keyword[continue]
keyword[elif] identifier[row] [ literal[int] ]== literal[string] :
identifier[trafficrows] = identifier[uwg_param_data] [ identifier[count] + literal[int] : identifier[count] + literal[int] ]
identifier[self] . identifier[_init_param_dict] [ identifier[row] [ literal[int] ]]=[ identifier[utilities] . identifier[str2fl] ( identifier[r] [: literal[int] ]) keyword[for] identifier[r] keyword[in] identifier[trafficrows] ]
identifier[count] += literal[int]
keyword[elif] identifier[row] [ literal[int] ]== literal[string] :
identifier[bldrows] = identifier[uwg_param_data] [ identifier[count] + literal[int] : identifier[count] + literal[int] ]
identifier[self] . identifier[_init_param_dict] [ identifier[row] [ literal[int] ]]=[ identifier[utilities] . identifier[str2fl] ( identifier[r] [: literal[int] ]) keyword[for] identifier[r] keyword[in] identifier[bldrows] ]
identifier[count] += literal[int]
keyword[elif] identifier[is_optional_parameter] :
identifier[self] . identifier[_init_param_dict] [ identifier[row] [ literal[int] ]]= identifier[float] ( identifier[row] [ literal[int] ]) keyword[if] identifier[row] [ literal[int] ]!= literal[string] keyword[else] keyword[None]
identifier[count] += literal[int]
keyword[else] :
identifier[self] . identifier[_init_param_dict] [ identifier[row] [ literal[int] ]]= identifier[float] ( identifier[row] [ literal[int] ])
identifier[count] += literal[int]
keyword[except] identifier[ValueError] :
identifier[print] ( literal[string] . identifier[format] ( identifier[count] , identifier[row] ))
identifier[ipd] = identifier[self] . identifier[_init_param_dict]
keyword[if] identifier[self] . identifier[Month] keyword[is] keyword[None] : identifier[self] . identifier[Month] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[Day] keyword[is] keyword[None] : identifier[self] . identifier[Day] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[nDay] keyword[is] keyword[None] : identifier[self] . identifier[nDay] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[dtSim] keyword[is] keyword[None] : identifier[self] . identifier[dtSim] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[dtWeather] keyword[is] keyword[None] : identifier[self] . identifier[dtWeather] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[autosize] keyword[is] keyword[None] : identifier[self] . identifier[autosize] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[sensOcc] keyword[is] keyword[None] : identifier[self] . identifier[sensOcc] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[LatFOcc] keyword[is] keyword[None] : identifier[self] . identifier[LatFOcc] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[RadFOcc] keyword[is] keyword[None] : identifier[self] . identifier[RadFOcc] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[RadFEquip] keyword[is] keyword[None] : identifier[self] . identifier[RadFEquip] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[RadFLight] keyword[is] keyword[None] : identifier[self] . identifier[RadFLight] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[h_ubl1] keyword[is] keyword[None] : identifier[self] . identifier[h_ubl1] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[h_ubl2] keyword[is] keyword[None] : identifier[self] . identifier[h_ubl2] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[h_ref] keyword[is] keyword[None] : identifier[self] . identifier[h_ref] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[h_temp] keyword[is] keyword[None] : identifier[self] . identifier[h_temp] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[h_wind] keyword[is] keyword[None] : identifier[self] . identifier[h_wind] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[c_circ] keyword[is] keyword[None] : identifier[self] . identifier[c_circ] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[c_exch] keyword[is] keyword[None] : identifier[self] . identifier[c_exch] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[maxDay] keyword[is] keyword[None] : identifier[self] . identifier[maxDay] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[maxNight] keyword[is] keyword[None] : identifier[self] . identifier[maxNight] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[windMin] keyword[is] keyword[None] : identifier[self] . identifier[windMin] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[h_obs] keyword[is] keyword[None] : identifier[self] . identifier[h_obs] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[bldHeight] keyword[is] keyword[None] : identifier[self] . identifier[bldHeight] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[h_mix] keyword[is] keyword[None] : identifier[self] . identifier[h_mix] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[bldDensity] keyword[is] keyword[None] : identifier[self] . identifier[bldDensity] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[verToHor] keyword[is] keyword[None] : identifier[self] . identifier[verToHor] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[charLength] keyword[is] keyword[None] : identifier[self] . identifier[charLength] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[alb_road] keyword[is] keyword[None] : identifier[self] . identifier[alb_road] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[d_road] keyword[is] keyword[None] : identifier[self] . identifier[d_road] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[sensAnth] keyword[is] keyword[None] : identifier[self] . identifier[sensAnth] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[zone] keyword[is] keyword[None] : identifier[self] . identifier[zone] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[vegCover] keyword[is] keyword[None] : identifier[self] . identifier[vegCover] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[treeCoverage] keyword[is] keyword[None] : identifier[self] . identifier[treeCoverage] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[vegStart] keyword[is] keyword[None] : identifier[self] . identifier[vegStart] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[vegEnd] keyword[is] keyword[None] : identifier[self] . identifier[vegEnd] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[albVeg] keyword[is] keyword[None] : identifier[self] . identifier[albVeg] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[rurVegCover] keyword[is] keyword[None] : identifier[self] . identifier[rurVegCover] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[latGrss] keyword[is] keyword[None] : identifier[self] . identifier[latGrss] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[latTree] keyword[is] keyword[None] : identifier[self] . identifier[latTree] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[SchTraffic] keyword[is] keyword[None] : identifier[self] . identifier[SchTraffic] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[kRoad] keyword[is] keyword[None] : identifier[self] . identifier[kRoad] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[cRoad] keyword[is] keyword[None] : identifier[self] . identifier[cRoad] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[bld] keyword[is] keyword[None] : identifier[self] . identifier[bld] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[albRoof] keyword[is] keyword[None] : identifier[self] . identifier[albRoof] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[vegRoof] keyword[is] keyword[None] : identifier[self] . identifier[vegRoof] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[glzR] keyword[is] keyword[None] : identifier[self] . identifier[glzR] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[albWall] keyword[is] keyword[None] : identifier[self] . identifier[albWall] = identifier[ipd] [ literal[string] ]
keyword[if] identifier[self] . identifier[SHGC] keyword[is] keyword[None] : identifier[self] . identifier[SHGC] = identifier[ipd] [ literal[string] ] | def read_input(self):
"""Section 3 - Read Input File (.m, file)
Note: UWG_Matlab input files are xlsm, XML, .m, file.
properties:
self._init_param_dict # dictionary of simulation initialization parameters
self.sensAnth # non-building sensible heat (W/m^2)
self.SchTraffic # Traffice schedule
self.BEM # list of BEMDef objects extracted from readDOE
self.Sch # list of Schedule objects extracted from readDOE
"""
uwg_param_file_path = os.path.join(self.uwgParamDir, self.uwgParamFileName)
if not os.path.exists(uwg_param_file_path):
raise Exception("Param file: '{}' does not exist.".format(uwg_param_file_path)) # depends on [control=['if'], data=[]] # Open .uwg file and feed csv data to initializeDataFile
try:
uwg_param_data = utilities.read_csv(uwg_param_file_path) # depends on [control=['try'], data=[]]
except Exception as e:
raise Exception('Failed to read .uwg file! {}'.format(e.message)) # depends on [control=['except'], data=['e']] # The initialize.uwg is read with a dictionary so that users changing
# line endings or line numbers doesn't make reading input incorrect
self._init_param_dict = {}
count = 0
while count < len(uwg_param_data):
row = uwg_param_data[count]
row = [row[i].replace(' ', '') for i in range(len(row))] # strip white spaces
# Optional parameters might be empty so handle separately
is_optional_parameter = row != [] and (row[0] == 'albRoof' or row[0] == 'vegRoof' or row[0] == 'glzR' or (row[0] == 'hvac') or (row[0] == 'albWall') or (row[0] == 'SHGC'))
try:
if row == [] or '#' in row[0]:
count += 1
continue # depends on [control=['if'], data=[]]
elif row[0] == 'SchTraffic': # SchTraffic: 3 x 24 matrix
trafficrows = uwg_param_data[count + 1:count + 4]
self._init_param_dict[row[0]] = [utilities.str2fl(r[:24]) for r in trafficrows]
count += 4 # depends on [control=['if'], data=[]]
elif row[0] == 'bld': # bld: 17 x 3 matrix
bldrows = uwg_param_data[count + 1:count + 17]
self._init_param_dict[row[0]] = [utilities.str2fl(r[:3]) for r in bldrows]
count += 17 # depends on [control=['if'], data=[]]
elif is_optional_parameter:
self._init_param_dict[row[0]] = float(row[1]) if row[1] != '' else None
count += 1 # depends on [control=['if'], data=[]]
else:
self._init_param_dict[row[0]] = float(row[1])
count += 1 # depends on [control=['try'], data=[]]
except ValueError:
print('Error while reading parameter at {} {}'.format(count, row)) # depends on [control=['except'], data=[]] # depends on [control=['while'], data=['count']]
ipd = self._init_param_dict # Define Simulation and Weather parameters
if self.Month is None:
self.Month = ipd['Month'] # depends on [control=['if'], data=[]]
if self.Day is None:
self.Day = ipd['Day'] # depends on [control=['if'], data=[]]
if self.nDay is None:
self.nDay = ipd['nDay'] # depends on [control=['if'], data=[]]
if self.dtSim is None:
self.dtSim = ipd['dtSim'] # depends on [control=['if'], data=[]]
if self.dtWeather is None:
self.dtWeather = ipd['dtWeather'] # depends on [control=['if'], data=[]] # HVAC system and internal laod
if self.autosize is None:
self.autosize = ipd['autosize'] # depends on [control=['if'], data=[]]
if self.sensOcc is None:
self.sensOcc = ipd['sensOcc'] # depends on [control=['if'], data=[]]
if self.LatFOcc is None:
self.LatFOcc = ipd['LatFOcc'] # depends on [control=['if'], data=[]]
if self.RadFOcc is None:
self.RadFOcc = ipd['RadFOcc'] # depends on [control=['if'], data=[]]
if self.RadFEquip is None:
self.RadFEquip = ipd['RadFEquip'] # depends on [control=['if'], data=[]]
if self.RadFLight is None:
self.RadFLight = ipd['RadFLight'] # depends on [control=['if'], data=[]] # Define Urban microclimate parameters
if self.h_ubl1 is None:
self.h_ubl1 = ipd['h_ubl1'] # depends on [control=['if'], data=[]]
if self.h_ubl2 is None:
self.h_ubl2 = ipd['h_ubl2'] # depends on [control=['if'], data=[]]
if self.h_ref is None:
self.h_ref = ipd['h_ref'] # depends on [control=['if'], data=[]]
if self.h_temp is None:
self.h_temp = ipd['h_temp'] # depends on [control=['if'], data=[]]
if self.h_wind is None:
self.h_wind = ipd['h_wind'] # depends on [control=['if'], data=[]]
if self.c_circ is None:
self.c_circ = ipd['c_circ'] # depends on [control=['if'], data=[]]
if self.c_exch is None:
self.c_exch = ipd['c_exch'] # depends on [control=['if'], data=[]]
if self.maxDay is None:
self.maxDay = ipd['maxDay'] # depends on [control=['if'], data=[]]
if self.maxNight is None:
self.maxNight = ipd['maxNight'] # depends on [control=['if'], data=[]]
if self.windMin is None:
self.windMin = ipd['windMin'] # depends on [control=['if'], data=[]]
if self.h_obs is None:
self.h_obs = ipd['h_obs'] # depends on [control=['if'], data=[]] # Urban characteristics
if self.bldHeight is None:
self.bldHeight = ipd['bldHeight'] # depends on [control=['if'], data=[]]
if self.h_mix is None:
self.h_mix = ipd['h_mix'] # depends on [control=['if'], data=[]]
if self.bldDensity is None:
self.bldDensity = ipd['bldDensity'] # depends on [control=['if'], data=[]]
if self.verToHor is None:
self.verToHor = ipd['verToHor'] # depends on [control=['if'], data=[]]
if self.charLength is None:
self.charLength = ipd['charLength'] # depends on [control=['if'], data=[]]
if self.alb_road is None:
self.alb_road = ipd['albRoad'] # depends on [control=['if'], data=[]]
if self.d_road is None:
self.d_road = ipd['dRoad'] # depends on [control=['if'], data=[]]
if self.sensAnth is None:
self.sensAnth = ipd['sensAnth'] # depends on [control=['if'], data=[]] # if self.latAnth is None: self.latAnth = ipd['latAnth'] # Not used, taken out by JH.
# climate Zone
if self.zone is None:
self.zone = ipd['zone'] # depends on [control=['if'], data=[]] # Vegetation parameters
if self.vegCover is None:
self.vegCover = ipd['vegCover'] # depends on [control=['if'], data=[]]
if self.treeCoverage is None:
self.treeCoverage = ipd['treeCoverage'] # depends on [control=['if'], data=[]]
if self.vegStart is None:
self.vegStart = ipd['vegStart'] # depends on [control=['if'], data=[]]
if self.vegEnd is None:
self.vegEnd = ipd['vegEnd'] # depends on [control=['if'], data=[]]
if self.albVeg is None:
self.albVeg = ipd['albVeg'] # depends on [control=['if'], data=[]]
if self.rurVegCover is None:
self.rurVegCover = ipd['rurVegCover'] # depends on [control=['if'], data=[]]
if self.latGrss is None:
self.latGrss = ipd['latGrss'] # depends on [control=['if'], data=[]]
if self.latTree is None:
self.latTree = ipd['latTree'] # depends on [control=['if'], data=[]] # Define Traffic schedule
if self.SchTraffic is None:
self.SchTraffic = ipd['SchTraffic'] # depends on [control=['if'], data=[]] # Define Road (Assume 0.5m of asphalt)
if self.kRoad is None:
self.kRoad = ipd['kRoad'] # depends on [control=['if'], data=[]]
if self.cRoad is None:
self.cRoad = ipd['cRoad'] # depends on [control=['if'], data=[]] # Building stock fraction
if self.bld is None:
self.bld = ipd['bld'] # depends on [control=['if'], data=[]] # Optional parameters
if self.albRoof is None:
self.albRoof = ipd['albRoof'] # depends on [control=['if'], data=[]]
if self.vegRoof is None:
self.vegRoof = ipd['vegRoof'] # depends on [control=['if'], data=[]]
if self.glzR is None:
self.glzR = ipd['glzR'] # depends on [control=['if'], data=[]]
if self.albWall is None:
self.albWall = ipd['albWall'] # depends on [control=['if'], data=[]]
if self.SHGC is None:
self.SHGC = ipd['SHGC'] # depends on [control=['if'], data=[]] |
def run(self):
"""
1. count the words for each of the :py:meth:`~.InputText.output` targets created by :py:class:`~.InputText`
2. write the count into the :py:meth:`~.WordCount.output` target
"""
count = {}
# NOTE: self.input() actually returns an element for the InputText.output() target
for f in self.input(): # The input() method is a wrapper around requires() that returns Target objects
for line in f.open('r'): # Target objects are a file system/format abstraction and this will return a file stream object
for word in line.strip().split():
count[word] = count.get(word, 0) + 1
# output data
f = self.output().open('w')
for word, count in six.iteritems(count):
f.write("%s\t%d\n" % (word, count))
f.close() | def function[run, parameter[self]]:
constant[
1. count the words for each of the :py:meth:`~.InputText.output` targets created by :py:class:`~.InputText`
2. write the count into the :py:meth:`~.WordCount.output` target
]
variable[count] assign[=] dictionary[[], []]
for taget[name[f]] in starred[call[name[self].input, parameter[]]] begin[:]
for taget[name[line]] in starred[call[name[f].open, parameter[constant[r]]]] begin[:]
for taget[name[word]] in starred[call[call[name[line].strip, parameter[]].split, parameter[]]] begin[:]
call[name[count]][name[word]] assign[=] binary_operation[call[name[count].get, parameter[name[word], constant[0]]] + constant[1]]
variable[f] assign[=] call[call[name[self].output, parameter[]].open, parameter[constant[w]]]
for taget[tuple[[<ast.Name object at 0x7da2041d9600>, <ast.Name object at 0x7da2041da320>]]] in starred[call[name[six].iteritems, parameter[name[count]]]] begin[:]
call[name[f].write, parameter[binary_operation[constant[%s %d
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2041d9780>, <ast.Name object at 0x7da2041d8280>]]]]]
call[name[f].close, parameter[]] | keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[count] ={}
keyword[for] identifier[f] keyword[in] identifier[self] . identifier[input] ():
keyword[for] identifier[line] keyword[in] identifier[f] . identifier[open] ( literal[string] ):
keyword[for] identifier[word] keyword[in] identifier[line] . identifier[strip] (). identifier[split] ():
identifier[count] [ identifier[word] ]= identifier[count] . identifier[get] ( identifier[word] , literal[int] )+ literal[int]
identifier[f] = identifier[self] . identifier[output] (). identifier[open] ( literal[string] )
keyword[for] identifier[word] , identifier[count] keyword[in] identifier[six] . identifier[iteritems] ( identifier[count] ):
identifier[f] . identifier[write] ( literal[string] %( identifier[word] , identifier[count] ))
identifier[f] . identifier[close] () | def run(self):
"""
1. count the words for each of the :py:meth:`~.InputText.output` targets created by :py:class:`~.InputText`
2. write the count into the :py:meth:`~.WordCount.output` target
"""
count = {}
# NOTE: self.input() actually returns an element for the InputText.output() target
for f in self.input(): # The input() method is a wrapper around requires() that returns Target objects
for line in f.open('r'): # Target objects are a file system/format abstraction and this will return a file stream object
for word in line.strip().split():
count[word] = count.get(word, 0) + 1 # depends on [control=['for'], data=['word']] # depends on [control=['for'], data=['line']] # depends on [control=['for'], data=['f']]
# output data
f = self.output().open('w')
for (word, count) in six.iteritems(count):
f.write('%s\t%d\n' % (word, count)) # depends on [control=['for'], data=[]]
f.close() |
def _deleted_files():
'''
Iterates over /proc/PID/maps and /proc/PID/fd links and returns list of desired deleted files.
Returns:
List of deleted files to analyze, False on failure.
'''
deleted_files = []
for proc in psutil.process_iter(): # pylint: disable=too-many-nested-blocks
try:
pinfo = proc.as_dict(attrs=['pid', 'name'])
try:
with salt.utils.files.fopen('/proc/{0}/maps'.format(pinfo['pid'])) as maps: # pylint: disable=resource-leakage
dirpath = '/proc/' + six.text_type(pinfo['pid']) + '/fd/'
listdir = os.listdir(dirpath)
maplines = maps.readlines()
except (OSError, IOError):
yield False
# /proc/PID/maps
mapline = re.compile(r'^[\da-f]+-[\da-f]+ [r-][w-][x-][sp-] '
r'[\da-f]+ [\da-f]{2}:[\da-f]{2} (\d+) *(.+)( \(deleted\))?\n$')
for line in maplines:
line = salt.utils.stringutils.to_unicode(line)
matched = mapline.match(line)
if not matched:
continue
path = matched.group(2)
if not path:
continue
valid = _valid_deleted_file(path)
if not valid:
continue
val = (pinfo['name'], pinfo['pid'], path[0:-10])
if val not in deleted_files:
deleted_files.append(val)
yield val
# /proc/PID/fd
try:
for link in listdir:
path = dirpath + link
readlink = os.readlink(path)
filenames = []
if os.path.isfile(readlink):
filenames.append(readlink)
elif os.path.isdir(readlink) and readlink != '/':
for root, dummy_dirs, files in salt.utils.path.os_walk(readlink, followlinks=True):
for name in files:
filenames.append(os.path.join(root, name))
for filename in filenames:
valid = _valid_deleted_file(filename)
if not valid:
continue
val = (pinfo['name'], pinfo['pid'], filename)
if val not in deleted_files:
deleted_files.append(val)
yield val
except OSError:
pass
except psutil.NoSuchProcess:
pass | def function[_deleted_files, parameter[]]:
constant[
Iterates over /proc/PID/maps and /proc/PID/fd links and returns list of desired deleted files.
Returns:
List of deleted files to analyze, False on failure.
]
variable[deleted_files] assign[=] list[[]]
for taget[name[proc]] in starred[call[name[psutil].process_iter, parameter[]]] begin[:]
<ast.Try object at 0x7da18f00dc90> | keyword[def] identifier[_deleted_files] ():
literal[string]
identifier[deleted_files] =[]
keyword[for] identifier[proc] keyword[in] identifier[psutil] . identifier[process_iter] ():
keyword[try] :
identifier[pinfo] = identifier[proc] . identifier[as_dict] ( identifier[attrs] =[ literal[string] , literal[string] ])
keyword[try] :
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( literal[string] . identifier[format] ( identifier[pinfo] [ literal[string] ])) keyword[as] identifier[maps] :
identifier[dirpath] = literal[string] + identifier[six] . identifier[text_type] ( identifier[pinfo] [ literal[string] ])+ literal[string]
identifier[listdir] = identifier[os] . identifier[listdir] ( identifier[dirpath] )
identifier[maplines] = identifier[maps] . identifier[readlines] ()
keyword[except] ( identifier[OSError] , identifier[IOError] ):
keyword[yield] keyword[False]
identifier[mapline] = identifier[re] . identifier[compile] ( literal[string]
literal[string] )
keyword[for] identifier[line] keyword[in] identifier[maplines] :
identifier[line] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[line] )
identifier[matched] = identifier[mapline] . identifier[match] ( identifier[line] )
keyword[if] keyword[not] identifier[matched] :
keyword[continue]
identifier[path] = identifier[matched] . identifier[group] ( literal[int] )
keyword[if] keyword[not] identifier[path] :
keyword[continue]
identifier[valid] = identifier[_valid_deleted_file] ( identifier[path] )
keyword[if] keyword[not] identifier[valid] :
keyword[continue]
identifier[val] =( identifier[pinfo] [ literal[string] ], identifier[pinfo] [ literal[string] ], identifier[path] [ literal[int] :- literal[int] ])
keyword[if] identifier[val] keyword[not] keyword[in] identifier[deleted_files] :
identifier[deleted_files] . identifier[append] ( identifier[val] )
keyword[yield] identifier[val]
keyword[try] :
keyword[for] identifier[link] keyword[in] identifier[listdir] :
identifier[path] = identifier[dirpath] + identifier[link]
identifier[readlink] = identifier[os] . identifier[readlink] ( identifier[path] )
identifier[filenames] =[]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[readlink] ):
identifier[filenames] . identifier[append] ( identifier[readlink] )
keyword[elif] identifier[os] . identifier[path] . identifier[isdir] ( identifier[readlink] ) keyword[and] identifier[readlink] != literal[string] :
keyword[for] identifier[root] , identifier[dummy_dirs] , identifier[files] keyword[in] identifier[salt] . identifier[utils] . identifier[path] . identifier[os_walk] ( identifier[readlink] , identifier[followlinks] = keyword[True] ):
keyword[for] identifier[name] keyword[in] identifier[files] :
identifier[filenames] . identifier[append] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[name] ))
keyword[for] identifier[filename] keyword[in] identifier[filenames] :
identifier[valid] = identifier[_valid_deleted_file] ( identifier[filename] )
keyword[if] keyword[not] identifier[valid] :
keyword[continue]
identifier[val] =( identifier[pinfo] [ literal[string] ], identifier[pinfo] [ literal[string] ], identifier[filename] )
keyword[if] identifier[val] keyword[not] keyword[in] identifier[deleted_files] :
identifier[deleted_files] . identifier[append] ( identifier[val] )
keyword[yield] identifier[val]
keyword[except] identifier[OSError] :
keyword[pass]
keyword[except] identifier[psutil] . identifier[NoSuchProcess] :
keyword[pass] | def _deleted_files():
"""
Iterates over /proc/PID/maps and /proc/PID/fd links and returns list of desired deleted files.
Returns:
List of deleted files to analyze, False on failure.
"""
deleted_files = []
for proc in psutil.process_iter(): # pylint: disable=too-many-nested-blocks
try:
pinfo = proc.as_dict(attrs=['pid', 'name'])
try:
with salt.utils.files.fopen('/proc/{0}/maps'.format(pinfo['pid'])) as maps: # pylint: disable=resource-leakage
dirpath = '/proc/' + six.text_type(pinfo['pid']) + '/fd/'
listdir = os.listdir(dirpath)
maplines = maps.readlines() # depends on [control=['with'], data=['maps']] # depends on [control=['try'], data=[]]
except (OSError, IOError):
yield False # depends on [control=['except'], data=[]]
# /proc/PID/maps
mapline = re.compile('^[\\da-f]+-[\\da-f]+ [r-][w-][x-][sp-] [\\da-f]+ [\\da-f]{2}:[\\da-f]{2} (\\d+) *(.+)( \\(deleted\\))?\\n$')
for line in maplines:
line = salt.utils.stringutils.to_unicode(line)
matched = mapline.match(line)
if not matched:
continue # depends on [control=['if'], data=[]]
path = matched.group(2)
if not path:
continue # depends on [control=['if'], data=[]]
valid = _valid_deleted_file(path)
if not valid:
continue # depends on [control=['if'], data=[]]
val = (pinfo['name'], pinfo['pid'], path[0:-10])
if val not in deleted_files:
deleted_files.append(val)
yield val # depends on [control=['if'], data=['val', 'deleted_files']] # depends on [control=['for'], data=['line']]
# /proc/PID/fd
try:
for link in listdir:
path = dirpath + link
readlink = os.readlink(path)
filenames = []
if os.path.isfile(readlink):
filenames.append(readlink) # depends on [control=['if'], data=[]]
elif os.path.isdir(readlink) and readlink != '/':
for (root, dummy_dirs, files) in salt.utils.path.os_walk(readlink, followlinks=True):
for name in files:
filenames.append(os.path.join(root, name)) # depends on [control=['for'], data=['name']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
for filename in filenames:
valid = _valid_deleted_file(filename)
if not valid:
continue # depends on [control=['if'], data=[]]
val = (pinfo['name'], pinfo['pid'], filename)
if val not in deleted_files:
deleted_files.append(val)
yield val # depends on [control=['if'], data=['val', 'deleted_files']] # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=['link']] # depends on [control=['try'], data=[]]
except OSError:
pass # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]]
except psutil.NoSuchProcess:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['proc']] |
def get_unique_field_values(dcm_file_list, field_name):
"""Return a set of unique field values from a list of DICOM files
Parameters
----------
dcm_file_list: iterable of DICOM file paths
field_name: str
Name of the field from where to get each value
Returns
-------
Set of field values
"""
field_values = set()
for dcm in dcm_file_list:
field_values.add(str(DicomFile(dcm).get_attributes(field_name)))
return field_values | def function[get_unique_field_values, parameter[dcm_file_list, field_name]]:
constant[Return a set of unique field values from a list of DICOM files
Parameters
----------
dcm_file_list: iterable of DICOM file paths
field_name: str
Name of the field from where to get each value
Returns
-------
Set of field values
]
variable[field_values] assign[=] call[name[set], parameter[]]
for taget[name[dcm]] in starred[name[dcm_file_list]] begin[:]
call[name[field_values].add, parameter[call[name[str], parameter[call[call[name[DicomFile], parameter[name[dcm]]].get_attributes, parameter[name[field_name]]]]]]]
return[name[field_values]] | keyword[def] identifier[get_unique_field_values] ( identifier[dcm_file_list] , identifier[field_name] ):
literal[string]
identifier[field_values] = identifier[set] ()
keyword[for] identifier[dcm] keyword[in] identifier[dcm_file_list] :
identifier[field_values] . identifier[add] ( identifier[str] ( identifier[DicomFile] ( identifier[dcm] ). identifier[get_attributes] ( identifier[field_name] )))
keyword[return] identifier[field_values] | def get_unique_field_values(dcm_file_list, field_name):
"""Return a set of unique field values from a list of DICOM files
Parameters
----------
dcm_file_list: iterable of DICOM file paths
field_name: str
Name of the field from where to get each value
Returns
-------
Set of field values
"""
field_values = set()
for dcm in dcm_file_list:
field_values.add(str(DicomFile(dcm).get_attributes(field_name))) # depends on [control=['for'], data=['dcm']]
return field_values |
def _put(self, *args, **kwargs):
"""Wrapper around Requests for PUT requests
Returns:
Response:
A Requests Response object
"""
if 'timeout' not in kwargs:
kwargs['timeout'] = self.timeout
req = self.session.put(*args, **kwargs)
return req | def function[_put, parameter[self]]:
constant[Wrapper around Requests for PUT requests
Returns:
Response:
A Requests Response object
]
if compare[constant[timeout] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[kwargs]][constant[timeout]] assign[=] name[self].timeout
variable[req] assign[=] call[name[self].session.put, parameter[<ast.Starred object at 0x7da1b0f40370>]]
return[name[req]] | keyword[def] identifier[_put] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[kwargs] [ literal[string] ]= identifier[self] . identifier[timeout]
identifier[req] = identifier[self] . identifier[session] . identifier[put] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[req] | def _put(self, *args, **kwargs):
"""Wrapper around Requests for PUT requests
Returns:
Response:
A Requests Response object
"""
if 'timeout' not in kwargs:
kwargs['timeout'] = self.timeout # depends on [control=['if'], data=['kwargs']]
req = self.session.put(*args, **kwargs)
return req |
def append(self, item):
"""Adds a new item to the end of the collection."""
if len(self) == 0:
# Special case, we make this the current item
self.index = 0
self.items.append(item) | def function[append, parameter[self, item]]:
constant[Adds a new item to the end of the collection.]
if compare[call[name[len], parameter[name[self]]] equal[==] constant[0]] begin[:]
name[self].index assign[=] constant[0]
call[name[self].items.append, parameter[name[item]]] | keyword[def] identifier[append] ( identifier[self] , identifier[item] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] )== literal[int] :
identifier[self] . identifier[index] = literal[int]
identifier[self] . identifier[items] . identifier[append] ( identifier[item] ) | def append(self, item):
"""Adds a new item to the end of the collection."""
if len(self) == 0:
# Special case, we make this the current item
self.index = 0 # depends on [control=['if'], data=[]]
self.items.append(item) |
def match_filter(self, idx_list, pattern, dict_type=False,
dict_key='name'):
"""Return Matched items in indexed files.
:param idx_list:
:return list
"""
if dict_type is False:
return self._return_deque([
obj for obj in idx_list
if re.search(pattern, obj)
])
elif dict_type is True:
return self._return_deque([
obj for obj in idx_list
if re.search(pattern, obj.get(dict_key))
])
else:
return self._return_deque() | def function[match_filter, parameter[self, idx_list, pattern, dict_type, dict_key]]:
constant[Return Matched items in indexed files.
:param idx_list:
:return list
]
if compare[name[dict_type] is constant[False]] begin[:]
return[call[name[self]._return_deque, parameter[<ast.ListComp object at 0x7da2054a5e10>]]] | keyword[def] identifier[match_filter] ( identifier[self] , identifier[idx_list] , identifier[pattern] , identifier[dict_type] = keyword[False] ,
identifier[dict_key] = literal[string] ):
literal[string]
keyword[if] identifier[dict_type] keyword[is] keyword[False] :
keyword[return] identifier[self] . identifier[_return_deque] ([
identifier[obj] keyword[for] identifier[obj] keyword[in] identifier[idx_list]
keyword[if] identifier[re] . identifier[search] ( identifier[pattern] , identifier[obj] )
])
keyword[elif] identifier[dict_type] keyword[is] keyword[True] :
keyword[return] identifier[self] . identifier[_return_deque] ([
identifier[obj] keyword[for] identifier[obj] keyword[in] identifier[idx_list]
keyword[if] identifier[re] . identifier[search] ( identifier[pattern] , identifier[obj] . identifier[get] ( identifier[dict_key] ))
])
keyword[else] :
keyword[return] identifier[self] . identifier[_return_deque] () | def match_filter(self, idx_list, pattern, dict_type=False, dict_key='name'):
"""Return Matched items in indexed files.
:param idx_list:
:return list
"""
if dict_type is False:
return self._return_deque([obj for obj in idx_list if re.search(pattern, obj)]) # depends on [control=['if'], data=[]]
elif dict_type is True:
return self._return_deque([obj for obj in idx_list if re.search(pattern, obj.get(dict_key))]) # depends on [control=['if'], data=[]]
else:
return self._return_deque() |
def refresh_existing_encodings(self):
"""
Refresh existing encodings for messages, when encoding was changed by user in dialog
:return:
"""
update = False
for msg in self.proto_analyzer.messages:
i = next((i for i, d in enumerate(self.decodings) if d.name == msg.decoder.name), 0)
if msg.decoder != self.decodings[i]:
update = True
msg.decoder = self.decodings[i]
msg.clear_decoded_bits()
msg.clear_encoded_bits()
if update:
self.protocol_model.update()
self.label_value_model.update() | def function[refresh_existing_encodings, parameter[self]]:
constant[
Refresh existing encodings for messages, when encoding was changed by user in dialog
:return:
]
variable[update] assign[=] constant[False]
for taget[name[msg]] in starred[name[self].proto_analyzer.messages] begin[:]
variable[i] assign[=] call[name[next], parameter[<ast.GeneratorExp object at 0x7da2054a4df0>, constant[0]]]
if compare[name[msg].decoder not_equal[!=] call[name[self].decodings][name[i]]] begin[:]
variable[update] assign[=] constant[True]
name[msg].decoder assign[=] call[name[self].decodings][name[i]]
call[name[msg].clear_decoded_bits, parameter[]]
call[name[msg].clear_encoded_bits, parameter[]]
if name[update] begin[:]
call[name[self].protocol_model.update, parameter[]]
call[name[self].label_value_model.update, parameter[]] | keyword[def] identifier[refresh_existing_encodings] ( identifier[self] ):
literal[string]
identifier[update] = keyword[False]
keyword[for] identifier[msg] keyword[in] identifier[self] . identifier[proto_analyzer] . identifier[messages] :
identifier[i] = identifier[next] (( identifier[i] keyword[for] identifier[i] , identifier[d] keyword[in] identifier[enumerate] ( identifier[self] . identifier[decodings] ) keyword[if] identifier[d] . identifier[name] == identifier[msg] . identifier[decoder] . identifier[name] ), literal[int] )
keyword[if] identifier[msg] . identifier[decoder] != identifier[self] . identifier[decodings] [ identifier[i] ]:
identifier[update] = keyword[True]
identifier[msg] . identifier[decoder] = identifier[self] . identifier[decodings] [ identifier[i] ]
identifier[msg] . identifier[clear_decoded_bits] ()
identifier[msg] . identifier[clear_encoded_bits] ()
keyword[if] identifier[update] :
identifier[self] . identifier[protocol_model] . identifier[update] ()
identifier[self] . identifier[label_value_model] . identifier[update] () | def refresh_existing_encodings(self):
"""
Refresh existing encodings for messages, when encoding was changed by user in dialog
:return:
"""
update = False
for msg in self.proto_analyzer.messages:
i = next((i for (i, d) in enumerate(self.decodings) if d.name == msg.decoder.name), 0)
if msg.decoder != self.decodings[i]:
update = True
msg.decoder = self.decodings[i]
msg.clear_decoded_bits()
msg.clear_encoded_bits() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['msg']]
if update:
self.protocol_model.update()
self.label_value_model.update() # depends on [control=['if'], data=[]] |
def sync_status(self):
"""Synchronize DOI status DataCite MDS.
:returns: `True` if is sync successfully.
"""
status = None
try:
try:
self.api.doi_get(self.pid.pid_value)
status = PIDStatus.REGISTERED
except DataCiteGoneError:
status = PIDStatus.DELETED
except DataCiteNoContentError:
status = PIDStatus.REGISTERED
except DataCiteNotFoundError:
pass
if status is None:
try:
self.api.metadata_get(self.pid.pid_value)
status = PIDStatus.RESERVED
except DataCiteGoneError:
status = PIDStatus.DELETED
except DataCiteNoContentError:
status = PIDStatus.REGISTERED
except DataCiteNotFoundError:
pass
except (DataCiteError, HttpError):
logger.exception("Failed to sync status from DataCite",
extra=dict(pid=self.pid))
raise
if status is None:
status = PIDStatus.NEW
self.pid.sync_status(status)
logger.info("Successfully synced status from DataCite",
extra=dict(pid=self.pid))
return True | def function[sync_status, parameter[self]]:
constant[Synchronize DOI status DataCite MDS.
:returns: `True` if is sync successfully.
]
variable[status] assign[=] constant[None]
<ast.Try object at 0x7da18f721150>
if compare[name[status] is constant[None]] begin[:]
variable[status] assign[=] name[PIDStatus].NEW
call[name[self].pid.sync_status, parameter[name[status]]]
call[name[logger].info, parameter[constant[Successfully synced status from DataCite]]]
return[constant[True]] | keyword[def] identifier[sync_status] ( identifier[self] ):
literal[string]
identifier[status] = keyword[None]
keyword[try] :
keyword[try] :
identifier[self] . identifier[api] . identifier[doi_get] ( identifier[self] . identifier[pid] . identifier[pid_value] )
identifier[status] = identifier[PIDStatus] . identifier[REGISTERED]
keyword[except] identifier[DataCiteGoneError] :
identifier[status] = identifier[PIDStatus] . identifier[DELETED]
keyword[except] identifier[DataCiteNoContentError] :
identifier[status] = identifier[PIDStatus] . identifier[REGISTERED]
keyword[except] identifier[DataCiteNotFoundError] :
keyword[pass]
keyword[if] identifier[status] keyword[is] keyword[None] :
keyword[try] :
identifier[self] . identifier[api] . identifier[metadata_get] ( identifier[self] . identifier[pid] . identifier[pid_value] )
identifier[status] = identifier[PIDStatus] . identifier[RESERVED]
keyword[except] identifier[DataCiteGoneError] :
identifier[status] = identifier[PIDStatus] . identifier[DELETED]
keyword[except] identifier[DataCiteNoContentError] :
identifier[status] = identifier[PIDStatus] . identifier[REGISTERED]
keyword[except] identifier[DataCiteNotFoundError] :
keyword[pass]
keyword[except] ( identifier[DataCiteError] , identifier[HttpError] ):
identifier[logger] . identifier[exception] ( literal[string] ,
identifier[extra] = identifier[dict] ( identifier[pid] = identifier[self] . identifier[pid] ))
keyword[raise]
keyword[if] identifier[status] keyword[is] keyword[None] :
identifier[status] = identifier[PIDStatus] . identifier[NEW]
identifier[self] . identifier[pid] . identifier[sync_status] ( identifier[status] )
identifier[logger] . identifier[info] ( literal[string] ,
identifier[extra] = identifier[dict] ( identifier[pid] = identifier[self] . identifier[pid] ))
keyword[return] keyword[True] | def sync_status(self):
"""Synchronize DOI status DataCite MDS.
:returns: `True` if is sync successfully.
"""
status = None
try:
try:
self.api.doi_get(self.pid.pid_value)
status = PIDStatus.REGISTERED # depends on [control=['try'], data=[]]
except DataCiteGoneError:
status = PIDStatus.DELETED # depends on [control=['except'], data=[]]
except DataCiteNoContentError:
status = PIDStatus.REGISTERED # depends on [control=['except'], data=[]]
except DataCiteNotFoundError:
pass # depends on [control=['except'], data=[]]
if status is None:
try:
self.api.metadata_get(self.pid.pid_value)
status = PIDStatus.RESERVED # depends on [control=['try'], data=[]]
except DataCiteGoneError:
status = PIDStatus.DELETED # depends on [control=['except'], data=[]]
except DataCiteNoContentError:
status = PIDStatus.REGISTERED # depends on [control=['except'], data=[]]
except DataCiteNotFoundError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['status']] # depends on [control=['try'], data=[]]
except (DataCiteError, HttpError):
logger.exception('Failed to sync status from DataCite', extra=dict(pid=self.pid))
raise # depends on [control=['except'], data=[]]
if status is None:
status = PIDStatus.NEW # depends on [control=['if'], data=['status']]
self.pid.sync_status(status)
logger.info('Successfully synced status from DataCite', extra=dict(pid=self.pid))
return True |
def _MultiStream(cls, fds):
"""Effectively streams data from multiple opened BlobImage objects.
Args:
fds: A list of opened AFF4Stream (or AFF4Stream descendants) objects.
Yields:
Tuples (chunk, fd, exception) where chunk is a binary blob of data and fd
is an object from the fds argument.
If one or more chunks are missing, exception is a MissingBlobsError object
and chunk is None. _MultiStream does its best to skip the file entirely if
one of its chunks is missing, but in case of very large files it's still
possible to yield a truncated file.
"""
broken_fds = set()
missing_blobs_fd_pairs = []
for chunk_fd_pairs in collection.Batch(
cls._GenerateChunkIds(fds), cls.MULTI_STREAM_CHUNKS_READ_AHEAD):
chunk_fds = list(map(operator.itemgetter(0), chunk_fd_pairs))
results_map = data_store.BLOBS.ReadBlobs(chunk_fds)
for chunk_id, fd in chunk_fd_pairs:
if chunk_id not in results_map or results_map[chunk_id] is None:
missing_blobs_fd_pairs.append((chunk_id, fd))
broken_fds.add(fd)
for chunk, fd in chunk_fd_pairs:
if fd in broken_fds:
continue
yield fd, results_map[chunk], None
if missing_blobs_fd_pairs:
missing_blobs_by_fd = {}
for chunk_id, fd in missing_blobs_fd_pairs:
missing_blobs_by_fd.setdefault(fd, []).append(chunk_id)
for fd, missing_blobs in iteritems(missing_blobs_by_fd):
e = MissingBlobsError(
"%d missing blobs (multi-stream)" % len(missing_blobs),
missing_chunks=missing_blobs)
yield fd, None, e | def function[_MultiStream, parameter[cls, fds]]:
constant[Effectively streams data from multiple opened BlobImage objects.
Args:
fds: A list of opened AFF4Stream (or AFF4Stream descendants) objects.
Yields:
Tuples (chunk, fd, exception) where chunk is a binary blob of data and fd
is an object from the fds argument.
If one or more chunks are missing, exception is a MissingBlobsError object
and chunk is None. _MultiStream does its best to skip the file entirely if
one of its chunks is missing, but in case of very large files it's still
possible to yield a truncated file.
]
variable[broken_fds] assign[=] call[name[set], parameter[]]
variable[missing_blobs_fd_pairs] assign[=] list[[]]
for taget[name[chunk_fd_pairs]] in starred[call[name[collection].Batch, parameter[call[name[cls]._GenerateChunkIds, parameter[name[fds]]], name[cls].MULTI_STREAM_CHUNKS_READ_AHEAD]]] begin[:]
variable[chunk_fds] assign[=] call[name[list], parameter[call[name[map], parameter[call[name[operator].itemgetter, parameter[constant[0]]], name[chunk_fd_pairs]]]]]
variable[results_map] assign[=] call[name[data_store].BLOBS.ReadBlobs, parameter[name[chunk_fds]]]
for taget[tuple[[<ast.Name object at 0x7da18fe93790>, <ast.Name object at 0x7da18fe90f70>]]] in starred[name[chunk_fd_pairs]] begin[:]
if <ast.BoolOp object at 0x7da18fe92530> begin[:]
call[name[missing_blobs_fd_pairs].append, parameter[tuple[[<ast.Name object at 0x7da18fe90e20>, <ast.Name object at 0x7da18fe91ff0>]]]]
call[name[broken_fds].add, parameter[name[fd]]]
for taget[tuple[[<ast.Name object at 0x7da18fe91780>, <ast.Name object at 0x7da18fe90c70>]]] in starred[name[chunk_fd_pairs]] begin[:]
if compare[name[fd] in name[broken_fds]] begin[:]
continue
<ast.Yield object at 0x7da18fe90340>
if name[missing_blobs_fd_pairs] begin[:]
variable[missing_blobs_by_fd] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18fe93e50>, <ast.Name object at 0x7da18fe91d20>]]] in starred[name[missing_blobs_fd_pairs]] begin[:]
call[call[name[missing_blobs_by_fd].setdefault, parameter[name[fd], list[[]]]].append, parameter[name[chunk_id]]]
for taget[tuple[[<ast.Name object at 0x7da18fe928f0>, <ast.Name object at 0x7da18fe93700>]]] in starred[call[name[iteritems], parameter[name[missing_blobs_by_fd]]]] begin[:]
variable[e] assign[=] call[name[MissingBlobsError], parameter[binary_operation[constant[%d missing blobs (multi-stream)] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[missing_blobs]]]]]]
<ast.Yield object at 0x7da18dc07700> | keyword[def] identifier[_MultiStream] ( identifier[cls] , identifier[fds] ):
literal[string]
identifier[broken_fds] = identifier[set] ()
identifier[missing_blobs_fd_pairs] =[]
keyword[for] identifier[chunk_fd_pairs] keyword[in] identifier[collection] . identifier[Batch] (
identifier[cls] . identifier[_GenerateChunkIds] ( identifier[fds] ), identifier[cls] . identifier[MULTI_STREAM_CHUNKS_READ_AHEAD] ):
identifier[chunk_fds] = identifier[list] ( identifier[map] ( identifier[operator] . identifier[itemgetter] ( literal[int] ), identifier[chunk_fd_pairs] ))
identifier[results_map] = identifier[data_store] . identifier[BLOBS] . identifier[ReadBlobs] ( identifier[chunk_fds] )
keyword[for] identifier[chunk_id] , identifier[fd] keyword[in] identifier[chunk_fd_pairs] :
keyword[if] identifier[chunk_id] keyword[not] keyword[in] identifier[results_map] keyword[or] identifier[results_map] [ identifier[chunk_id] ] keyword[is] keyword[None] :
identifier[missing_blobs_fd_pairs] . identifier[append] (( identifier[chunk_id] , identifier[fd] ))
identifier[broken_fds] . identifier[add] ( identifier[fd] )
keyword[for] identifier[chunk] , identifier[fd] keyword[in] identifier[chunk_fd_pairs] :
keyword[if] identifier[fd] keyword[in] identifier[broken_fds] :
keyword[continue]
keyword[yield] identifier[fd] , identifier[results_map] [ identifier[chunk] ], keyword[None]
keyword[if] identifier[missing_blobs_fd_pairs] :
identifier[missing_blobs_by_fd] ={}
keyword[for] identifier[chunk_id] , identifier[fd] keyword[in] identifier[missing_blobs_fd_pairs] :
identifier[missing_blobs_by_fd] . identifier[setdefault] ( identifier[fd] ,[]). identifier[append] ( identifier[chunk_id] )
keyword[for] identifier[fd] , identifier[missing_blobs] keyword[in] identifier[iteritems] ( identifier[missing_blobs_by_fd] ):
identifier[e] = identifier[MissingBlobsError] (
literal[string] % identifier[len] ( identifier[missing_blobs] ),
identifier[missing_chunks] = identifier[missing_blobs] )
keyword[yield] identifier[fd] , keyword[None] , identifier[e] | def _MultiStream(cls, fds):
"""Effectively streams data from multiple opened BlobImage objects.
Args:
fds: A list of opened AFF4Stream (or AFF4Stream descendants) objects.
Yields:
Tuples (chunk, fd, exception) where chunk is a binary blob of data and fd
is an object from the fds argument.
If one or more chunks are missing, exception is a MissingBlobsError object
and chunk is None. _MultiStream does its best to skip the file entirely if
one of its chunks is missing, but in case of very large files it's still
possible to yield a truncated file.
"""
broken_fds = set()
missing_blobs_fd_pairs = []
for chunk_fd_pairs in collection.Batch(cls._GenerateChunkIds(fds), cls.MULTI_STREAM_CHUNKS_READ_AHEAD):
chunk_fds = list(map(operator.itemgetter(0), chunk_fd_pairs))
results_map = data_store.BLOBS.ReadBlobs(chunk_fds)
for (chunk_id, fd) in chunk_fd_pairs:
if chunk_id not in results_map or results_map[chunk_id] is None:
missing_blobs_fd_pairs.append((chunk_id, fd))
broken_fds.add(fd) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for (chunk, fd) in chunk_fd_pairs:
if fd in broken_fds:
continue # depends on [control=['if'], data=[]]
yield (fd, results_map[chunk], None) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['chunk_fd_pairs']]
if missing_blobs_fd_pairs:
missing_blobs_by_fd = {}
for (chunk_id, fd) in missing_blobs_fd_pairs:
missing_blobs_by_fd.setdefault(fd, []).append(chunk_id) # depends on [control=['for'], data=[]]
for (fd, missing_blobs) in iteritems(missing_blobs_by_fd):
e = MissingBlobsError('%d missing blobs (multi-stream)' % len(missing_blobs), missing_chunks=missing_blobs)
yield (fd, None, e) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] |
def length(self):
"""Array of vector lengths"""
return np.sqrt(np.sum(self**2, axis=1)).view(np.ndarray) | def function[length, parameter[self]]:
constant[Array of vector lengths]
return[call[call[name[np].sqrt, parameter[call[name[np].sum, parameter[binary_operation[name[self] ** constant[2]]]]]].view, parameter[name[np].ndarray]]] | keyword[def] identifier[length] ( identifier[self] ):
literal[string]
keyword[return] identifier[np] . identifier[sqrt] ( identifier[np] . identifier[sum] ( identifier[self] ** literal[int] , identifier[axis] = literal[int] )). identifier[view] ( identifier[np] . identifier[ndarray] ) | def length(self):
"""Array of vector lengths"""
return np.sqrt(np.sum(self ** 2, axis=1)).view(np.ndarray) |
def returner(ret):
'''
Send a message to Nagios with the data
'''
_options = _get_options(ret)
log.debug('_options %s', _options)
_options['hostname'] = ret.get('id')
if 'url' not in _options or _options['url'] == '':
log.error('nagios_nrdp.url not defined in salt config')
return
if 'token' not in _options or _options['token'] == '':
log.error('nagios_nrdp.token not defined in salt config')
return
xml = _prepare_xml(options=_options, state=ret['return'])
res = _post_data(options=_options, xml=xml)
return res | def function[returner, parameter[ret]]:
constant[
Send a message to Nagios with the data
]
variable[_options] assign[=] call[name[_get_options], parameter[name[ret]]]
call[name[log].debug, parameter[constant[_options %s], name[_options]]]
call[name[_options]][constant[hostname]] assign[=] call[name[ret].get, parameter[constant[id]]]
if <ast.BoolOp object at 0x7da204620f10> begin[:]
call[name[log].error, parameter[constant[nagios_nrdp.url not defined in salt config]]]
return[None]
if <ast.BoolOp object at 0x7da1b2184790> begin[:]
call[name[log].error, parameter[constant[nagios_nrdp.token not defined in salt config]]]
return[None]
variable[xml] assign[=] call[name[_prepare_xml], parameter[]]
variable[res] assign[=] call[name[_post_data], parameter[]]
return[name[res]] | keyword[def] identifier[returner] ( identifier[ret] ):
literal[string]
identifier[_options] = identifier[_get_options] ( identifier[ret] )
identifier[log] . identifier[debug] ( literal[string] , identifier[_options] )
identifier[_options] [ literal[string] ]= identifier[ret] . identifier[get] ( literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[_options] keyword[or] identifier[_options] [ literal[string] ]== literal[string] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return]
keyword[if] literal[string] keyword[not] keyword[in] identifier[_options] keyword[or] identifier[_options] [ literal[string] ]== literal[string] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return]
identifier[xml] = identifier[_prepare_xml] ( identifier[options] = identifier[_options] , identifier[state] = identifier[ret] [ literal[string] ])
identifier[res] = identifier[_post_data] ( identifier[options] = identifier[_options] , identifier[xml] = identifier[xml] )
keyword[return] identifier[res] | def returner(ret):
"""
Send a message to Nagios with the data
"""
_options = _get_options(ret)
log.debug('_options %s', _options)
_options['hostname'] = ret.get('id')
if 'url' not in _options or _options['url'] == '':
log.error('nagios_nrdp.url not defined in salt config')
return # depends on [control=['if'], data=[]]
if 'token' not in _options or _options['token'] == '':
log.error('nagios_nrdp.token not defined in salt config')
return # depends on [control=['if'], data=[]]
xml = _prepare_xml(options=_options, state=ret['return'])
res = _post_data(options=_options, xml=xml)
return res |
def get_anime(self, anime_id, title_language='canonical'):
"""Fetches the Anime Object of the given id or slug.
:param anime_id: The Anime ID or Slug.
:type anime_id: int or str
:param str title_language: The PREFERED title language can be any of
`'canonical'`, `'english'`, `'romanized'`
:returns: Anime Object -- The Anime you requested.
"""
r = self._query_('/anime/%s' % anime_id, 'GET',
params={'title_language_preference': title_language})
return Anime(r.json()) | def function[get_anime, parameter[self, anime_id, title_language]]:
constant[Fetches the Anime Object of the given id or slug.
:param anime_id: The Anime ID or Slug.
:type anime_id: int or str
:param str title_language: The PREFERED title language can be any of
`'canonical'`, `'english'`, `'romanized'`
:returns: Anime Object -- The Anime you requested.
]
variable[r] assign[=] call[name[self]._query_, parameter[binary_operation[constant[/anime/%s] <ast.Mod object at 0x7da2590d6920> name[anime_id]], constant[GET]]]
return[call[name[Anime], parameter[call[name[r].json, parameter[]]]]] | keyword[def] identifier[get_anime] ( identifier[self] , identifier[anime_id] , identifier[title_language] = literal[string] ):
literal[string]
identifier[r] = identifier[self] . identifier[_query_] ( literal[string] % identifier[anime_id] , literal[string] ,
identifier[params] ={ literal[string] : identifier[title_language] })
keyword[return] identifier[Anime] ( identifier[r] . identifier[json] ()) | def get_anime(self, anime_id, title_language='canonical'):
"""Fetches the Anime Object of the given id or slug.
:param anime_id: The Anime ID or Slug.
:type anime_id: int or str
:param str title_language: The PREFERED title language can be any of
`'canonical'`, `'english'`, `'romanized'`
:returns: Anime Object -- The Anime you requested.
"""
r = self._query_('/anime/%s' % anime_id, 'GET', params={'title_language_preference': title_language})
return Anime(r.json()) |
def apply(self, incoming):
"""
Store the incoming activation, apply the activation function and store
the result as outgoing activation.
"""
assert len(incoming) == self.size
self.incoming = incoming
outgoing = self.activation(self.incoming)
assert len(outgoing) == self.size
self.outgoing = outgoing | def function[apply, parameter[self, incoming]]:
constant[
Store the incoming activation, apply the activation function and store
the result as outgoing activation.
]
assert[compare[call[name[len], parameter[name[incoming]]] equal[==] name[self].size]]
name[self].incoming assign[=] name[incoming]
variable[outgoing] assign[=] call[name[self].activation, parameter[name[self].incoming]]
assert[compare[call[name[len], parameter[name[outgoing]]] equal[==] name[self].size]]
name[self].outgoing assign[=] name[outgoing] | keyword[def] identifier[apply] ( identifier[self] , identifier[incoming] ):
literal[string]
keyword[assert] identifier[len] ( identifier[incoming] )== identifier[self] . identifier[size]
identifier[self] . identifier[incoming] = identifier[incoming]
identifier[outgoing] = identifier[self] . identifier[activation] ( identifier[self] . identifier[incoming] )
keyword[assert] identifier[len] ( identifier[outgoing] )== identifier[self] . identifier[size]
identifier[self] . identifier[outgoing] = identifier[outgoing] | def apply(self, incoming):
"""
Store the incoming activation, apply the activation function and store
the result as outgoing activation.
"""
assert len(incoming) == self.size
self.incoming = incoming
outgoing = self.activation(self.incoming)
assert len(outgoing) == self.size
self.outgoing = outgoing |
def remove_isolated_clusters(labels, inlets):
r"""
Finds cluster labels not attached to the inlets, and sets them to
unoccupied (-1)
Parameters
----------
labels : tuple of site and bond labels
This information is provided by the ``site_percolation`` or
``bond_percolation`` functions
inlets : array_like
A list of which sites are inlets. Can be a boolean mask or an
array of indices.
Returns
-------
A tuple containing a list of site and bond labels, with all clusters
not connected to the inlet sites set to not occupied.
"""
# Identify clusters of invasion sites
inv_clusters = sp.unique(labels.sites[inlets])
# Remove cluster numbers == -1, if any
inv_clusters = inv_clusters[inv_clusters >= 0]
# Find all pores in invading clusters
p_invading = sp.in1d(labels.sites, inv_clusters)
labels.sites[~p_invading] = -1
t_invading = sp.in1d(labels.bonds, inv_clusters)
labels.bonds[~t_invading] = -1
return labels | def function[remove_isolated_clusters, parameter[labels, inlets]]:
constant[
Finds cluster labels not attached to the inlets, and sets them to
unoccupied (-1)
Parameters
----------
labels : tuple of site and bond labels
This information is provided by the ``site_percolation`` or
``bond_percolation`` functions
inlets : array_like
A list of which sites are inlets. Can be a boolean mask or an
array of indices.
Returns
-------
A tuple containing a list of site and bond labels, with all clusters
not connected to the inlet sites set to not occupied.
]
variable[inv_clusters] assign[=] call[name[sp].unique, parameter[call[name[labels].sites][name[inlets]]]]
variable[inv_clusters] assign[=] call[name[inv_clusters]][compare[name[inv_clusters] greater_or_equal[>=] constant[0]]]
variable[p_invading] assign[=] call[name[sp].in1d, parameter[name[labels].sites, name[inv_clusters]]]
call[name[labels].sites][<ast.UnaryOp object at 0x7da18f58d930>] assign[=] <ast.UnaryOp object at 0x7da18f58dcf0>
variable[t_invading] assign[=] call[name[sp].in1d, parameter[name[labels].bonds, name[inv_clusters]]]
call[name[labels].bonds][<ast.UnaryOp object at 0x7da18f58f8e0>] assign[=] <ast.UnaryOp object at 0x7da18f58e980>
return[name[labels]] | keyword[def] identifier[remove_isolated_clusters] ( identifier[labels] , identifier[inlets] ):
literal[string]
identifier[inv_clusters] = identifier[sp] . identifier[unique] ( identifier[labels] . identifier[sites] [ identifier[inlets] ])
identifier[inv_clusters] = identifier[inv_clusters] [ identifier[inv_clusters] >= literal[int] ]
identifier[p_invading] = identifier[sp] . identifier[in1d] ( identifier[labels] . identifier[sites] , identifier[inv_clusters] )
identifier[labels] . identifier[sites] [~ identifier[p_invading] ]=- literal[int]
identifier[t_invading] = identifier[sp] . identifier[in1d] ( identifier[labels] . identifier[bonds] , identifier[inv_clusters] )
identifier[labels] . identifier[bonds] [~ identifier[t_invading] ]=- literal[int]
keyword[return] identifier[labels] | def remove_isolated_clusters(labels, inlets):
"""
Finds cluster labels not attached to the inlets, and sets them to
unoccupied (-1)
Parameters
----------
labels : tuple of site and bond labels
This information is provided by the ``site_percolation`` or
``bond_percolation`` functions
inlets : array_like
A list of which sites are inlets. Can be a boolean mask or an
array of indices.
Returns
-------
A tuple containing a list of site and bond labels, with all clusters
not connected to the inlet sites set to not occupied.
"""
# Identify clusters of invasion sites
inv_clusters = sp.unique(labels.sites[inlets])
# Remove cluster numbers == -1, if any
inv_clusters = inv_clusters[inv_clusters >= 0]
# Find all pores in invading clusters
p_invading = sp.in1d(labels.sites, inv_clusters)
labels.sites[~p_invading] = -1
t_invading = sp.in1d(labels.bonds, inv_clusters)
labels.bonds[~t_invading] = -1
return labels |
def divisors(n):
"""
From a given natural integer, returns the list of divisors in ascending order
:param n: Natural integer
:return: List of divisors of n in ascending order
"""
factors = _factor_generator(n)
_divisors = []
listexponents = [[k**x for x in range(0, factors[k]+1)] for k in list(factors.keys())]
listfactors = _cartesian_product(listexponents)
for f in listfactors:
_divisors.append(reduce(lambda x, y: x*y, f, 1))
_divisors.sort()
return _divisors | def function[divisors, parameter[n]]:
constant[
From a given natural integer, returns the list of divisors in ascending order
:param n: Natural integer
:return: List of divisors of n in ascending order
]
variable[factors] assign[=] call[name[_factor_generator], parameter[name[n]]]
variable[_divisors] assign[=] list[[]]
variable[listexponents] assign[=] <ast.ListComp object at 0x7da18dc05b10>
variable[listfactors] assign[=] call[name[_cartesian_product], parameter[name[listexponents]]]
for taget[name[f]] in starred[name[listfactors]] begin[:]
call[name[_divisors].append, parameter[call[name[reduce], parameter[<ast.Lambda object at 0x7da18dc07e20>, name[f], constant[1]]]]]
call[name[_divisors].sort, parameter[]]
return[name[_divisors]] | keyword[def] identifier[divisors] ( identifier[n] ):
literal[string]
identifier[factors] = identifier[_factor_generator] ( identifier[n] )
identifier[_divisors] =[]
identifier[listexponents] =[[ identifier[k] ** identifier[x] keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[factors] [ identifier[k] ]+ literal[int] )] keyword[for] identifier[k] keyword[in] identifier[list] ( identifier[factors] . identifier[keys] ())]
identifier[listfactors] = identifier[_cartesian_product] ( identifier[listexponents] )
keyword[for] identifier[f] keyword[in] identifier[listfactors] :
identifier[_divisors] . identifier[append] ( identifier[reduce] ( keyword[lambda] identifier[x] , identifier[y] : identifier[x] * identifier[y] , identifier[f] , literal[int] ))
identifier[_divisors] . identifier[sort] ()
keyword[return] identifier[_divisors] | def divisors(n):
"""
From a given natural integer, returns the list of divisors in ascending order
:param n: Natural integer
:return: List of divisors of n in ascending order
"""
factors = _factor_generator(n)
_divisors = []
listexponents = [[k ** x for x in range(0, factors[k] + 1)] for k in list(factors.keys())]
listfactors = _cartesian_product(listexponents)
for f in listfactors:
_divisors.append(reduce(lambda x, y: x * y, f, 1)) # depends on [control=['for'], data=['f']]
_divisors.sort()
return _divisors |
def setitem_without_overwrite(d, key, value):
"""
@param d: An instance of dict, that is: isinstance(d, dict)
@param key: a key
@param value: a value to associate with the key
@return: None
@raise: OverwriteError if the key is already present in d.
"""
if key in d:
raise OverwriteError(key, value, d[key])
else:
dict.__setitem__(d, key, value) | def function[setitem_without_overwrite, parameter[d, key, value]]:
constant[
@param d: An instance of dict, that is: isinstance(d, dict)
@param key: a key
@param value: a value to associate with the key
@return: None
@raise: OverwriteError if the key is already present in d.
]
if compare[name[key] in name[d]] begin[:]
<ast.Raise object at 0x7da1b1304130> | keyword[def] identifier[setitem_without_overwrite] ( identifier[d] , identifier[key] , identifier[value] ):
literal[string]
keyword[if] identifier[key] keyword[in] identifier[d] :
keyword[raise] identifier[OverwriteError] ( identifier[key] , identifier[value] , identifier[d] [ identifier[key] ])
keyword[else] :
identifier[dict] . identifier[__setitem__] ( identifier[d] , identifier[key] , identifier[value] ) | def setitem_without_overwrite(d, key, value):
"""
@param d: An instance of dict, that is: isinstance(d, dict)
@param key: a key
@param value: a value to associate with the key
@return: None
@raise: OverwriteError if the key is already present in d.
"""
if key in d:
raise OverwriteError(key, value, d[key]) # depends on [control=['if'], data=['key', 'd']]
else:
dict.__setitem__(d, key, value) |
def save(self, sender):
"""
Save the message and send it out into the wide world.
:param sender:
The :class:`User` that sends the message.
:param parent_msg:
The :class:`Message` that preceded this message in the thread.
:return: The saved :class:`Message`.
"""
um_to_user_list = self.cleaned_data['to']
body = self.cleaned_data['body']
msg = Message.objects.send_message(sender,
um_to_user_list,
body)
return msg | def function[save, parameter[self, sender]]:
constant[
Save the message and send it out into the wide world.
:param sender:
The :class:`User` that sends the message.
:param parent_msg:
The :class:`Message` that preceded this message in the thread.
:return: The saved :class:`Message`.
]
variable[um_to_user_list] assign[=] call[name[self].cleaned_data][constant[to]]
variable[body] assign[=] call[name[self].cleaned_data][constant[body]]
variable[msg] assign[=] call[name[Message].objects.send_message, parameter[name[sender], name[um_to_user_list], name[body]]]
return[name[msg]] | keyword[def] identifier[save] ( identifier[self] , identifier[sender] ):
literal[string]
identifier[um_to_user_list] = identifier[self] . identifier[cleaned_data] [ literal[string] ]
identifier[body] = identifier[self] . identifier[cleaned_data] [ literal[string] ]
identifier[msg] = identifier[Message] . identifier[objects] . identifier[send_message] ( identifier[sender] ,
identifier[um_to_user_list] ,
identifier[body] )
keyword[return] identifier[msg] | def save(self, sender):
"""
Save the message and send it out into the wide world.
:param sender:
The :class:`User` that sends the message.
:param parent_msg:
The :class:`Message` that preceded this message in the thread.
:return: The saved :class:`Message`.
"""
um_to_user_list = self.cleaned_data['to']
body = self.cleaned_data['body']
msg = Message.objects.send_message(sender, um_to_user_list, body)
return msg |
def sort(self, key_or_list, direction=None):
"""
Sorts a cursor object based on the input
:param key_or_list: a list/tuple containing the sort specification,
i.e. ('user_number': -1), or a basestring
:param direction: sorting direction, 1 or -1, needed if key_or_list
is a basestring
:return:
"""
# checking input format
sort_specifier = list()
if isinstance(key_or_list, list):
if direction is not None:
raise ValueError('direction can not be set separately '
'if sorting by multiple fields.')
for pair in key_or_list:
if not (isinstance(pair, list) or isinstance(pair, tuple)):
raise TypeError('key pair should be a list or tuple.')
if not len(pair) == 2:
raise ValueError('Need to be (key, direction) pair')
if not isinstance(pair[0], basestring):
raise TypeError('first item in each key pair must '
'be a string')
if not isinstance(pair[1], int) or not abs(pair[1]) == 1:
raise TypeError('bad sort specification.')
sort_specifier = key_or_list
elif isinstance(key_or_list, basestring):
if direction is not None:
if not isinstance(direction, int) or not abs(direction) == 1:
raise TypeError('bad sort specification.')
else:
# default ASCENDING
direction = 1
sort_specifier = [(key_or_list, direction)]
else:
raise ValueError('Wrong input, pass a field name and a direction,'
' or pass a list of (key, direction) pairs.')
# sorting
_cursordat = self.cursordat
total = len(_cursordat)
pre_sect_stack = list()
for pair in sort_specifier:
is_reverse = bool(1-pair[1])
value_stack = list()
for index, data in enumerate(_cursordat):
# get field value
not_found = None
for key in pair[0].split('.'):
not_found = True
if isinstance(data, dict) and key in data:
data = copy.deepcopy(data[key])
not_found = False
elif isinstance(data, list):
if not is_reverse and len(data) == 1:
# MongoDB treat [{data}] as {data}
# when finding fields
if isinstance(data[0], dict) and key in data[0]:
data = copy.deepcopy(data[0][key])
not_found = False
elif is_reverse:
# MongoDB will keep finding field in reverse mode
for _d in data:
if isinstance(_d, dict) and key in _d:
data = copy.deepcopy(_d[key])
not_found = False
break
if not_found:
break
# parsing data for sorting
if not_found:
# treat no match as None
data = None
value = self._order(data, is_reverse)
# read previous section
pre_sect = pre_sect_stack[index] if pre_sect_stack else 0
# inverse if in reverse mode
# for keeping order as ASCENDING after sort
pre_sect = (total - pre_sect) if is_reverse else pre_sect
_ind = (total - index) if is_reverse else index
value_stack.append((pre_sect, value, _ind))
# sorting cursor data
value_stack.sort(reverse=is_reverse)
ordereddat = list()
sect_stack = list()
sect_id = -1
last_dat = None
for dat in value_stack:
# restore if in reverse mode
_ind = (total - dat[-1]) if is_reverse else dat[-1]
ordereddat.append(_cursordat[_ind])
# define section
# maintain the sorting result in next level sorting
if not dat[1] == last_dat:
sect_id += 1
sect_stack.append(sect_id)
last_dat = dat[1]
# save result for next level sorting
_cursordat = ordereddat
pre_sect_stack = sect_stack
# done
self.cursordat = _cursordat
return self | def function[sort, parameter[self, key_or_list, direction]]:
constant[
Sorts a cursor object based on the input
:param key_or_list: a list/tuple containing the sort specification,
i.e. ('user_number': -1), or a basestring
:param direction: sorting direction, 1 or -1, needed if key_or_list
is a basestring
:return:
]
variable[sort_specifier] assign[=] call[name[list], parameter[]]
if call[name[isinstance], parameter[name[key_or_list], name[list]]] begin[:]
if compare[name[direction] is_not constant[None]] begin[:]
<ast.Raise object at 0x7da20c796530>
for taget[name[pair]] in starred[name[key_or_list]] begin[:]
if <ast.UnaryOp object at 0x7da20c7944c0> begin[:]
<ast.Raise object at 0x7da20c794a00>
if <ast.UnaryOp object at 0x7da20c796260> begin[:]
<ast.Raise object at 0x7da20c7962f0>
if <ast.UnaryOp object at 0x7da20c794520> begin[:]
<ast.Raise object at 0x7da20c796860>
if <ast.BoolOp object at 0x7da20c7950f0> begin[:]
<ast.Raise object at 0x7da20c795330>
variable[sort_specifier] assign[=] name[key_or_list]
variable[_cursordat] assign[=] name[self].cursordat
variable[total] assign[=] call[name[len], parameter[name[_cursordat]]]
variable[pre_sect_stack] assign[=] call[name[list], parameter[]]
for taget[name[pair]] in starred[name[sort_specifier]] begin[:]
variable[is_reverse] assign[=] call[name[bool], parameter[binary_operation[constant[1] - call[name[pair]][constant[1]]]]]
variable[value_stack] assign[=] call[name[list], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c993ac0>, <ast.Name object at 0x7da20c993dc0>]]] in starred[call[name[enumerate], parameter[name[_cursordat]]]] begin[:]
variable[not_found] assign[=] constant[None]
for taget[name[key]] in starred[call[call[name[pair]][constant[0]].split, parameter[constant[.]]]] begin[:]
variable[not_found] assign[=] constant[True]
if <ast.BoolOp object at 0x7da20c993a60> begin[:]
variable[data] assign[=] call[name[copy].deepcopy, parameter[call[name[data]][name[key]]]]
variable[not_found] assign[=] constant[False]
if name[not_found] begin[:]
break
if name[not_found] begin[:]
variable[data] assign[=] constant[None]
variable[value] assign[=] call[name[self]._order, parameter[name[data], name[is_reverse]]]
variable[pre_sect] assign[=] <ast.IfExp object at 0x7da20e9b3520>
variable[pre_sect] assign[=] <ast.IfExp object at 0x7da20e9b03a0>
variable[_ind] assign[=] <ast.IfExp object at 0x7da20e9b3cd0>
call[name[value_stack].append, parameter[tuple[[<ast.Name object at 0x7da20e9b1ba0>, <ast.Name object at 0x7da20e9b2f80>, <ast.Name object at 0x7da20e9b1990>]]]]
call[name[value_stack].sort, parameter[]]
variable[ordereddat] assign[=] call[name[list], parameter[]]
variable[sect_stack] assign[=] call[name[list], parameter[]]
variable[sect_id] assign[=] <ast.UnaryOp object at 0x7da20e9b2fb0>
variable[last_dat] assign[=] constant[None]
for taget[name[dat]] in starred[name[value_stack]] begin[:]
variable[_ind] assign[=] <ast.IfExp object at 0x7da20e9b3760>
call[name[ordereddat].append, parameter[call[name[_cursordat]][name[_ind]]]]
if <ast.UnaryOp object at 0x7da20e9b0280> begin[:]
<ast.AugAssign object at 0x7da20e9b0bb0>
call[name[sect_stack].append, parameter[name[sect_id]]]
variable[last_dat] assign[=] call[name[dat]][constant[1]]
variable[_cursordat] assign[=] name[ordereddat]
variable[pre_sect_stack] assign[=] name[sect_stack]
name[self].cursordat assign[=] name[_cursordat]
return[name[self]] | keyword[def] identifier[sort] ( identifier[self] , identifier[key_or_list] , identifier[direction] = keyword[None] ):
literal[string]
identifier[sort_specifier] = identifier[list] ()
keyword[if] identifier[isinstance] ( identifier[key_or_list] , identifier[list] ):
keyword[if] identifier[direction] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[for] identifier[pair] keyword[in] identifier[key_or_list] :
keyword[if] keyword[not] ( identifier[isinstance] ( identifier[pair] , identifier[list] ) keyword[or] identifier[isinstance] ( identifier[pair] , identifier[tuple] )):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[len] ( identifier[pair] )== literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[pair] [ literal[int] ], identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[pair] [ literal[int] ], identifier[int] ) keyword[or] keyword[not] identifier[abs] ( identifier[pair] [ literal[int] ])== literal[int] :
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[sort_specifier] = identifier[key_or_list]
keyword[elif] identifier[isinstance] ( identifier[key_or_list] , identifier[basestring] ):
keyword[if] identifier[direction] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[direction] , identifier[int] ) keyword[or] keyword[not] identifier[abs] ( identifier[direction] )== literal[int] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[else] :
identifier[direction] = literal[int]
identifier[sort_specifier] =[( identifier[key_or_list] , identifier[direction] )]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[_cursordat] = identifier[self] . identifier[cursordat]
identifier[total] = identifier[len] ( identifier[_cursordat] )
identifier[pre_sect_stack] = identifier[list] ()
keyword[for] identifier[pair] keyword[in] identifier[sort_specifier] :
identifier[is_reverse] = identifier[bool] ( literal[int] - identifier[pair] [ literal[int] ])
identifier[value_stack] = identifier[list] ()
keyword[for] identifier[index] , identifier[data] keyword[in] identifier[enumerate] ( identifier[_cursordat] ):
identifier[not_found] = keyword[None]
keyword[for] identifier[key] keyword[in] identifier[pair] [ literal[int] ]. identifier[split] ( literal[string] ):
identifier[not_found] = keyword[True]
keyword[if] identifier[isinstance] ( identifier[data] , identifier[dict] ) keyword[and] identifier[key] keyword[in] identifier[data] :
identifier[data] = identifier[copy] . identifier[deepcopy] ( identifier[data] [ identifier[key] ])
identifier[not_found] = keyword[False]
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[list] ):
keyword[if] keyword[not] identifier[is_reverse] keyword[and] identifier[len] ( identifier[data] )== literal[int] :
keyword[if] identifier[isinstance] ( identifier[data] [ literal[int] ], identifier[dict] ) keyword[and] identifier[key] keyword[in] identifier[data] [ literal[int] ]:
identifier[data] = identifier[copy] . identifier[deepcopy] ( identifier[data] [ literal[int] ][ identifier[key] ])
identifier[not_found] = keyword[False]
keyword[elif] identifier[is_reverse] :
keyword[for] identifier[_d] keyword[in] identifier[data] :
keyword[if] identifier[isinstance] ( identifier[_d] , identifier[dict] ) keyword[and] identifier[key] keyword[in] identifier[_d] :
identifier[data] = identifier[copy] . identifier[deepcopy] ( identifier[_d] [ identifier[key] ])
identifier[not_found] = keyword[False]
keyword[break]
keyword[if] identifier[not_found] :
keyword[break]
keyword[if] identifier[not_found] :
identifier[data] = keyword[None]
identifier[value] = identifier[self] . identifier[_order] ( identifier[data] , identifier[is_reverse] )
identifier[pre_sect] = identifier[pre_sect_stack] [ identifier[index] ] keyword[if] identifier[pre_sect_stack] keyword[else] literal[int]
identifier[pre_sect] =( identifier[total] - identifier[pre_sect] ) keyword[if] identifier[is_reverse] keyword[else] identifier[pre_sect]
identifier[_ind] =( identifier[total] - identifier[index] ) keyword[if] identifier[is_reverse] keyword[else] identifier[index]
identifier[value_stack] . identifier[append] (( identifier[pre_sect] , identifier[value] , identifier[_ind] ))
identifier[value_stack] . identifier[sort] ( identifier[reverse] = identifier[is_reverse] )
identifier[ordereddat] = identifier[list] ()
identifier[sect_stack] = identifier[list] ()
identifier[sect_id] =- literal[int]
identifier[last_dat] = keyword[None]
keyword[for] identifier[dat] keyword[in] identifier[value_stack] :
identifier[_ind] =( identifier[total] - identifier[dat] [- literal[int] ]) keyword[if] identifier[is_reverse] keyword[else] identifier[dat] [- literal[int] ]
identifier[ordereddat] . identifier[append] ( identifier[_cursordat] [ identifier[_ind] ])
keyword[if] keyword[not] identifier[dat] [ literal[int] ]== identifier[last_dat] :
identifier[sect_id] += literal[int]
identifier[sect_stack] . identifier[append] ( identifier[sect_id] )
identifier[last_dat] = identifier[dat] [ literal[int] ]
identifier[_cursordat] = identifier[ordereddat]
identifier[pre_sect_stack] = identifier[sect_stack]
identifier[self] . identifier[cursordat] = identifier[_cursordat]
keyword[return] identifier[self] | def sort(self, key_or_list, direction=None):
"""
Sorts a cursor object based on the input
:param key_or_list: a list/tuple containing the sort specification,
i.e. ('user_number': -1), or a basestring
:param direction: sorting direction, 1 or -1, needed if key_or_list
is a basestring
:return:
"""
# checking input format
sort_specifier = list()
if isinstance(key_or_list, list):
if direction is not None:
raise ValueError('direction can not be set separately if sorting by multiple fields.') # depends on [control=['if'], data=[]]
for pair in key_or_list:
if not (isinstance(pair, list) or isinstance(pair, tuple)):
raise TypeError('key pair should be a list or tuple.') # depends on [control=['if'], data=[]]
if not len(pair) == 2:
raise ValueError('Need to be (key, direction) pair') # depends on [control=['if'], data=[]]
if not isinstance(pair[0], basestring):
raise TypeError('first item in each key pair must be a string') # depends on [control=['if'], data=[]]
if not isinstance(pair[1], int) or not abs(pair[1]) == 1:
raise TypeError('bad sort specification.') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pair']]
sort_specifier = key_or_list # depends on [control=['if'], data=[]]
elif isinstance(key_or_list, basestring):
if direction is not None:
if not isinstance(direction, int) or not abs(direction) == 1:
raise TypeError('bad sort specification.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['direction']]
else:
# default ASCENDING
direction = 1
sort_specifier = [(key_or_list, direction)] # depends on [control=['if'], data=[]]
else:
raise ValueError('Wrong input, pass a field name and a direction, or pass a list of (key, direction) pairs.')
# sorting
_cursordat = self.cursordat
total = len(_cursordat)
pre_sect_stack = list()
for pair in sort_specifier:
is_reverse = bool(1 - pair[1])
value_stack = list()
for (index, data) in enumerate(_cursordat):
# get field value
not_found = None
for key in pair[0].split('.'):
not_found = True
if isinstance(data, dict) and key in data:
data = copy.deepcopy(data[key])
not_found = False # depends on [control=['if'], data=[]]
elif isinstance(data, list):
if not is_reverse and len(data) == 1:
# MongoDB treat [{data}] as {data}
# when finding fields
if isinstance(data[0], dict) and key in data[0]:
data = copy.deepcopy(data[0][key])
not_found = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif is_reverse:
# MongoDB will keep finding field in reverse mode
for _d in data:
if isinstance(_d, dict) and key in _d:
data = copy.deepcopy(_d[key])
not_found = False
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_d']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not_found:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
# parsing data for sorting
if not_found:
# treat no match as None
data = None # depends on [control=['if'], data=[]]
value = self._order(data, is_reverse)
# read previous section
pre_sect = pre_sect_stack[index] if pre_sect_stack else 0
# inverse if in reverse mode
# for keeping order as ASCENDING after sort
pre_sect = total - pre_sect if is_reverse else pre_sect
_ind = total - index if is_reverse else index
value_stack.append((pre_sect, value, _ind)) # depends on [control=['for'], data=[]]
# sorting cursor data
value_stack.sort(reverse=is_reverse)
ordereddat = list()
sect_stack = list()
sect_id = -1
last_dat = None
for dat in value_stack:
# restore if in reverse mode
_ind = total - dat[-1] if is_reverse else dat[-1]
ordereddat.append(_cursordat[_ind])
# define section
# maintain the sorting result in next level sorting
if not dat[1] == last_dat:
sect_id += 1 # depends on [control=['if'], data=[]]
sect_stack.append(sect_id)
last_dat = dat[1] # depends on [control=['for'], data=['dat']]
# save result for next level sorting
_cursordat = ordereddat
pre_sect_stack = sect_stack # depends on [control=['for'], data=['pair']]
# done
self.cursordat = _cursordat
return self |
def get_template_dir():
"""Find and return the ntc-templates/templates dir."""
try:
template_dir = os.path.expanduser(os.environ["NET_TEXTFSM"])
index = os.path.join(template_dir, "index")
if not os.path.isfile(index):
# Assume only base ./ntc-templates specified
template_dir = os.path.join(template_dir, "templates")
except KeyError:
# Construct path ~/ntc-templates/templates
home_dir = os.path.expanduser("~")
template_dir = os.path.join(home_dir, "ntc-templates", "templates")
index = os.path.join(template_dir, "index")
if not os.path.isdir(template_dir) or not os.path.isfile(index):
msg = """
Valid ntc-templates not found, please install https://github.com/networktocode/ntc-templates
and then set the NET_TEXTFSM environment variable to point to the ./ntc-templates/templates
directory."""
raise ValueError(msg)
return os.path.abspath(template_dir) | def function[get_template_dir, parameter[]]:
constant[Find and return the ntc-templates/templates dir.]
<ast.Try object at 0x7da2054a6ce0>
variable[index] assign[=] call[name[os].path.join, parameter[name[template_dir], constant[index]]]
if <ast.BoolOp object at 0x7da2054a7880> begin[:]
variable[msg] assign[=] constant[
Valid ntc-templates not found, please install https://github.com/networktocode/ntc-templates
and then set the NET_TEXTFSM environment variable to point to the ./ntc-templates/templates
directory.]
<ast.Raise object at 0x7da1b1f25930>
return[call[name[os].path.abspath, parameter[name[template_dir]]]] | keyword[def] identifier[get_template_dir] ():
literal[string]
keyword[try] :
identifier[template_dir] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[os] . identifier[environ] [ literal[string] ])
identifier[index] = identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[index] ):
identifier[template_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] )
keyword[except] identifier[KeyError] :
identifier[home_dir] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] )
identifier[template_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[home_dir] , literal[string] , literal[string] )
identifier[index] = identifier[os] . identifier[path] . identifier[join] ( identifier[template_dir] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[template_dir] ) keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[index] ):
identifier[msg] = literal[string]
keyword[raise] identifier[ValueError] ( identifier[msg] )
keyword[return] identifier[os] . identifier[path] . identifier[abspath] ( identifier[template_dir] ) | def get_template_dir():
"""Find and return the ntc-templates/templates dir."""
try:
template_dir = os.path.expanduser(os.environ['NET_TEXTFSM'])
index = os.path.join(template_dir, 'index')
if not os.path.isfile(index):
# Assume only base ./ntc-templates specified
template_dir = os.path.join(template_dir, 'templates') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
# Construct path ~/ntc-templates/templates
home_dir = os.path.expanduser('~')
template_dir = os.path.join(home_dir, 'ntc-templates', 'templates') # depends on [control=['except'], data=[]]
index = os.path.join(template_dir, 'index')
if not os.path.isdir(template_dir) or not os.path.isfile(index):
msg = '\nValid ntc-templates not found, please install https://github.com/networktocode/ntc-templates\nand then set the NET_TEXTFSM environment variable to point to the ./ntc-templates/templates\ndirectory.'
raise ValueError(msg) # depends on [control=['if'], data=[]]
return os.path.abspath(template_dir) |
def sample(self, size=1):
""" Sample rigid transform random variables.
Parameters
----------
size : int
number of sample to take
Returns
-------
:obj:`list` of :obj:`RigidTransform`
sampled rigid transformations
"""
samples = []
for i in range(size):
# sample random pose
xi = self._r_xi_rv.rvs(size=1)
S_xi = skew(xi)
R_sample = scipy.linalg.expm(S_xi)
t_sample = self._t_rv.rvs(size=1)
samples.append(RigidTransform(rotation=R_sample,
translation=t_sample,
from_frame=self._from_frame,
to_frame=self._to_frame))
# not a list if only 1 sample
if size == 1 and len(samples) > 0:
return samples[0]
return samples | def function[sample, parameter[self, size]]:
constant[ Sample rigid transform random variables.
Parameters
----------
size : int
number of sample to take
Returns
-------
:obj:`list` of :obj:`RigidTransform`
sampled rigid transformations
]
variable[samples] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[size]]]] begin[:]
variable[xi] assign[=] call[name[self]._r_xi_rv.rvs, parameter[]]
variable[S_xi] assign[=] call[name[skew], parameter[name[xi]]]
variable[R_sample] assign[=] call[name[scipy].linalg.expm, parameter[name[S_xi]]]
variable[t_sample] assign[=] call[name[self]._t_rv.rvs, parameter[]]
call[name[samples].append, parameter[call[name[RigidTransform], parameter[]]]]
if <ast.BoolOp object at 0x7da1b12f3190> begin[:]
return[call[name[samples]][constant[0]]]
return[name[samples]] | keyword[def] identifier[sample] ( identifier[self] , identifier[size] = literal[int] ):
literal[string]
identifier[samples] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[size] ):
identifier[xi] = identifier[self] . identifier[_r_xi_rv] . identifier[rvs] ( identifier[size] = literal[int] )
identifier[S_xi] = identifier[skew] ( identifier[xi] )
identifier[R_sample] = identifier[scipy] . identifier[linalg] . identifier[expm] ( identifier[S_xi] )
identifier[t_sample] = identifier[self] . identifier[_t_rv] . identifier[rvs] ( identifier[size] = literal[int] )
identifier[samples] . identifier[append] ( identifier[RigidTransform] ( identifier[rotation] = identifier[R_sample] ,
identifier[translation] = identifier[t_sample] ,
identifier[from_frame] = identifier[self] . identifier[_from_frame] ,
identifier[to_frame] = identifier[self] . identifier[_to_frame] ))
keyword[if] identifier[size] == literal[int] keyword[and] identifier[len] ( identifier[samples] )> literal[int] :
keyword[return] identifier[samples] [ literal[int] ]
keyword[return] identifier[samples] | def sample(self, size=1):
""" Sample rigid transform random variables.
Parameters
----------
size : int
number of sample to take
Returns
-------
:obj:`list` of :obj:`RigidTransform`
sampled rigid transformations
"""
samples = []
for i in range(size):
# sample random pose
xi = self._r_xi_rv.rvs(size=1)
S_xi = skew(xi)
R_sample = scipy.linalg.expm(S_xi)
t_sample = self._t_rv.rvs(size=1)
samples.append(RigidTransform(rotation=R_sample, translation=t_sample, from_frame=self._from_frame, to_frame=self._to_frame)) # depends on [control=['for'], data=[]]
# not a list if only 1 sample
if size == 1 and len(samples) > 0:
return samples[0] # depends on [control=['if'], data=[]]
return samples |
def _parse_playlist(self, playlist):
'''
Parse search json-data and create a :class:`Playlist` object.
'''
if playlist['Picture']:
cover_url = '%s70_%s' % (grooveshark.const.PLAYLIST_COVER_URL,
playlist['Picture'])
else:
cover_url = None
return Playlist(
playlist['PlaylistID'],
playlist['Name'],
cover_url,
self.connection) | def function[_parse_playlist, parameter[self, playlist]]:
constant[
Parse search json-data and create a :class:`Playlist` object.
]
if call[name[playlist]][constant[Picture]] begin[:]
variable[cover_url] assign[=] binary_operation[constant[%s70_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b287e3b0>, <ast.Subscript object at 0x7da1b287e4a0>]]]
return[call[name[Playlist], parameter[call[name[playlist]][constant[PlaylistID]], call[name[playlist]][constant[Name]], name[cover_url], name[self].connection]]] | keyword[def] identifier[_parse_playlist] ( identifier[self] , identifier[playlist] ):
literal[string]
keyword[if] identifier[playlist] [ literal[string] ]:
identifier[cover_url] = literal[string] %( identifier[grooveshark] . identifier[const] . identifier[PLAYLIST_COVER_URL] ,
identifier[playlist] [ literal[string] ])
keyword[else] :
identifier[cover_url] = keyword[None]
keyword[return] identifier[Playlist] (
identifier[playlist] [ literal[string] ],
identifier[playlist] [ literal[string] ],
identifier[cover_url] ,
identifier[self] . identifier[connection] ) | def _parse_playlist(self, playlist):
"""
Parse search json-data and create a :class:`Playlist` object.
"""
if playlist['Picture']:
cover_url = '%s70_%s' % (grooveshark.const.PLAYLIST_COVER_URL, playlist['Picture']) # depends on [control=['if'], data=[]]
else:
cover_url = None
return Playlist(playlist['PlaylistID'], playlist['Name'], cover_url, self.connection) |
def boundary_cell_fractions(self):
"""Return a tuple of contained fractions of boundary cells.
Since the outermost grid points can have any distance to the
boundary of the partitioned set, the "natural" outermost cell
around these points can either be cropped or extended. This
property is a tuple of (float, float) tuples, one entry per
dimension, where the fractions of the left- and rightmost
cells inside the set are stored. If a grid point lies exactly
on the boundary, the value is 1/2 since the cell is cut in half.
Otherwise, any value larger than 1/2 is possible.
Returns
-------
on_bdry : tuple of 2-tuples of floats
Each 2-tuple contains the fraction of the leftmost
(first entry) and rightmost (second entry) cell in the
partitioned set in the corresponding dimension.
See Also
--------
cell_boundary_vecs
Examples
--------
We create a partition of the rectangle [0, 1.5] x [-2, 2] with
the grid points [0, 1] x [-1, 0, 2]. The "natural" cells at the
boundary would be:
[-0.5, 0.5] and [0.5, 1.5] in the first axis
[-1.5, -0.5] and [1, 3] in the second axis
Thus, in the first axis, the fractions contained in [0, 1.5]
are 0.5 and 1, and in the second axis, [-2, 2] contains the
fractions 1.5 and 0.5.
>>> rect = odl.IntervalProd([0, -2], [1.5, 2])
>>> grid = odl.RectGrid([0, 1], [-1, 0, 2])
>>> part = odl.RectPartition(rect, grid)
>>> part.boundary_cell_fractions
((0.5, 1.0), (1.5, 0.5))
"""
frac_list = []
for ax, (cvec, bmin, bmax) in enumerate(zip(
self.grid.coord_vectors, self.set.min_pt, self.set.max_pt)):
# Degenerate axes have a value of 1.0 (this is used as weight
# in integration formulas later)
if len(cvec) == 1:
frac_list.append((1.0, 1.0))
else:
left_frac = 0.5 + (cvec[0] - bmin) / (cvec[1] - cvec[0])
right_frac = 0.5 + (bmax - cvec[-1]) / (cvec[-1] - cvec[-2])
frac_list.append((left_frac, right_frac))
return tuple(frac_list) | def function[boundary_cell_fractions, parameter[self]]:
constant[Return a tuple of contained fractions of boundary cells.
Since the outermost grid points can have any distance to the
boundary of the partitioned set, the "natural" outermost cell
around these points can either be cropped or extended. This
property is a tuple of (float, float) tuples, one entry per
dimension, where the fractions of the left- and rightmost
cells inside the set are stored. If a grid point lies exactly
on the boundary, the value is 1/2 since the cell is cut in half.
Otherwise, any value larger than 1/2 is possible.
Returns
-------
on_bdry : tuple of 2-tuples of floats
Each 2-tuple contains the fraction of the leftmost
(first entry) and rightmost (second entry) cell in the
partitioned set in the corresponding dimension.
See Also
--------
cell_boundary_vecs
Examples
--------
We create a partition of the rectangle [0, 1.5] x [-2, 2] with
the grid points [0, 1] x [-1, 0, 2]. The "natural" cells at the
boundary would be:
[-0.5, 0.5] and [0.5, 1.5] in the first axis
[-1.5, -0.5] and [1, 3] in the second axis
Thus, in the first axis, the fractions contained in [0, 1.5]
are 0.5 and 1, and in the second axis, [-2, 2] contains the
fractions 1.5 and 0.5.
>>> rect = odl.IntervalProd([0, -2], [1.5, 2])
>>> grid = odl.RectGrid([0, 1], [-1, 0, 2])
>>> part = odl.RectPartition(rect, grid)
>>> part.boundary_cell_fractions
((0.5, 1.0), (1.5, 0.5))
]
variable[frac_list] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1ea1c00>, <ast.Tuple object at 0x7da1b1ea1cf0>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[self].grid.coord_vectors, name[self].set.min_pt, name[self].set.max_pt]]]]] begin[:]
if compare[call[name[len], parameter[name[cvec]]] equal[==] constant[1]] begin[:]
call[name[frac_list].append, parameter[tuple[[<ast.Constant object at 0x7da1b1e9b6d0>, <ast.Constant object at 0x7da1b1e997b0>]]]]
return[call[name[tuple], parameter[name[frac_list]]]] | keyword[def] identifier[boundary_cell_fractions] ( identifier[self] ):
literal[string]
identifier[frac_list] =[]
keyword[for] identifier[ax] ,( identifier[cvec] , identifier[bmin] , identifier[bmax] ) keyword[in] identifier[enumerate] ( identifier[zip] (
identifier[self] . identifier[grid] . identifier[coord_vectors] , identifier[self] . identifier[set] . identifier[min_pt] , identifier[self] . identifier[set] . identifier[max_pt] )):
keyword[if] identifier[len] ( identifier[cvec] )== literal[int] :
identifier[frac_list] . identifier[append] (( literal[int] , literal[int] ))
keyword[else] :
identifier[left_frac] = literal[int] +( identifier[cvec] [ literal[int] ]- identifier[bmin] )/( identifier[cvec] [ literal[int] ]- identifier[cvec] [ literal[int] ])
identifier[right_frac] = literal[int] +( identifier[bmax] - identifier[cvec] [- literal[int] ])/( identifier[cvec] [- literal[int] ]- identifier[cvec] [- literal[int] ])
identifier[frac_list] . identifier[append] (( identifier[left_frac] , identifier[right_frac] ))
keyword[return] identifier[tuple] ( identifier[frac_list] ) | def boundary_cell_fractions(self):
"""Return a tuple of contained fractions of boundary cells.
Since the outermost grid points can have any distance to the
boundary of the partitioned set, the "natural" outermost cell
around these points can either be cropped or extended. This
property is a tuple of (float, float) tuples, one entry per
dimension, where the fractions of the left- and rightmost
cells inside the set are stored. If a grid point lies exactly
on the boundary, the value is 1/2 since the cell is cut in half.
Otherwise, any value larger than 1/2 is possible.
Returns
-------
on_bdry : tuple of 2-tuples of floats
Each 2-tuple contains the fraction of the leftmost
(first entry) and rightmost (second entry) cell in the
partitioned set in the corresponding dimension.
See Also
--------
cell_boundary_vecs
Examples
--------
We create a partition of the rectangle [0, 1.5] x [-2, 2] with
the grid points [0, 1] x [-1, 0, 2]. The "natural" cells at the
boundary would be:
[-0.5, 0.5] and [0.5, 1.5] in the first axis
[-1.5, -0.5] and [1, 3] in the second axis
Thus, in the first axis, the fractions contained in [0, 1.5]
are 0.5 and 1, and in the second axis, [-2, 2] contains the
fractions 1.5 and 0.5.
>>> rect = odl.IntervalProd([0, -2], [1.5, 2])
>>> grid = odl.RectGrid([0, 1], [-1, 0, 2])
>>> part = odl.RectPartition(rect, grid)
>>> part.boundary_cell_fractions
((0.5, 1.0), (1.5, 0.5))
"""
frac_list = []
for (ax, (cvec, bmin, bmax)) in enumerate(zip(self.grid.coord_vectors, self.set.min_pt, self.set.max_pt)):
# Degenerate axes have a value of 1.0 (this is used as weight
# in integration formulas later)
if len(cvec) == 1:
frac_list.append((1.0, 1.0)) # depends on [control=['if'], data=[]]
else:
left_frac = 0.5 + (cvec[0] - bmin) / (cvec[1] - cvec[0])
right_frac = 0.5 + (bmax - cvec[-1]) / (cvec[-1] - cvec[-2])
frac_list.append((left_frac, right_frac)) # depends on [control=['for'], data=[]]
return tuple(frac_list) |
def get_middleware_resolvers(middlewares: Tuple[Any, ...]) -> Iterator[Callable]:
"""Get a list of resolver functions from a list of classes or functions."""
for middleware in middlewares:
if isfunction(middleware):
yield middleware
else: # middleware provided as object with 'resolve' method
resolver_func = getattr(middleware, "resolve", None)
if resolver_func is not None:
yield resolver_func | def function[get_middleware_resolvers, parameter[middlewares]]:
constant[Get a list of resolver functions from a list of classes or functions.]
for taget[name[middleware]] in starred[name[middlewares]] begin[:]
if call[name[isfunction], parameter[name[middleware]]] begin[:]
<ast.Yield object at 0x7da1b1d482b0> | keyword[def] identifier[get_middleware_resolvers] ( identifier[middlewares] : identifier[Tuple] [ identifier[Any] ,...])-> identifier[Iterator] [ identifier[Callable] ]:
literal[string]
keyword[for] identifier[middleware] keyword[in] identifier[middlewares] :
keyword[if] identifier[isfunction] ( identifier[middleware] ):
keyword[yield] identifier[middleware]
keyword[else] :
identifier[resolver_func] = identifier[getattr] ( identifier[middleware] , literal[string] , keyword[None] )
keyword[if] identifier[resolver_func] keyword[is] keyword[not] keyword[None] :
keyword[yield] identifier[resolver_func] | def get_middleware_resolvers(middlewares: Tuple[Any, ...]) -> Iterator[Callable]:
"""Get a list of resolver functions from a list of classes or functions."""
for middleware in middlewares:
if isfunction(middleware):
yield middleware # depends on [control=['if'], data=[]]
else: # middleware provided as object with 'resolve' method
resolver_func = getattr(middleware, 'resolve', None)
if resolver_func is not None:
yield resolver_func # depends on [control=['if'], data=['resolver_func']] # depends on [control=['for'], data=['middleware']] |
def find_file(path, tgt_env='base', **kwargs): # pylint: disable=W0613
'''
Find the first file to match the path and ref, read the file out of git
and send the path to the newly cached file
'''
return _gitfs().find_file(path, tgt_env=tgt_env, **kwargs) | def function[find_file, parameter[path, tgt_env]]:
constant[
Find the first file to match the path and ref, read the file out of git
and send the path to the newly cached file
]
return[call[call[name[_gitfs], parameter[]].find_file, parameter[name[path]]]] | keyword[def] identifier[find_file] ( identifier[path] , identifier[tgt_env] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[_gitfs] (). identifier[find_file] ( identifier[path] , identifier[tgt_env] = identifier[tgt_env] ,** identifier[kwargs] ) | def find_file(path, tgt_env='base', **kwargs): # pylint: disable=W0613
'\n Find the first file to match the path and ref, read the file out of git\n and send the path to the newly cached file\n '
return _gitfs().find_file(path, tgt_env=tgt_env, **kwargs) |
def create_q(token):
"""
Creates the Q() object.
"""
meta = getattr(token, 'meta', None)
query = getattr(token, 'query', '')
wildcards = None
if isinstance(query, six.string_types): # Unicode -> Quoted string
search = query
else: # List -> No quoted string (possible wildcards)
if len(query) == 1:
search = query[0]
elif len(query) == 3:
wildcards = 'BOTH'
search = query[1]
elif len(query) == 2:
if query[0] == '*':
wildcards = 'START'
search = query[1]
else:
wildcards = 'END'
search = query[0]
# Ignore short term and stop words
if (len(search) < 3 and not search.isdigit()) or search in STOP_WORDS:
return Q()
if not meta:
q = Q()
for field in SEARCH_FIELDS:
q |= Q(**{'%s__icontains' % field: search})
return q
if meta == 'category':
if wildcards == 'BOTH':
return (Q(categories__title__icontains=search) |
Q(categories__slug__icontains=search))
elif wildcards == 'START':
return (Q(categories__title__iendswith=search) |
Q(categories__slug__iendswith=search))
elif wildcards == 'END':
return (Q(categories__title__istartswith=search) |
Q(categories__slug__istartswith=search))
else:
return (Q(categories__title__iexact=search) |
Q(categories__slug__iexact=search))
elif meta == 'author':
if wildcards == 'BOTH':
return Q(**{'authors__%s__icontains' % Author.USERNAME_FIELD:
search})
elif wildcards == 'START':
return Q(**{'authors__%s__iendswith' % Author.USERNAME_FIELD:
search})
elif wildcards == 'END':
return Q(**{'authors__%s__istartswith' % Author.USERNAME_FIELD:
search})
else:
return Q(**{'authors__%s__iexact' % Author.USERNAME_FIELD:
search})
elif meta == 'tag': # TODO: tags ignore wildcards
return Q(tags__icontains=search) | def function[create_q, parameter[token]]:
constant[
Creates the Q() object.
]
variable[meta] assign[=] call[name[getattr], parameter[name[token], constant[meta], constant[None]]]
variable[query] assign[=] call[name[getattr], parameter[name[token], constant[query], constant[]]]
variable[wildcards] assign[=] constant[None]
if call[name[isinstance], parameter[name[query], name[six].string_types]] begin[:]
variable[search] assign[=] name[query]
if <ast.BoolOp object at 0x7da1b1d45ba0> begin[:]
return[call[name[Q], parameter[]]]
if <ast.UnaryOp object at 0x7da1b1d47c70> begin[:]
variable[q] assign[=] call[name[Q], parameter[]]
for taget[name[field]] in starred[name[SEARCH_FIELDS]] begin[:]
<ast.AugAssign object at 0x7da1b1d44df0>
return[name[q]]
if compare[name[meta] equal[==] constant[category]] begin[:]
if compare[name[wildcards] equal[==] constant[BOTH]] begin[:]
return[binary_operation[call[name[Q], parameter[]] <ast.BitOr object at 0x7da2590d6aa0> call[name[Q], parameter[]]]] | keyword[def] identifier[create_q] ( identifier[token] ):
literal[string]
identifier[meta] = identifier[getattr] ( identifier[token] , literal[string] , keyword[None] )
identifier[query] = identifier[getattr] ( identifier[token] , literal[string] , literal[string] )
identifier[wildcards] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[query] , identifier[six] . identifier[string_types] ):
identifier[search] = identifier[query]
keyword[else] :
keyword[if] identifier[len] ( identifier[query] )== literal[int] :
identifier[search] = identifier[query] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[query] )== literal[int] :
identifier[wildcards] = literal[string]
identifier[search] = identifier[query] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[query] )== literal[int] :
keyword[if] identifier[query] [ literal[int] ]== literal[string] :
identifier[wildcards] = literal[string]
identifier[search] = identifier[query] [ literal[int] ]
keyword[else] :
identifier[wildcards] = literal[string]
identifier[search] = identifier[query] [ literal[int] ]
keyword[if] ( identifier[len] ( identifier[search] )< literal[int] keyword[and] keyword[not] identifier[search] . identifier[isdigit] ()) keyword[or] identifier[search] keyword[in] identifier[STOP_WORDS] :
keyword[return] identifier[Q] ()
keyword[if] keyword[not] identifier[meta] :
identifier[q] = identifier[Q] ()
keyword[for] identifier[field] keyword[in] identifier[SEARCH_FIELDS] :
identifier[q] |= identifier[Q] (**{ literal[string] % identifier[field] : identifier[search] })
keyword[return] identifier[q]
keyword[if] identifier[meta] == literal[string] :
keyword[if] identifier[wildcards] == literal[string] :
keyword[return] ( identifier[Q] ( identifier[categories__title__icontains] = identifier[search] )|
identifier[Q] ( identifier[categories__slug__icontains] = identifier[search] ))
keyword[elif] identifier[wildcards] == literal[string] :
keyword[return] ( identifier[Q] ( identifier[categories__title__iendswith] = identifier[search] )|
identifier[Q] ( identifier[categories__slug__iendswith] = identifier[search] ))
keyword[elif] identifier[wildcards] == literal[string] :
keyword[return] ( identifier[Q] ( identifier[categories__title__istartswith] = identifier[search] )|
identifier[Q] ( identifier[categories__slug__istartswith] = identifier[search] ))
keyword[else] :
keyword[return] ( identifier[Q] ( identifier[categories__title__iexact] = identifier[search] )|
identifier[Q] ( identifier[categories__slug__iexact] = identifier[search] ))
keyword[elif] identifier[meta] == literal[string] :
keyword[if] identifier[wildcards] == literal[string] :
keyword[return] identifier[Q] (**{ literal[string] % identifier[Author] . identifier[USERNAME_FIELD] :
identifier[search] })
keyword[elif] identifier[wildcards] == literal[string] :
keyword[return] identifier[Q] (**{ literal[string] % identifier[Author] . identifier[USERNAME_FIELD] :
identifier[search] })
keyword[elif] identifier[wildcards] == literal[string] :
keyword[return] identifier[Q] (**{ literal[string] % identifier[Author] . identifier[USERNAME_FIELD] :
identifier[search] })
keyword[else] :
keyword[return] identifier[Q] (**{ literal[string] % identifier[Author] . identifier[USERNAME_FIELD] :
identifier[search] })
keyword[elif] identifier[meta] == literal[string] :
keyword[return] identifier[Q] ( identifier[tags__icontains] = identifier[search] ) | def create_q(token):
"""
Creates the Q() object.
"""
meta = getattr(token, 'meta', None)
query = getattr(token, 'query', '')
wildcards = None
if isinstance(query, six.string_types): # Unicode -> Quoted string
search = query # depends on [control=['if'], data=[]] # List -> No quoted string (possible wildcards)
elif len(query) == 1:
search = query[0] # depends on [control=['if'], data=[]]
elif len(query) == 3:
wildcards = 'BOTH'
search = query[1] # depends on [control=['if'], data=[]]
elif len(query) == 2:
if query[0] == '*':
wildcards = 'START'
search = query[1] # depends on [control=['if'], data=[]]
else:
wildcards = 'END'
search = query[0] # depends on [control=['if'], data=[]]
# Ignore short term and stop words
if len(search) < 3 and (not search.isdigit()) or search in STOP_WORDS:
return Q() # depends on [control=['if'], data=[]]
if not meta:
q = Q()
for field in SEARCH_FIELDS:
q |= Q(**{'%s__icontains' % field: search}) # depends on [control=['for'], data=['field']]
return q # depends on [control=['if'], data=[]]
if meta == 'category':
if wildcards == 'BOTH':
return Q(categories__title__icontains=search) | Q(categories__slug__icontains=search) # depends on [control=['if'], data=[]]
elif wildcards == 'START':
return Q(categories__title__iendswith=search) | Q(categories__slug__iendswith=search) # depends on [control=['if'], data=[]]
elif wildcards == 'END':
return Q(categories__title__istartswith=search) | Q(categories__slug__istartswith=search) # depends on [control=['if'], data=[]]
else:
return Q(categories__title__iexact=search) | Q(categories__slug__iexact=search) # depends on [control=['if'], data=[]]
elif meta == 'author':
if wildcards == 'BOTH':
return Q(**{'authors__%s__icontains' % Author.USERNAME_FIELD: search}) # depends on [control=['if'], data=[]]
elif wildcards == 'START':
return Q(**{'authors__%s__iendswith' % Author.USERNAME_FIELD: search}) # depends on [control=['if'], data=[]]
elif wildcards == 'END':
return Q(**{'authors__%s__istartswith' % Author.USERNAME_FIELD: search}) # depends on [control=['if'], data=[]]
else:
return Q(**{'authors__%s__iexact' % Author.USERNAME_FIELD: search}) # depends on [control=['if'], data=[]]
elif meta == 'tag': # TODO: tags ignore wildcards
return Q(tags__icontains=search) # depends on [control=['if'], data=[]] |
def filter(self, record):
"""Change the severity of selected log records."""
if isinstance(record.msg, basestring):
message = record.msg.lower()
if all(kw in message for kw in self.KEYWORDS):
record.levelname = 'DEBUG'
record.levelno = logging.DEBUG
return 1 | def function[filter, parameter[self, record]]:
constant[Change the severity of selected log records.]
if call[name[isinstance], parameter[name[record].msg, name[basestring]]] begin[:]
variable[message] assign[=] call[name[record].msg.lower, parameter[]]
if call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b031d450>]] begin[:]
name[record].levelname assign[=] constant[DEBUG]
name[record].levelno assign[=] name[logging].DEBUG
return[constant[1]] | keyword[def] identifier[filter] ( identifier[self] , identifier[record] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[record] . identifier[msg] , identifier[basestring] ):
identifier[message] = identifier[record] . identifier[msg] . identifier[lower] ()
keyword[if] identifier[all] ( identifier[kw] keyword[in] identifier[message] keyword[for] identifier[kw] keyword[in] identifier[self] . identifier[KEYWORDS] ):
identifier[record] . identifier[levelname] = literal[string]
identifier[record] . identifier[levelno] = identifier[logging] . identifier[DEBUG]
keyword[return] literal[int] | def filter(self, record):
"""Change the severity of selected log records."""
if isinstance(record.msg, basestring):
message = record.msg.lower()
if all((kw in message for kw in self.KEYWORDS)):
record.levelname = 'DEBUG'
record.levelno = logging.DEBUG # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return 1 |
def plotcommand(cosmology='WMAP5', plotname=None):
""" Example ways to interrogate the dataset and plot the commah output """
# Plot the c-M relation as a functon of redshift
xarray = 10**(np.arange(1, 15, 0.2))
yval = 'c'
# Specify the redshift range
zarray = np.arange(0, 5, 0.5)
xtitle = r"Halo Mass (M$_{sol}$)"
ytitle = r"Concentration"
linelabel = "z="
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
plt.ylim([2, 30])
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for zind, zval in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray)
# Access the column yval from the data file
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, yarray, label=linelabel+str(zval), color=colors[zind])
# Overplot the D08 predictions in black
ax.plot(xarray, commah.commah.cduffy(zval, xarray), color="black")
ax.set_xscale('log')
ax.set_yscale('log')
leg = ax.legend(loc=1)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_CM_relation.png'" % (plotname))
fig.savefig(plotname+"_CM_relation.png", dpi=fig.dpi*5)
else:
plt.show()
# Plot the c-z relation as a function of mass (so always Mz=M0)
xarray = 10**(np.arange(0, 1, 0.05)) - 1
yval = 'c'
# Specify the mass range
zarray = 10**np.arange(6, 14, 2)
xtitle = r"Redshift"
ytitle = r"NFW Concentration"
linelabel = r"log$_{10}$ M$_{z}$(M$_{sol}$)="
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for zind, zval in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval)
# Access the column yval from the data file
yarray = output[yval].flatten()
# Plot each line in turn with different colours
ax.plot(xarray, yarray,
label=linelabel+"{0:.1f}".format(np.log10(zval)),
color=colors[zind],)
leg = ax.legend(loc=1)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_Cz_relation.png'" % (plotname))
fig.savefig(plotname+"_Cz_relation.png", dpi=fig.dpi*5)
else:
plt.show()
# Plot the zf-z relation for different masses (so always Mz=M0)
xarray = 10**(np.arange(0, 1, 0.05)) - 1
yval = 'zf'
# Specify the mass range
zarray = 10**np.arange(6, 14, 2)
xtitle = r"Redshift"
ytitle = r"Formation Redshift"
linelabel = r"log$_{10}$ M$_{z}$(M$_{sol}$)="
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for zind, zval in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, yarray,
label=linelabel+"{0:.1f}".format(np.log10(zval)),
color=colors[zind],)
leg = ax.legend(loc=2)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_zfz_relation.png'" % (plotname))
fig.savefig(plotname+"_zfz_relation.png", dpi=fig.dpi*5)
else:
plt.show()
# Plot the dM/dt-z relation for different masses (so always Mz=M0)
xarray = 10**(np.arange(0, 1, 0.05)) - 1
yval = 'dMdt'
# Specify the mass range
zarray = 10**np.arange(10, 14, 0.5)
xtitle = r"log$_{10}$ (1+z)"
ytitle = r"log$_{10}$ Accretion Rate M$_{sol}$ yr$^{-1}$"
linelabel = r"log$_{10}$ M$_z$(M$_{sol}$)="
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
cosmo = commah.getcosmo(cosmology)
for zind, zval in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval,
com=False, mah=True)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(np.log10(xarray+1.), np.log10(yarray),
label=linelabel+"{0:.1f}".format(np.log10(zval)),
color=colors[zind],)
# Plot the semi-analytic approximate formula from Correa et al 2015b
semianalytic_approx = 71.6 * (zval / 1e12) * (cosmo['h'] / 0.7) *\
(-0.24 + 0.75 * (xarray + 1)) * np.sqrt(
cosmo['omega_M_0'] * (xarray + 1)**3 + cosmo['omega_lambda_0'])
ax.plot(np.log10(xarray + 1), np.log10(semianalytic_approx),
color='black')
leg = ax.legend(loc=2)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_dMdtz_relation.png'" % (plotname))
fig.savefig(plotname+"_dMdtz_relation.png", dpi=fig.dpi*5)
else:
plt.show()
# Plot the dMdt-M relation as a function of redshift
xarray = 10**(np.arange(10, 14, 0.5))
yval = 'dMdt'
# Specify the redshift range
zarray = np.arange(0, 5, 0.5)
xtitle = r"Halo Mass M$_{sol}$"
ytitle = r"Accretion Rate M$_{sol}$ yr$^{-1}$"
linelabel = "z="
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for zind, zval in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray,
com=False, mah=True)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, yarray, label=linelabel+str(zval),
color=colors[zind],)
ax.set_xscale('log')
ax.set_yscale('log')
leg = ax.legend(loc=2)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_MAH_M_relation.png'" % (plotname))
fig.savefig(plotname+"_MAH_M_relation.png", dpi=fig.dpi*5)
else:
plt.show()
# Plot the (dM/M)dt-M relation as a function of redshift
xarray = 10**(np.arange(10, 14, 0.5))
yval = 'dMdt'
# Specify the redshift range
zarray = np.arange(0, 5, 0.5)
xtitle = r"Halo Mass M$_{sol}$"
ytitle = r"Specific Accretion Rate yr$^{-1}$"
linelabel = "z="
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for zind, zval in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray,
mah=True, com=False)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, yarray/xarray, label=linelabel+str(zval),
color=colors[zind],)
ax.set_xscale('log')
ax.set_yscale('log')
leg = ax.legend(loc=1)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_specificMAH_M_relation.png'" % (plotname))
fig.savefig(plotname+"_specificMAH_M_relation.png", dpi=fig.dpi*5)
else:
plt.show()
# Plot the Mz-z relation as a function of mass
# (so mass is decreasing to zero as z-> inf)
xarray = 10**(np.arange(0, 1, 0.05)) - 1
yval = 'Mz'
# Specify the mass range
zarray = 10**np.arange(10, 14, 0.5)
xtitle = r"Redshift"
ytitle = r"M(z) (M$_{sol}$)"
linelabel = r"log$_{10}$ M$_{0}$(M$_{sol}$)="
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for zind, zval in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=0, Mi=zval, z=xarray)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, yarray,
label=linelabel+"{0:.1f}".format(np.log10(zval)),
color=colors[zind],)
ax.set_yscale('log')
leg = ax.legend(loc=1)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_Mzz_relation.png'" % (plotname))
fig.savefig(plotname+"_Mzz_relation.png", dpi=fig.dpi*5)
else:
plt.show()
# Plot the Mz/M0-z relation as a function of mass
xarray = 10**(np.arange(0, 1, 0.02)) - 1
yval = 'Mz'
# Specify the mass range
zarray = 10**np.arange(10, 14, 0.5)
xtitle = r"Redshift"
ytitle = r"log$_{10}$ M(z)/M$_{0}$"
linelabel = r"log$_{10}$ M$_{0}$(M$_{sol}$)="
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for zind, zval in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=0, Mi=zval, z=xarray)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, np.log10(yarray/zval),
label=linelabel+"{0:.1f}".format(np.log10(zval)),
color=colors[zind],)
leg = ax.legend(loc=3)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_MzM0z_relation.png'" % (plotname))
fig.savefig(plotname+"_MzM0z_relation.png", dpi=fig.dpi*5)
else:
plt.show()
return("Done") | def function[plotcommand, parameter[cosmology, plotname]]:
constant[ Example ways to interrogate the dataset and plot the commah output ]
variable[xarray] assign[=] binary_operation[constant[10] ** call[name[np].arange, parameter[constant[1], constant[15], constant[0.2]]]]
variable[yval] assign[=] constant[c]
variable[zarray] assign[=] call[name[np].arange, parameter[constant[0], constant[5], constant[0.5]]]
variable[xtitle] assign[=] constant[Halo Mass (M$_{sol}$)]
variable[ytitle] assign[=] constant[Concentration]
variable[linelabel] assign[=] constant[z=]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
call[name[ax].set_xlabel, parameter[name[xtitle]]]
call[name[ax].set_ylabel, parameter[name[ytitle]]]
call[name[plt].ylim, parameter[list[[<ast.Constant object at 0x7da1b1252d10>, <ast.Constant object at 0x7da1b1252ce0>]]]]
variable[colors] assign[=] call[name[cm].rainbow, parameter[call[name[np].linspace, parameter[constant[0], constant[1], call[name[len], parameter[name[zarray]]]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b12529b0>, <ast.Name object at 0x7da1b1252980>]]] in starred[call[name[enumerate], parameter[name[zarray]]]] begin[:]
variable[output] assign[=] call[name[commah].run, parameter[]]
variable[yarray] assign[=] call[call[name[output]][name[yval]].flatten, parameter[]]
call[name[ax].plot, parameter[name[xarray], name[yarray]]]
call[name[ax].plot, parameter[name[xarray], call[name[commah].commah.cduffy, parameter[name[zval], name[xarray]]]]]
call[name[ax].set_xscale, parameter[constant[log]]]
call[name[ax].set_yscale, parameter[constant[log]]]
variable[leg] assign[=] call[name[ax].legend, parameter[]]
call[call[name[leg].get_frame, parameter[]].set_alpha, parameter[constant[0]]]
call[call[name[leg].get_frame, parameter[]].set_edgecolor, parameter[constant[white]]]
for taget[name[label]] in starred[call[name[leg].get_texts, parameter[]]] begin[:]
call[name[label].set_fontsize, parameter[constant[small]]]
for taget[name[label]] in starred[call[name[leg].get_lines, parameter[]]] begin[:]
call[name[label].set_linewidth, parameter[constant[4]]]
if name[plotname] begin[:]
call[name[fig].tight_layout, parameter[]]
call[name[print], parameter[binary_operation[constant[Plotting to '%s_CM_relation.png'] <ast.Mod object at 0x7da2590d6920> name[plotname]]]]
call[name[fig].savefig, parameter[binary_operation[name[plotname] + constant[_CM_relation.png]]]]
variable[xarray] assign[=] binary_operation[binary_operation[constant[10] ** call[name[np].arange, parameter[constant[0], constant[1], constant[0.05]]]] - constant[1]]
variable[yval] assign[=] constant[c]
variable[zarray] assign[=] binary_operation[constant[10] ** call[name[np].arange, parameter[constant[6], constant[14], constant[2]]]]
variable[xtitle] assign[=] constant[Redshift]
variable[ytitle] assign[=] constant[NFW Concentration]
variable[linelabel] assign[=] constant[log$_{10}$ M$_{z}$(M$_{sol}$)=]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
call[name[ax].set_xlabel, parameter[name[xtitle]]]
call[name[ax].set_ylabel, parameter[name[ytitle]]]
variable[colors] assign[=] call[name[cm].rainbow, parameter[call[name[np].linspace, parameter[constant[0], constant[1], call[name[len], parameter[name[zarray]]]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1250100>, <ast.Name object at 0x7da1b12500d0>]]] in starred[call[name[enumerate], parameter[name[zarray]]]] begin[:]
variable[output] assign[=] call[name[commah].run, parameter[]]
variable[yarray] assign[=] call[call[name[output]][name[yval]].flatten, parameter[]]
call[name[ax].plot, parameter[name[xarray], name[yarray]]]
variable[leg] assign[=] call[name[ax].legend, parameter[]]
call[call[name[leg].get_frame, parameter[]].set_alpha, parameter[constant[0]]]
call[call[name[leg].get_frame, parameter[]].set_edgecolor, parameter[constant[white]]]
for taget[name[label]] in starred[call[name[leg].get_texts, parameter[]]] begin[:]
call[name[label].set_fontsize, parameter[constant[small]]]
for taget[name[label]] in starred[call[name[leg].get_lines, parameter[]]] begin[:]
call[name[label].set_linewidth, parameter[constant[4]]]
if name[plotname] begin[:]
call[name[fig].tight_layout, parameter[]]
call[name[print], parameter[binary_operation[constant[Plotting to '%s_Cz_relation.png'] <ast.Mod object at 0x7da2590d6920> name[plotname]]]]
call[name[fig].savefig, parameter[binary_operation[name[plotname] + constant[_Cz_relation.png]]]]
variable[xarray] assign[=] binary_operation[binary_operation[constant[10] ** call[name[np].arange, parameter[constant[0], constant[1], constant[0.05]]]] - constant[1]]
variable[yval] assign[=] constant[zf]
variable[zarray] assign[=] binary_operation[constant[10] ** call[name[np].arange, parameter[constant[6], constant[14], constant[2]]]]
variable[xtitle] assign[=] constant[Redshift]
variable[ytitle] assign[=] constant[Formation Redshift]
variable[linelabel] assign[=] constant[log$_{10}$ M$_{z}$(M$_{sol}$)=]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
call[name[ax].set_xlabel, parameter[name[xtitle]]]
call[name[ax].set_ylabel, parameter[name[ytitle]]]
variable[colors] assign[=] call[name[cm].rainbow, parameter[call[name[np].linspace, parameter[constant[0], constant[1], call[name[len], parameter[name[zarray]]]]]]]
for taget[tuple[[<ast.Name object at 0x7da207f98ca0>, <ast.Name object at 0x7da207f9a110>]]] in starred[call[name[enumerate], parameter[name[zarray]]]] begin[:]
variable[output] assign[=] call[name[commah].run, parameter[]]
variable[yarray] assign[=] call[call[name[output]][name[yval]].flatten, parameter[]]
call[name[ax].plot, parameter[name[xarray], name[yarray]]]
variable[leg] assign[=] call[name[ax].legend, parameter[]]
call[call[name[leg].get_frame, parameter[]].set_alpha, parameter[constant[0]]]
call[call[name[leg].get_frame, parameter[]].set_edgecolor, parameter[constant[white]]]
for taget[name[label]] in starred[call[name[leg].get_texts, parameter[]]] begin[:]
call[name[label].set_fontsize, parameter[constant[small]]]
for taget[name[label]] in starred[call[name[leg].get_lines, parameter[]]] begin[:]
call[name[label].set_linewidth, parameter[constant[4]]]
if name[plotname] begin[:]
call[name[fig].tight_layout, parameter[]]
call[name[print], parameter[binary_operation[constant[Plotting to '%s_zfz_relation.png'] <ast.Mod object at 0x7da2590d6920> name[plotname]]]]
call[name[fig].savefig, parameter[binary_operation[name[plotname] + constant[_zfz_relation.png]]]]
variable[xarray] assign[=] binary_operation[binary_operation[constant[10] ** call[name[np].arange, parameter[constant[0], constant[1], constant[0.05]]]] - constant[1]]
variable[yval] assign[=] constant[dMdt]
variable[zarray] assign[=] binary_operation[constant[10] ** call[name[np].arange, parameter[constant[10], constant[14], constant[0.5]]]]
variable[xtitle] assign[=] constant[log$_{10}$ (1+z)]
variable[ytitle] assign[=] constant[log$_{10}$ Accretion Rate M$_{sol}$ yr$^{-1}$]
variable[linelabel] assign[=] constant[log$_{10}$ M$_z$(M$_{sol}$)=]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
call[name[ax].set_xlabel, parameter[name[xtitle]]]
call[name[ax].set_ylabel, parameter[name[ytitle]]]
variable[colors] assign[=] call[name[cm].rainbow, parameter[call[name[np].linspace, parameter[constant[0], constant[1], call[name[len], parameter[name[zarray]]]]]]]
variable[cosmo] assign[=] call[name[commah].getcosmo, parameter[name[cosmology]]]
for taget[tuple[[<ast.Name object at 0x7da20c7c8790>, <ast.Name object at 0x7da20c7c8f40>]]] in starred[call[name[enumerate], parameter[name[zarray]]]] begin[:]
variable[output] assign[=] call[name[commah].run, parameter[]]
variable[yarray] assign[=] call[call[name[output]][name[yval]].flatten, parameter[]]
call[name[ax].plot, parameter[call[name[np].log10, parameter[binary_operation[name[xarray] + constant[1.0]]]], call[name[np].log10, parameter[name[yarray]]]]]
variable[semianalytic_approx] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[71.6] * binary_operation[name[zval] / constant[1000000000000.0]]] * binary_operation[call[name[cosmo]][constant[h]] / constant[0.7]]] * binary_operation[<ast.UnaryOp object at 0x7da2041d9120> + binary_operation[constant[0.75] * binary_operation[name[xarray] + constant[1]]]]] * call[name[np].sqrt, parameter[binary_operation[binary_operation[call[name[cosmo]][constant[omega_M_0]] * binary_operation[binary_operation[name[xarray] + constant[1]] ** constant[3]]] + call[name[cosmo]][constant[omega_lambda_0]]]]]]
call[name[ax].plot, parameter[call[name[np].log10, parameter[binary_operation[name[xarray] + constant[1]]]], call[name[np].log10, parameter[name[semianalytic_approx]]]]]
variable[leg] assign[=] call[name[ax].legend, parameter[]]
call[call[name[leg].get_frame, parameter[]].set_alpha, parameter[constant[0]]]
call[call[name[leg].get_frame, parameter[]].set_edgecolor, parameter[constant[white]]]
for taget[name[label]] in starred[call[name[leg].get_texts, parameter[]]] begin[:]
call[name[label].set_fontsize, parameter[constant[small]]]
for taget[name[label]] in starred[call[name[leg].get_lines, parameter[]]] begin[:]
call[name[label].set_linewidth, parameter[constant[4]]]
if name[plotname] begin[:]
call[name[fig].tight_layout, parameter[]]
call[name[print], parameter[binary_operation[constant[Plotting to '%s_dMdtz_relation.png'] <ast.Mod object at 0x7da2590d6920> name[plotname]]]]
call[name[fig].savefig, parameter[binary_operation[name[plotname] + constant[_dMdtz_relation.png]]]]
variable[xarray] assign[=] binary_operation[constant[10] ** call[name[np].arange, parameter[constant[10], constant[14], constant[0.5]]]]
variable[yval] assign[=] constant[dMdt]
variable[zarray] assign[=] call[name[np].arange, parameter[constant[0], constant[5], constant[0.5]]]
variable[xtitle] assign[=] constant[Halo Mass M$_{sol}$]
variable[ytitle] assign[=] constant[Accretion Rate M$_{sol}$ yr$^{-1}$]
variable[linelabel] assign[=] constant[z=]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
call[name[ax].set_xlabel, parameter[name[xtitle]]]
call[name[ax].set_ylabel, parameter[name[ytitle]]]
variable[colors] assign[=] call[name[cm].rainbow, parameter[call[name[np].linspace, parameter[constant[0], constant[1], call[name[len], parameter[name[zarray]]]]]]]
for taget[tuple[[<ast.Name object at 0x7da20e954e80>, <ast.Name object at 0x7da20e956a10>]]] in starred[call[name[enumerate], parameter[name[zarray]]]] begin[:]
variable[output] assign[=] call[name[commah].run, parameter[]]
variable[yarray] assign[=] call[call[name[output]][name[yval]].flatten, parameter[]]
call[name[ax].plot, parameter[name[xarray], name[yarray]]]
call[name[ax].set_xscale, parameter[constant[log]]]
call[name[ax].set_yscale, parameter[constant[log]]]
variable[leg] assign[=] call[name[ax].legend, parameter[]]
call[call[name[leg].get_frame, parameter[]].set_alpha, parameter[constant[0]]]
call[call[name[leg].get_frame, parameter[]].set_edgecolor, parameter[constant[white]]]
for taget[name[label]] in starred[call[name[leg].get_texts, parameter[]]] begin[:]
call[name[label].set_fontsize, parameter[constant[small]]]
for taget[name[label]] in starred[call[name[leg].get_lines, parameter[]]] begin[:]
call[name[label].set_linewidth, parameter[constant[4]]]
if name[plotname] begin[:]
call[name[fig].tight_layout, parameter[]]
call[name[print], parameter[binary_operation[constant[Plotting to '%s_MAH_M_relation.png'] <ast.Mod object at 0x7da2590d6920> name[plotname]]]]
call[name[fig].savefig, parameter[binary_operation[name[plotname] + constant[_MAH_M_relation.png]]]]
variable[xarray] assign[=] binary_operation[constant[10] ** call[name[np].arange, parameter[constant[10], constant[14], constant[0.5]]]]
variable[yval] assign[=] constant[dMdt]
variable[zarray] assign[=] call[name[np].arange, parameter[constant[0], constant[5], constant[0.5]]]
variable[xtitle] assign[=] constant[Halo Mass M$_{sol}$]
variable[ytitle] assign[=] constant[Specific Accretion Rate yr$^{-1}$]
variable[linelabel] assign[=] constant[z=]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
call[name[ax].set_xlabel, parameter[name[xtitle]]]
call[name[ax].set_ylabel, parameter[name[ytitle]]]
variable[colors] assign[=] call[name[cm].rainbow, parameter[call[name[np].linspace, parameter[constant[0], constant[1], call[name[len], parameter[name[zarray]]]]]]]
for taget[tuple[[<ast.Name object at 0x7da204620910>, <ast.Name object at 0x7da204623d90>]]] in starred[call[name[enumerate], parameter[name[zarray]]]] begin[:]
variable[output] assign[=] call[name[commah].run, parameter[]]
variable[yarray] assign[=] call[call[name[output]][name[yval]].flatten, parameter[]]
call[name[ax].plot, parameter[name[xarray], binary_operation[name[yarray] / name[xarray]]]]
call[name[ax].set_xscale, parameter[constant[log]]]
call[name[ax].set_yscale, parameter[constant[log]]]
variable[leg] assign[=] call[name[ax].legend, parameter[]]
call[call[name[leg].get_frame, parameter[]].set_alpha, parameter[constant[0]]]
call[call[name[leg].get_frame, parameter[]].set_edgecolor, parameter[constant[white]]]
for taget[name[label]] in starred[call[name[leg].get_texts, parameter[]]] begin[:]
call[name[label].set_fontsize, parameter[constant[small]]]
for taget[name[label]] in starred[call[name[leg].get_lines, parameter[]]] begin[:]
call[name[label].set_linewidth, parameter[constant[4]]]
if name[plotname] begin[:]
call[name[fig].tight_layout, parameter[]]
call[name[print], parameter[binary_operation[constant[Plotting to '%s_specificMAH_M_relation.png'] <ast.Mod object at 0x7da2590d6920> name[plotname]]]]
call[name[fig].savefig, parameter[binary_operation[name[plotname] + constant[_specificMAH_M_relation.png]]]]
variable[xarray] assign[=] binary_operation[binary_operation[constant[10] ** call[name[np].arange, parameter[constant[0], constant[1], constant[0.05]]]] - constant[1]]
variable[yval] assign[=] constant[Mz]
variable[zarray] assign[=] binary_operation[constant[10] ** call[name[np].arange, parameter[constant[10], constant[14], constant[0.5]]]]
variable[xtitle] assign[=] constant[Redshift]
variable[ytitle] assign[=] constant[M(z) (M$_{sol}$)]
variable[linelabel] assign[=] constant[log$_{10}$ M$_{0}$(M$_{sol}$)=]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
call[name[ax].set_xlabel, parameter[name[xtitle]]]
call[name[ax].set_ylabel, parameter[name[ytitle]]]
variable[colors] assign[=] call[name[cm].rainbow, parameter[call[name[np].linspace, parameter[constant[0], constant[1], call[name[len], parameter[name[zarray]]]]]]]
for taget[tuple[[<ast.Name object at 0x7da18bc72d40>, <ast.Name object at 0x7da18bc71e10>]]] in starred[call[name[enumerate], parameter[name[zarray]]]] begin[:]
variable[output] assign[=] call[name[commah].run, parameter[]]
variable[yarray] assign[=] call[call[name[output]][name[yval]].flatten, parameter[]]
call[name[ax].plot, parameter[name[xarray], name[yarray]]]
call[name[ax].set_yscale, parameter[constant[log]]]
variable[leg] assign[=] call[name[ax].legend, parameter[]]
call[call[name[leg].get_frame, parameter[]].set_alpha, parameter[constant[0]]]
call[call[name[leg].get_frame, parameter[]].set_edgecolor, parameter[constant[white]]]
for taget[name[label]] in starred[call[name[leg].get_texts, parameter[]]] begin[:]
call[name[label].set_fontsize, parameter[constant[small]]]
for taget[name[label]] in starred[call[name[leg].get_lines, parameter[]]] begin[:]
call[name[label].set_linewidth, parameter[constant[4]]]
if name[plotname] begin[:]
call[name[fig].tight_layout, parameter[]]
call[name[print], parameter[binary_operation[constant[Plotting to '%s_Mzz_relation.png'] <ast.Mod object at 0x7da2590d6920> name[plotname]]]]
call[name[fig].savefig, parameter[binary_operation[name[plotname] + constant[_Mzz_relation.png]]]]
variable[xarray] assign[=] binary_operation[binary_operation[constant[10] ** call[name[np].arange, parameter[constant[0], constant[1], constant[0.02]]]] - constant[1]]
variable[yval] assign[=] constant[Mz]
variable[zarray] assign[=] binary_operation[constant[10] ** call[name[np].arange, parameter[constant[10], constant[14], constant[0.5]]]]
variable[xtitle] assign[=] constant[Redshift]
variable[ytitle] assign[=] constant[log$_{10}$ M(z)/M$_{0}$]
variable[linelabel] assign[=] constant[log$_{10}$ M$_{0}$(M$_{sol}$)=]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
call[name[ax].set_xlabel, parameter[name[xtitle]]]
call[name[ax].set_ylabel, parameter[name[ytitle]]]
variable[colors] assign[=] call[name[cm].rainbow, parameter[call[name[np].linspace, parameter[constant[0], constant[1], call[name[len], parameter[name[zarray]]]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1041ed0>, <ast.Name object at 0x7da1b1043520>]]] in starred[call[name[enumerate], parameter[name[zarray]]]] begin[:]
variable[output] assign[=] call[name[commah].run, parameter[]]
variable[yarray] assign[=] call[call[name[output]][name[yval]].flatten, parameter[]]
call[name[ax].plot, parameter[name[xarray], call[name[np].log10, parameter[binary_operation[name[yarray] / name[zval]]]]]]
variable[leg] assign[=] call[name[ax].legend, parameter[]]
call[call[name[leg].get_frame, parameter[]].set_alpha, parameter[constant[0]]]
call[call[name[leg].get_frame, parameter[]].set_edgecolor, parameter[constant[white]]]
for taget[name[label]] in starred[call[name[leg].get_texts, parameter[]]] begin[:]
call[name[label].set_fontsize, parameter[constant[small]]]
for taget[name[label]] in starred[call[name[leg].get_lines, parameter[]]] begin[:]
call[name[label].set_linewidth, parameter[constant[4]]]
if name[plotname] begin[:]
call[name[fig].tight_layout, parameter[]]
call[name[print], parameter[binary_operation[constant[Plotting to '%s_MzM0z_relation.png'] <ast.Mod object at 0x7da2590d6920> name[plotname]]]]
call[name[fig].savefig, parameter[binary_operation[name[plotname] + constant[_MzM0z_relation.png]]]]
return[constant[Done]] | keyword[def] identifier[plotcommand] ( identifier[cosmology] = literal[string] , identifier[plotname] = keyword[None] ):
literal[string]
identifier[xarray] = literal[int] **( identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] ))
identifier[yval] = literal[string]
identifier[zarray] = identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] )
identifier[xtitle] = literal[string]
identifier[ytitle] = literal[string]
identifier[linelabel] = literal[string]
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[ax] . identifier[set_xlabel] ( identifier[xtitle] )
identifier[ax] . identifier[set_ylabel] ( identifier[ytitle] )
identifier[plt] . identifier[ylim] ([ literal[int] , literal[int] ])
identifier[colors] = identifier[cm] . identifier[rainbow] ( identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[len] ( identifier[zarray] )))
keyword[for] identifier[zind] , identifier[zval] keyword[in] identifier[enumerate] ( identifier[zarray] ):
identifier[output] = identifier[commah] . identifier[run] ( identifier[cosmology] = identifier[cosmology] , identifier[zi] = identifier[zval] , identifier[Mi] = identifier[xarray] )
identifier[yarray] = identifier[output] [ identifier[yval] ]. identifier[flatten] ()
identifier[ax] . identifier[plot] ( identifier[xarray] , identifier[yarray] , identifier[label] = identifier[linelabel] + identifier[str] ( identifier[zval] ), identifier[color] = identifier[colors] [ identifier[zind] ])
identifier[ax] . identifier[plot] ( identifier[xarray] , identifier[commah] . identifier[commah] . identifier[cduffy] ( identifier[zval] , identifier[xarray] ), identifier[color] = literal[string] )
identifier[ax] . identifier[set_xscale] ( literal[string] )
identifier[ax] . identifier[set_yscale] ( literal[string] )
identifier[leg] = identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_alpha] ( literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_edgecolor] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_texts] ():
identifier[label] . identifier[set_fontsize] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_lines] ():
identifier[label] . identifier[set_linewidth] ( literal[int] )
keyword[if] identifier[plotname] :
identifier[fig] . identifier[tight_layout] ( identifier[pad] = literal[int] )
identifier[print] ( literal[string] %( identifier[plotname] ))
identifier[fig] . identifier[savefig] ( identifier[plotname] + literal[string] , identifier[dpi] = identifier[fig] . identifier[dpi] * literal[int] )
keyword[else] :
identifier[plt] . identifier[show] ()
identifier[xarray] = literal[int] **( identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] ))- literal[int]
identifier[yval] = literal[string]
identifier[zarray] = literal[int] ** identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] )
identifier[xtitle] = literal[string]
identifier[ytitle] = literal[string]
identifier[linelabel] = literal[string]
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[ax] . identifier[set_xlabel] ( identifier[xtitle] )
identifier[ax] . identifier[set_ylabel] ( identifier[ytitle] )
identifier[colors] = identifier[cm] . identifier[rainbow] ( identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[len] ( identifier[zarray] )))
keyword[for] identifier[zind] , identifier[zval] keyword[in] identifier[enumerate] ( identifier[zarray] ):
identifier[output] = identifier[commah] . identifier[run] ( identifier[cosmology] = identifier[cosmology] , identifier[zi] = identifier[xarray] , identifier[Mi] = identifier[zval] )
identifier[yarray] = identifier[output] [ identifier[yval] ]. identifier[flatten] ()
identifier[ax] . identifier[plot] ( identifier[xarray] , identifier[yarray] ,
identifier[label] = identifier[linelabel] + literal[string] . identifier[format] ( identifier[np] . identifier[log10] ( identifier[zval] )),
identifier[color] = identifier[colors] [ identifier[zind] ],)
identifier[leg] = identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_alpha] ( literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_edgecolor] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_texts] ():
identifier[label] . identifier[set_fontsize] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_lines] ():
identifier[label] . identifier[set_linewidth] ( literal[int] )
keyword[if] identifier[plotname] :
identifier[fig] . identifier[tight_layout] ( identifier[pad] = literal[int] )
identifier[print] ( literal[string] %( identifier[plotname] ))
identifier[fig] . identifier[savefig] ( identifier[plotname] + literal[string] , identifier[dpi] = identifier[fig] . identifier[dpi] * literal[int] )
keyword[else] :
identifier[plt] . identifier[show] ()
identifier[xarray] = literal[int] **( identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] ))- literal[int]
identifier[yval] = literal[string]
identifier[zarray] = literal[int] ** identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] )
identifier[xtitle] = literal[string]
identifier[ytitle] = literal[string]
identifier[linelabel] = literal[string]
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[ax] . identifier[set_xlabel] ( identifier[xtitle] )
identifier[ax] . identifier[set_ylabel] ( identifier[ytitle] )
identifier[colors] = identifier[cm] . identifier[rainbow] ( identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[len] ( identifier[zarray] )))
keyword[for] identifier[zind] , identifier[zval] keyword[in] identifier[enumerate] ( identifier[zarray] ):
identifier[output] = identifier[commah] . identifier[run] ( identifier[cosmology] = identifier[cosmology] , identifier[zi] = identifier[xarray] , identifier[Mi] = identifier[zval] )
identifier[yarray] = identifier[output] [ identifier[yval] ]. identifier[flatten] ()
identifier[ax] . identifier[plot] ( identifier[xarray] , identifier[yarray] ,
identifier[label] = identifier[linelabel] + literal[string] . identifier[format] ( identifier[np] . identifier[log10] ( identifier[zval] )),
identifier[color] = identifier[colors] [ identifier[zind] ],)
identifier[leg] = identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_alpha] ( literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_edgecolor] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_texts] ():
identifier[label] . identifier[set_fontsize] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_lines] ():
identifier[label] . identifier[set_linewidth] ( literal[int] )
keyword[if] identifier[plotname] :
identifier[fig] . identifier[tight_layout] ( identifier[pad] = literal[int] )
identifier[print] ( literal[string] %( identifier[plotname] ))
identifier[fig] . identifier[savefig] ( identifier[plotname] + literal[string] , identifier[dpi] = identifier[fig] . identifier[dpi] * literal[int] )
keyword[else] :
identifier[plt] . identifier[show] ()
identifier[xarray] = literal[int] **( identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] ))- literal[int]
identifier[yval] = literal[string]
identifier[zarray] = literal[int] ** identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] )
identifier[xtitle] = literal[string]
identifier[ytitle] = literal[string]
identifier[linelabel] = literal[string]
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[ax] . identifier[set_xlabel] ( identifier[xtitle] )
identifier[ax] . identifier[set_ylabel] ( identifier[ytitle] )
identifier[colors] = identifier[cm] . identifier[rainbow] ( identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[len] ( identifier[zarray] )))
identifier[cosmo] = identifier[commah] . identifier[getcosmo] ( identifier[cosmology] )
keyword[for] identifier[zind] , identifier[zval] keyword[in] identifier[enumerate] ( identifier[zarray] ):
identifier[output] = identifier[commah] . identifier[run] ( identifier[cosmology] = identifier[cosmology] , identifier[zi] = identifier[xarray] , identifier[Mi] = identifier[zval] ,
identifier[com] = keyword[False] , identifier[mah] = keyword[True] )
identifier[yarray] = identifier[output] [ identifier[yval] ]. identifier[flatten] ()
identifier[ax] . identifier[plot] ( identifier[np] . identifier[log10] ( identifier[xarray] + literal[int] ), identifier[np] . identifier[log10] ( identifier[yarray] ),
identifier[label] = identifier[linelabel] + literal[string] . identifier[format] ( identifier[np] . identifier[log10] ( identifier[zval] )),
identifier[color] = identifier[colors] [ identifier[zind] ],)
identifier[semianalytic_approx] = literal[int] *( identifier[zval] / literal[int] )*( identifier[cosmo] [ literal[string] ]/ literal[int] )*(- literal[int] + literal[int] *( identifier[xarray] + literal[int] ))* identifier[np] . identifier[sqrt] (
identifier[cosmo] [ literal[string] ]*( identifier[xarray] + literal[int] )** literal[int] + identifier[cosmo] [ literal[string] ])
identifier[ax] . identifier[plot] ( identifier[np] . identifier[log10] ( identifier[xarray] + literal[int] ), identifier[np] . identifier[log10] ( identifier[semianalytic_approx] ),
identifier[color] = literal[string] )
identifier[leg] = identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_alpha] ( literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_edgecolor] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_texts] ():
identifier[label] . identifier[set_fontsize] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_lines] ():
identifier[label] . identifier[set_linewidth] ( literal[int] )
keyword[if] identifier[plotname] :
identifier[fig] . identifier[tight_layout] ( identifier[pad] = literal[int] )
identifier[print] ( literal[string] %( identifier[plotname] ))
identifier[fig] . identifier[savefig] ( identifier[plotname] + literal[string] , identifier[dpi] = identifier[fig] . identifier[dpi] * literal[int] )
keyword[else] :
identifier[plt] . identifier[show] ()
identifier[xarray] = literal[int] **( identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] ))
identifier[yval] = literal[string]
identifier[zarray] = identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] )
identifier[xtitle] = literal[string]
identifier[ytitle] = literal[string]
identifier[linelabel] = literal[string]
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[ax] . identifier[set_xlabel] ( identifier[xtitle] )
identifier[ax] . identifier[set_ylabel] ( identifier[ytitle] )
identifier[colors] = identifier[cm] . identifier[rainbow] ( identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[len] ( identifier[zarray] )))
keyword[for] identifier[zind] , identifier[zval] keyword[in] identifier[enumerate] ( identifier[zarray] ):
identifier[output] = identifier[commah] . identifier[run] ( identifier[cosmology] = identifier[cosmology] , identifier[zi] = identifier[zval] , identifier[Mi] = identifier[xarray] ,
identifier[com] = keyword[False] , identifier[mah] = keyword[True] )
identifier[yarray] = identifier[output] [ identifier[yval] ]. identifier[flatten] ()
identifier[ax] . identifier[plot] ( identifier[xarray] , identifier[yarray] , identifier[label] = identifier[linelabel] + identifier[str] ( identifier[zval] ),
identifier[color] = identifier[colors] [ identifier[zind] ],)
identifier[ax] . identifier[set_xscale] ( literal[string] )
identifier[ax] . identifier[set_yscale] ( literal[string] )
identifier[leg] = identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_alpha] ( literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_edgecolor] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_texts] ():
identifier[label] . identifier[set_fontsize] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_lines] ():
identifier[label] . identifier[set_linewidth] ( literal[int] )
keyword[if] identifier[plotname] :
identifier[fig] . identifier[tight_layout] ( identifier[pad] = literal[int] )
identifier[print] ( literal[string] %( identifier[plotname] ))
identifier[fig] . identifier[savefig] ( identifier[plotname] + literal[string] , identifier[dpi] = identifier[fig] . identifier[dpi] * literal[int] )
keyword[else] :
identifier[plt] . identifier[show] ()
identifier[xarray] = literal[int] **( identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] ))
identifier[yval] = literal[string]
identifier[zarray] = identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] )
identifier[xtitle] = literal[string]
identifier[ytitle] = literal[string]
identifier[linelabel] = literal[string]
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[ax] . identifier[set_xlabel] ( identifier[xtitle] )
identifier[ax] . identifier[set_ylabel] ( identifier[ytitle] )
identifier[colors] = identifier[cm] . identifier[rainbow] ( identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[len] ( identifier[zarray] )))
keyword[for] identifier[zind] , identifier[zval] keyword[in] identifier[enumerate] ( identifier[zarray] ):
identifier[output] = identifier[commah] . identifier[run] ( identifier[cosmology] = identifier[cosmology] , identifier[zi] = identifier[zval] , identifier[Mi] = identifier[xarray] ,
identifier[mah] = keyword[True] , identifier[com] = keyword[False] )
identifier[yarray] = identifier[output] [ identifier[yval] ]. identifier[flatten] ()
identifier[ax] . identifier[plot] ( identifier[xarray] , identifier[yarray] / identifier[xarray] , identifier[label] = identifier[linelabel] + identifier[str] ( identifier[zval] ),
identifier[color] = identifier[colors] [ identifier[zind] ],)
identifier[ax] . identifier[set_xscale] ( literal[string] )
identifier[ax] . identifier[set_yscale] ( literal[string] )
identifier[leg] = identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_alpha] ( literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_edgecolor] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_texts] ():
identifier[label] . identifier[set_fontsize] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_lines] ():
identifier[label] . identifier[set_linewidth] ( literal[int] )
keyword[if] identifier[plotname] :
identifier[fig] . identifier[tight_layout] ( identifier[pad] = literal[int] )
identifier[print] ( literal[string] %( identifier[plotname] ))
identifier[fig] . identifier[savefig] ( identifier[plotname] + literal[string] , identifier[dpi] = identifier[fig] . identifier[dpi] * literal[int] )
keyword[else] :
identifier[plt] . identifier[show] ()
identifier[xarray] = literal[int] **( identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] ))- literal[int]
identifier[yval] = literal[string]
identifier[zarray] = literal[int] ** identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] )
identifier[xtitle] = literal[string]
identifier[ytitle] = literal[string]
identifier[linelabel] = literal[string]
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[ax] . identifier[set_xlabel] ( identifier[xtitle] )
identifier[ax] . identifier[set_ylabel] ( identifier[ytitle] )
identifier[colors] = identifier[cm] . identifier[rainbow] ( identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[len] ( identifier[zarray] )))
keyword[for] identifier[zind] , identifier[zval] keyword[in] identifier[enumerate] ( identifier[zarray] ):
identifier[output] = identifier[commah] . identifier[run] ( identifier[cosmology] = identifier[cosmology] , identifier[zi] = literal[int] , identifier[Mi] = identifier[zval] , identifier[z] = identifier[xarray] )
identifier[yarray] = identifier[output] [ identifier[yval] ]. identifier[flatten] ()
identifier[ax] . identifier[plot] ( identifier[xarray] , identifier[yarray] ,
identifier[label] = identifier[linelabel] + literal[string] . identifier[format] ( identifier[np] . identifier[log10] ( identifier[zval] )),
identifier[color] = identifier[colors] [ identifier[zind] ],)
identifier[ax] . identifier[set_yscale] ( literal[string] )
identifier[leg] = identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_alpha] ( literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_edgecolor] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_texts] ():
identifier[label] . identifier[set_fontsize] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_lines] ():
identifier[label] . identifier[set_linewidth] ( literal[int] )
keyword[if] identifier[plotname] :
identifier[fig] . identifier[tight_layout] ( identifier[pad] = literal[int] )
identifier[print] ( literal[string] %( identifier[plotname] ))
identifier[fig] . identifier[savefig] ( identifier[plotname] + literal[string] , identifier[dpi] = identifier[fig] . identifier[dpi] * literal[int] )
keyword[else] :
identifier[plt] . identifier[show] ()
identifier[xarray] = literal[int] **( identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] ))- literal[int]
identifier[yval] = literal[string]
identifier[zarray] = literal[int] ** identifier[np] . identifier[arange] ( literal[int] , literal[int] , literal[int] )
identifier[xtitle] = literal[string]
identifier[ytitle] = literal[string]
identifier[linelabel] = literal[string]
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[ax] . identifier[set_xlabel] ( identifier[xtitle] )
identifier[ax] . identifier[set_ylabel] ( identifier[ytitle] )
identifier[colors] = identifier[cm] . identifier[rainbow] ( identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[len] ( identifier[zarray] )))
keyword[for] identifier[zind] , identifier[zval] keyword[in] identifier[enumerate] ( identifier[zarray] ):
identifier[output] = identifier[commah] . identifier[run] ( identifier[cosmology] = identifier[cosmology] , identifier[zi] = literal[int] , identifier[Mi] = identifier[zval] , identifier[z] = identifier[xarray] )
identifier[yarray] = identifier[output] [ identifier[yval] ]. identifier[flatten] ()
identifier[ax] . identifier[plot] ( identifier[xarray] , identifier[np] . identifier[log10] ( identifier[yarray] / identifier[zval] ),
identifier[label] = identifier[linelabel] + literal[string] . identifier[format] ( identifier[np] . identifier[log10] ( identifier[zval] )),
identifier[color] = identifier[colors] [ identifier[zind] ],)
identifier[leg] = identifier[ax] . identifier[legend] ( identifier[loc] = literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_alpha] ( literal[int] )
identifier[leg] . identifier[get_frame] (). identifier[set_edgecolor] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_texts] ():
identifier[label] . identifier[set_fontsize] ( literal[string] )
keyword[for] identifier[label] keyword[in] identifier[leg] . identifier[get_lines] ():
identifier[label] . identifier[set_linewidth] ( literal[int] )
keyword[if] identifier[plotname] :
identifier[fig] . identifier[tight_layout] ( identifier[pad] = literal[int] )
identifier[print] ( literal[string] %( identifier[plotname] ))
identifier[fig] . identifier[savefig] ( identifier[plotname] + literal[string] , identifier[dpi] = identifier[fig] . identifier[dpi] * literal[int] )
keyword[else] :
identifier[plt] . identifier[show] ()
keyword[return] ( literal[string] ) | def plotcommand(cosmology='WMAP5', plotname=None):
""" Example ways to interrogate the dataset and plot the commah output """
# Plot the c-M relation as a functon of redshift
xarray = 10 ** np.arange(1, 15, 0.2)
yval = 'c'
# Specify the redshift range
zarray = np.arange(0, 5, 0.5)
xtitle = 'Halo Mass (M$_{sol}$)'
ytitle = 'Concentration'
linelabel = 'z='
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
plt.ylim([2, 30])
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for (zind, zval) in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray)
# Access the column yval from the data file
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, yarray, label=linelabel + str(zval), color=colors[zind])
# Overplot the D08 predictions in black
ax.plot(xarray, commah.commah.cduffy(zval, xarray), color='black') # depends on [control=['for'], data=[]]
ax.set_xscale('log')
ax.set_yscale('log')
leg = ax.legend(loc=1)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size # depends on [control=['for'], data=['label']]
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width # depends on [control=['for'], data=['label']]
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_CM_relation.png'" % plotname)
fig.savefig(plotname + '_CM_relation.png', dpi=fig.dpi * 5) # depends on [control=['if'], data=[]]
else:
plt.show()
# Plot the c-z relation as a function of mass (so always Mz=M0)
xarray = 10 ** np.arange(0, 1, 0.05) - 1
yval = 'c'
# Specify the mass range
zarray = 10 ** np.arange(6, 14, 2)
xtitle = 'Redshift'
ytitle = 'NFW Concentration'
linelabel = 'log$_{10}$ M$_{z}$(M$_{sol}$)='
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for (zind, zval) in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval)
# Access the column yval from the data file
yarray = output[yval].flatten()
# Plot each line in turn with different colours
ax.plot(xarray, yarray, label=linelabel + '{0:.1f}'.format(np.log10(zval)), color=colors[zind]) # depends on [control=['for'], data=[]]
leg = ax.legend(loc=1)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size # depends on [control=['for'], data=['label']]
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width # depends on [control=['for'], data=['label']]
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_Cz_relation.png'" % plotname)
fig.savefig(plotname + '_Cz_relation.png', dpi=fig.dpi * 5) # depends on [control=['if'], data=[]]
else:
plt.show()
# Plot the zf-z relation for different masses (so always Mz=M0)
xarray = 10 ** np.arange(0, 1, 0.05) - 1
yval = 'zf'
# Specify the mass range
zarray = 10 ** np.arange(6, 14, 2)
xtitle = 'Redshift'
ytitle = 'Formation Redshift'
linelabel = 'log$_{10}$ M$_{z}$(M$_{sol}$)='
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for (zind, zval) in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, yarray, label=linelabel + '{0:.1f}'.format(np.log10(zval)), color=colors[zind]) # depends on [control=['for'], data=[]]
leg = ax.legend(loc=2)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size # depends on [control=['for'], data=['label']]
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width # depends on [control=['for'], data=['label']]
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_zfz_relation.png'" % plotname)
fig.savefig(plotname + '_zfz_relation.png', dpi=fig.dpi * 5) # depends on [control=['if'], data=[]]
else:
plt.show()
# Plot the dM/dt-z relation for different masses (so always Mz=M0)
xarray = 10 ** np.arange(0, 1, 0.05) - 1
yval = 'dMdt'
# Specify the mass range
zarray = 10 ** np.arange(10, 14, 0.5)
xtitle = 'log$_{10}$ (1+z)'
ytitle = 'log$_{10}$ Accretion Rate M$_{sol}$ yr$^{-1}$'
linelabel = 'log$_{10}$ M$_z$(M$_{sol}$)='
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
cosmo = commah.getcosmo(cosmology)
for (zind, zval) in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=xarray, Mi=zval, com=False, mah=True)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(np.log10(xarray + 1.0), np.log10(yarray), label=linelabel + '{0:.1f}'.format(np.log10(zval)), color=colors[zind])
# Plot the semi-analytic approximate formula from Correa et al 2015b
semianalytic_approx = 71.6 * (zval / 1000000000000.0) * (cosmo['h'] / 0.7) * (-0.24 + 0.75 * (xarray + 1)) * np.sqrt(cosmo['omega_M_0'] * (xarray + 1) ** 3 + cosmo['omega_lambda_0'])
ax.plot(np.log10(xarray + 1), np.log10(semianalytic_approx), color='black') # depends on [control=['for'], data=[]]
leg = ax.legend(loc=2)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size # depends on [control=['for'], data=['label']]
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width # depends on [control=['for'], data=['label']]
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_dMdtz_relation.png'" % plotname)
fig.savefig(plotname + '_dMdtz_relation.png', dpi=fig.dpi * 5) # depends on [control=['if'], data=[]]
else:
plt.show()
# Plot the dMdt-M relation as a function of redshift
xarray = 10 ** np.arange(10, 14, 0.5)
yval = 'dMdt'
# Specify the redshift range
zarray = np.arange(0, 5, 0.5)
xtitle = 'Halo Mass M$_{sol}$'
ytitle = 'Accretion Rate M$_{sol}$ yr$^{-1}$'
linelabel = 'z='
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for (zind, zval) in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray, com=False, mah=True)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, yarray, label=linelabel + str(zval), color=colors[zind]) # depends on [control=['for'], data=[]]
ax.set_xscale('log')
ax.set_yscale('log')
leg = ax.legend(loc=2)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size # depends on [control=['for'], data=['label']]
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width # depends on [control=['for'], data=['label']]
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_MAH_M_relation.png'" % plotname)
fig.savefig(plotname + '_MAH_M_relation.png', dpi=fig.dpi * 5) # depends on [control=['if'], data=[]]
else:
plt.show()
# Plot the (dM/M)dt-M relation as a function of redshift
xarray = 10 ** np.arange(10, 14, 0.5)
yval = 'dMdt'
# Specify the redshift range
zarray = np.arange(0, 5, 0.5)
xtitle = 'Halo Mass M$_{sol}$'
ytitle = 'Specific Accretion Rate yr$^{-1}$'
linelabel = 'z='
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for (zind, zval) in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=zval, Mi=xarray, mah=True, com=False)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, yarray / xarray, label=linelabel + str(zval), color=colors[zind]) # depends on [control=['for'], data=[]]
ax.set_xscale('log')
ax.set_yscale('log')
leg = ax.legend(loc=1)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size # depends on [control=['for'], data=['label']]
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width # depends on [control=['for'], data=['label']]
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_specificMAH_M_relation.png'" % plotname)
fig.savefig(plotname + '_specificMAH_M_relation.png', dpi=fig.dpi * 5) # depends on [control=['if'], data=[]]
else:
plt.show()
# Plot the Mz-z relation as a function of mass
# (so mass is decreasing to zero as z-> inf)
xarray = 10 ** np.arange(0, 1, 0.05) - 1
yval = 'Mz'
# Specify the mass range
zarray = 10 ** np.arange(10, 14, 0.5)
xtitle = 'Redshift'
ytitle = 'M(z) (M$_{sol}$)'
linelabel = 'log$_{10}$ M$_{0}$(M$_{sol}$)='
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for (zind, zval) in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=0, Mi=zval, z=xarray)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, yarray, label=linelabel + '{0:.1f}'.format(np.log10(zval)), color=colors[zind]) # depends on [control=['for'], data=[]]
ax.set_yscale('log')
leg = ax.legend(loc=1)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size # depends on [control=['for'], data=['label']]
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width # depends on [control=['for'], data=['label']]
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_Mzz_relation.png'" % plotname)
fig.savefig(plotname + '_Mzz_relation.png', dpi=fig.dpi * 5) # depends on [control=['if'], data=[]]
else:
plt.show()
# Plot the Mz/M0-z relation as a function of mass
xarray = 10 ** np.arange(0, 1, 0.02) - 1
yval = 'Mz'
# Specify the mass range
zarray = 10 ** np.arange(10, 14, 0.5)
xtitle = 'Redshift'
ytitle = 'log$_{10}$ M(z)/M$_{0}$'
linelabel = 'log$_{10}$ M$_{0}$(M$_{sol}$)='
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel(xtitle)
ax.set_ylabel(ytitle)
colors = cm.rainbow(np.linspace(0, 1, len(zarray)))
for (zind, zval) in enumerate(zarray):
output = commah.run(cosmology=cosmology, zi=0, Mi=zval, z=xarray)
yarray = output[yval].flatten()
# Plot each line in turn with different colour
ax.plot(xarray, np.log10(yarray / zval), label=linelabel + '{0:.1f}'.format(np.log10(zval)), color=colors[zind]) # depends on [control=['for'], data=[]]
leg = ax.legend(loc=3)
# Make box totally transparent
leg.get_frame().set_alpha(0)
leg.get_frame().set_edgecolor('white')
for label in leg.get_texts():
label.set_fontsize('small') # the font size # depends on [control=['for'], data=['label']]
for label in leg.get_lines():
label.set_linewidth(4) # the legend line width # depends on [control=['for'], data=['label']]
if plotname:
fig.tight_layout(pad=0.2)
print("Plotting to '%s_MzM0z_relation.png'" % plotname)
fig.savefig(plotname + '_MzM0z_relation.png', dpi=fig.dpi * 5) # depends on [control=['if'], data=[]]
else:
plt.show()
return 'Done' |
def pa_naxis(self, viewer, event, msg=True):
"""Interactively change the slice of the image in a data cube
by pan gesture.
"""
event = self._pa_synth_scroll_event(event)
if event.state != 'move':
return False
# TODO: be able to pick axis
axis = 2
direction = self.get_direction(event.direction)
return self._nav_naxis(viewer, axis, direction, msg=msg) | def function[pa_naxis, parameter[self, viewer, event, msg]]:
constant[Interactively change the slice of the image in a data cube
by pan gesture.
]
variable[event] assign[=] call[name[self]._pa_synth_scroll_event, parameter[name[event]]]
if compare[name[event].state not_equal[!=] constant[move]] begin[:]
return[constant[False]]
variable[axis] assign[=] constant[2]
variable[direction] assign[=] call[name[self].get_direction, parameter[name[event].direction]]
return[call[name[self]._nav_naxis, parameter[name[viewer], name[axis], name[direction]]]] | keyword[def] identifier[pa_naxis] ( identifier[self] , identifier[viewer] , identifier[event] , identifier[msg] = keyword[True] ):
literal[string]
identifier[event] = identifier[self] . identifier[_pa_synth_scroll_event] ( identifier[event] )
keyword[if] identifier[event] . identifier[state] != literal[string] :
keyword[return] keyword[False]
identifier[axis] = literal[int]
identifier[direction] = identifier[self] . identifier[get_direction] ( identifier[event] . identifier[direction] )
keyword[return] identifier[self] . identifier[_nav_naxis] ( identifier[viewer] , identifier[axis] , identifier[direction] , identifier[msg] = identifier[msg] ) | def pa_naxis(self, viewer, event, msg=True):
"""Interactively change the slice of the image in a data cube
by pan gesture.
"""
event = self._pa_synth_scroll_event(event)
if event.state != 'move':
return False # depends on [control=['if'], data=[]]
# TODO: be able to pick axis
axis = 2
direction = self.get_direction(event.direction)
return self._nav_naxis(viewer, axis, direction, msg=msg) |
def get_vector(self):
"""Return the vector for this survey."""
vec = {}
for dim in ['forbidden', 'required', 'permitted']:
if self.survey[dim] is None:
continue
dim_vec = map(lambda x: (x['tag'], x['answer']),
self.survey[dim])
vec[dim] = dict(dim_vec)
return vec | def function[get_vector, parameter[self]]:
constant[Return the vector for this survey.]
variable[vec] assign[=] dictionary[[], []]
for taget[name[dim]] in starred[list[[<ast.Constant object at 0x7da1b26aec50>, <ast.Constant object at 0x7da1b26ac460>, <ast.Constant object at 0x7da1b26aea10>]]] begin[:]
if compare[call[name[self].survey][name[dim]] is constant[None]] begin[:]
continue
variable[dim_vec] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da1b26ad360>, call[name[self].survey][name[dim]]]]
call[name[vec]][name[dim]] assign[=] call[name[dict], parameter[name[dim_vec]]]
return[name[vec]] | keyword[def] identifier[get_vector] ( identifier[self] ):
literal[string]
identifier[vec] ={}
keyword[for] identifier[dim] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
keyword[if] identifier[self] . identifier[survey] [ identifier[dim] ] keyword[is] keyword[None] :
keyword[continue]
identifier[dim_vec] = identifier[map] ( keyword[lambda] identifier[x] :( identifier[x] [ literal[string] ], identifier[x] [ literal[string] ]),
identifier[self] . identifier[survey] [ identifier[dim] ])
identifier[vec] [ identifier[dim] ]= identifier[dict] ( identifier[dim_vec] )
keyword[return] identifier[vec] | def get_vector(self):
"""Return the vector for this survey."""
vec = {}
for dim in ['forbidden', 'required', 'permitted']:
if self.survey[dim] is None:
continue # depends on [control=['if'], data=[]]
dim_vec = map(lambda x: (x['tag'], x['answer']), self.survey[dim])
vec[dim] = dict(dim_vec) # depends on [control=['for'], data=['dim']]
return vec |
def create_logger(level=logging.NOTSET):
"""Create a logger for python-gnupg at a specific message level.
:type level: :obj:`int` or :obj:`str`
:param level: A string or an integer for the lowest level to include in
logs.
**Available levels:**
==== ======== ========================================
int str description
==== ======== ========================================
0 NOTSET Disable all logging.
9 GNUPG Log GnuPG's internal status messages.
10 DEBUG Log module level debuging messages.
20 INFO Normal user-level messages.
30 WARN Warning messages.
40 ERROR Error messages and tracebacks.
50 CRITICAL Unhandled exceptions and tracebacks.
==== ======== ========================================
"""
_test = os.path.join(os.path.join(os.getcwd(), 'pretty_bad_protocol'), 'test')
_now = datetime.now().strftime("%Y-%m-%d_%H%M%S")
_fn = os.path.join(_test, "%s_test_gnupg.log" % _now)
_fmt = "%(relativeCreated)-4d L%(lineno)-4d:%(funcName)-18.18s %(levelname)-7.7s %(message)s"
## Add the GNUPG_STATUS_LEVEL LogRecord to all Loggers in the module:
logging.addLevelName(GNUPG_STATUS_LEVEL, "GNUPG")
logging.Logger.status = status
if level > logging.NOTSET:
logging.basicConfig(level=level, filename=_fn,
filemode="a", format=_fmt)
logging.logThreads = True
if hasattr(logging,'captureWarnings'):
logging.captureWarnings(True)
colouriser = _ansistrm.ColorizingStreamHandler
colouriser.level_map[9] = (None, 'blue', False)
colouriser.level_map[10] = (None, 'cyan', False)
handler = colouriser(sys.stderr)
handler.setLevel(level)
formatr = logging.Formatter(_fmt)
handler.setFormatter(formatr)
else:
handler = NullHandler()
log = logging.getLogger('gnupg')
log.addHandler(handler)
log.setLevel(level)
log.info("Log opened: %s UTC" % datetime.ctime(datetime.utcnow()))
return log | def function[create_logger, parameter[level]]:
constant[Create a logger for python-gnupg at a specific message level.
:type level: :obj:`int` or :obj:`str`
:param level: A string or an integer for the lowest level to include in
logs.
**Available levels:**
==== ======== ========================================
int str description
==== ======== ========================================
0 NOTSET Disable all logging.
9 GNUPG Log GnuPG's internal status messages.
10 DEBUG Log module level debuging messages.
20 INFO Normal user-level messages.
30 WARN Warning messages.
40 ERROR Error messages and tracebacks.
50 CRITICAL Unhandled exceptions and tracebacks.
==== ======== ========================================
]
variable[_test] assign[=] call[name[os].path.join, parameter[call[name[os].path.join, parameter[call[name[os].getcwd, parameter[]], constant[pretty_bad_protocol]]], constant[test]]]
variable[_now] assign[=] call[call[name[datetime].now, parameter[]].strftime, parameter[constant[%Y-%m-%d_%H%M%S]]]
variable[_fn] assign[=] call[name[os].path.join, parameter[name[_test], binary_operation[constant[%s_test_gnupg.log] <ast.Mod object at 0x7da2590d6920> name[_now]]]]
variable[_fmt] assign[=] constant[%(relativeCreated)-4d L%(lineno)-4d:%(funcName)-18.18s %(levelname)-7.7s %(message)s]
call[name[logging].addLevelName, parameter[name[GNUPG_STATUS_LEVEL], constant[GNUPG]]]
name[logging].Logger.status assign[=] name[status]
if compare[name[level] greater[>] name[logging].NOTSET] begin[:]
call[name[logging].basicConfig, parameter[]]
name[logging].logThreads assign[=] constant[True]
if call[name[hasattr], parameter[name[logging], constant[captureWarnings]]] begin[:]
call[name[logging].captureWarnings, parameter[constant[True]]]
variable[colouriser] assign[=] name[_ansistrm].ColorizingStreamHandler
call[name[colouriser].level_map][constant[9]] assign[=] tuple[[<ast.Constant object at 0x7da20c7c8100>, <ast.Constant object at 0x7da20c7ca410>, <ast.Constant object at 0x7da20c7ca7a0>]]
call[name[colouriser].level_map][constant[10]] assign[=] tuple[[<ast.Constant object at 0x7da20c7ca800>, <ast.Constant object at 0x7da20c7ca620>, <ast.Constant object at 0x7da20c7c8250>]]
variable[handler] assign[=] call[name[colouriser], parameter[name[sys].stderr]]
call[name[handler].setLevel, parameter[name[level]]]
variable[formatr] assign[=] call[name[logging].Formatter, parameter[name[_fmt]]]
call[name[handler].setFormatter, parameter[name[formatr]]]
variable[log] assign[=] call[name[logging].getLogger, parameter[constant[gnupg]]]
call[name[log].addHandler, parameter[name[handler]]]
call[name[log].setLevel, parameter[name[level]]]
call[name[log].info, parameter[binary_operation[constant[Log opened: %s UTC] <ast.Mod object at 0x7da2590d6920> call[name[datetime].ctime, parameter[call[name[datetime].utcnow, parameter[]]]]]]]
return[name[log]] | keyword[def] identifier[create_logger] ( identifier[level] = identifier[logging] . identifier[NOTSET] ):
literal[string]
identifier[_test] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getcwd] (), literal[string] ), literal[string] )
identifier[_now] = identifier[datetime] . identifier[now] (). identifier[strftime] ( literal[string] )
identifier[_fn] = identifier[os] . identifier[path] . identifier[join] ( identifier[_test] , literal[string] % identifier[_now] )
identifier[_fmt] = literal[string]
identifier[logging] . identifier[addLevelName] ( identifier[GNUPG_STATUS_LEVEL] , literal[string] )
identifier[logging] . identifier[Logger] . identifier[status] = identifier[status]
keyword[if] identifier[level] > identifier[logging] . identifier[NOTSET] :
identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[level] , identifier[filename] = identifier[_fn] ,
identifier[filemode] = literal[string] , identifier[format] = identifier[_fmt] )
identifier[logging] . identifier[logThreads] = keyword[True]
keyword[if] identifier[hasattr] ( identifier[logging] , literal[string] ):
identifier[logging] . identifier[captureWarnings] ( keyword[True] )
identifier[colouriser] = identifier[_ansistrm] . identifier[ColorizingStreamHandler]
identifier[colouriser] . identifier[level_map] [ literal[int] ]=( keyword[None] , literal[string] , keyword[False] )
identifier[colouriser] . identifier[level_map] [ literal[int] ]=( keyword[None] , literal[string] , keyword[False] )
identifier[handler] = identifier[colouriser] ( identifier[sys] . identifier[stderr] )
identifier[handler] . identifier[setLevel] ( identifier[level] )
identifier[formatr] = identifier[logging] . identifier[Formatter] ( identifier[_fmt] )
identifier[handler] . identifier[setFormatter] ( identifier[formatr] )
keyword[else] :
identifier[handler] = identifier[NullHandler] ()
identifier[log] = identifier[logging] . identifier[getLogger] ( literal[string] )
identifier[log] . identifier[addHandler] ( identifier[handler] )
identifier[log] . identifier[setLevel] ( identifier[level] )
identifier[log] . identifier[info] ( literal[string] % identifier[datetime] . identifier[ctime] ( identifier[datetime] . identifier[utcnow] ()))
keyword[return] identifier[log] | def create_logger(level=logging.NOTSET):
"""Create a logger for python-gnupg at a specific message level.
:type level: :obj:`int` or :obj:`str`
:param level: A string or an integer for the lowest level to include in
logs.
**Available levels:**
==== ======== ========================================
int str description
==== ======== ========================================
0 NOTSET Disable all logging.
9 GNUPG Log GnuPG's internal status messages.
10 DEBUG Log module level debuging messages.
20 INFO Normal user-level messages.
30 WARN Warning messages.
40 ERROR Error messages and tracebacks.
50 CRITICAL Unhandled exceptions and tracebacks.
==== ======== ========================================
"""
_test = os.path.join(os.path.join(os.getcwd(), 'pretty_bad_protocol'), 'test')
_now = datetime.now().strftime('%Y-%m-%d_%H%M%S')
_fn = os.path.join(_test, '%s_test_gnupg.log' % _now)
_fmt = '%(relativeCreated)-4d L%(lineno)-4d:%(funcName)-18.18s %(levelname)-7.7s %(message)s'
## Add the GNUPG_STATUS_LEVEL LogRecord to all Loggers in the module:
logging.addLevelName(GNUPG_STATUS_LEVEL, 'GNUPG')
logging.Logger.status = status
if level > logging.NOTSET:
logging.basicConfig(level=level, filename=_fn, filemode='a', format=_fmt)
logging.logThreads = True
if hasattr(logging, 'captureWarnings'):
logging.captureWarnings(True) # depends on [control=['if'], data=[]]
colouriser = _ansistrm.ColorizingStreamHandler
colouriser.level_map[9] = (None, 'blue', False)
colouriser.level_map[10] = (None, 'cyan', False)
handler = colouriser(sys.stderr)
handler.setLevel(level)
formatr = logging.Formatter(_fmt)
handler.setFormatter(formatr) # depends on [control=['if'], data=['level']]
else:
handler = NullHandler()
log = logging.getLogger('gnupg')
log.addHandler(handler)
log.setLevel(level)
log.info('Log opened: %s UTC' % datetime.ctime(datetime.utcnow()))
return log |
def dict_from_qs(qs):
''' Slightly introverted parser for lists of dot-notation nested fields
i.e. "period.di,period.fhr" => {"period": {"di": {}, "fhr": {}}}
'''
entries = qs.split(',') if qs.strip() else []
entries = [entry.strip() for entry in entries]
def _dict_from_qs(line, d):
if '.' in line:
key, value = line.split('.', 1)
d.setdefault(key, {})
return _dict_from_qs(value, d[key])
else:
d[line] = {}
def _default():
return defaultdict(_default)
d = defaultdict(_default)
for line in entries:
_dict_from_qs(line, d)
return d | def function[dict_from_qs, parameter[qs]]:
constant[ Slightly introverted parser for lists of dot-notation nested fields
i.e. "period.di,period.fhr" => {"period": {"di": {}, "fhr": {}}}
]
variable[entries] assign[=] <ast.IfExp object at 0x7da2043466e0>
variable[entries] assign[=] <ast.ListComp object at 0x7da2043478b0>
def function[_dict_from_qs, parameter[line, d]]:
if compare[constant[.] in name[line]] begin[:]
<ast.Tuple object at 0x7da1b0ed5ae0> assign[=] call[name[line].split, parameter[constant[.], constant[1]]]
call[name[d].setdefault, parameter[name[key], dictionary[[], []]]]
return[call[name[_dict_from_qs], parameter[name[value], call[name[d]][name[key]]]]]
def function[_default, parameter[]]:
return[call[name[defaultdict], parameter[name[_default]]]]
variable[d] assign[=] call[name[defaultdict], parameter[name[_default]]]
for taget[name[line]] in starred[name[entries]] begin[:]
call[name[_dict_from_qs], parameter[name[line], name[d]]]
return[name[d]] | keyword[def] identifier[dict_from_qs] ( identifier[qs] ):
literal[string]
identifier[entries] = identifier[qs] . identifier[split] ( literal[string] ) keyword[if] identifier[qs] . identifier[strip] () keyword[else] []
identifier[entries] =[ identifier[entry] . identifier[strip] () keyword[for] identifier[entry] keyword[in] identifier[entries] ]
keyword[def] identifier[_dict_from_qs] ( identifier[line] , identifier[d] ):
keyword[if] literal[string] keyword[in] identifier[line] :
identifier[key] , identifier[value] = identifier[line] . identifier[split] ( literal[string] , literal[int] )
identifier[d] . identifier[setdefault] ( identifier[key] ,{})
keyword[return] identifier[_dict_from_qs] ( identifier[value] , identifier[d] [ identifier[key] ])
keyword[else] :
identifier[d] [ identifier[line] ]={}
keyword[def] identifier[_default] ():
keyword[return] identifier[defaultdict] ( identifier[_default] )
identifier[d] = identifier[defaultdict] ( identifier[_default] )
keyword[for] identifier[line] keyword[in] identifier[entries] :
identifier[_dict_from_qs] ( identifier[line] , identifier[d] )
keyword[return] identifier[d] | def dict_from_qs(qs):
""" Slightly introverted parser for lists of dot-notation nested fields
i.e. "period.di,period.fhr" => {"period": {"di": {}, "fhr": {}}}
"""
entries = qs.split(',') if qs.strip() else []
entries = [entry.strip() for entry in entries]
def _dict_from_qs(line, d):
if '.' in line:
(key, value) = line.split('.', 1)
d.setdefault(key, {})
return _dict_from_qs(value, d[key]) # depends on [control=['if'], data=['line']]
else:
d[line] = {}
def _default():
return defaultdict(_default)
d = defaultdict(_default)
for line in entries:
_dict_from_qs(line, d) # depends on [control=['for'], data=['line']]
return d |
def disk_vmag(hemi, retinotopy='any', to=None, **kw):
'''
disk_vmag(mesh) yields the visual magnification based on the projection of disks on the cortical
surface into the visual field.
All options accepted by mag_data() are accepted by disk_vmag().
'''
mdat = mag_data(hemi, retinotopy=retinotopy, **kw)
if pimms.is_vector(mdat): return tuple([face_vmag(m, to=to) for m in mdat])
elif pimms.is_vector(mdat.keys(), 'int'):
return pimms.lazy_map({k: curry(lambda k: face_vmag(mdat[k], to=to), k)
for k in six.iterkeys(mdat)})
#TODO: implement the disk_vmag calculation using mdat
# convert to the appropriate type according to the to param
raise NotImplementedError() | def function[disk_vmag, parameter[hemi, retinotopy, to]]:
constant[
disk_vmag(mesh) yields the visual magnification based on the projection of disks on the cortical
surface into the visual field.
All options accepted by mag_data() are accepted by disk_vmag().
]
variable[mdat] assign[=] call[name[mag_data], parameter[name[hemi]]]
if call[name[pimms].is_vector, parameter[name[mdat]]] begin[:]
return[call[name[tuple], parameter[<ast.ListComp object at 0x7da18bc71cc0>]]]
<ast.Raise object at 0x7da18bc71000> | keyword[def] identifier[disk_vmag] ( identifier[hemi] , identifier[retinotopy] = literal[string] , identifier[to] = keyword[None] ,** identifier[kw] ):
literal[string]
identifier[mdat] = identifier[mag_data] ( identifier[hemi] , identifier[retinotopy] = identifier[retinotopy] ,** identifier[kw] )
keyword[if] identifier[pimms] . identifier[is_vector] ( identifier[mdat] ): keyword[return] identifier[tuple] ([ identifier[face_vmag] ( identifier[m] , identifier[to] = identifier[to] ) keyword[for] identifier[m] keyword[in] identifier[mdat] ])
keyword[elif] identifier[pimms] . identifier[is_vector] ( identifier[mdat] . identifier[keys] (), literal[string] ):
keyword[return] identifier[pimms] . identifier[lazy_map] ({ identifier[k] : identifier[curry] ( keyword[lambda] identifier[k] : identifier[face_vmag] ( identifier[mdat] [ identifier[k] ], identifier[to] = identifier[to] ), identifier[k] )
keyword[for] identifier[k] keyword[in] identifier[six] . identifier[iterkeys] ( identifier[mdat] )})
keyword[raise] identifier[NotImplementedError] () | def disk_vmag(hemi, retinotopy='any', to=None, **kw):
"""
disk_vmag(mesh) yields the visual magnification based on the projection of disks on the cortical
surface into the visual field.
All options accepted by mag_data() are accepted by disk_vmag().
"""
mdat = mag_data(hemi, retinotopy=retinotopy, **kw)
if pimms.is_vector(mdat):
return tuple([face_vmag(m, to=to) for m in mdat]) # depends on [control=['if'], data=[]]
elif pimms.is_vector(mdat.keys(), 'int'):
return pimms.lazy_map({k: curry(lambda k: face_vmag(mdat[k], to=to), k) for k in six.iterkeys(mdat)}) # depends on [control=['if'], data=[]]
#TODO: implement the disk_vmag calculation using mdat
# convert to the appropriate type according to the to param
raise NotImplementedError() |
def sign_direct(self, request, authheaders, secret):
"""Signs a request directly with an appropriate signature. The request's Authorization header will change.
Keyword arguments:
request -- A request object which can be consumed by this API.
authheaders -- A string-indexable object which contains the headers appropriate for this signature version.
secret -- The base64-encoded secret key for the HMAC authorization.
"""
sig = self.sign(request, authheaders, secret)
return request.with_header("Authorization", "Acquia {0}:{1}".format(authheaders["id"], sig)) | def function[sign_direct, parameter[self, request, authheaders, secret]]:
constant[Signs a request directly with an appropriate signature. The request's Authorization header will change.
Keyword arguments:
request -- A request object which can be consumed by this API.
authheaders -- A string-indexable object which contains the headers appropriate for this signature version.
secret -- The base64-encoded secret key for the HMAC authorization.
]
variable[sig] assign[=] call[name[self].sign, parameter[name[request], name[authheaders], name[secret]]]
return[call[name[request].with_header, parameter[constant[Authorization], call[constant[Acquia {0}:{1}].format, parameter[call[name[authheaders]][constant[id]], name[sig]]]]]] | keyword[def] identifier[sign_direct] ( identifier[self] , identifier[request] , identifier[authheaders] , identifier[secret] ):
literal[string]
identifier[sig] = identifier[self] . identifier[sign] ( identifier[request] , identifier[authheaders] , identifier[secret] )
keyword[return] identifier[request] . identifier[with_header] ( literal[string] , literal[string] . identifier[format] ( identifier[authheaders] [ literal[string] ], identifier[sig] )) | def sign_direct(self, request, authheaders, secret):
"""Signs a request directly with an appropriate signature. The request's Authorization header will change.
Keyword arguments:
request -- A request object which can be consumed by this API.
authheaders -- A string-indexable object which contains the headers appropriate for this signature version.
secret -- The base64-encoded secret key for the HMAC authorization.
"""
sig = self.sign(request, authheaders, secret)
return request.with_header('Authorization', 'Acquia {0}:{1}'.format(authheaders['id'], sig)) |
def shell(ctx, package, working_dir, sudo):
"""Runs a Canari interactive python shell"""
ctx.mode = CanariMode.LocalShellDebug
from canari.commands.shell import shell
shell(package, working_dir, sudo) | def function[shell, parameter[ctx, package, working_dir, sudo]]:
constant[Runs a Canari interactive python shell]
name[ctx].mode assign[=] name[CanariMode].LocalShellDebug
from relative_module[canari.commands.shell] import module[shell]
call[name[shell], parameter[name[package], name[working_dir], name[sudo]]] | keyword[def] identifier[shell] ( identifier[ctx] , identifier[package] , identifier[working_dir] , identifier[sudo] ):
literal[string]
identifier[ctx] . identifier[mode] = identifier[CanariMode] . identifier[LocalShellDebug]
keyword[from] identifier[canari] . identifier[commands] . identifier[shell] keyword[import] identifier[shell]
identifier[shell] ( identifier[package] , identifier[working_dir] , identifier[sudo] ) | def shell(ctx, package, working_dir, sudo):
"""Runs a Canari interactive python shell"""
ctx.mode = CanariMode.LocalShellDebug
from canari.commands.shell import shell
shell(package, working_dir, sudo) |
def connect_bulk(self, si, logger, vcenter_data_model, request):
"""
:param si:
:param logger:
:param VMwarevCenterResourceModel vcenter_data_model:
:param request:
:return:
"""
self.logger = logger
self.logger.info('Apply connectivity changes has started')
self.logger.debug('Apply connectivity changes has started with the requet: {0}'.format(request))
holder = DeployDataHolder(jsonpickle.decode(request))
self.vcenter_data_model = vcenter_data_model
if vcenter_data_model.reserved_networks:
self.reserved_networks = [name.strip() for name in vcenter_data_model.reserved_networks.split(',')]
if not vcenter_data_model.default_dvswitch:
return self._handle_no_dvswitch_error(holder)
dvswitch_location = VMLocation.create_from_full_path(vcenter_data_model.default_dvswitch)
self.dv_switch_path = VMLocation.combine([vcenter_data_model.default_datacenter, dvswitch_location.path])
self.dv_switch_name = dvswitch_location.name
self.default_network = VMLocation.combine(
[vcenter_data_model.default_datacenter, vcenter_data_model.holding_network])
mappings = self._map_requsets(holder.driverRequest.actions)
self.logger.debug('Connectivity actions mappings: {0}'.format(jsonpickle.encode(mappings, unpicklable=False)))
pool = ThreadPool()
async_results = self._run_async_connection_actions(si, mappings, pool, logger)
results = self._get_async_results(async_results, pool)
self.logger.info('Apply connectivity changes done')
self.logger.debug('Apply connectivity has finished with the results: {0}'.format(jsonpickle.encode(results,
unpicklable=False)))
return results | def function[connect_bulk, parameter[self, si, logger, vcenter_data_model, request]]:
constant[
:param si:
:param logger:
:param VMwarevCenterResourceModel vcenter_data_model:
:param request:
:return:
]
name[self].logger assign[=] name[logger]
call[name[self].logger.info, parameter[constant[Apply connectivity changes has started]]]
call[name[self].logger.debug, parameter[call[constant[Apply connectivity changes has started with the requet: {0}].format, parameter[name[request]]]]]
variable[holder] assign[=] call[name[DeployDataHolder], parameter[call[name[jsonpickle].decode, parameter[name[request]]]]]
name[self].vcenter_data_model assign[=] name[vcenter_data_model]
if name[vcenter_data_model].reserved_networks begin[:]
name[self].reserved_networks assign[=] <ast.ListComp object at 0x7da20c7c86d0>
if <ast.UnaryOp object at 0x7da20c7cbc70> begin[:]
return[call[name[self]._handle_no_dvswitch_error, parameter[name[holder]]]]
variable[dvswitch_location] assign[=] call[name[VMLocation].create_from_full_path, parameter[name[vcenter_data_model].default_dvswitch]]
name[self].dv_switch_path assign[=] call[name[VMLocation].combine, parameter[list[[<ast.Attribute object at 0x7da20c7cbc40>, <ast.Attribute object at 0x7da20c7c9510>]]]]
name[self].dv_switch_name assign[=] name[dvswitch_location].name
name[self].default_network assign[=] call[name[VMLocation].combine, parameter[list[[<ast.Attribute object at 0x7da20c7cb610>, <ast.Attribute object at 0x7da20c7c98a0>]]]]
variable[mappings] assign[=] call[name[self]._map_requsets, parameter[name[holder].driverRequest.actions]]
call[name[self].logger.debug, parameter[call[constant[Connectivity actions mappings: {0}].format, parameter[call[name[jsonpickle].encode, parameter[name[mappings]]]]]]]
variable[pool] assign[=] call[name[ThreadPool], parameter[]]
variable[async_results] assign[=] call[name[self]._run_async_connection_actions, parameter[name[si], name[mappings], name[pool], name[logger]]]
variable[results] assign[=] call[name[self]._get_async_results, parameter[name[async_results], name[pool]]]
call[name[self].logger.info, parameter[constant[Apply connectivity changes done]]]
call[name[self].logger.debug, parameter[call[constant[Apply connectivity has finished with the results: {0}].format, parameter[call[name[jsonpickle].encode, parameter[name[results]]]]]]]
return[name[results]] | keyword[def] identifier[connect_bulk] ( identifier[self] , identifier[si] , identifier[logger] , identifier[vcenter_data_model] , identifier[request] ):
literal[string]
identifier[self] . identifier[logger] = identifier[logger]
identifier[self] . identifier[logger] . identifier[info] ( literal[string] )
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[request] ))
identifier[holder] = identifier[DeployDataHolder] ( identifier[jsonpickle] . identifier[decode] ( identifier[request] ))
identifier[self] . identifier[vcenter_data_model] = identifier[vcenter_data_model]
keyword[if] identifier[vcenter_data_model] . identifier[reserved_networks] :
identifier[self] . identifier[reserved_networks] =[ identifier[name] . identifier[strip] () keyword[for] identifier[name] keyword[in] identifier[vcenter_data_model] . identifier[reserved_networks] . identifier[split] ( literal[string] )]
keyword[if] keyword[not] identifier[vcenter_data_model] . identifier[default_dvswitch] :
keyword[return] identifier[self] . identifier[_handle_no_dvswitch_error] ( identifier[holder] )
identifier[dvswitch_location] = identifier[VMLocation] . identifier[create_from_full_path] ( identifier[vcenter_data_model] . identifier[default_dvswitch] )
identifier[self] . identifier[dv_switch_path] = identifier[VMLocation] . identifier[combine] ([ identifier[vcenter_data_model] . identifier[default_datacenter] , identifier[dvswitch_location] . identifier[path] ])
identifier[self] . identifier[dv_switch_name] = identifier[dvswitch_location] . identifier[name]
identifier[self] . identifier[default_network] = identifier[VMLocation] . identifier[combine] (
[ identifier[vcenter_data_model] . identifier[default_datacenter] , identifier[vcenter_data_model] . identifier[holding_network] ])
identifier[mappings] = identifier[self] . identifier[_map_requsets] ( identifier[holder] . identifier[driverRequest] . identifier[actions] )
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[jsonpickle] . identifier[encode] ( identifier[mappings] , identifier[unpicklable] = keyword[False] )))
identifier[pool] = identifier[ThreadPool] ()
identifier[async_results] = identifier[self] . identifier[_run_async_connection_actions] ( identifier[si] , identifier[mappings] , identifier[pool] , identifier[logger] )
identifier[results] = identifier[self] . identifier[_get_async_results] ( identifier[async_results] , identifier[pool] )
identifier[self] . identifier[logger] . identifier[info] ( literal[string] )
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[jsonpickle] . identifier[encode] ( identifier[results] ,
identifier[unpicklable] = keyword[False] )))
keyword[return] identifier[results] | def connect_bulk(self, si, logger, vcenter_data_model, request):
"""
:param si:
:param logger:
:param VMwarevCenterResourceModel vcenter_data_model:
:param request:
:return:
"""
self.logger = logger
self.logger.info('Apply connectivity changes has started')
self.logger.debug('Apply connectivity changes has started with the requet: {0}'.format(request))
holder = DeployDataHolder(jsonpickle.decode(request))
self.vcenter_data_model = vcenter_data_model
if vcenter_data_model.reserved_networks:
self.reserved_networks = [name.strip() for name in vcenter_data_model.reserved_networks.split(',')] # depends on [control=['if'], data=[]]
if not vcenter_data_model.default_dvswitch:
return self._handle_no_dvswitch_error(holder) # depends on [control=['if'], data=[]]
dvswitch_location = VMLocation.create_from_full_path(vcenter_data_model.default_dvswitch)
self.dv_switch_path = VMLocation.combine([vcenter_data_model.default_datacenter, dvswitch_location.path])
self.dv_switch_name = dvswitch_location.name
self.default_network = VMLocation.combine([vcenter_data_model.default_datacenter, vcenter_data_model.holding_network])
mappings = self._map_requsets(holder.driverRequest.actions)
self.logger.debug('Connectivity actions mappings: {0}'.format(jsonpickle.encode(mappings, unpicklable=False)))
pool = ThreadPool()
async_results = self._run_async_connection_actions(si, mappings, pool, logger)
results = self._get_async_results(async_results, pool)
self.logger.info('Apply connectivity changes done')
self.logger.debug('Apply connectivity has finished with the results: {0}'.format(jsonpickle.encode(results, unpicklable=False)))
return results |
def generate_statistics_pdf(activities=None, start_date=None, all_years=False, year=None):
''' Accepts EighthActivity objects and outputs a PDF file. '''
if activities is None:
activities = EighthActivity.objects.all().order_by("name")
if year is None:
year = current_school_year()
if not isinstance(activities, list):
activities = activities.prefetch_related("rooms").prefetch_related("sponsors")
pdf_buffer = BytesIO()
h_margin = 1 * inch
v_margin = 0.5 * inch
doc = SimpleDocTemplate(pdf_buffer, pagesize=letter, rightMargin=h_margin, leftMargin=h_margin, topMargin=v_margin, bottomMargin=v_margin)
elements = []
styles = getSampleStyleSheet()
styles.add(ParagraphStyle(name="Indent", leftIndent=15))
empty_activities = []
for act in activities:
lelements = []
relements = []
act_stats = calculate_statistics(act, start_date=start_date, all_years=all_years, year=year)
if act_stats["total_blocks"] == 0:
empty_activities.append(act.name)
continue
elements.append(Paragraph(act.name, styles["Title"]))
sponsor_str = (", ".join([x.name for x in act.sponsors.all()])) if act.sponsors.count() > 0 else "None"
lelements.append(Paragraph("<b>Default Sponsors:</b> " + sponsor_str, styles["Normal"]))
lelements.append(Spacer(0, 0.025 * inch))
room_str = (", ".join([str(x) for x in act.rooms.all()])) if act.rooms.count() > 0 else "None"
relements.append(Paragraph("<b>Default Rooms:</b> " + room_str, styles["Normal"]))
relements.append(Spacer(0, 0.025 * inch))
relements.append(Paragraph("<b>Total blocks:</b> {}".format(act_stats["total_blocks"]), styles["Normal"]))
relements.append(Paragraph("<b>Scheduled blocks:</b> {}".format(act_stats["scheduled_blocks"]), styles["Indent"]))
relements.append(Paragraph("<b>Empty blocks:</b> {}".format(act_stats["empty_blocks"]), styles["Indent"]))
relements.append(Paragraph("<b>Cancelled blocks:</b> {}".format(act_stats["cancelled_blocks"]), styles["Indent"]))
lelements.append(Paragraph("<b>Total signups:</b> {}".format(act_stats["total_signups"]), styles["Normal"]))
lelements.append(Paragraph("<b>Average signups per block:</b> {}".format(act_stats["average_signups"]), styles["Indent"]))
lelements.append(Paragraph("<b>Average signups per student:</b> {}".format(act_stats["average_user_signups"]), styles["Indent"]))
lelements.append(
Paragraph("<b>Unique students:</b> {}, <b>Capacity:</b> {}".format(act_stats["students"], act_stats["capacity"]), styles["Normal"]))
elements.append(
Table([[lelements, relements]], style=[('LEFTPADDING', (0, 0), (-1, -1), 0), ('RIGHTPADDING', (0, 0), (-1, -1), 0), ('VALIGN', (0, 0),
(-1, -1), 'TOP')]))
parsed_members = [[x.username, y] for x, y in act_stats["members"]]
parsed_members = list(chunks(parsed_members, 30))[:3]
if parsed_members:
parsed_members = [[["Username", "Signups"]] + x for x in parsed_members]
parsed_members = [
Table(x, style=[('FONT', (0, 0), (1, 0), 'Helvetica-Bold'), ('ALIGN', (1, 0), (1, -1), 'RIGHT')]) for x in parsed_members
]
elements.append(Table([parsed_members], style=[('VALIGN', (-1, -1), (-1, -1), 'TOP')]))
if act_stats["students"] - 90 > 0:
elements.append(Paragraph("<b>{}</b> students were not shown on this page. ".format(act_stats["students"] - 90), styles["Normal"]))
else:
elements.append(Spacer(0, 0.20 * inch))
if start_date is not None:
elements.append(
Paragraph("<b>{}</b> block(s) are past the start date and are not included on this page.".format(act_stats["past_start_date"]),
styles["Normal"]))
elements.append(
Paragraph("<b>{}</b> block(s) not in the {}-{} school year are not included on this page.".format(
act_stats["old_blocks"], year - 1, year), styles["Normal"]))
elements.append(PageBreak())
if empty_activities:
empty_activities = [x[:37] + "..." if len(x) > 40 else x for x in empty_activities]
empty_activities = [[x] for x in empty_activities]
empty_activities = list(chunks(empty_activities, 35))
empty_activities = [[["Activity"]] + x for x in empty_activities]
empty_activities = [
Table(x, style=[('FONT', (0, 0), (-1, 0), 'Helvetica-Bold'), ('LEFTPADDING', (0, 0), (-1, -1), 0)]) for x in empty_activities
]
for i in range(0, len(empty_activities), 2):
elements.append(Paragraph("Empty Activities (Page {})".format(i // 2 + 1), styles["Title"]))
if all_years:
elements.append(Paragraph("The following activities have no 8th period blocks assigned to them.", styles["Normal"]))
else:
elements.append(
Paragraph("The following activities have no 8th period blocks assigned to them for the {}-{} school year.".format(year - 1, year),
styles["Normal"]))
elements.append(Spacer(0, 0.10 * inch))
ea = [empty_activities[i]]
if i + 1 < len(empty_activities):
ea.append(empty_activities[i + 1])
elements.append(
Table([ea], style=[
('LEFTPADDING', (0, 0), (-1, -1), 0),
('RIGHTPADDING', (0, 0), (-1, -1), 0),
('VALIGN', (0, 0), (-1, -1), 'TOP'),
], hAlign='LEFT'))
elements.append(PageBreak())
def first_page(canvas, _):
if len(activities) == 1:
canvas.setTitle("{} Statistics".format(activities[0].name))
else:
canvas.setTitle("8th Period Activity Statistics")
canvas.setAuthor("Generated by Ion")
doc.build(elements, onFirstPage=first_page)
pdf_buffer.seek(0)
return pdf_buffer | def function[generate_statistics_pdf, parameter[activities, start_date, all_years, year]]:
constant[ Accepts EighthActivity objects and outputs a PDF file. ]
if compare[name[activities] is constant[None]] begin[:]
variable[activities] assign[=] call[call[name[EighthActivity].objects.all, parameter[]].order_by, parameter[constant[name]]]
if compare[name[year] is constant[None]] begin[:]
variable[year] assign[=] call[name[current_school_year], parameter[]]
if <ast.UnaryOp object at 0x7da1b0558700> begin[:]
variable[activities] assign[=] call[call[name[activities].prefetch_related, parameter[constant[rooms]]].prefetch_related, parameter[constant[sponsors]]]
variable[pdf_buffer] assign[=] call[name[BytesIO], parameter[]]
variable[h_margin] assign[=] binary_operation[constant[1] * name[inch]]
variable[v_margin] assign[=] binary_operation[constant[0.5] * name[inch]]
variable[doc] assign[=] call[name[SimpleDocTemplate], parameter[name[pdf_buffer]]]
variable[elements] assign[=] list[[]]
variable[styles] assign[=] call[name[getSampleStyleSheet], parameter[]]
call[name[styles].add, parameter[call[name[ParagraphStyle], parameter[]]]]
variable[empty_activities] assign[=] list[[]]
for taget[name[act]] in starred[name[activities]] begin[:]
variable[lelements] assign[=] list[[]]
variable[relements] assign[=] list[[]]
variable[act_stats] assign[=] call[name[calculate_statistics], parameter[name[act]]]
if compare[call[name[act_stats]][constant[total_blocks]] equal[==] constant[0]] begin[:]
call[name[empty_activities].append, parameter[name[act].name]]
continue
call[name[elements].append, parameter[call[name[Paragraph], parameter[name[act].name, call[name[styles]][constant[Title]]]]]]
variable[sponsor_str] assign[=] <ast.IfExp object at 0x7da1b0559c60>
call[name[lelements].append, parameter[call[name[Paragraph], parameter[binary_operation[constant[<b>Default Sponsors:</b> ] + name[sponsor_str]], call[name[styles]][constant[Normal]]]]]]
call[name[lelements].append, parameter[call[name[Spacer], parameter[constant[0], binary_operation[constant[0.025] * name[inch]]]]]]
variable[room_str] assign[=] <ast.IfExp object at 0x7da1b055a530>
call[name[relements].append, parameter[call[name[Paragraph], parameter[binary_operation[constant[<b>Default Rooms:</b> ] + name[room_str]], call[name[styles]][constant[Normal]]]]]]
call[name[relements].append, parameter[call[name[Spacer], parameter[constant[0], binary_operation[constant[0.025] * name[inch]]]]]]
call[name[relements].append, parameter[call[name[Paragraph], parameter[call[constant[<b>Total blocks:</b> {}].format, parameter[call[name[act_stats]][constant[total_blocks]]]], call[name[styles]][constant[Normal]]]]]]
call[name[relements].append, parameter[call[name[Paragraph], parameter[call[constant[<b>Scheduled blocks:</b> {}].format, parameter[call[name[act_stats]][constant[scheduled_blocks]]]], call[name[styles]][constant[Indent]]]]]]
call[name[relements].append, parameter[call[name[Paragraph], parameter[call[constant[<b>Empty blocks:</b> {}].format, parameter[call[name[act_stats]][constant[empty_blocks]]]], call[name[styles]][constant[Indent]]]]]]
call[name[relements].append, parameter[call[name[Paragraph], parameter[call[constant[<b>Cancelled blocks:</b> {}].format, parameter[call[name[act_stats]][constant[cancelled_blocks]]]], call[name[styles]][constant[Indent]]]]]]
call[name[lelements].append, parameter[call[name[Paragraph], parameter[call[constant[<b>Total signups:</b> {}].format, parameter[call[name[act_stats]][constant[total_signups]]]], call[name[styles]][constant[Normal]]]]]]
call[name[lelements].append, parameter[call[name[Paragraph], parameter[call[constant[<b>Average signups per block:</b> {}].format, parameter[call[name[act_stats]][constant[average_signups]]]], call[name[styles]][constant[Indent]]]]]]
call[name[lelements].append, parameter[call[name[Paragraph], parameter[call[constant[<b>Average signups per student:</b> {}].format, parameter[call[name[act_stats]][constant[average_user_signups]]]], call[name[styles]][constant[Indent]]]]]]
call[name[lelements].append, parameter[call[name[Paragraph], parameter[call[constant[<b>Unique students:</b> {}, <b>Capacity:</b> {}].format, parameter[call[name[act_stats]][constant[students]], call[name[act_stats]][constant[capacity]]]], call[name[styles]][constant[Normal]]]]]]
call[name[elements].append, parameter[call[name[Table], parameter[list[[<ast.List object at 0x7da1b052a080>]]]]]]
variable[parsed_members] assign[=] <ast.ListComp object at 0x7da1b05298d0>
variable[parsed_members] assign[=] call[call[name[list], parameter[call[name[chunks], parameter[name[parsed_members], constant[30]]]]]][<ast.Slice object at 0x7da1b05294b0>]
if name[parsed_members] begin[:]
variable[parsed_members] assign[=] <ast.ListComp object at 0x7da1b0529390>
variable[parsed_members] assign[=] <ast.ListComp object at 0x7da1b0529120>
call[name[elements].append, parameter[call[name[Table], parameter[list[[<ast.Name object at 0x7da1b0528a60>]]]]]]
if compare[binary_operation[call[name[act_stats]][constant[students]] - constant[90]] greater[>] constant[0]] begin[:]
call[name[elements].append, parameter[call[name[Paragraph], parameter[call[constant[<b>{}</b> students were not shown on this page. ].format, parameter[binary_operation[call[name[act_stats]][constant[students]] - constant[90]]]], call[name[styles]][constant[Normal]]]]]]
if compare[name[start_date] is_not constant[None]] begin[:]
call[name[elements].append, parameter[call[name[Paragraph], parameter[call[constant[<b>{}</b> block(s) are past the start date and are not included on this page.].format, parameter[call[name[act_stats]][constant[past_start_date]]]], call[name[styles]][constant[Normal]]]]]]
call[name[elements].append, parameter[call[name[Paragraph], parameter[call[constant[<b>{}</b> block(s) not in the {}-{} school year are not included on this page.].format, parameter[call[name[act_stats]][constant[old_blocks]], binary_operation[name[year] - constant[1]], name[year]]], call[name[styles]][constant[Normal]]]]]]
call[name[elements].append, parameter[call[name[PageBreak], parameter[]]]]
if name[empty_activities] begin[:]
variable[empty_activities] assign[=] <ast.ListComp object at 0x7da1b0431360>
variable[empty_activities] assign[=] <ast.ListComp object at 0x7da1b04326e0>
variable[empty_activities] assign[=] call[name[list], parameter[call[name[chunks], parameter[name[empty_activities], constant[35]]]]]
variable[empty_activities] assign[=] <ast.ListComp object at 0x7da1b0431630>
variable[empty_activities] assign[=] <ast.ListComp object at 0x7da1b04bf4f0>
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[empty_activities]]], constant[2]]]] begin[:]
call[name[elements].append, parameter[call[name[Paragraph], parameter[call[constant[Empty Activities (Page {})].format, parameter[binary_operation[binary_operation[name[i] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] + constant[1]]]], call[name[styles]][constant[Title]]]]]]
if name[all_years] begin[:]
call[name[elements].append, parameter[call[name[Paragraph], parameter[constant[The following activities have no 8th period blocks assigned to them.], call[name[styles]][constant[Normal]]]]]]
call[name[elements].append, parameter[call[name[Spacer], parameter[constant[0], binary_operation[constant[0.1] * name[inch]]]]]]
variable[ea] assign[=] list[[<ast.Subscript object at 0x7da1b04e0fd0>]]
if compare[binary_operation[name[i] + constant[1]] less[<] call[name[len], parameter[name[empty_activities]]]] begin[:]
call[name[ea].append, parameter[call[name[empty_activities]][binary_operation[name[i] + constant[1]]]]]
call[name[elements].append, parameter[call[name[Table], parameter[list[[<ast.Name object at 0x7da1b04e1690>]]]]]]
call[name[elements].append, parameter[call[name[PageBreak], parameter[]]]]
def function[first_page, parameter[canvas, _]]:
if compare[call[name[len], parameter[name[activities]]] equal[==] constant[1]] begin[:]
call[name[canvas].setTitle, parameter[call[constant[{} Statistics].format, parameter[call[name[activities]][constant[0]].name]]]]
call[name[canvas].setAuthor, parameter[constant[Generated by Ion]]]
call[name[doc].build, parameter[name[elements]]]
call[name[pdf_buffer].seek, parameter[constant[0]]]
return[name[pdf_buffer]] | keyword[def] identifier[generate_statistics_pdf] ( identifier[activities] = keyword[None] , identifier[start_date] = keyword[None] , identifier[all_years] = keyword[False] , identifier[year] = keyword[None] ):
literal[string]
keyword[if] identifier[activities] keyword[is] keyword[None] :
identifier[activities] = identifier[EighthActivity] . identifier[objects] . identifier[all] (). identifier[order_by] ( literal[string] )
keyword[if] identifier[year] keyword[is] keyword[None] :
identifier[year] = identifier[current_school_year] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[activities] , identifier[list] ):
identifier[activities] = identifier[activities] . identifier[prefetch_related] ( literal[string] ). identifier[prefetch_related] ( literal[string] )
identifier[pdf_buffer] = identifier[BytesIO] ()
identifier[h_margin] = literal[int] * identifier[inch]
identifier[v_margin] = literal[int] * identifier[inch]
identifier[doc] = identifier[SimpleDocTemplate] ( identifier[pdf_buffer] , identifier[pagesize] = identifier[letter] , identifier[rightMargin] = identifier[h_margin] , identifier[leftMargin] = identifier[h_margin] , identifier[topMargin] = identifier[v_margin] , identifier[bottomMargin] = identifier[v_margin] )
identifier[elements] =[]
identifier[styles] = identifier[getSampleStyleSheet] ()
identifier[styles] . identifier[add] ( identifier[ParagraphStyle] ( identifier[name] = literal[string] , identifier[leftIndent] = literal[int] ))
identifier[empty_activities] =[]
keyword[for] identifier[act] keyword[in] identifier[activities] :
identifier[lelements] =[]
identifier[relements] =[]
identifier[act_stats] = identifier[calculate_statistics] ( identifier[act] , identifier[start_date] = identifier[start_date] , identifier[all_years] = identifier[all_years] , identifier[year] = identifier[year] )
keyword[if] identifier[act_stats] [ literal[string] ]== literal[int] :
identifier[empty_activities] . identifier[append] ( identifier[act] . identifier[name] )
keyword[continue]
identifier[elements] . identifier[append] ( identifier[Paragraph] ( identifier[act] . identifier[name] , identifier[styles] [ literal[string] ]))
identifier[sponsor_str] =( literal[string] . identifier[join] ([ identifier[x] . identifier[name] keyword[for] identifier[x] keyword[in] identifier[act] . identifier[sponsors] . identifier[all] ()])) keyword[if] identifier[act] . identifier[sponsors] . identifier[count] ()> literal[int] keyword[else] literal[string]
identifier[lelements] . identifier[append] ( identifier[Paragraph] ( literal[string] + identifier[sponsor_str] , identifier[styles] [ literal[string] ]))
identifier[lelements] . identifier[append] ( identifier[Spacer] ( literal[int] , literal[int] * identifier[inch] ))
identifier[room_str] =( literal[string] . identifier[join] ([ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[act] . identifier[rooms] . identifier[all] ()])) keyword[if] identifier[act] . identifier[rooms] . identifier[count] ()> literal[int] keyword[else] literal[string]
identifier[relements] . identifier[append] ( identifier[Paragraph] ( literal[string] + identifier[room_str] , identifier[styles] [ literal[string] ]))
identifier[relements] . identifier[append] ( identifier[Spacer] ( literal[int] , literal[int] * identifier[inch] ))
identifier[relements] . identifier[append] ( identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[act_stats] [ literal[string] ]), identifier[styles] [ literal[string] ]))
identifier[relements] . identifier[append] ( identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[act_stats] [ literal[string] ]), identifier[styles] [ literal[string] ]))
identifier[relements] . identifier[append] ( identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[act_stats] [ literal[string] ]), identifier[styles] [ literal[string] ]))
identifier[relements] . identifier[append] ( identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[act_stats] [ literal[string] ]), identifier[styles] [ literal[string] ]))
identifier[lelements] . identifier[append] ( identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[act_stats] [ literal[string] ]), identifier[styles] [ literal[string] ]))
identifier[lelements] . identifier[append] ( identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[act_stats] [ literal[string] ]), identifier[styles] [ literal[string] ]))
identifier[lelements] . identifier[append] ( identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[act_stats] [ literal[string] ]), identifier[styles] [ literal[string] ]))
identifier[lelements] . identifier[append] (
identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[act_stats] [ literal[string] ], identifier[act_stats] [ literal[string] ]), identifier[styles] [ literal[string] ]))
identifier[elements] . identifier[append] (
identifier[Table] ([[ identifier[lelements] , identifier[relements] ]], identifier[style] =[( literal[string] ,( literal[int] , literal[int] ),(- literal[int] ,- literal[int] ), literal[int] ),( literal[string] ,( literal[int] , literal[int] ),(- literal[int] ,- literal[int] ), literal[int] ),( literal[string] ,( literal[int] , literal[int] ),
(- literal[int] ,- literal[int] ), literal[string] )]))
identifier[parsed_members] =[[ identifier[x] . identifier[username] , identifier[y] ] keyword[for] identifier[x] , identifier[y] keyword[in] identifier[act_stats] [ literal[string] ]]
identifier[parsed_members] = identifier[list] ( identifier[chunks] ( identifier[parsed_members] , literal[int] ))[: literal[int] ]
keyword[if] identifier[parsed_members] :
identifier[parsed_members] =[[[ literal[string] , literal[string] ]]+ identifier[x] keyword[for] identifier[x] keyword[in] identifier[parsed_members] ]
identifier[parsed_members] =[
identifier[Table] ( identifier[x] , identifier[style] =[( literal[string] ,( literal[int] , literal[int] ),( literal[int] , literal[int] ), literal[string] ),( literal[string] ,( literal[int] , literal[int] ),( literal[int] ,- literal[int] ), literal[string] )]) keyword[for] identifier[x] keyword[in] identifier[parsed_members]
]
identifier[elements] . identifier[append] ( identifier[Table] ([ identifier[parsed_members] ], identifier[style] =[( literal[string] ,(- literal[int] ,- literal[int] ),(- literal[int] ,- literal[int] ), literal[string] )]))
keyword[if] identifier[act_stats] [ literal[string] ]- literal[int] > literal[int] :
identifier[elements] . identifier[append] ( identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[act_stats] [ literal[string] ]- literal[int] ), identifier[styles] [ literal[string] ]))
keyword[else] :
identifier[elements] . identifier[append] ( identifier[Spacer] ( literal[int] , literal[int] * identifier[inch] ))
keyword[if] identifier[start_date] keyword[is] keyword[not] keyword[None] :
identifier[elements] . identifier[append] (
identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[act_stats] [ literal[string] ]),
identifier[styles] [ literal[string] ]))
identifier[elements] . identifier[append] (
identifier[Paragraph] ( literal[string] . identifier[format] (
identifier[act_stats] [ literal[string] ], identifier[year] - literal[int] , identifier[year] ), identifier[styles] [ literal[string] ]))
identifier[elements] . identifier[append] ( identifier[PageBreak] ())
keyword[if] identifier[empty_activities] :
identifier[empty_activities] =[ identifier[x] [: literal[int] ]+ literal[string] keyword[if] identifier[len] ( identifier[x] )> literal[int] keyword[else] identifier[x] keyword[for] identifier[x] keyword[in] identifier[empty_activities] ]
identifier[empty_activities] =[[ identifier[x] ] keyword[for] identifier[x] keyword[in] identifier[empty_activities] ]
identifier[empty_activities] = identifier[list] ( identifier[chunks] ( identifier[empty_activities] , literal[int] ))
identifier[empty_activities] =[[[ literal[string] ]]+ identifier[x] keyword[for] identifier[x] keyword[in] identifier[empty_activities] ]
identifier[empty_activities] =[
identifier[Table] ( identifier[x] , identifier[style] =[( literal[string] ,( literal[int] , literal[int] ),(- literal[int] , literal[int] ), literal[string] ),( literal[string] ,( literal[int] , literal[int] ),(- literal[int] ,- literal[int] ), literal[int] )]) keyword[for] identifier[x] keyword[in] identifier[empty_activities]
]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[empty_activities] ), literal[int] ):
identifier[elements] . identifier[append] ( identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[i] // literal[int] + literal[int] ), identifier[styles] [ literal[string] ]))
keyword[if] identifier[all_years] :
identifier[elements] . identifier[append] ( identifier[Paragraph] ( literal[string] , identifier[styles] [ literal[string] ]))
keyword[else] :
identifier[elements] . identifier[append] (
identifier[Paragraph] ( literal[string] . identifier[format] ( identifier[year] - literal[int] , identifier[year] ),
identifier[styles] [ literal[string] ]))
identifier[elements] . identifier[append] ( identifier[Spacer] ( literal[int] , literal[int] * identifier[inch] ))
identifier[ea] =[ identifier[empty_activities] [ identifier[i] ]]
keyword[if] identifier[i] + literal[int] < identifier[len] ( identifier[empty_activities] ):
identifier[ea] . identifier[append] ( identifier[empty_activities] [ identifier[i] + literal[int] ])
identifier[elements] . identifier[append] (
identifier[Table] ([ identifier[ea] ], identifier[style] =[
( literal[string] ,( literal[int] , literal[int] ),(- literal[int] ,- literal[int] ), literal[int] ),
( literal[string] ,( literal[int] , literal[int] ),(- literal[int] ,- literal[int] ), literal[int] ),
( literal[string] ,( literal[int] , literal[int] ),(- literal[int] ,- literal[int] ), literal[string] ),
], identifier[hAlign] = literal[string] ))
identifier[elements] . identifier[append] ( identifier[PageBreak] ())
keyword[def] identifier[first_page] ( identifier[canvas] , identifier[_] ):
keyword[if] identifier[len] ( identifier[activities] )== literal[int] :
identifier[canvas] . identifier[setTitle] ( literal[string] . identifier[format] ( identifier[activities] [ literal[int] ]. identifier[name] ))
keyword[else] :
identifier[canvas] . identifier[setTitle] ( literal[string] )
identifier[canvas] . identifier[setAuthor] ( literal[string] )
identifier[doc] . identifier[build] ( identifier[elements] , identifier[onFirstPage] = identifier[first_page] )
identifier[pdf_buffer] . identifier[seek] ( literal[int] )
keyword[return] identifier[pdf_buffer] | def generate_statistics_pdf(activities=None, start_date=None, all_years=False, year=None):
""" Accepts EighthActivity objects and outputs a PDF file. """
if activities is None:
activities = EighthActivity.objects.all().order_by('name') # depends on [control=['if'], data=['activities']]
if year is None:
year = current_school_year() # depends on [control=['if'], data=['year']]
if not isinstance(activities, list):
activities = activities.prefetch_related('rooms').prefetch_related('sponsors') # depends on [control=['if'], data=[]]
pdf_buffer = BytesIO()
h_margin = 1 * inch
v_margin = 0.5 * inch
doc = SimpleDocTemplate(pdf_buffer, pagesize=letter, rightMargin=h_margin, leftMargin=h_margin, topMargin=v_margin, bottomMargin=v_margin)
elements = []
styles = getSampleStyleSheet()
styles.add(ParagraphStyle(name='Indent', leftIndent=15))
empty_activities = []
for act in activities:
lelements = []
relements = []
act_stats = calculate_statistics(act, start_date=start_date, all_years=all_years, year=year)
if act_stats['total_blocks'] == 0:
empty_activities.append(act.name)
continue # depends on [control=['if'], data=[]]
elements.append(Paragraph(act.name, styles['Title']))
sponsor_str = ', '.join([x.name for x in act.sponsors.all()]) if act.sponsors.count() > 0 else 'None'
lelements.append(Paragraph('<b>Default Sponsors:</b> ' + sponsor_str, styles['Normal']))
lelements.append(Spacer(0, 0.025 * inch))
room_str = ', '.join([str(x) for x in act.rooms.all()]) if act.rooms.count() > 0 else 'None'
relements.append(Paragraph('<b>Default Rooms:</b> ' + room_str, styles['Normal']))
relements.append(Spacer(0, 0.025 * inch))
relements.append(Paragraph('<b>Total blocks:</b> {}'.format(act_stats['total_blocks']), styles['Normal']))
relements.append(Paragraph('<b>Scheduled blocks:</b> {}'.format(act_stats['scheduled_blocks']), styles['Indent']))
relements.append(Paragraph('<b>Empty blocks:</b> {}'.format(act_stats['empty_blocks']), styles['Indent']))
relements.append(Paragraph('<b>Cancelled blocks:</b> {}'.format(act_stats['cancelled_blocks']), styles['Indent']))
lelements.append(Paragraph('<b>Total signups:</b> {}'.format(act_stats['total_signups']), styles['Normal']))
lelements.append(Paragraph('<b>Average signups per block:</b> {}'.format(act_stats['average_signups']), styles['Indent']))
lelements.append(Paragraph('<b>Average signups per student:</b> {}'.format(act_stats['average_user_signups']), styles['Indent']))
lelements.append(Paragraph('<b>Unique students:</b> {}, <b>Capacity:</b> {}'.format(act_stats['students'], act_stats['capacity']), styles['Normal']))
elements.append(Table([[lelements, relements]], style=[('LEFTPADDING', (0, 0), (-1, -1), 0), ('RIGHTPADDING', (0, 0), (-1, -1), 0), ('VALIGN', (0, 0), (-1, -1), 'TOP')]))
parsed_members = [[x.username, y] for (x, y) in act_stats['members']]
parsed_members = list(chunks(parsed_members, 30))[:3]
if parsed_members:
parsed_members = [[['Username', 'Signups']] + x for x in parsed_members]
parsed_members = [Table(x, style=[('FONT', (0, 0), (1, 0), 'Helvetica-Bold'), ('ALIGN', (1, 0), (1, -1), 'RIGHT')]) for x in parsed_members]
elements.append(Table([parsed_members], style=[('VALIGN', (-1, -1), (-1, -1), 'TOP')]))
if act_stats['students'] - 90 > 0:
elements.append(Paragraph('<b>{}</b> students were not shown on this page. '.format(act_stats['students'] - 90), styles['Normal'])) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
elements.append(Spacer(0, 0.2 * inch))
if start_date is not None:
elements.append(Paragraph('<b>{}</b> block(s) are past the start date and are not included on this page.'.format(act_stats['past_start_date']), styles['Normal'])) # depends on [control=['if'], data=[]]
elements.append(Paragraph('<b>{}</b> block(s) not in the {}-{} school year are not included on this page.'.format(act_stats['old_blocks'], year - 1, year), styles['Normal']))
elements.append(PageBreak()) # depends on [control=['for'], data=['act']]
if empty_activities:
empty_activities = [x[:37] + '...' if len(x) > 40 else x for x in empty_activities]
empty_activities = [[x] for x in empty_activities]
empty_activities = list(chunks(empty_activities, 35))
empty_activities = [[['Activity']] + x for x in empty_activities]
empty_activities = [Table(x, style=[('FONT', (0, 0), (-1, 0), 'Helvetica-Bold'), ('LEFTPADDING', (0, 0), (-1, -1), 0)]) for x in empty_activities]
for i in range(0, len(empty_activities), 2):
elements.append(Paragraph('Empty Activities (Page {})'.format(i // 2 + 1), styles['Title']))
if all_years:
elements.append(Paragraph('The following activities have no 8th period blocks assigned to them.', styles['Normal'])) # depends on [control=['if'], data=[]]
else:
elements.append(Paragraph('The following activities have no 8th period blocks assigned to them for the {}-{} school year.'.format(year - 1, year), styles['Normal']))
elements.append(Spacer(0, 0.1 * inch))
ea = [empty_activities[i]]
if i + 1 < len(empty_activities):
ea.append(empty_activities[i + 1]) # depends on [control=['if'], data=[]]
elements.append(Table([ea], style=[('LEFTPADDING', (0, 0), (-1, -1), 0), ('RIGHTPADDING', (0, 0), (-1, -1), 0), ('VALIGN', (0, 0), (-1, -1), 'TOP')], hAlign='LEFT'))
elements.append(PageBreak()) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
def first_page(canvas, _):
if len(activities) == 1:
canvas.setTitle('{} Statistics'.format(activities[0].name)) # depends on [control=['if'], data=[]]
else:
canvas.setTitle('8th Period Activity Statistics')
canvas.setAuthor('Generated by Ion')
doc.build(elements, onFirstPage=first_page)
pdf_buffer.seek(0)
return pdf_buffer |
def idx(self):
"""
Return partname index as integer for tuple partname or None for
singleton partname, e.g. ``21`` for ``'/ppt/slides/slide21.xml'`` and
|None| for ``'/ppt/presentation.xml'``.
"""
filename = self.filename
if not filename:
return None
name_part = posixpath.splitext(filename)[0] # filename w/ext removed
match = self._filename_re.match(name_part)
if match is None:
return None
if match.group(2):
return int(match.group(2))
return None | def function[idx, parameter[self]]:
constant[
Return partname index as integer for tuple partname or None for
singleton partname, e.g. ``21`` for ``'/ppt/slides/slide21.xml'`` and
|None| for ``'/ppt/presentation.xml'``.
]
variable[filename] assign[=] name[self].filename
if <ast.UnaryOp object at 0x7da1b1cb27d0> begin[:]
return[constant[None]]
variable[name_part] assign[=] call[call[name[posixpath].splitext, parameter[name[filename]]]][constant[0]]
variable[match] assign[=] call[name[self]._filename_re.match, parameter[name[name_part]]]
if compare[name[match] is constant[None]] begin[:]
return[constant[None]]
if call[name[match].group, parameter[constant[2]]] begin[:]
return[call[name[int], parameter[call[name[match].group, parameter[constant[2]]]]]]
return[constant[None]] | keyword[def] identifier[idx] ( identifier[self] ):
literal[string]
identifier[filename] = identifier[self] . identifier[filename]
keyword[if] keyword[not] identifier[filename] :
keyword[return] keyword[None]
identifier[name_part] = identifier[posixpath] . identifier[splitext] ( identifier[filename] )[ literal[int] ]
identifier[match] = identifier[self] . identifier[_filename_re] . identifier[match] ( identifier[name_part] )
keyword[if] identifier[match] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[if] identifier[match] . identifier[group] ( literal[int] ):
keyword[return] identifier[int] ( identifier[match] . identifier[group] ( literal[int] ))
keyword[return] keyword[None] | def idx(self):
"""
Return partname index as integer for tuple partname or None for
singleton partname, e.g. ``21`` for ``'/ppt/slides/slide21.xml'`` and
|None| for ``'/ppt/presentation.xml'``.
"""
filename = self.filename
if not filename:
return None # depends on [control=['if'], data=[]]
name_part = posixpath.splitext(filename)[0] # filename w/ext removed
match = self._filename_re.match(name_part)
if match is None:
return None # depends on [control=['if'], data=[]]
if match.group(2):
return int(match.group(2)) # depends on [control=['if'], data=[]]
return None |
def unflatten(obj):
'''
TODO: add docs
'''
if not isdict(obj):
raise ValueError(
'only dict-like objects can be unflattened, not %r' % (obj,))
ret = dict()
sub = dict()
for key, value in obj.items():
if '.' not in key and '[' not in key:
ret[key] = value
continue
if '.' in key and '[' in key:
idx = min(key.find('.'), key.find('['))
elif '.' in key:
idx = key.find('.')
else:
idx = key.find('[')
prefix = key[:idx]
if prefix not in sub:
sub[prefix] = dict()
sub[prefix][key[idx:]] = value
for pfx, values in sub.items():
if pfx in ret:
raise ValueError(
'conflicting scalar vs. structure for prefix: %s' % (pfx,))
ret[pfx] = _relunflatten(pfx, values)
return ret | def function[unflatten, parameter[obj]]:
constant[
TODO: add docs
]
if <ast.UnaryOp object at 0x7da1b0aa2d70> begin[:]
<ast.Raise object at 0x7da1b0aa05e0>
variable[ret] assign[=] call[name[dict], parameter[]]
variable[sub] assign[=] call[name[dict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0ac9a20>, <ast.Name object at 0x7da1b0ac9f90>]]] in starred[call[name[obj].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b0ac8040> begin[:]
call[name[ret]][name[key]] assign[=] name[value]
continue
if <ast.BoolOp object at 0x7da1b0aa5ba0> begin[:]
variable[idx] assign[=] call[name[min], parameter[call[name[key].find, parameter[constant[.]]], call[name[key].find, parameter[constant[[]]]]]
variable[prefix] assign[=] call[name[key]][<ast.Slice object at 0x7da1b0aa3bb0>]
if compare[name[prefix] <ast.NotIn object at 0x7da2590d7190> name[sub]] begin[:]
call[name[sub]][name[prefix]] assign[=] call[name[dict], parameter[]]
call[call[name[sub]][name[prefix]]][call[name[key]][<ast.Slice object at 0x7da1b0aa0d00>]] assign[=] name[value]
for taget[tuple[[<ast.Name object at 0x7da1b0aa3520>, <ast.Name object at 0x7da1b0aa0940>]]] in starred[call[name[sub].items, parameter[]]] begin[:]
if compare[name[pfx] in name[ret]] begin[:]
<ast.Raise object at 0x7da1b0aa32e0>
call[name[ret]][name[pfx]] assign[=] call[name[_relunflatten], parameter[name[pfx], name[values]]]
return[name[ret]] | keyword[def] identifier[unflatten] ( identifier[obj] ):
literal[string]
keyword[if] keyword[not] identifier[isdict] ( identifier[obj] ):
keyword[raise] identifier[ValueError] (
literal[string] %( identifier[obj] ,))
identifier[ret] = identifier[dict] ()
identifier[sub] = identifier[dict] ()
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[obj] . identifier[items] ():
keyword[if] literal[string] keyword[not] keyword[in] identifier[key] keyword[and] literal[string] keyword[not] keyword[in] identifier[key] :
identifier[ret] [ identifier[key] ]= identifier[value]
keyword[continue]
keyword[if] literal[string] keyword[in] identifier[key] keyword[and] literal[string] keyword[in] identifier[key] :
identifier[idx] = identifier[min] ( identifier[key] . identifier[find] ( literal[string] ), identifier[key] . identifier[find] ( literal[string] ))
keyword[elif] literal[string] keyword[in] identifier[key] :
identifier[idx] = identifier[key] . identifier[find] ( literal[string] )
keyword[else] :
identifier[idx] = identifier[key] . identifier[find] ( literal[string] )
identifier[prefix] = identifier[key] [: identifier[idx] ]
keyword[if] identifier[prefix] keyword[not] keyword[in] identifier[sub] :
identifier[sub] [ identifier[prefix] ]= identifier[dict] ()
identifier[sub] [ identifier[prefix] ][ identifier[key] [ identifier[idx] :]]= identifier[value]
keyword[for] identifier[pfx] , identifier[values] keyword[in] identifier[sub] . identifier[items] ():
keyword[if] identifier[pfx] keyword[in] identifier[ret] :
keyword[raise] identifier[ValueError] (
literal[string] %( identifier[pfx] ,))
identifier[ret] [ identifier[pfx] ]= identifier[_relunflatten] ( identifier[pfx] , identifier[values] )
keyword[return] identifier[ret] | def unflatten(obj):
"""
TODO: add docs
"""
if not isdict(obj):
raise ValueError('only dict-like objects can be unflattened, not %r' % (obj,)) # depends on [control=['if'], data=[]]
ret = dict()
sub = dict()
for (key, value) in obj.items():
if '.' not in key and '[' not in key:
ret[key] = value
continue # depends on [control=['if'], data=[]]
if '.' in key and '[' in key:
idx = min(key.find('.'), key.find('[')) # depends on [control=['if'], data=[]]
elif '.' in key:
idx = key.find('.') # depends on [control=['if'], data=['key']]
else:
idx = key.find('[')
prefix = key[:idx]
if prefix not in sub:
sub[prefix] = dict() # depends on [control=['if'], data=['prefix', 'sub']]
sub[prefix][key[idx:]] = value # depends on [control=['for'], data=[]]
for (pfx, values) in sub.items():
if pfx in ret:
raise ValueError('conflicting scalar vs. structure for prefix: %s' % (pfx,)) # depends on [control=['if'], data=['pfx']]
ret[pfx] = _relunflatten(pfx, values) # depends on [control=['for'], data=[]]
return ret |
def create_page(page_object_class_or_interface,
webdriver=None, **kwargs):
"""
Instantiate a page object from a given Interface or Abstract class.
Args:
page_object_class_or_interface (Class): PageObject class, AbstractBaseClass, or
Interface to attempt to consturct.
Kwargs:
webdriver (WebDriver): Selenium Webdriver to use to instantiate the page. If none
is provided, then it was use the default from
WTF_WEBDRIVER_MANAGER
Returns:
PageObject
Raises:
NoMatchingPageError
Instantiating a Page from PageObject from class usage::
my_page_instance = PageFactory.create_page(MyPageClass)
Instantiating a Page from an Interface or base class::
import pages.mysite.* # Make sure you import classes first, or else PageFactory will not know about it.
my_page_instance = PageFactory.create_page(MyPageInterfaceClass)
Instantiating a Page from a list of classes.::
my_page_instance = PageFactory.create_page([PossiblePage1, PossiblePage2])
Note: It'll only be able to detect pages that are imported. To it's best to
do an import of all pages implementing a base class or the interface inside the
__init__.py of the package directory.
"""
if not webdriver:
webdriver = WTF_WEBDRIVER_MANAGER.get_driver()
# will be used later when tracking best matched page.
current_matched_page = None
# used to track if there is a valid page object within the set of PageObjects searched.
was_validate_called = False
# Walk through all classes if a list was passed.
if type(page_object_class_or_interface) == list:
subclasses = []
for page_class in page_object_class_or_interface:
# attempt to instantiate class.
page = PageFactory.__instantiate_page_object(page_class,
webdriver,
**kwargs)
if isinstance(page, PageObject):
was_validate_called = True
if (current_matched_page == None or page > current_matched_page):
current_matched_page = page
elif page is True:
was_validate_called = True
# check for subclasses
subclasses += PageFactory.__itersubclasses(page_class)
else:
# A single class was passed in, try to instantiate the class.
page_class = page_object_class_or_interface
page = PageFactory.__instantiate_page_object(page_class,
webdriver,
**kwargs)
# Check if we got a valid PageObject back.
if isinstance(page, PageObject):
was_validate_called = True
current_matched_page = page
elif page is True:
was_validate_called = True
# check for subclasses
subclasses = PageFactory.__itersubclasses(
page_object_class_or_interface)
# Iterate over subclasses of the passed in classes to see if we have a
# better match.
for pageClass in subclasses:
try:
page = PageFactory.__instantiate_page_object(pageClass,
webdriver,
**kwargs)
# If we get a valid PageObject match, check to see if the ranking is higher
# than our current PageObject.
if isinstance(page, PageObject):
was_validate_called = True
if current_matched_page == None or page > current_matched_page:
current_matched_page = page
elif page is True:
was_validate_called = True
except InvalidPageError as e:
_wtflog.debug("InvalidPageError: %s", e)
pass # This happens when the page fails check.
except TypeError as e:
_wtflog.debug("TypeError: %s", e)
# this happens when it tries to instantiate the original
# abstract class.
pass
except Exception as e:
_wtflog.debug("Exception during page instantiation: %s", e)
# Unexpected exception.
raise e
# If no matching classes.
if not isinstance(current_matched_page, PageObject):
# Check that there is at least 1 valid page object that was passed in.
if was_validate_called is False:
raise TypeError("Neither the PageObjects nor it's subclasses have implemented " +
"'PageObject._validate(self, webdriver)'.")
try:
current_url = webdriver.current_url
raise NoMatchingPageError(u("There's, no matching classes to this page. URL:{0}")
.format(current_url))
except:
raise NoMatchingPageError(u("There's, no matching classes to this page. "))
else:
return current_matched_page | def function[create_page, parameter[page_object_class_or_interface, webdriver]]:
constant[
Instantiate a page object from a given Interface or Abstract class.
Args:
page_object_class_or_interface (Class): PageObject class, AbstractBaseClass, or
Interface to attempt to consturct.
Kwargs:
webdriver (WebDriver): Selenium Webdriver to use to instantiate the page. If none
is provided, then it was use the default from
WTF_WEBDRIVER_MANAGER
Returns:
PageObject
Raises:
NoMatchingPageError
Instantiating a Page from PageObject from class usage::
my_page_instance = PageFactory.create_page(MyPageClass)
Instantiating a Page from an Interface or base class::
import pages.mysite.* # Make sure you import classes first, or else PageFactory will not know about it.
my_page_instance = PageFactory.create_page(MyPageInterfaceClass)
Instantiating a Page from a list of classes.::
my_page_instance = PageFactory.create_page([PossiblePage1, PossiblePage2])
Note: It'll only be able to detect pages that are imported. To it's best to
do an import of all pages implementing a base class or the interface inside the
__init__.py of the package directory.
]
if <ast.UnaryOp object at 0x7da1b11eece0> begin[:]
variable[webdriver] assign[=] call[name[WTF_WEBDRIVER_MANAGER].get_driver, parameter[]]
variable[current_matched_page] assign[=] constant[None]
variable[was_validate_called] assign[=] constant[False]
if compare[call[name[type], parameter[name[page_object_class_or_interface]]] equal[==] name[list]] begin[:]
variable[subclasses] assign[=] list[[]]
for taget[name[page_class]] in starred[name[page_object_class_or_interface]] begin[:]
variable[page] assign[=] call[name[PageFactory].__instantiate_page_object, parameter[name[page_class], name[webdriver]]]
if call[name[isinstance], parameter[name[page], name[PageObject]]] begin[:]
variable[was_validate_called] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b11edd50> begin[:]
variable[current_matched_page] assign[=] name[page]
<ast.AugAssign object at 0x7da1b11ec670>
for taget[name[pageClass]] in starred[name[subclasses]] begin[:]
<ast.Try object at 0x7da1b11db880>
if <ast.UnaryOp object at 0x7da1b11d8400> begin[:]
if compare[name[was_validate_called] is constant[False]] begin[:]
<ast.Raise object at 0x7da1b11daa10>
<ast.Try object at 0x7da1b11dbc70> | keyword[def] identifier[create_page] ( identifier[page_object_class_or_interface] ,
identifier[webdriver] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[webdriver] :
identifier[webdriver] = identifier[WTF_WEBDRIVER_MANAGER] . identifier[get_driver] ()
identifier[current_matched_page] = keyword[None]
identifier[was_validate_called] = keyword[False]
keyword[if] identifier[type] ( identifier[page_object_class_or_interface] )== identifier[list] :
identifier[subclasses] =[]
keyword[for] identifier[page_class] keyword[in] identifier[page_object_class_or_interface] :
identifier[page] = identifier[PageFactory] . identifier[__instantiate_page_object] ( identifier[page_class] ,
identifier[webdriver] ,
** identifier[kwargs] )
keyword[if] identifier[isinstance] ( identifier[page] , identifier[PageObject] ):
identifier[was_validate_called] = keyword[True]
keyword[if] ( identifier[current_matched_page] == keyword[None] keyword[or] identifier[page] > identifier[current_matched_page] ):
identifier[current_matched_page] = identifier[page]
keyword[elif] identifier[page] keyword[is] keyword[True] :
identifier[was_validate_called] = keyword[True]
identifier[subclasses] += identifier[PageFactory] . identifier[__itersubclasses] ( identifier[page_class] )
keyword[else] :
identifier[page_class] = identifier[page_object_class_or_interface]
identifier[page] = identifier[PageFactory] . identifier[__instantiate_page_object] ( identifier[page_class] ,
identifier[webdriver] ,
** identifier[kwargs] )
keyword[if] identifier[isinstance] ( identifier[page] , identifier[PageObject] ):
identifier[was_validate_called] = keyword[True]
identifier[current_matched_page] = identifier[page]
keyword[elif] identifier[page] keyword[is] keyword[True] :
identifier[was_validate_called] = keyword[True]
identifier[subclasses] = identifier[PageFactory] . identifier[__itersubclasses] (
identifier[page_object_class_or_interface] )
keyword[for] identifier[pageClass] keyword[in] identifier[subclasses] :
keyword[try] :
identifier[page] = identifier[PageFactory] . identifier[__instantiate_page_object] ( identifier[pageClass] ,
identifier[webdriver] ,
** identifier[kwargs] )
keyword[if] identifier[isinstance] ( identifier[page] , identifier[PageObject] ):
identifier[was_validate_called] = keyword[True]
keyword[if] identifier[current_matched_page] == keyword[None] keyword[or] identifier[page] > identifier[current_matched_page] :
identifier[current_matched_page] = identifier[page]
keyword[elif] identifier[page] keyword[is] keyword[True] :
identifier[was_validate_called] = keyword[True]
keyword[except] identifier[InvalidPageError] keyword[as] identifier[e] :
identifier[_wtflog] . identifier[debug] ( literal[string] , identifier[e] )
keyword[pass]
keyword[except] identifier[TypeError] keyword[as] identifier[e] :
identifier[_wtflog] . identifier[debug] ( literal[string] , identifier[e] )
keyword[pass]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[_wtflog] . identifier[debug] ( literal[string] , identifier[e] )
keyword[raise] identifier[e]
keyword[if] keyword[not] identifier[isinstance] ( identifier[current_matched_page] , identifier[PageObject] ):
keyword[if] identifier[was_validate_called] keyword[is] keyword[False] :
keyword[raise] identifier[TypeError] ( literal[string] +
literal[string] )
keyword[try] :
identifier[current_url] = identifier[webdriver] . identifier[current_url]
keyword[raise] identifier[NoMatchingPageError] ( identifier[u] ( literal[string] )
. identifier[format] ( identifier[current_url] ))
keyword[except] :
keyword[raise] identifier[NoMatchingPageError] ( identifier[u] ( literal[string] ))
keyword[else] :
keyword[return] identifier[current_matched_page] | def create_page(page_object_class_or_interface, webdriver=None, **kwargs):
"""
Instantiate a page object from a given Interface or Abstract class.
Args:
page_object_class_or_interface (Class): PageObject class, AbstractBaseClass, or
Interface to attempt to consturct.
Kwargs:
webdriver (WebDriver): Selenium Webdriver to use to instantiate the page. If none
is provided, then it was use the default from
WTF_WEBDRIVER_MANAGER
Returns:
PageObject
Raises:
NoMatchingPageError
Instantiating a Page from PageObject from class usage::
my_page_instance = PageFactory.create_page(MyPageClass)
Instantiating a Page from an Interface or base class::
import pages.mysite.* # Make sure you import classes first, or else PageFactory will not know about it.
my_page_instance = PageFactory.create_page(MyPageInterfaceClass)
Instantiating a Page from a list of classes.::
my_page_instance = PageFactory.create_page([PossiblePage1, PossiblePage2])
Note: It'll only be able to detect pages that are imported. To it's best to
do an import of all pages implementing a base class or the interface inside the
__init__.py of the package directory.
"""
if not webdriver:
webdriver = WTF_WEBDRIVER_MANAGER.get_driver() # depends on [control=['if'], data=[]]
# will be used later when tracking best matched page.
current_matched_page = None
# used to track if there is a valid page object within the set of PageObjects searched.
was_validate_called = False
# Walk through all classes if a list was passed.
if type(page_object_class_or_interface) == list:
subclasses = []
for page_class in page_object_class_or_interface:
# attempt to instantiate class.
page = PageFactory.__instantiate_page_object(page_class, webdriver, **kwargs)
if isinstance(page, PageObject):
was_validate_called = True
if current_matched_page == None or page > current_matched_page:
current_matched_page = page # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif page is True:
was_validate_called = True # depends on [control=['if'], data=[]]
# check for subclasses
subclasses += PageFactory.__itersubclasses(page_class) # depends on [control=['for'], data=['page_class']] # depends on [control=['if'], data=[]]
else:
# A single class was passed in, try to instantiate the class.
page_class = page_object_class_or_interface
page = PageFactory.__instantiate_page_object(page_class, webdriver, **kwargs)
# Check if we got a valid PageObject back.
if isinstance(page, PageObject):
was_validate_called = True
current_matched_page = page # depends on [control=['if'], data=[]]
elif page is True:
was_validate_called = True # depends on [control=['if'], data=[]]
# check for subclasses
subclasses = PageFactory.__itersubclasses(page_object_class_or_interface)
# Iterate over subclasses of the passed in classes to see if we have a
# better match.
for pageClass in subclasses:
try:
page = PageFactory.__instantiate_page_object(pageClass, webdriver, **kwargs)
# If we get a valid PageObject match, check to see if the ranking is higher
# than our current PageObject.
if isinstance(page, PageObject):
was_validate_called = True
if current_matched_page == None or page > current_matched_page:
current_matched_page = page # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif page is True:
was_validate_called = True # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except InvalidPageError as e:
_wtflog.debug('InvalidPageError: %s', e)
pass # This happens when the page fails check. # depends on [control=['except'], data=['e']]
except TypeError as e:
_wtflog.debug('TypeError: %s', e)
# this happens when it tries to instantiate the original
# abstract class.
pass # depends on [control=['except'], data=['e']]
except Exception as e:
_wtflog.debug('Exception during page instantiation: %s', e)
# Unexpected exception.
raise e # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['pageClass']]
# If no matching classes.
if not isinstance(current_matched_page, PageObject):
# Check that there is at least 1 valid page object that was passed in.
if was_validate_called is False:
raise TypeError("Neither the PageObjects nor it's subclasses have implemented " + "'PageObject._validate(self, webdriver)'.") # depends on [control=['if'], data=[]]
try:
current_url = webdriver.current_url
raise NoMatchingPageError(u("There's, no matching classes to this page. URL:{0}").format(current_url)) # depends on [control=['try'], data=[]]
except:
raise NoMatchingPageError(u("There's, no matching classes to this page. ")) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
return current_matched_page |
def ratio(self, col: str, ratio_col: str="Ratio"):
"""
Add a column whith the percentages ratio from a column
:param col: column to calculate ratio from
:type col: str
:param ratio_col: new ratio column name, defaults to "Ratio"
:param ratio_col: str, optional
:example: ``ds2 = ds.ratio("Col 1")``
"""
try:
df = self.df.copy()
df[ratio_col] = df[[col]].apply(
lambda x: 100 * x / float(x.sum()))
self.df = df
except Exception as e:
self.err(e, self.ratio, "Can not calculate ratio") | def function[ratio, parameter[self, col, ratio_col]]:
constant[
Add a column whith the percentages ratio from a column
:param col: column to calculate ratio from
:type col: str
:param ratio_col: new ratio column name, defaults to "Ratio"
:param ratio_col: str, optional
:example: ``ds2 = ds.ratio("Col 1")``
]
<ast.Try object at 0x7da204621c00> | keyword[def] identifier[ratio] ( identifier[self] , identifier[col] : identifier[str] , identifier[ratio_col] : identifier[str] = literal[string] ):
literal[string]
keyword[try] :
identifier[df] = identifier[self] . identifier[df] . identifier[copy] ()
identifier[df] [ identifier[ratio_col] ]= identifier[df] [[ identifier[col] ]]. identifier[apply] (
keyword[lambda] identifier[x] : literal[int] * identifier[x] / identifier[float] ( identifier[x] . identifier[sum] ()))
identifier[self] . identifier[df] = identifier[df]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[err] ( identifier[e] , identifier[self] . identifier[ratio] , literal[string] ) | def ratio(self, col: str, ratio_col: str='Ratio'):
"""
Add a column whith the percentages ratio from a column
:param col: column to calculate ratio from
:type col: str
:param ratio_col: new ratio column name, defaults to "Ratio"
:param ratio_col: str, optional
:example: ``ds2 = ds.ratio("Col 1")``
"""
try:
df = self.df.copy()
df[ratio_col] = df[[col]].apply(lambda x: 100 * x / float(x.sum()))
self.df = df # depends on [control=['try'], data=[]]
except Exception as e:
self.err(e, self.ratio, 'Can not calculate ratio') # depends on [control=['except'], data=['e']] |
def switch(self, idx, control):
"""Switch a single control of <idx>"""
old = None
new = None
if control == 'Q':
if self.PQ[idx] == 1:
old = 'PQ'
new = 'PV'
elif self.vQ[idx] == 1:
old = 'vQ'
new = 'vV'
elif control == 'P':
if self.PQ[idx] == 1:
old = 'PQ'
new = 'vQ'
elif self.PV[idx] == 1:
old = 'PV'
new = 'vV'
elif control == 'V':
if self.PV[idx] == 1:
old = 'PV'
new = 'PQ'
elif self.vV[idx] == 1:
old = 'vV'
new = 'vQ'
elif control == 'v':
if self.vQ[idx] == 1:
old = 'vQ'
new = 'PQ'
elif self.vV[idx] == 1:
old = 'vV'
new = 'PV'
if old and new:
self.__dict__[old][idx] = 0
self.__dict__[new][idx] = 1 | def function[switch, parameter[self, idx, control]]:
constant[Switch a single control of <idx>]
variable[old] assign[=] constant[None]
variable[new] assign[=] constant[None]
if compare[name[control] equal[==] constant[Q]] begin[:]
if compare[call[name[self].PQ][name[idx]] equal[==] constant[1]] begin[:]
variable[old] assign[=] constant[PQ]
variable[new] assign[=] constant[PV]
if <ast.BoolOp object at 0x7da2044c3640> begin[:]
call[call[name[self].__dict__][name[old]]][name[idx]] assign[=] constant[0]
call[call[name[self].__dict__][name[new]]][name[idx]] assign[=] constant[1] | keyword[def] identifier[switch] ( identifier[self] , identifier[idx] , identifier[control] ):
literal[string]
identifier[old] = keyword[None]
identifier[new] = keyword[None]
keyword[if] identifier[control] == literal[string] :
keyword[if] identifier[self] . identifier[PQ] [ identifier[idx] ]== literal[int] :
identifier[old] = literal[string]
identifier[new] = literal[string]
keyword[elif] identifier[self] . identifier[vQ] [ identifier[idx] ]== literal[int] :
identifier[old] = literal[string]
identifier[new] = literal[string]
keyword[elif] identifier[control] == literal[string] :
keyword[if] identifier[self] . identifier[PQ] [ identifier[idx] ]== literal[int] :
identifier[old] = literal[string]
identifier[new] = literal[string]
keyword[elif] identifier[self] . identifier[PV] [ identifier[idx] ]== literal[int] :
identifier[old] = literal[string]
identifier[new] = literal[string]
keyword[elif] identifier[control] == literal[string] :
keyword[if] identifier[self] . identifier[PV] [ identifier[idx] ]== literal[int] :
identifier[old] = literal[string]
identifier[new] = literal[string]
keyword[elif] identifier[self] . identifier[vV] [ identifier[idx] ]== literal[int] :
identifier[old] = literal[string]
identifier[new] = literal[string]
keyword[elif] identifier[control] == literal[string] :
keyword[if] identifier[self] . identifier[vQ] [ identifier[idx] ]== literal[int] :
identifier[old] = literal[string]
identifier[new] = literal[string]
keyword[elif] identifier[self] . identifier[vV] [ identifier[idx] ]== literal[int] :
identifier[old] = literal[string]
identifier[new] = literal[string]
keyword[if] identifier[old] keyword[and] identifier[new] :
identifier[self] . identifier[__dict__] [ identifier[old] ][ identifier[idx] ]= literal[int]
identifier[self] . identifier[__dict__] [ identifier[new] ][ identifier[idx] ]= literal[int] | def switch(self, idx, control):
"""Switch a single control of <idx>"""
old = None
new = None
if control == 'Q':
if self.PQ[idx] == 1:
old = 'PQ'
new = 'PV' # depends on [control=['if'], data=[]]
elif self.vQ[idx] == 1:
old = 'vQ'
new = 'vV' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif control == 'P':
if self.PQ[idx] == 1:
old = 'PQ'
new = 'vQ' # depends on [control=['if'], data=[]]
elif self.PV[idx] == 1:
old = 'PV'
new = 'vV' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif control == 'V':
if self.PV[idx] == 1:
old = 'PV'
new = 'PQ' # depends on [control=['if'], data=[]]
elif self.vV[idx] == 1:
old = 'vV'
new = 'vQ' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif control == 'v':
if self.vQ[idx] == 1:
old = 'vQ'
new = 'PQ' # depends on [control=['if'], data=[]]
elif self.vV[idx] == 1:
old = 'vV'
new = 'PV' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if old and new:
self.__dict__[old][idx] = 0
self.__dict__[new][idx] = 1 # depends on [control=['if'], data=[]] |
def commit(self):
"""Remove temporary save dir: rollback will no longer be possible."""
if self.save_dir is not None:
rmtree(self.save_dir)
self.save_dir = None
self._moved_paths = [] | def function[commit, parameter[self]]:
constant[Remove temporary save dir: rollback will no longer be possible.]
if compare[name[self].save_dir is_not constant[None]] begin[:]
call[name[rmtree], parameter[name[self].save_dir]]
name[self].save_dir assign[=] constant[None]
name[self]._moved_paths assign[=] list[[]] | keyword[def] identifier[commit] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[save_dir] keyword[is] keyword[not] keyword[None] :
identifier[rmtree] ( identifier[self] . identifier[save_dir] )
identifier[self] . identifier[save_dir] = keyword[None]
identifier[self] . identifier[_moved_paths] =[] | def commit(self):
"""Remove temporary save dir: rollback will no longer be possible."""
if self.save_dir is not None:
rmtree(self.save_dir)
self.save_dir = None
self._moved_paths = [] # depends on [control=['if'], data=[]] |
def create_operator(operator, auth, url):
"""
Function takes input of dictionary operator with the following keys
operator = { "fullName" : "" ,
"sessionTimeout" : "",
"password" : "",
"operatorGroupId" : "",
"name" : "",
"desc" : "",
"defaultAcl" : "",
"authType" : ""}
converts to json and issues a HTTP POST request to the HPE IMC Restful API
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param operator: dictionary with the required operator key-value pairs as defined above.
:return:
:rtype:
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.operator import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> operator = { "fullName" : "test administrator",
"sessionTimeout" : "30",
"password" : "password",
"operatorGroupId" : "1",
"name" : "testadmin",
"desc" : "test admin account",
"defaultAcl" : "",
"authType" : "0"}
>>> delete_if_exists = delete_plat_operator('testadmin', auth.creds, auth.url)
>>> new_operator = create_operator(operator, auth.creds, auth.url)
>>> assert type(new_operator) is int
>>> assert new_operator == 201
>>> fail_operator_create = create_operator(operator, auth.creds, auth.url)
>>> assert type(fail_operator_create) is int
>>> assert fail_operator_create == 409
"""
f_url = url + '/imcrs/plat/operator'
payload = json.dumps(operator, indent=4)
response = requests.post(f_url, data=payload, auth=auth, headers=HEADERS)
try:
if response.status_code == 409:
return response.status_code
elif response.status_code == 201:
return response.status_code
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + ' create_operator: An Error has occured' | def function[create_operator, parameter[operator, auth, url]]:
constant[
Function takes input of dictionary operator with the following keys
operator = { "fullName" : "" ,
"sessionTimeout" : "",
"password" : "",
"operatorGroupId" : "",
"name" : "",
"desc" : "",
"defaultAcl" : "",
"authType" : ""}
converts to json and issues a HTTP POST request to the HPE IMC Restful API
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param operator: dictionary with the required operator key-value pairs as defined above.
:return:
:rtype:
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.operator import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> operator = { "fullName" : "test administrator",
"sessionTimeout" : "30",
"password" : "password",
"operatorGroupId" : "1",
"name" : "testadmin",
"desc" : "test admin account",
"defaultAcl" : "",
"authType" : "0"}
>>> delete_if_exists = delete_plat_operator('testadmin', auth.creds, auth.url)
>>> new_operator = create_operator(operator, auth.creds, auth.url)
>>> assert type(new_operator) is int
>>> assert new_operator == 201
>>> fail_operator_create = create_operator(operator, auth.creds, auth.url)
>>> assert type(fail_operator_create) is int
>>> assert fail_operator_create == 409
]
variable[f_url] assign[=] binary_operation[name[url] + constant[/imcrs/plat/operator]]
variable[payload] assign[=] call[name[json].dumps, parameter[name[operator]]]
variable[response] assign[=] call[name[requests].post, parameter[name[f_url]]]
<ast.Try object at 0x7da204347670> | keyword[def] identifier[create_operator] ( identifier[operator] , identifier[auth] , identifier[url] ):
literal[string]
identifier[f_url] = identifier[url] + literal[string]
identifier[payload] = identifier[json] . identifier[dumps] ( identifier[operator] , identifier[indent] = literal[int] )
identifier[response] = identifier[requests] . identifier[post] ( identifier[f_url] , identifier[data] = identifier[payload] , identifier[auth] = identifier[auth] , identifier[headers] = identifier[HEADERS] )
keyword[try] :
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
keyword[return] identifier[response] . identifier[status_code]
keyword[elif] identifier[response] . identifier[status_code] == literal[int] :
keyword[return] identifier[response] . identifier[status_code]
keyword[except] identifier[requests] . identifier[exceptions] . identifier[RequestException] keyword[as] identifier[error] :
keyword[return] literal[string] + identifier[str] ( identifier[error] )+ literal[string] | def create_operator(operator, auth, url):
"""
Function takes input of dictionary operator with the following keys
operator = { "fullName" : "" ,
"sessionTimeout" : "",
"password" : "",
"operatorGroupId" : "",
"name" : "",
"desc" : "",
"defaultAcl" : "",
"authType" : ""}
converts to json and issues a HTTP POST request to the HPE IMC Restful API
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param operator: dictionary with the required operator key-value pairs as defined above.
:return:
:rtype:
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.operator import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> operator = { "fullName" : "test administrator",
"sessionTimeout" : "30",
"password" : "password",
"operatorGroupId" : "1",
"name" : "testadmin",
"desc" : "test admin account",
"defaultAcl" : "",
"authType" : "0"}
>>> delete_if_exists = delete_plat_operator('testadmin', auth.creds, auth.url)
>>> new_operator = create_operator(operator, auth.creds, auth.url)
>>> assert type(new_operator) is int
>>> assert new_operator == 201
>>> fail_operator_create = create_operator(operator, auth.creds, auth.url)
>>> assert type(fail_operator_create) is int
>>> assert fail_operator_create == 409
"""
f_url = url + '/imcrs/plat/operator'
payload = json.dumps(operator, indent=4)
response = requests.post(f_url, data=payload, auth=auth, headers=HEADERS)
try:
if response.status_code == 409:
return response.status_code # depends on [control=['if'], data=[]]
elif response.status_code == 201:
return response.status_code # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except requests.exceptions.RequestException as error:
return 'Error:\n' + str(error) + ' create_operator: An Error has occured' # depends on [control=['except'], data=['error']] |
def diagram_layout(graph, height='freeenergy', sources=None, targets=None,
pos=None, scale=None, center=None, dim=2):
"""
Position nodes such that paths are highlighted, from left to right.
Parameters
----------
graph : `networkx.Graph` or `list` of nodes
A position will be assigned to every node in graph.
height : `str` or `None`, optional
The node attribute that holds the numerical value used for the node
height. This defaults to ``'freeenergy'``. If `None`, all node heights
are set to zero.
sources : `list` of `str`
All simple paths starting at members of `sources` are considered.
Defaults to all nodes of graph.
targets : `list` of `str`
All simple paths ending at members of `targets` are considered.
Defaults to all nodes of graph.
pos : mapping, optional
Initial positions for nodes as a mapping with node as keys and
values as a coordinate `list` or `tuple`. If not specified (default),
initial positions are computed with `tower_layout`.
scale : number, optional
Scale factor for positions.
center : array-like, optional
Coordinate pair around which to center the layout. Default is the
origin.
dim : `int`
Dimension of layout. If `dim` > 2, the remaining dimensions are set to
zero in the returned positions.
Returns
-------
pos : mapping
A mapping of positions keyed by node.
Examples
--------
>>> import pandas as pd
>>> from pyrrole import ChemicalSystem
>>> from pyrrole.drawing import diagram_layout
>>> data = pd.DataFrame(
... [{"name": "Separated_Reactants", "freeenergy": 0.},
... {"name": "mlC1", "freeenergy": -5.4},
... {"name": "mlC2", "freeenergy": -15.6},
... {"name": "mTS1", "freeenergy": 28.5, "color": "g"},
... {"name": "mCARB1", "freeenergy": -9.7},
... {"name": "mCARB2", "freeenergy": -19.8},
... {"name": "mCARBX", "freeenergy": 20}]).set_index("name")
>>> system = ChemicalSystem(
... ["Separated_Reactants -> mlC1 -> mTS1",
... "Separated_Reactants -> mlC2 -> mTS1",
... "mCARB2 <- mTS1 -> mCARB1",
... "Separated_Reactants -> mCARBX"], data)
>>> digraph = system.to_digraph()
>>> layout = diagram_layout(digraph)
>>> layout['mCARB2']
array([ 3. , -19.8])
Passing ``scale=1`` means scaling positions to ``(-1, 1)`` in all axes:
>>> layout = diagram_layout(digraph, scale=1)
>>> layout['mTS1'][1] <= 1.
True
"""
# TODO: private function of packages should not be used.
graph, center = _nx.drawing.layout._process_params(graph, center, dim)
num_nodes = len(graph)
if num_nodes == 0:
return {}
elif num_nodes == 1:
return {_nx.utils.arbitrary_element(graph): center}
if sources is None:
sources = graph.nodes()
if targets is None:
targets = graph.nodes()
simple_paths = [path for source in set(sources) for target in set(targets)
for path in _nx.all_simple_paths(graph, source, target)]
if pos is None:
pos = tower_layout(graph, height=height, scale=None, center=center,
dim=dim)
for path in simple_paths:
for n, step in enumerate(path):
if pos[step][0] < n:
pos[step][0] = n
if scale is not None:
pos_arr = _np.array([pos[node] for node in graph])
pos_arr = _nx.drawing.layout.rescale_layout(pos_arr,
scale=scale) + center
pos = dict(zip(graph, pos_arr))
# TODO: make test
return pos | def function[diagram_layout, parameter[graph, height, sources, targets, pos, scale, center, dim]]:
constant[
Position nodes such that paths are highlighted, from left to right.
Parameters
----------
graph : `networkx.Graph` or `list` of nodes
A position will be assigned to every node in graph.
height : `str` or `None`, optional
The node attribute that holds the numerical value used for the node
height. This defaults to ``'freeenergy'``. If `None`, all node heights
are set to zero.
sources : `list` of `str`
All simple paths starting at members of `sources` are considered.
Defaults to all nodes of graph.
targets : `list` of `str`
All simple paths ending at members of `targets` are considered.
Defaults to all nodes of graph.
pos : mapping, optional
Initial positions for nodes as a mapping with node as keys and
values as a coordinate `list` or `tuple`. If not specified (default),
initial positions are computed with `tower_layout`.
scale : number, optional
Scale factor for positions.
center : array-like, optional
Coordinate pair around which to center the layout. Default is the
origin.
dim : `int`
Dimension of layout. If `dim` > 2, the remaining dimensions are set to
zero in the returned positions.
Returns
-------
pos : mapping
A mapping of positions keyed by node.
Examples
--------
>>> import pandas as pd
>>> from pyrrole import ChemicalSystem
>>> from pyrrole.drawing import diagram_layout
>>> data = pd.DataFrame(
... [{"name": "Separated_Reactants", "freeenergy": 0.},
... {"name": "mlC1", "freeenergy": -5.4},
... {"name": "mlC2", "freeenergy": -15.6},
... {"name": "mTS1", "freeenergy": 28.5, "color": "g"},
... {"name": "mCARB1", "freeenergy": -9.7},
... {"name": "mCARB2", "freeenergy": -19.8},
... {"name": "mCARBX", "freeenergy": 20}]).set_index("name")
>>> system = ChemicalSystem(
... ["Separated_Reactants -> mlC1 -> mTS1",
... "Separated_Reactants -> mlC2 -> mTS1",
... "mCARB2 <- mTS1 -> mCARB1",
... "Separated_Reactants -> mCARBX"], data)
>>> digraph = system.to_digraph()
>>> layout = diagram_layout(digraph)
>>> layout['mCARB2']
array([ 3. , -19.8])
Passing ``scale=1`` means scaling positions to ``(-1, 1)`` in all axes:
>>> layout = diagram_layout(digraph, scale=1)
>>> layout['mTS1'][1] <= 1.
True
]
<ast.Tuple object at 0x7da1b0a4f4c0> assign[=] call[name[_nx].drawing.layout._process_params, parameter[name[graph], name[center], name[dim]]]
variable[num_nodes] assign[=] call[name[len], parameter[name[graph]]]
if compare[name[num_nodes] equal[==] constant[0]] begin[:]
return[dictionary[[], []]]
if compare[name[sources] is constant[None]] begin[:]
variable[sources] assign[=] call[name[graph].nodes, parameter[]]
if compare[name[targets] is constant[None]] begin[:]
variable[targets] assign[=] call[name[graph].nodes, parameter[]]
variable[simple_paths] assign[=] <ast.ListComp object at 0x7da1b0a4f2e0>
if compare[name[pos] is constant[None]] begin[:]
variable[pos] assign[=] call[name[tower_layout], parameter[name[graph]]]
for taget[name[path]] in starred[name[simple_paths]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0a4c190>, <ast.Name object at 0x7da1b0a4e440>]]] in starred[call[name[enumerate], parameter[name[path]]]] begin[:]
if compare[call[call[name[pos]][name[step]]][constant[0]] less[<] name[n]] begin[:]
call[call[name[pos]][name[step]]][constant[0]] assign[=] name[n]
if compare[name[scale] is_not constant[None]] begin[:]
variable[pos_arr] assign[=] call[name[_np].array, parameter[<ast.ListComp object at 0x7da1b0a4ce50>]]
variable[pos_arr] assign[=] binary_operation[call[name[_nx].drawing.layout.rescale_layout, parameter[name[pos_arr]]] + name[center]]
variable[pos] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[graph], name[pos_arr]]]]]
return[name[pos]] | keyword[def] identifier[diagram_layout] ( identifier[graph] , identifier[height] = literal[string] , identifier[sources] = keyword[None] , identifier[targets] = keyword[None] ,
identifier[pos] = keyword[None] , identifier[scale] = keyword[None] , identifier[center] = keyword[None] , identifier[dim] = literal[int] ):
literal[string]
identifier[graph] , identifier[center] = identifier[_nx] . identifier[drawing] . identifier[layout] . identifier[_process_params] ( identifier[graph] , identifier[center] , identifier[dim] )
identifier[num_nodes] = identifier[len] ( identifier[graph] )
keyword[if] identifier[num_nodes] == literal[int] :
keyword[return] {}
keyword[elif] identifier[num_nodes] == literal[int] :
keyword[return] { identifier[_nx] . identifier[utils] . identifier[arbitrary_element] ( identifier[graph] ): identifier[center] }
keyword[if] identifier[sources] keyword[is] keyword[None] :
identifier[sources] = identifier[graph] . identifier[nodes] ()
keyword[if] identifier[targets] keyword[is] keyword[None] :
identifier[targets] = identifier[graph] . identifier[nodes] ()
identifier[simple_paths] =[ identifier[path] keyword[for] identifier[source] keyword[in] identifier[set] ( identifier[sources] ) keyword[for] identifier[target] keyword[in] identifier[set] ( identifier[targets] )
keyword[for] identifier[path] keyword[in] identifier[_nx] . identifier[all_simple_paths] ( identifier[graph] , identifier[source] , identifier[target] )]
keyword[if] identifier[pos] keyword[is] keyword[None] :
identifier[pos] = identifier[tower_layout] ( identifier[graph] , identifier[height] = identifier[height] , identifier[scale] = keyword[None] , identifier[center] = identifier[center] ,
identifier[dim] = identifier[dim] )
keyword[for] identifier[path] keyword[in] identifier[simple_paths] :
keyword[for] identifier[n] , identifier[step] keyword[in] identifier[enumerate] ( identifier[path] ):
keyword[if] identifier[pos] [ identifier[step] ][ literal[int] ]< identifier[n] :
identifier[pos] [ identifier[step] ][ literal[int] ]= identifier[n]
keyword[if] identifier[scale] keyword[is] keyword[not] keyword[None] :
identifier[pos_arr] = identifier[_np] . identifier[array] ([ identifier[pos] [ identifier[node] ] keyword[for] identifier[node] keyword[in] identifier[graph] ])
identifier[pos_arr] = identifier[_nx] . identifier[drawing] . identifier[layout] . identifier[rescale_layout] ( identifier[pos_arr] ,
identifier[scale] = identifier[scale] )+ identifier[center]
identifier[pos] = identifier[dict] ( identifier[zip] ( identifier[graph] , identifier[pos_arr] ))
keyword[return] identifier[pos] | def diagram_layout(graph, height='freeenergy', sources=None, targets=None, pos=None, scale=None, center=None, dim=2):
"""
Position nodes such that paths are highlighted, from left to right.
Parameters
----------
graph : `networkx.Graph` or `list` of nodes
A position will be assigned to every node in graph.
height : `str` or `None`, optional
The node attribute that holds the numerical value used for the node
height. This defaults to ``'freeenergy'``. If `None`, all node heights
are set to zero.
sources : `list` of `str`
All simple paths starting at members of `sources` are considered.
Defaults to all nodes of graph.
targets : `list` of `str`
All simple paths ending at members of `targets` are considered.
Defaults to all nodes of graph.
pos : mapping, optional
Initial positions for nodes as a mapping with node as keys and
values as a coordinate `list` or `tuple`. If not specified (default),
initial positions are computed with `tower_layout`.
scale : number, optional
Scale factor for positions.
center : array-like, optional
Coordinate pair around which to center the layout. Default is the
origin.
dim : `int`
Dimension of layout. If `dim` > 2, the remaining dimensions are set to
zero in the returned positions.
Returns
-------
pos : mapping
A mapping of positions keyed by node.
Examples
--------
>>> import pandas as pd
>>> from pyrrole import ChemicalSystem
>>> from pyrrole.drawing import diagram_layout
>>> data = pd.DataFrame(
... [{"name": "Separated_Reactants", "freeenergy": 0.},
... {"name": "mlC1", "freeenergy": -5.4},
... {"name": "mlC2", "freeenergy": -15.6},
... {"name": "mTS1", "freeenergy": 28.5, "color": "g"},
... {"name": "mCARB1", "freeenergy": -9.7},
... {"name": "mCARB2", "freeenergy": -19.8},
... {"name": "mCARBX", "freeenergy": 20}]).set_index("name")
>>> system = ChemicalSystem(
... ["Separated_Reactants -> mlC1 -> mTS1",
... "Separated_Reactants -> mlC2 -> mTS1",
... "mCARB2 <- mTS1 -> mCARB1",
... "Separated_Reactants -> mCARBX"], data)
>>> digraph = system.to_digraph()
>>> layout = diagram_layout(digraph)
>>> layout['mCARB2']
array([ 3. , -19.8])
Passing ``scale=1`` means scaling positions to ``(-1, 1)`` in all axes:
>>> layout = diagram_layout(digraph, scale=1)
>>> layout['mTS1'][1] <= 1.
True
"""
# TODO: private function of packages should not be used.
(graph, center) = _nx.drawing.layout._process_params(graph, center, dim)
num_nodes = len(graph)
if num_nodes == 0:
return {} # depends on [control=['if'], data=[]]
elif num_nodes == 1:
return {_nx.utils.arbitrary_element(graph): center} # depends on [control=['if'], data=[]]
if sources is None:
sources = graph.nodes() # depends on [control=['if'], data=['sources']]
if targets is None:
targets = graph.nodes() # depends on [control=['if'], data=['targets']]
simple_paths = [path for source in set(sources) for target in set(targets) for path in _nx.all_simple_paths(graph, source, target)]
if pos is None:
pos = tower_layout(graph, height=height, scale=None, center=center, dim=dim) # depends on [control=['if'], data=['pos']]
for path in simple_paths:
for (n, step) in enumerate(path):
if pos[step][0] < n:
pos[step][0] = n # depends on [control=['if'], data=['n']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['path']]
if scale is not None:
pos_arr = _np.array([pos[node] for node in graph])
pos_arr = _nx.drawing.layout.rescale_layout(pos_arr, scale=scale) + center
pos = dict(zip(graph, pos_arr)) # depends on [control=['if'], data=['scale']]
# TODO: make test
return pos |
def delete_thumbnails(relative_source_path, root=None, basedir=None,
subdir=None, prefix=None):
"""
Delete all thumbnails for a source image.
"""
thumbs = thumbnails_for_file(relative_source_path, root, basedir, subdir,
prefix)
return _delete_using_thumbs_list(thumbs) | def function[delete_thumbnails, parameter[relative_source_path, root, basedir, subdir, prefix]]:
constant[
Delete all thumbnails for a source image.
]
variable[thumbs] assign[=] call[name[thumbnails_for_file], parameter[name[relative_source_path], name[root], name[basedir], name[subdir], name[prefix]]]
return[call[name[_delete_using_thumbs_list], parameter[name[thumbs]]]] | keyword[def] identifier[delete_thumbnails] ( identifier[relative_source_path] , identifier[root] = keyword[None] , identifier[basedir] = keyword[None] ,
identifier[subdir] = keyword[None] , identifier[prefix] = keyword[None] ):
literal[string]
identifier[thumbs] = identifier[thumbnails_for_file] ( identifier[relative_source_path] , identifier[root] , identifier[basedir] , identifier[subdir] ,
identifier[prefix] )
keyword[return] identifier[_delete_using_thumbs_list] ( identifier[thumbs] ) | def delete_thumbnails(relative_source_path, root=None, basedir=None, subdir=None, prefix=None):
"""
Delete all thumbnails for a source image.
"""
thumbs = thumbnails_for_file(relative_source_path, root, basedir, subdir, prefix)
return _delete_using_thumbs_list(thumbs) |
def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
"""Decode a single field from a pax record.
"""
try:
return value.decode(encoding, "strict")
except UnicodeDecodeError:
return value.decode(fallback_encoding, fallback_errors) | def function[_decode_pax_field, parameter[self, value, encoding, fallback_encoding, fallback_errors]]:
constant[Decode a single field from a pax record.
]
<ast.Try object at 0x7da1b20885b0> | keyword[def] identifier[_decode_pax_field] ( identifier[self] , identifier[value] , identifier[encoding] , identifier[fallback_encoding] , identifier[fallback_errors] ):
literal[string]
keyword[try] :
keyword[return] identifier[value] . identifier[decode] ( identifier[encoding] , literal[string] )
keyword[except] identifier[UnicodeDecodeError] :
keyword[return] identifier[value] . identifier[decode] ( identifier[fallback_encoding] , identifier[fallback_errors] ) | def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
"""Decode a single field from a pax record.
"""
try:
return value.decode(encoding, 'strict') # depends on [control=['try'], data=[]]
except UnicodeDecodeError:
return value.decode(fallback_encoding, fallback_errors) # depends on [control=['except'], data=[]] |
def show_vcs_output_vcs_nodes_vcs_node_info_co_ordinator(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_vcs = ET.Element("show_vcs")
config = show_vcs
output = ET.SubElement(show_vcs, "output")
vcs_nodes = ET.SubElement(output, "vcs-nodes")
vcs_node_info = ET.SubElement(vcs_nodes, "vcs-node-info")
co_ordinator = ET.SubElement(vcs_node_info, "co-ordinator")
co_ordinator.text = kwargs.pop('co_ordinator')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[show_vcs_output_vcs_nodes_vcs_node_info_co_ordinator, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[show_vcs] assign[=] call[name[ET].Element, parameter[constant[show_vcs]]]
variable[config] assign[=] name[show_vcs]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[show_vcs], constant[output]]]
variable[vcs_nodes] assign[=] call[name[ET].SubElement, parameter[name[output], constant[vcs-nodes]]]
variable[vcs_node_info] assign[=] call[name[ET].SubElement, parameter[name[vcs_nodes], constant[vcs-node-info]]]
variable[co_ordinator] assign[=] call[name[ET].SubElement, parameter[name[vcs_node_info], constant[co-ordinator]]]
name[co_ordinator].text assign[=] call[name[kwargs].pop, parameter[constant[co_ordinator]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[show_vcs_output_vcs_nodes_vcs_node_info_co_ordinator] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[show_vcs] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[show_vcs]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[show_vcs] , literal[string] )
identifier[vcs_nodes] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[vcs_node_info] = identifier[ET] . identifier[SubElement] ( identifier[vcs_nodes] , literal[string] )
identifier[co_ordinator] = identifier[ET] . identifier[SubElement] ( identifier[vcs_node_info] , literal[string] )
identifier[co_ordinator] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def show_vcs_output_vcs_nodes_vcs_node_info_co_ordinator(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
show_vcs = ET.Element('show_vcs')
config = show_vcs
output = ET.SubElement(show_vcs, 'output')
vcs_nodes = ET.SubElement(output, 'vcs-nodes')
vcs_node_info = ET.SubElement(vcs_nodes, 'vcs-node-info')
co_ordinator = ET.SubElement(vcs_node_info, 'co-ordinator')
co_ordinator.text = kwargs.pop('co_ordinator')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def get_by_id(self, id):
"""
Return object based on ``child_id_attribute``.
Parameters
----------
val : str
Returns
-------
object
Notes
-----
Based on `.get()`_ from `backbone.js`_.
.. _backbone.js: http://backbonejs.org/
.. _.get(): http://backbonejs.org/#Collection-get
"""
for child in self.children:
if child[self.child_id_attribute] == id:
return child
else:
continue
return None | def function[get_by_id, parameter[self, id]]:
constant[
Return object based on ``child_id_attribute``.
Parameters
----------
val : str
Returns
-------
object
Notes
-----
Based on `.get()`_ from `backbone.js`_.
.. _backbone.js: http://backbonejs.org/
.. _.get(): http://backbonejs.org/#Collection-get
]
for taget[name[child]] in starred[name[self].children] begin[:]
if compare[call[name[child]][name[self].child_id_attribute] equal[==] name[id]] begin[:]
return[name[child]]
return[constant[None]] | keyword[def] identifier[get_by_id] ( identifier[self] , identifier[id] ):
literal[string]
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] :
keyword[if] identifier[child] [ identifier[self] . identifier[child_id_attribute] ]== identifier[id] :
keyword[return] identifier[child]
keyword[else] :
keyword[continue]
keyword[return] keyword[None] | def get_by_id(self, id):
"""
Return object based on ``child_id_attribute``.
Parameters
----------
val : str
Returns
-------
object
Notes
-----
Based on `.get()`_ from `backbone.js`_.
.. _backbone.js: http://backbonejs.org/
.. _.get(): http://backbonejs.org/#Collection-get
"""
for child in self.children:
if child[self.child_id_attribute] == id:
return child # depends on [control=['if'], data=[]]
else:
continue # depends on [control=['for'], data=['child']]
return None |
def interval_timer(interval, func, *args, **kwargs):
'''Interval timer function.
Taken from: http://stackoverflow.com/questions/22498038/improvement-on-interval-python/22498708
'''
stopped = Event()
def loop():
while not stopped.wait(interval): # the first call is after interval
func(*args, **kwargs)
Thread(name='IntervalTimerThread', target=loop).start()
return stopped.set | def function[interval_timer, parameter[interval, func]]:
constant[Interval timer function.
Taken from: http://stackoverflow.com/questions/22498038/improvement-on-interval-python/22498708
]
variable[stopped] assign[=] call[name[Event], parameter[]]
def function[loop, parameter[]]:
while <ast.UnaryOp object at 0x7da1b11ec280> begin[:]
call[name[func], parameter[<ast.Starred object at 0x7da1b11ef6a0>]]
call[call[name[Thread], parameter[]].start, parameter[]]
return[name[stopped].set] | keyword[def] identifier[interval_timer] ( identifier[interval] , identifier[func] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[stopped] = identifier[Event] ()
keyword[def] identifier[loop] ():
keyword[while] keyword[not] identifier[stopped] . identifier[wait] ( identifier[interval] ):
identifier[func] (* identifier[args] ,** identifier[kwargs] )
identifier[Thread] ( identifier[name] = literal[string] , identifier[target] = identifier[loop] ). identifier[start] ()
keyword[return] identifier[stopped] . identifier[set] | def interval_timer(interval, func, *args, **kwargs):
"""Interval timer function.
Taken from: http://stackoverflow.com/questions/22498038/improvement-on-interval-python/22498708
"""
stopped = Event()
def loop():
while not stopped.wait(interval): # the first call is after interval
func(*args, **kwargs) # depends on [control=['while'], data=[]]
Thread(name='IntervalTimerThread', target=loop).start()
return stopped.set |
def _get_manifest_data(self):
"""
Return the list of items in the manifest
:return: list
"""
with tempfile.NamedTemporaryFile(delete=True) as tmp:
try:
self.s3.download_fileobj(self.sitename, self.manifest_file, tmp)
tmp.seek(0)
data = tmp.read()
if data is not None:
return data.split(",")
except Exception as ex:
pass
return [] | def function[_get_manifest_data, parameter[self]]:
constant[
Return the list of items in the manifest
:return: list
]
with call[name[tempfile].NamedTemporaryFile, parameter[]] begin[:]
<ast.Try object at 0x7da18f812f80>
return[list[[]]] | keyword[def] identifier[_get_manifest_data] ( identifier[self] ):
literal[string]
keyword[with] identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[delete] = keyword[True] ) keyword[as] identifier[tmp] :
keyword[try] :
identifier[self] . identifier[s3] . identifier[download_fileobj] ( identifier[self] . identifier[sitename] , identifier[self] . identifier[manifest_file] , identifier[tmp] )
identifier[tmp] . identifier[seek] ( literal[int] )
identifier[data] = identifier[tmp] . identifier[read] ()
keyword[if] identifier[data] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[data] . identifier[split] ( literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
keyword[pass]
keyword[return] [] | def _get_manifest_data(self):
"""
Return the list of items in the manifest
:return: list
"""
with tempfile.NamedTemporaryFile(delete=True) as tmp:
try:
self.s3.download_fileobj(self.sitename, self.manifest_file, tmp)
tmp.seek(0)
data = tmp.read()
if data is not None:
return data.split(',') # depends on [control=['if'], data=['data']] # depends on [control=['try'], data=[]]
except Exception as ex:
pass # depends on [control=['except'], data=[]] # depends on [control=['with'], data=['tmp']]
return [] |
def _parse_cli_args():
"""Parse the arguments from CLI using ArgumentParser
:return: The arguments parsed by ArgumentParser
:rtype: Namespace
"""
parser = argparse.ArgumentParser()
parser.add_argument(
'-g',
help='The photoset id to be downloaded',
metavar='<photoset_id>'
)
parser.add_argument(
'-s',
default=1,
help=(
'Image size. 12 is smallest, 1 is original size. '
'Default: 1'
),
type=int,
choices=xrange(0, 10),
metavar='<num>'
)
parser.add_argument(
'-d',
default=None,
help=(
'The path to store the downloaded images. '
'Automatically create it if not exist. '
'Default use the photoset id as folder name under current path'
),
metavar='<path>'
)
parser.add_argument(
'-O',
default=1,
help=(
'0 for single process, '
'1 for multithread. '
'2 for event driven. '
'Default: 1'
),
type=int,
choices=xrange(0, 3),
metavar='<num>'
)
parser.add_argument(
'-u',
help=(
'Set your API key'
),
action='store_true'
)
args = parser.parse_args()
logger.debug(args)
return args | def function[_parse_cli_args, parameter[]]:
constant[Parse the arguments from CLI using ArgumentParser
:return: The arguments parsed by ArgumentParser
:rtype: Namespace
]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[-g]]]
call[name[parser].add_argument, parameter[constant[-s]]]
call[name[parser].add_argument, parameter[constant[-d]]]
call[name[parser].add_argument, parameter[constant[-O]]]
call[name[parser].add_argument, parameter[constant[-u]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
call[name[logger].debug, parameter[name[args]]]
return[name[args]] | keyword[def] identifier[_parse_cli_args] ():
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ()
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[help] = literal[string] ,
identifier[metavar] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[default] = literal[int] ,
identifier[help] =(
literal[string]
literal[string]
),
identifier[type] = identifier[int] ,
identifier[choices] = identifier[xrange] ( literal[int] , literal[int] ),
identifier[metavar] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[default] = keyword[None] ,
identifier[help] =(
literal[string]
literal[string]
literal[string]
),
identifier[metavar] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[default] = literal[int] ,
identifier[help] =(
literal[string]
literal[string]
literal[string]
literal[string]
),
identifier[type] = identifier[int] ,
identifier[choices] = identifier[xrange] ( literal[int] , literal[int] ),
identifier[metavar] = literal[string]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[help] =(
literal[string]
),
identifier[action] = literal[string]
)
identifier[args] = identifier[parser] . identifier[parse_args] ()
identifier[logger] . identifier[debug] ( identifier[args] )
keyword[return] identifier[args] | def _parse_cli_args():
"""Parse the arguments from CLI using ArgumentParser
:return: The arguments parsed by ArgumentParser
:rtype: Namespace
"""
parser = argparse.ArgumentParser()
parser.add_argument('-g', help='The photoset id to be downloaded', metavar='<photoset_id>')
parser.add_argument('-s', default=1, help='Image size. 12 is smallest, 1 is original size. Default: 1', type=int, choices=xrange(0, 10), metavar='<num>')
parser.add_argument('-d', default=None, help='The path to store the downloaded images. Automatically create it if not exist. Default use the photoset id as folder name under current path', metavar='<path>')
parser.add_argument('-O', default=1, help='0 for single process, 1 for multithread. 2 for event driven. Default: 1', type=int, choices=xrange(0, 3), metavar='<num>')
parser.add_argument('-u', help='Set your API key', action='store_true')
args = parser.parse_args()
logger.debug(args)
return args |
def parse_args(args=None):
"""Parse command line arguments and return a dictionary of options
for ttfautohint.ttfautohint function.
`args` can be either None, a list of strings, or a single string,
that is split into individual options with `shlex.split`.
When `args` is None, the console's default sys.argv are used, and any
SystemExit exceptions raised by argparse are propagated.
If args is a string list or a string, it is assumed that the function
was not called from a console script's `main` entry point, but from
other client code, and thus the SystemExit exceptions are muted and
a `None` value is returned.
"""
import argparse
from ttfautohint import __version__, libttfautohint
from ttfautohint.cli import USAGE, DESCRIPTION, EPILOG
version_string = "ttfautohint-py %s (libttfautohint %s)" % (
__version__, libttfautohint.version_string)
if args is None:
capture_sys_exit = False
else:
capture_sys_exit = True
if isinstance(args, basestring):
import shlex
args = shlex.split(args)
parser = argparse.ArgumentParser(
prog="ttfautohint",
usage=USAGE,
description=DESCRIPTION,
epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument(
"in_file", nargs="?", metavar="IN-FILE", default="-",
type=stdin_or_input_path_type,
help="input file (default: standard input)")
parser.add_argument(
"out_file", nargs="?", metavar="OUT-FILE", default="-",
type=stdout_or_output_path_type,
help="output file (default: standard output)")
parser.add_argument(
"--debug", action="store_true", help="print debugging information")
stem_width_group = parser.add_mutually_exclusive_group(required=False)
stem_width_group.add_argument(
"-a", "--stem-width-mode", type=stem_width_mode, metavar="S",
default=STEM_WIDTH_MODE_OPTIONS,
help=("select stem width mode for grayscale, GDI ClearType, and DW "
"ClearType, where S is a string of three letters with possible "
"values 'n' for natural, 'q' for quantized, and 's' for strong "
"(default: qsq)"))
stem_width_group.add_argument( # deprecated
"-w", "--strong-stem-width", type=strong_stem_width, metavar="S",
help=argparse.SUPPRESS)
parser.add_argument(
"-c", "--composites", dest="hint_composites", action="store_true",
help="hint glyph composites also")
parser.add_argument(
"-d", "--dehint", action="store_true", help="remove all hints")
parser.add_argument(
"-D", "--default-script", metavar="SCRIPT",
default=USER_OPTIONS["default_script"],
help="set default OpenType script (default: %(default)s)")
parser.add_argument(
"-f", "--fallback-script", metavar="SCRIPT",
default=USER_OPTIONS["fallback_script"],
help="set fallback script (default: %(default)s)")
parser.add_argument(
"-F", "--family-suffix", metavar="SUFFIX",
help="append SUFFIX to the family name string(s) in the `name' table")
parser.add_argument(
"-G", "--hinting-limit", type=int, metavar="PPEM",
default=USER_OPTIONS["hinting_limit"],
help=("switch off hinting above this PPEM value (default: "
"%(default)s); value 0 means no limit"))
parser.add_argument(
"-H", "--fallback-stem-width", type=int, metavar="UNITS",
default=USER_OPTIONS["fallback_stem_width"],
help=("set fallback stem width (default: %(default)s font units at "
"2048 UPEM)"))
parser.add_argument(
"-i", "--ignore-restrictions", action="store_true",
help="override font license restrictions")
parser.add_argument(
"-I", "--detailed-info", action="store_true",
help=("add detailed ttfautohint info to the version string(s) in "
"the `name' table"))
parser.add_argument(
"-l", "--hinting-range-min", type=int, metavar="PPEM",
default=USER_OPTIONS["hinting_range_min"],
help="the minimum PPEM value for hint sets (default: %(default)s)")
parser.add_argument(
"-m", "--control-file", metavar="FILE",
help="get control instructions from FILE")
parser.add_argument(
"-n", "--no-info", action="store_true",
help=("don't add ttfautohint info to the version string(s) in the "
"`name' table"))
parser.add_argument(
"-p", "--adjust-subglyphs", action="store_true",
help="handle subglyph adjustments in exotic fonts")
parser.add_argument(
"-r", "--hinting-range-max", type=int, metavar="PPEM",
default=USER_OPTIONS["hinting_range_max"],
help="the maximum PPEM value for hint sets (default: %(default)s)")
parser.add_argument(
"-R", "--reference", dest="reference_file", metavar="FILE",
help="derive blue zones from reference font FILE")
parser.add_argument(
"-s", "--symbol", action="store_true",
help="input is symbol font")
parser.add_argument(
"-S", "--fallback-scaling", action="store_true",
help="use fallback scaling, not hinting")
parser.add_argument(
"-t", "--ttfa-table", action="store_true", dest="TTFA_info",
help="add TTFA information table")
parser.add_argument(
"-T", "--ttfa-info", dest="show_TTFA_info", action="store_true",
help="display TTFA table in IN-FILE and exit")
parser.add_argument(
"-v", "--verbose", action="store_true",
help="show progress information")
parser.add_argument(
"-V", "--version", action="version",
version=version_string,
help="print version information and exit")
parser.add_argument(
"-W", "--windows-compatibility", action="store_true",
help=("add blue zones for `usWinAscent' and `usWinDescent' to avoid "
"clipping"))
parser.add_argument(
"-x", "--increase-x-height", type=int, metavar="PPEM",
default=USER_OPTIONS["increase_x_height"],
help=("increase x height for sizes in the range 6<=PPEM<=N; value "
"0 switches off this feature (default: %(default)s)"))
parser.add_argument(
"-X", "--x-height-snapping-exceptions", metavar="STRING",
default=USER_OPTIONS["x_height_snapping_exceptions"],
help=('specify a comma-separated list of x-height snapping exceptions'
', for example "-9, 13-17, 19" (default: "%(default)s")'))
parser.add_argument(
"-Z", "--reference-index", type=int, metavar="NUMBER",
default=USER_OPTIONS["reference_index"],
help="face index of reference font (default: %(default)s)")
try:
options = vars(parser.parse_args(args))
except SystemExit:
if capture_sys_exit:
return None
raise
# if either input/output are interactive, print help and exit
if (not capture_sys_exit and
(options["in_file"] is None or options["out_file"] is None)):
parser.print_help()
parser.exit(1)
# check SOURCE_DATE_EPOCH environment variable
source_date_epoch = os.environ.get("SOURCE_DATE_EPOCH")
if source_date_epoch:
try:
options["epoch"] = int(source_date_epoch)
except ValueError:
import warnings
warnings.warn(
UserWarning("invalid SOURCE_DATE_EPOCH: %r" % source_date_epoch))
if options.pop("show_TTFA_info"):
# TODO use fonttools to dump TTFA table?
raise NotImplementedError()
stem_width_options = options.pop("stem_width_mode")
strong_stem_width_options = options.pop("strong_stem_width")
if strong_stem_width_options:
import warnings
warnings.warn(
UserWarning("Option '-w' is deprecated! Use option '-a' instead"))
stem_width_options = strong_stem_width_options
options.update(stem_width_options)
return options | def function[parse_args, parameter[args]]:
constant[Parse command line arguments and return a dictionary of options
for ttfautohint.ttfautohint function.
`args` can be either None, a list of strings, or a single string,
that is split into individual options with `shlex.split`.
When `args` is None, the console's default sys.argv are used, and any
SystemExit exceptions raised by argparse are propagated.
If args is a string list or a string, it is assumed that the function
was not called from a console script's `main` entry point, but from
other client code, and thus the SystemExit exceptions are muted and
a `None` value is returned.
]
import module[argparse]
from relative_module[ttfautohint] import module[__version__], module[libttfautohint]
from relative_module[ttfautohint.cli] import module[USAGE], module[DESCRIPTION], module[EPILOG]
variable[version_string] assign[=] binary_operation[constant[ttfautohint-py %s (libttfautohint %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204621120>, <ast.Attribute object at 0x7da204622fb0>]]]
if compare[name[args] is constant[None]] begin[:]
variable[capture_sys_exit] assign[=] constant[False]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[in_file]]]
call[name[parser].add_argument, parameter[constant[out_file]]]
call[name[parser].add_argument, parameter[constant[--debug]]]
variable[stem_width_group] assign[=] call[name[parser].add_mutually_exclusive_group, parameter[]]
call[name[stem_width_group].add_argument, parameter[constant[-a], constant[--stem-width-mode]]]
call[name[stem_width_group].add_argument, parameter[constant[-w], constant[--strong-stem-width]]]
call[name[parser].add_argument, parameter[constant[-c], constant[--composites]]]
call[name[parser].add_argument, parameter[constant[-d], constant[--dehint]]]
call[name[parser].add_argument, parameter[constant[-D], constant[--default-script]]]
call[name[parser].add_argument, parameter[constant[-f], constant[--fallback-script]]]
call[name[parser].add_argument, parameter[constant[-F], constant[--family-suffix]]]
call[name[parser].add_argument, parameter[constant[-G], constant[--hinting-limit]]]
call[name[parser].add_argument, parameter[constant[-H], constant[--fallback-stem-width]]]
call[name[parser].add_argument, parameter[constant[-i], constant[--ignore-restrictions]]]
call[name[parser].add_argument, parameter[constant[-I], constant[--detailed-info]]]
call[name[parser].add_argument, parameter[constant[-l], constant[--hinting-range-min]]]
call[name[parser].add_argument, parameter[constant[-m], constant[--control-file]]]
call[name[parser].add_argument, parameter[constant[-n], constant[--no-info]]]
call[name[parser].add_argument, parameter[constant[-p], constant[--adjust-subglyphs]]]
call[name[parser].add_argument, parameter[constant[-r], constant[--hinting-range-max]]]
call[name[parser].add_argument, parameter[constant[-R], constant[--reference]]]
call[name[parser].add_argument, parameter[constant[-s], constant[--symbol]]]
call[name[parser].add_argument, parameter[constant[-S], constant[--fallback-scaling]]]
call[name[parser].add_argument, parameter[constant[-t], constant[--ttfa-table]]]
call[name[parser].add_argument, parameter[constant[-T], constant[--ttfa-info]]]
call[name[parser].add_argument, parameter[constant[-v], constant[--verbose]]]
call[name[parser].add_argument, parameter[constant[-V], constant[--version]]]
call[name[parser].add_argument, parameter[constant[-W], constant[--windows-compatibility]]]
call[name[parser].add_argument, parameter[constant[-x], constant[--increase-x-height]]]
call[name[parser].add_argument, parameter[constant[-X], constant[--x-height-snapping-exceptions]]]
call[name[parser].add_argument, parameter[constant[-Z], constant[--reference-index]]]
<ast.Try object at 0x7da20e9b3e50>
if <ast.BoolOp object at 0x7da20e9b3820> begin[:]
call[name[parser].print_help, parameter[]]
call[name[parser].exit, parameter[constant[1]]]
variable[source_date_epoch] assign[=] call[name[os].environ.get, parameter[constant[SOURCE_DATE_EPOCH]]]
if name[source_date_epoch] begin[:]
<ast.Try object at 0x7da20e9b0550>
if call[name[options].pop, parameter[constant[show_TTFA_info]]] begin[:]
<ast.Raise object at 0x7da20c6e77c0>
variable[stem_width_options] assign[=] call[name[options].pop, parameter[constant[stem_width_mode]]]
variable[strong_stem_width_options] assign[=] call[name[options].pop, parameter[constant[strong_stem_width]]]
if name[strong_stem_width_options] begin[:]
import module[warnings]
call[name[warnings].warn, parameter[call[name[UserWarning], parameter[constant[Option '-w' is deprecated! Use option '-a' instead]]]]]
variable[stem_width_options] assign[=] name[strong_stem_width_options]
call[name[options].update, parameter[name[stem_width_options]]]
return[name[options]] | keyword[def] identifier[parse_args] ( identifier[args] = keyword[None] ):
literal[string]
keyword[import] identifier[argparse]
keyword[from] identifier[ttfautohint] keyword[import] identifier[__version__] , identifier[libttfautohint]
keyword[from] identifier[ttfautohint] . identifier[cli] keyword[import] identifier[USAGE] , identifier[DESCRIPTION] , identifier[EPILOG]
identifier[version_string] = literal[string] %(
identifier[__version__] , identifier[libttfautohint] . identifier[version_string] )
keyword[if] identifier[args] keyword[is] keyword[None] :
identifier[capture_sys_exit] = keyword[False]
keyword[else] :
identifier[capture_sys_exit] = keyword[True]
keyword[if] identifier[isinstance] ( identifier[args] , identifier[basestring] ):
keyword[import] identifier[shlex]
identifier[args] = identifier[shlex] . identifier[split] ( identifier[args] )
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] (
identifier[prog] = literal[string] ,
identifier[usage] = identifier[USAGE] ,
identifier[description] = identifier[DESCRIPTION] ,
identifier[epilog] = identifier[EPILOG] ,
identifier[formatter_class] = identifier[argparse] . identifier[RawDescriptionHelpFormatter] ,
)
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[nargs] = literal[string] , identifier[metavar] = literal[string] , identifier[default] = literal[string] ,
identifier[type] = identifier[stdin_or_input_path_type] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[nargs] = literal[string] , identifier[metavar] = literal[string] , identifier[default] = literal[string] ,
identifier[type] = identifier[stdout_or_output_path_type] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] )
identifier[stem_width_group] = identifier[parser] . identifier[add_mutually_exclusive_group] ( identifier[required] = keyword[False] )
identifier[stem_width_group] . identifier[add_argument] (
literal[string] , literal[string] , identifier[type] = identifier[stem_width_mode] , identifier[metavar] = literal[string] ,
identifier[default] = identifier[STEM_WIDTH_MODE_OPTIONS] ,
identifier[help] =( literal[string]
literal[string]
literal[string]
literal[string] ))
identifier[stem_width_group] . identifier[add_argument] (
literal[string] , literal[string] , identifier[type] = identifier[strong_stem_width] , identifier[metavar] = literal[string] ,
identifier[help] = identifier[argparse] . identifier[SUPPRESS] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[default] = identifier[USER_OPTIONS] [ literal[string] ],
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[default] = identifier[USER_OPTIONS] [ literal[string] ],
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[type] = identifier[int] , identifier[metavar] = literal[string] ,
identifier[default] = identifier[USER_OPTIONS] [ literal[string] ],
identifier[help] =( literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[type] = identifier[int] , identifier[metavar] = literal[string] ,
identifier[default] = identifier[USER_OPTIONS] [ literal[string] ],
identifier[help] =( literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[help] =( literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[type] = identifier[int] , identifier[metavar] = literal[string] ,
identifier[default] = identifier[USER_OPTIONS] [ literal[string] ],
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[help] =( literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[type] = identifier[int] , identifier[metavar] = literal[string] ,
identifier[default] = identifier[USER_OPTIONS] [ literal[string] ],
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[metavar] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] , identifier[dest] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[version] = identifier[version_string] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[action] = literal[string] ,
identifier[help] =( literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[type] = identifier[int] , identifier[metavar] = literal[string] ,
identifier[default] = identifier[USER_OPTIONS] [ literal[string] ],
identifier[help] =( literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[metavar] = literal[string] ,
identifier[default] = identifier[USER_OPTIONS] [ literal[string] ],
identifier[help] =( literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , literal[string] , identifier[type] = identifier[int] , identifier[metavar] = literal[string] ,
identifier[default] = identifier[USER_OPTIONS] [ literal[string] ],
identifier[help] = literal[string] )
keyword[try] :
identifier[options] = identifier[vars] ( identifier[parser] . identifier[parse_args] ( identifier[args] ))
keyword[except] identifier[SystemExit] :
keyword[if] identifier[capture_sys_exit] :
keyword[return] keyword[None]
keyword[raise]
keyword[if] ( keyword[not] identifier[capture_sys_exit] keyword[and]
( identifier[options] [ literal[string] ] keyword[is] keyword[None] keyword[or] identifier[options] [ literal[string] ] keyword[is] keyword[None] )):
identifier[parser] . identifier[print_help] ()
identifier[parser] . identifier[exit] ( literal[int] )
identifier[source_date_epoch] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] )
keyword[if] identifier[source_date_epoch] :
keyword[try] :
identifier[options] [ literal[string] ]= identifier[int] ( identifier[source_date_epoch] )
keyword[except] identifier[ValueError] :
keyword[import] identifier[warnings]
identifier[warnings] . identifier[warn] (
identifier[UserWarning] ( literal[string] % identifier[source_date_epoch] ))
keyword[if] identifier[options] . identifier[pop] ( literal[string] ):
keyword[raise] identifier[NotImplementedError] ()
identifier[stem_width_options] = identifier[options] . identifier[pop] ( literal[string] )
identifier[strong_stem_width_options] = identifier[options] . identifier[pop] ( literal[string] )
keyword[if] identifier[strong_stem_width_options] :
keyword[import] identifier[warnings]
identifier[warnings] . identifier[warn] (
identifier[UserWarning] ( literal[string] ))
identifier[stem_width_options] = identifier[strong_stem_width_options]
identifier[options] . identifier[update] ( identifier[stem_width_options] )
keyword[return] identifier[options] | def parse_args(args=None):
"""Parse command line arguments and return a dictionary of options
for ttfautohint.ttfautohint function.
`args` can be either None, a list of strings, or a single string,
that is split into individual options with `shlex.split`.
When `args` is None, the console's default sys.argv are used, and any
SystemExit exceptions raised by argparse are propagated.
If args is a string list or a string, it is assumed that the function
was not called from a console script's `main` entry point, but from
other client code, and thus the SystemExit exceptions are muted and
a `None` value is returned.
"""
import argparse
from ttfautohint import __version__, libttfautohint
from ttfautohint.cli import USAGE, DESCRIPTION, EPILOG
version_string = 'ttfautohint-py %s (libttfautohint %s)' % (__version__, libttfautohint.version_string)
if args is None:
capture_sys_exit = False # depends on [control=['if'], data=[]]
else:
capture_sys_exit = True
if isinstance(args, basestring):
import shlex
args = shlex.split(args) # depends on [control=['if'], data=[]]
parser = argparse.ArgumentParser(prog='ttfautohint', usage=USAGE, description=DESCRIPTION, epilog=EPILOG, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('in_file', nargs='?', metavar='IN-FILE', default='-', type=stdin_or_input_path_type, help='input file (default: standard input)')
parser.add_argument('out_file', nargs='?', metavar='OUT-FILE', default='-', type=stdout_or_output_path_type, help='output file (default: standard output)')
parser.add_argument('--debug', action='store_true', help='print debugging information')
stem_width_group = parser.add_mutually_exclusive_group(required=False)
stem_width_group.add_argument('-a', '--stem-width-mode', type=stem_width_mode, metavar='S', default=STEM_WIDTH_MODE_OPTIONS, help="select stem width mode for grayscale, GDI ClearType, and DW ClearType, where S is a string of three letters with possible values 'n' for natural, 'q' for quantized, and 's' for strong (default: qsq)") # deprecated
stem_width_group.add_argument('-w', '--strong-stem-width', type=strong_stem_width, metavar='S', help=argparse.SUPPRESS)
parser.add_argument('-c', '--composites', dest='hint_composites', action='store_true', help='hint glyph composites also')
parser.add_argument('-d', '--dehint', action='store_true', help='remove all hints')
parser.add_argument('-D', '--default-script', metavar='SCRIPT', default=USER_OPTIONS['default_script'], help='set default OpenType script (default: %(default)s)')
parser.add_argument('-f', '--fallback-script', metavar='SCRIPT', default=USER_OPTIONS['fallback_script'], help='set fallback script (default: %(default)s)')
parser.add_argument('-F', '--family-suffix', metavar='SUFFIX', help="append SUFFIX to the family name string(s) in the `name' table")
parser.add_argument('-G', '--hinting-limit', type=int, metavar='PPEM', default=USER_OPTIONS['hinting_limit'], help='switch off hinting above this PPEM value (default: %(default)s); value 0 means no limit')
parser.add_argument('-H', '--fallback-stem-width', type=int, metavar='UNITS', default=USER_OPTIONS['fallback_stem_width'], help='set fallback stem width (default: %(default)s font units at 2048 UPEM)')
parser.add_argument('-i', '--ignore-restrictions', action='store_true', help='override font license restrictions')
parser.add_argument('-I', '--detailed-info', action='store_true', help="add detailed ttfautohint info to the version string(s) in the `name' table")
parser.add_argument('-l', '--hinting-range-min', type=int, metavar='PPEM', default=USER_OPTIONS['hinting_range_min'], help='the minimum PPEM value for hint sets (default: %(default)s)')
parser.add_argument('-m', '--control-file', metavar='FILE', help='get control instructions from FILE')
parser.add_argument('-n', '--no-info', action='store_true', help="don't add ttfautohint info to the version string(s) in the `name' table")
parser.add_argument('-p', '--adjust-subglyphs', action='store_true', help='handle subglyph adjustments in exotic fonts')
parser.add_argument('-r', '--hinting-range-max', type=int, metavar='PPEM', default=USER_OPTIONS['hinting_range_max'], help='the maximum PPEM value for hint sets (default: %(default)s)')
parser.add_argument('-R', '--reference', dest='reference_file', metavar='FILE', help='derive blue zones from reference font FILE')
parser.add_argument('-s', '--symbol', action='store_true', help='input is symbol font')
parser.add_argument('-S', '--fallback-scaling', action='store_true', help='use fallback scaling, not hinting')
parser.add_argument('-t', '--ttfa-table', action='store_true', dest='TTFA_info', help='add TTFA information table')
parser.add_argument('-T', '--ttfa-info', dest='show_TTFA_info', action='store_true', help='display TTFA table in IN-FILE and exit')
parser.add_argument('-v', '--verbose', action='store_true', help='show progress information')
parser.add_argument('-V', '--version', action='version', version=version_string, help='print version information and exit')
parser.add_argument('-W', '--windows-compatibility', action='store_true', help="add blue zones for `usWinAscent' and `usWinDescent' to avoid clipping")
parser.add_argument('-x', '--increase-x-height', type=int, metavar='PPEM', default=USER_OPTIONS['increase_x_height'], help='increase x height for sizes in the range 6<=PPEM<=N; value 0 switches off this feature (default: %(default)s)')
parser.add_argument('-X', '--x-height-snapping-exceptions', metavar='STRING', default=USER_OPTIONS['x_height_snapping_exceptions'], help='specify a comma-separated list of x-height snapping exceptions, for example "-9, 13-17, 19" (default: "%(default)s")')
parser.add_argument('-Z', '--reference-index', type=int, metavar='NUMBER', default=USER_OPTIONS['reference_index'], help='face index of reference font (default: %(default)s)')
try:
options = vars(parser.parse_args(args)) # depends on [control=['try'], data=[]]
except SystemExit:
if capture_sys_exit:
return None # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=[]]
# if either input/output are interactive, print help and exit
if not capture_sys_exit and (options['in_file'] is None or options['out_file'] is None):
parser.print_help()
parser.exit(1) # depends on [control=['if'], data=[]]
# check SOURCE_DATE_EPOCH environment variable
source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH')
if source_date_epoch:
try:
options['epoch'] = int(source_date_epoch) # depends on [control=['try'], data=[]]
except ValueError:
import warnings
warnings.warn(UserWarning('invalid SOURCE_DATE_EPOCH: %r' % source_date_epoch)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if options.pop('show_TTFA_info'):
# TODO use fonttools to dump TTFA table?
raise NotImplementedError() # depends on [control=['if'], data=[]]
stem_width_options = options.pop('stem_width_mode')
strong_stem_width_options = options.pop('strong_stem_width')
if strong_stem_width_options:
import warnings
warnings.warn(UserWarning("Option '-w' is deprecated! Use option '-a' instead"))
stem_width_options = strong_stem_width_options # depends on [control=['if'], data=[]]
options.update(stem_width_options)
return options |
def set_max_priority(self, infohash_list):
"""
Set torrents to maximum priority level.
:param infohash_list: Single or list() of infohashes.
"""
data = self._process_infohash_list(infohash_list)
return self._post('command/topPrio', data=data) | def function[set_max_priority, parameter[self, infohash_list]]:
constant[
Set torrents to maximum priority level.
:param infohash_list: Single or list() of infohashes.
]
variable[data] assign[=] call[name[self]._process_infohash_list, parameter[name[infohash_list]]]
return[call[name[self]._post, parameter[constant[command/topPrio]]]] | keyword[def] identifier[set_max_priority] ( identifier[self] , identifier[infohash_list] ):
literal[string]
identifier[data] = identifier[self] . identifier[_process_infohash_list] ( identifier[infohash_list] )
keyword[return] identifier[self] . identifier[_post] ( literal[string] , identifier[data] = identifier[data] ) | def set_max_priority(self, infohash_list):
"""
Set torrents to maximum priority level.
:param infohash_list: Single or list() of infohashes.
"""
data = self._process_infohash_list(infohash_list)
return self._post('command/topPrio', data=data) |
def parse_200_row(row: list) -> NmiDetails:
""" Parse NMI data details record (200) """
return NmiDetails(row[1], row[2], row[3], row[4], row[5], row[6],
row[7], int(row[8]), parse_datetime(row[9])) | def function[parse_200_row, parameter[row]]:
constant[ Parse NMI data details record (200) ]
return[call[name[NmiDetails], parameter[call[name[row]][constant[1]], call[name[row]][constant[2]], call[name[row]][constant[3]], call[name[row]][constant[4]], call[name[row]][constant[5]], call[name[row]][constant[6]], call[name[row]][constant[7]], call[name[int], parameter[call[name[row]][constant[8]]]], call[name[parse_datetime], parameter[call[name[row]][constant[9]]]]]]] | keyword[def] identifier[parse_200_row] ( identifier[row] : identifier[list] )-> identifier[NmiDetails] :
literal[string]
keyword[return] identifier[NmiDetails] ( identifier[row] [ literal[int] ], identifier[row] [ literal[int] ], identifier[row] [ literal[int] ], identifier[row] [ literal[int] ], identifier[row] [ literal[int] ], identifier[row] [ literal[int] ],
identifier[row] [ literal[int] ], identifier[int] ( identifier[row] [ literal[int] ]), identifier[parse_datetime] ( identifier[row] [ literal[int] ])) | def parse_200_row(row: list) -> NmiDetails:
""" Parse NMI data details record (200) """
return NmiDetails(row[1], row[2], row[3], row[4], row[5], row[6], row[7], int(row[8]), parse_datetime(row[9])) |
def get_interpolation_function(self, times, data):
""" Initializes interpolation model
:param times: Array of reference times in second relative to the first timestamp
:type times: numpy.array
:param data: One dimensional array of time series
:type data: numpy.array
:return: Initialized interpolation model class
"""
return self.interpolation_object(times, data, axis=0, **self.interpolation_parameters) | def function[get_interpolation_function, parameter[self, times, data]]:
constant[ Initializes interpolation model
:param times: Array of reference times in second relative to the first timestamp
:type times: numpy.array
:param data: One dimensional array of time series
:type data: numpy.array
:return: Initialized interpolation model class
]
return[call[name[self].interpolation_object, parameter[name[times], name[data]]]] | keyword[def] identifier[get_interpolation_function] ( identifier[self] , identifier[times] , identifier[data] ):
literal[string]
keyword[return] identifier[self] . identifier[interpolation_object] ( identifier[times] , identifier[data] , identifier[axis] = literal[int] ,** identifier[self] . identifier[interpolation_parameters] ) | def get_interpolation_function(self, times, data):
""" Initializes interpolation model
:param times: Array of reference times in second relative to the first timestamp
:type times: numpy.array
:param data: One dimensional array of time series
:type data: numpy.array
:return: Initialized interpolation model class
"""
return self.interpolation_object(times, data, axis=0, **self.interpolation_parameters) |
def get_context_menu(self):
"""
Gets the editor context menu.
:return: QMenu
"""
mnu = QtWidgets.QMenu()
mnu.addActions(self._actions)
mnu.addSeparator()
for menu in self._menus:
mnu.addMenu(menu)
return mnu | def function[get_context_menu, parameter[self]]:
constant[
Gets the editor context menu.
:return: QMenu
]
variable[mnu] assign[=] call[name[QtWidgets].QMenu, parameter[]]
call[name[mnu].addActions, parameter[name[self]._actions]]
call[name[mnu].addSeparator, parameter[]]
for taget[name[menu]] in starred[name[self]._menus] begin[:]
call[name[mnu].addMenu, parameter[name[menu]]]
return[name[mnu]] | keyword[def] identifier[get_context_menu] ( identifier[self] ):
literal[string]
identifier[mnu] = identifier[QtWidgets] . identifier[QMenu] ()
identifier[mnu] . identifier[addActions] ( identifier[self] . identifier[_actions] )
identifier[mnu] . identifier[addSeparator] ()
keyword[for] identifier[menu] keyword[in] identifier[self] . identifier[_menus] :
identifier[mnu] . identifier[addMenu] ( identifier[menu] )
keyword[return] identifier[mnu] | def get_context_menu(self):
"""
Gets the editor context menu.
:return: QMenu
"""
mnu = QtWidgets.QMenu()
mnu.addActions(self._actions)
mnu.addSeparator()
for menu in self._menus:
mnu.addMenu(menu) # depends on [control=['for'], data=['menu']]
return mnu |
def get_options_menu(self):
"""Return options menu"""
env_action = create_action(
self,
_("Show environment variables"),
icon=ima.icon('environ'),
triggered=self.shellwidget.get_env
)
syspath_action = create_action(
self,
_("Show sys.path contents"),
icon=ima.icon('syspath'),
triggered=self.shellwidget.get_syspath
)
self.show_time_action.setChecked(self.show_elapsed_time)
additional_actions = [MENU_SEPARATOR,
env_action,
syspath_action,
self.show_time_action]
if self.menu_actions is not None:
console_menu = self.menu_actions + additional_actions
return console_menu
else:
return additional_actions | def function[get_options_menu, parameter[self]]:
constant[Return options menu]
variable[env_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Show environment variables]]]]]
variable[syspath_action] assign[=] call[name[create_action], parameter[name[self], call[name[_], parameter[constant[Show sys.path contents]]]]]
call[name[self].show_time_action.setChecked, parameter[name[self].show_elapsed_time]]
variable[additional_actions] assign[=] list[[<ast.Name object at 0x7da18dc06500>, <ast.Name object at 0x7da18dc07b50>, <ast.Name object at 0x7da18dc06800>, <ast.Attribute object at 0x7da18dc05e10>]]
if compare[name[self].menu_actions is_not constant[None]] begin[:]
variable[console_menu] assign[=] binary_operation[name[self].menu_actions + name[additional_actions]]
return[name[console_menu]] | keyword[def] identifier[get_options_menu] ( identifier[self] ):
literal[string]
identifier[env_action] = identifier[create_action] (
identifier[self] ,
identifier[_] ( literal[string] ),
identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ),
identifier[triggered] = identifier[self] . identifier[shellwidget] . identifier[get_env]
)
identifier[syspath_action] = identifier[create_action] (
identifier[self] ,
identifier[_] ( literal[string] ),
identifier[icon] = identifier[ima] . identifier[icon] ( literal[string] ),
identifier[triggered] = identifier[self] . identifier[shellwidget] . identifier[get_syspath]
)
identifier[self] . identifier[show_time_action] . identifier[setChecked] ( identifier[self] . identifier[show_elapsed_time] )
identifier[additional_actions] =[ identifier[MENU_SEPARATOR] ,
identifier[env_action] ,
identifier[syspath_action] ,
identifier[self] . identifier[show_time_action] ]
keyword[if] identifier[self] . identifier[menu_actions] keyword[is] keyword[not] keyword[None] :
identifier[console_menu] = identifier[self] . identifier[menu_actions] + identifier[additional_actions]
keyword[return] identifier[console_menu]
keyword[else] :
keyword[return] identifier[additional_actions] | def get_options_menu(self):
"""Return options menu"""
env_action = create_action(self, _('Show environment variables'), icon=ima.icon('environ'), triggered=self.shellwidget.get_env)
syspath_action = create_action(self, _('Show sys.path contents'), icon=ima.icon('syspath'), triggered=self.shellwidget.get_syspath)
self.show_time_action.setChecked(self.show_elapsed_time)
additional_actions = [MENU_SEPARATOR, env_action, syspath_action, self.show_time_action]
if self.menu_actions is not None:
console_menu = self.menu_actions + additional_actions
return console_menu # depends on [control=['if'], data=[]]
else:
return additional_actions |
def download_sparse_points():
"""Used with ``download_saddle_surface``"""
saved_file, _ = _download_file('sparsePoints.txt')
points_reader = vtk.vtkDelimitedTextReader()
points_reader.SetFileName(saved_file)
points_reader.DetectNumericColumnsOn()
points_reader.SetFieldDelimiterCharacters('\t')
points_reader.SetHaveHeaders(True)
table_points = vtk.vtkTableToPolyData()
table_points.SetInputConnection(points_reader.GetOutputPort())
table_points.SetXColumn('x')
table_points.SetYColumn('y')
table_points.SetZColumn('z')
table_points.Update()
return vtki.wrap(table_points.GetOutput()) | def function[download_sparse_points, parameter[]]:
constant[Used with ``download_saddle_surface``]
<ast.Tuple object at 0x7da18f58f1f0> assign[=] call[name[_download_file], parameter[constant[sparsePoints.txt]]]
variable[points_reader] assign[=] call[name[vtk].vtkDelimitedTextReader, parameter[]]
call[name[points_reader].SetFileName, parameter[name[saved_file]]]
call[name[points_reader].DetectNumericColumnsOn, parameter[]]
call[name[points_reader].SetFieldDelimiterCharacters, parameter[constant[ ]]]
call[name[points_reader].SetHaveHeaders, parameter[constant[True]]]
variable[table_points] assign[=] call[name[vtk].vtkTableToPolyData, parameter[]]
call[name[table_points].SetInputConnection, parameter[call[name[points_reader].GetOutputPort, parameter[]]]]
call[name[table_points].SetXColumn, parameter[constant[x]]]
call[name[table_points].SetYColumn, parameter[constant[y]]]
call[name[table_points].SetZColumn, parameter[constant[z]]]
call[name[table_points].Update, parameter[]]
return[call[name[vtki].wrap, parameter[call[name[table_points].GetOutput, parameter[]]]]] | keyword[def] identifier[download_sparse_points] ():
literal[string]
identifier[saved_file] , identifier[_] = identifier[_download_file] ( literal[string] )
identifier[points_reader] = identifier[vtk] . identifier[vtkDelimitedTextReader] ()
identifier[points_reader] . identifier[SetFileName] ( identifier[saved_file] )
identifier[points_reader] . identifier[DetectNumericColumnsOn] ()
identifier[points_reader] . identifier[SetFieldDelimiterCharacters] ( literal[string] )
identifier[points_reader] . identifier[SetHaveHeaders] ( keyword[True] )
identifier[table_points] = identifier[vtk] . identifier[vtkTableToPolyData] ()
identifier[table_points] . identifier[SetInputConnection] ( identifier[points_reader] . identifier[GetOutputPort] ())
identifier[table_points] . identifier[SetXColumn] ( literal[string] )
identifier[table_points] . identifier[SetYColumn] ( literal[string] )
identifier[table_points] . identifier[SetZColumn] ( literal[string] )
identifier[table_points] . identifier[Update] ()
keyword[return] identifier[vtki] . identifier[wrap] ( identifier[table_points] . identifier[GetOutput] ()) | def download_sparse_points():
"""Used with ``download_saddle_surface``"""
(saved_file, _) = _download_file('sparsePoints.txt')
points_reader = vtk.vtkDelimitedTextReader()
points_reader.SetFileName(saved_file)
points_reader.DetectNumericColumnsOn()
points_reader.SetFieldDelimiterCharacters('\t')
points_reader.SetHaveHeaders(True)
table_points = vtk.vtkTableToPolyData()
table_points.SetInputConnection(points_reader.GetOutputPort())
table_points.SetXColumn('x')
table_points.SetYColumn('y')
table_points.SetZColumn('z')
table_points.Update()
return vtki.wrap(table_points.GetOutput()) |
def ParseOptions(self, options):
"""Parses the options and initializes the front-end.
Args:
options (argparse.Namespace): command line arguments.
Raises:
BadConfigOption: if the options are invalid.
"""
# The data location is required to list signatures.
helpers_manager.ArgumentHelperManager.ParseOptions(
options, self, names=['data_location'])
# Check the list options first otherwise required options will raise.
signature_identifiers = self.ParseStringOption(
options, 'signature_identifiers')
if signature_identifiers == 'list':
self.list_signature_identifiers = True
if self.list_signature_identifiers:
return
self._ParseInformationalOptions(options)
self._ParseLogFileOptions(options)
self._ParseStorageMediaOptions(options)
self._destination_path = self.ParseStringOption(
options, 'path', default_value='export')
if not self._data_location:
logger.warning('Unable to automatically determine data location.')
argument_helper_names = ['artifact_definitions', 'process_resources']
helpers_manager.ArgumentHelperManager.ParseOptions(
options, self, names=argument_helper_names)
self._ParseFilterOptions(options)
if (getattr(options, 'no_vss', False) or
getattr(options, 'include_duplicates', False)):
self._skip_duplicates = False
self._EnforceProcessMemoryLimit(self._process_memory_limit) | def function[ParseOptions, parameter[self, options]]:
constant[Parses the options and initializes the front-end.
Args:
options (argparse.Namespace): command line arguments.
Raises:
BadConfigOption: if the options are invalid.
]
call[name[helpers_manager].ArgumentHelperManager.ParseOptions, parameter[name[options], name[self]]]
variable[signature_identifiers] assign[=] call[name[self].ParseStringOption, parameter[name[options], constant[signature_identifiers]]]
if compare[name[signature_identifiers] equal[==] constant[list]] begin[:]
name[self].list_signature_identifiers assign[=] constant[True]
if name[self].list_signature_identifiers begin[:]
return[None]
call[name[self]._ParseInformationalOptions, parameter[name[options]]]
call[name[self]._ParseLogFileOptions, parameter[name[options]]]
call[name[self]._ParseStorageMediaOptions, parameter[name[options]]]
name[self]._destination_path assign[=] call[name[self].ParseStringOption, parameter[name[options], constant[path]]]
if <ast.UnaryOp object at 0x7da2054a7a60> begin[:]
call[name[logger].warning, parameter[constant[Unable to automatically determine data location.]]]
variable[argument_helper_names] assign[=] list[[<ast.Constant object at 0x7da2054a6440>, <ast.Constant object at 0x7da2054a6920>]]
call[name[helpers_manager].ArgumentHelperManager.ParseOptions, parameter[name[options], name[self]]]
call[name[self]._ParseFilterOptions, parameter[name[options]]]
if <ast.BoolOp object at 0x7da204620af0> begin[:]
name[self]._skip_duplicates assign[=] constant[False]
call[name[self]._EnforceProcessMemoryLimit, parameter[name[self]._process_memory_limit]] | keyword[def] identifier[ParseOptions] ( identifier[self] , identifier[options] ):
literal[string]
identifier[helpers_manager] . identifier[ArgumentHelperManager] . identifier[ParseOptions] (
identifier[options] , identifier[self] , identifier[names] =[ literal[string] ])
identifier[signature_identifiers] = identifier[self] . identifier[ParseStringOption] (
identifier[options] , literal[string] )
keyword[if] identifier[signature_identifiers] == literal[string] :
identifier[self] . identifier[list_signature_identifiers] = keyword[True]
keyword[if] identifier[self] . identifier[list_signature_identifiers] :
keyword[return]
identifier[self] . identifier[_ParseInformationalOptions] ( identifier[options] )
identifier[self] . identifier[_ParseLogFileOptions] ( identifier[options] )
identifier[self] . identifier[_ParseStorageMediaOptions] ( identifier[options] )
identifier[self] . identifier[_destination_path] = identifier[self] . identifier[ParseStringOption] (
identifier[options] , literal[string] , identifier[default_value] = literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[_data_location] :
identifier[logger] . identifier[warning] ( literal[string] )
identifier[argument_helper_names] =[ literal[string] , literal[string] ]
identifier[helpers_manager] . identifier[ArgumentHelperManager] . identifier[ParseOptions] (
identifier[options] , identifier[self] , identifier[names] = identifier[argument_helper_names] )
identifier[self] . identifier[_ParseFilterOptions] ( identifier[options] )
keyword[if] ( identifier[getattr] ( identifier[options] , literal[string] , keyword[False] ) keyword[or]
identifier[getattr] ( identifier[options] , literal[string] , keyword[False] )):
identifier[self] . identifier[_skip_duplicates] = keyword[False]
identifier[self] . identifier[_EnforceProcessMemoryLimit] ( identifier[self] . identifier[_process_memory_limit] ) | def ParseOptions(self, options):
"""Parses the options and initializes the front-end.
Args:
options (argparse.Namespace): command line arguments.
Raises:
BadConfigOption: if the options are invalid.
"""
# The data location is required to list signatures.
helpers_manager.ArgumentHelperManager.ParseOptions(options, self, names=['data_location'])
# Check the list options first otherwise required options will raise.
signature_identifiers = self.ParseStringOption(options, 'signature_identifiers')
if signature_identifiers == 'list':
self.list_signature_identifiers = True # depends on [control=['if'], data=[]]
if self.list_signature_identifiers:
return # depends on [control=['if'], data=[]]
self._ParseInformationalOptions(options)
self._ParseLogFileOptions(options)
self._ParseStorageMediaOptions(options)
self._destination_path = self.ParseStringOption(options, 'path', default_value='export')
if not self._data_location:
logger.warning('Unable to automatically determine data location.') # depends on [control=['if'], data=[]]
argument_helper_names = ['artifact_definitions', 'process_resources']
helpers_manager.ArgumentHelperManager.ParseOptions(options, self, names=argument_helper_names)
self._ParseFilterOptions(options)
if getattr(options, 'no_vss', False) or getattr(options, 'include_duplicates', False):
self._skip_duplicates = False # depends on [control=['if'], data=[]]
self._EnforceProcessMemoryLimit(self._process_memory_limit) |
def BC_0Displacement0Slope(self):
"""
0Displacement0Slope boundary condition for 0 deflection.
This requires that nothing be done to the edges of the solution array,
because the lack of the off-grid terms implies that they go to 0
Here we just turn the cells outside the array into nan, to ensure that
we are not accidentally including the wrong cells here (and for consistency
with the other solution types -- this takes negligible time)
"""
if self.BC_W == '0Displacement0Slope':
i=0
self.l2[i] = np.nan
self.l1[i] = np.nan
self.c0[i] += 0
self.r1[i] += 0
self.r2[i] += 0
i=1
self.l2[i] = np.nan
self.l1[i] += 0
self.c0[i] += 0
self.r1[i] += 0
self.r2[i] += 0
if self.BC_E == '0Displacement0Slope':
i=-2
self.l2[i] += 0
self.l1[i] += 0
self.c0[i] += 0
self.r1[i] += 0
self.r2[i] = np.nan
i=-1
self.l2[i] += 0
self.l1[i] += 0
self.c0[i] += 0
self.r1[i] = np.nan
self.r2[i] = np.nan | def function[BC_0Displacement0Slope, parameter[self]]:
constant[
0Displacement0Slope boundary condition for 0 deflection.
This requires that nothing be done to the edges of the solution array,
because the lack of the off-grid terms implies that they go to 0
Here we just turn the cells outside the array into nan, to ensure that
we are not accidentally including the wrong cells here (and for consistency
with the other solution types -- this takes negligible time)
]
if compare[name[self].BC_W equal[==] constant[0Displacement0Slope]] begin[:]
variable[i] assign[=] constant[0]
call[name[self].l2][name[i]] assign[=] name[np].nan
call[name[self].l1][name[i]] assign[=] name[np].nan
<ast.AugAssign object at 0x7da20c6abdc0>
<ast.AugAssign object at 0x7da20c6a8940>
<ast.AugAssign object at 0x7da1b25588e0>
variable[i] assign[=] constant[1]
call[name[self].l2][name[i]] assign[=] name[np].nan
<ast.AugAssign object at 0x7da1b2558be0>
<ast.AugAssign object at 0x7da1b255bc40>
<ast.AugAssign object at 0x7da1b255b610>
<ast.AugAssign object at 0x7da1b2559120>
if compare[name[self].BC_E equal[==] constant[0Displacement0Slope]] begin[:]
variable[i] assign[=] <ast.UnaryOp object at 0x7da1b251e740>
<ast.AugAssign object at 0x7da1b251fa00>
<ast.AugAssign object at 0x7da1b251e380>
<ast.AugAssign object at 0x7da1b251edd0>
<ast.AugAssign object at 0x7da1b251d900>
call[name[self].r2][name[i]] assign[=] name[np].nan
variable[i] assign[=] <ast.UnaryOp object at 0x7da1b251ddb0>
<ast.AugAssign object at 0x7da1b251c6d0>
<ast.AugAssign object at 0x7da1b251fdf0>
<ast.AugAssign object at 0x7da1b251f5e0>
call[name[self].r1][name[i]] assign[=] name[np].nan
call[name[self].r2][name[i]] assign[=] name[np].nan | keyword[def] identifier[BC_0Displacement0Slope] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[BC_W] == literal[string] :
identifier[i] = literal[int]
identifier[self] . identifier[l2] [ identifier[i] ]= identifier[np] . identifier[nan]
identifier[self] . identifier[l1] [ identifier[i] ]= identifier[np] . identifier[nan]
identifier[self] . identifier[c0] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[r1] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[r2] [ identifier[i] ]+= literal[int]
identifier[i] = literal[int]
identifier[self] . identifier[l2] [ identifier[i] ]= identifier[np] . identifier[nan]
identifier[self] . identifier[l1] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[c0] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[r1] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[r2] [ identifier[i] ]+= literal[int]
keyword[if] identifier[self] . identifier[BC_E] == literal[string] :
identifier[i] =- literal[int]
identifier[self] . identifier[l2] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[l1] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[c0] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[r1] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[r2] [ identifier[i] ]= identifier[np] . identifier[nan]
identifier[i] =- literal[int]
identifier[self] . identifier[l2] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[l1] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[c0] [ identifier[i] ]+= literal[int]
identifier[self] . identifier[r1] [ identifier[i] ]= identifier[np] . identifier[nan]
identifier[self] . identifier[r2] [ identifier[i] ]= identifier[np] . identifier[nan] | def BC_0Displacement0Slope(self):
"""
0Displacement0Slope boundary condition for 0 deflection.
This requires that nothing be done to the edges of the solution array,
because the lack of the off-grid terms implies that they go to 0
Here we just turn the cells outside the array into nan, to ensure that
we are not accidentally including the wrong cells here (and for consistency
with the other solution types -- this takes negligible time)
"""
if self.BC_W == '0Displacement0Slope':
i = 0
self.l2[i] = np.nan
self.l1[i] = np.nan
self.c0[i] += 0
self.r1[i] += 0
self.r2[i] += 0
i = 1
self.l2[i] = np.nan
self.l1[i] += 0
self.c0[i] += 0
self.r1[i] += 0
self.r2[i] += 0 # depends on [control=['if'], data=[]]
if self.BC_E == '0Displacement0Slope':
i = -2
self.l2[i] += 0
self.l1[i] += 0
self.c0[i] += 0
self.r1[i] += 0
self.r2[i] = np.nan
i = -1
self.l2[i] += 0
self.l1[i] += 0
self.c0[i] += 0
self.r1[i] = np.nan
self.r2[i] = np.nan # depends on [control=['if'], data=[]] |
def _find_existing_instance(self):
"""
I find existing VMs that are already running that might be orphaned instances of this worker.
"""
if not self.connection:
return None
domains = yield self.connection.all()
for d in domains:
name = yield d.name()
if name.startswith(self.workername):
self.domain = d
break
self.ready = True | def function[_find_existing_instance, parameter[self]]:
constant[
I find existing VMs that are already running that might be orphaned instances of this worker.
]
if <ast.UnaryOp object at 0x7da18c4cdd50> begin[:]
return[constant[None]]
variable[domains] assign[=] <ast.Yield object at 0x7da18c4cf820>
for taget[name[d]] in starred[name[domains]] begin[:]
variable[name] assign[=] <ast.Yield object at 0x7da18c4cda50>
if call[name[name].startswith, parameter[name[self].workername]] begin[:]
name[self].domain assign[=] name[d]
break
name[self].ready assign[=] constant[True] | keyword[def] identifier[_find_existing_instance] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[connection] :
keyword[return] keyword[None]
identifier[domains] = keyword[yield] identifier[self] . identifier[connection] . identifier[all] ()
keyword[for] identifier[d] keyword[in] identifier[domains] :
identifier[name] = keyword[yield] identifier[d] . identifier[name] ()
keyword[if] identifier[name] . identifier[startswith] ( identifier[self] . identifier[workername] ):
identifier[self] . identifier[domain] = identifier[d]
keyword[break]
identifier[self] . identifier[ready] = keyword[True] | def _find_existing_instance(self):
"""
I find existing VMs that are already running that might be orphaned instances of this worker.
"""
if not self.connection:
return None # depends on [control=['if'], data=[]]
domains = (yield self.connection.all())
for d in domains:
name = (yield d.name())
if name.startswith(self.workername):
self.domain = d
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']]
self.ready = True |
def _validate_oneof(self, definitions, field, value):
""" {'type': 'list', 'logical': 'oneof'} """
valids, _errors = \
self.__validate_logical('oneof', definitions, field, value)
if valids != 1:
self._error(field, errors.ONEOF, _errors,
valids, len(definitions)) | def function[_validate_oneof, parameter[self, definitions, field, value]]:
constant[ {'type': 'list', 'logical': 'oneof'} ]
<ast.Tuple object at 0x7da1b1de2cb0> assign[=] call[name[self].__validate_logical, parameter[constant[oneof], name[definitions], name[field], name[value]]]
if compare[name[valids] not_equal[!=] constant[1]] begin[:]
call[name[self]._error, parameter[name[field], name[errors].ONEOF, name[_errors], name[valids], call[name[len], parameter[name[definitions]]]]] | keyword[def] identifier[_validate_oneof] ( identifier[self] , identifier[definitions] , identifier[field] , identifier[value] ):
literal[string]
identifier[valids] , identifier[_errors] = identifier[self] . identifier[__validate_logical] ( literal[string] , identifier[definitions] , identifier[field] , identifier[value] )
keyword[if] identifier[valids] != literal[int] :
identifier[self] . identifier[_error] ( identifier[field] , identifier[errors] . identifier[ONEOF] , identifier[_errors] ,
identifier[valids] , identifier[len] ( identifier[definitions] )) | def _validate_oneof(self, definitions, field, value):
""" {'type': 'list', 'logical': 'oneof'} """
(valids, _errors) = self.__validate_logical('oneof', definitions, field, value)
if valids != 1:
self._error(field, errors.ONEOF, _errors, valids, len(definitions)) # depends on [control=['if'], data=['valids']] |
def p_else_part_label(p):
""" else_part : LABEL ELSE program_co endif
| LABEL ELSE statements_co endif
| LABEL ELSE co_statements_co endif
"""
lbl = make_label(p[1], p.lineno(1))
p[0] = [make_block(lbl, p[3]), p[4]] | def function[p_else_part_label, parameter[p]]:
constant[ else_part : LABEL ELSE program_co endif
| LABEL ELSE statements_co endif
| LABEL ELSE co_statements_co endif
]
variable[lbl] assign[=] call[name[make_label], parameter[call[name[p]][constant[1]], call[name[p].lineno, parameter[constant[1]]]]]
call[name[p]][constant[0]] assign[=] list[[<ast.Call object at 0x7da1b06e3640>, <ast.Subscript object at 0x7da1b06e3d60>]] | keyword[def] identifier[p_else_part_label] ( identifier[p] ):
literal[string]
identifier[lbl] = identifier[make_label] ( identifier[p] [ literal[int] ], identifier[p] . identifier[lineno] ( literal[int] ))
identifier[p] [ literal[int] ]=[ identifier[make_block] ( identifier[lbl] , identifier[p] [ literal[int] ]), identifier[p] [ literal[int] ]] | def p_else_part_label(p):
""" else_part : LABEL ELSE program_co endif
| LABEL ELSE statements_co endif
| LABEL ELSE co_statements_co endif
"""
lbl = make_label(p[1], p.lineno(1))
p[0] = [make_block(lbl, p[3]), p[4]] |
def generate(self, tree):
'''
generates code based on templates and gen functions
defined in the <x> lang generator
'''
for middleware in DEFAULT_MIDDLEWARES + self.middlewares:
tree = middleware.process(tree) # changed in place!!
original = self._generate_node(tree)
# first n lines n dependencies
# after that additional code
if self.a and tree.type == 'module':
p = original.split('\n')
r = '\n'.join(p[:len(tree.dependencies)] + (['\n'] if tree.dependencies else []) + self.a + ['\n'] + p[len(tree.dependencies):]) + '\n'
else:
r = original
r = re.sub(CLOSING_CURLY_ENDLINES, r'}\n\2}', r)
r = re.sub(JS_BRACKET, r'}\1', r)
return re.sub(TOO_MANY_ENDLINES, r'\n\n', r) | def function[generate, parameter[self, tree]]:
constant[
generates code based on templates and gen functions
defined in the <x> lang generator
]
for taget[name[middleware]] in starred[binary_operation[name[DEFAULT_MIDDLEWARES] + name[self].middlewares]] begin[:]
variable[tree] assign[=] call[name[middleware].process, parameter[name[tree]]]
variable[original] assign[=] call[name[self]._generate_node, parameter[name[tree]]]
if <ast.BoolOp object at 0x7da1b07cdea0> begin[:]
variable[p] assign[=] call[name[original].split, parameter[constant[
]]]
variable[r] assign[=] binary_operation[call[constant[
].join, parameter[binary_operation[binary_operation[binary_operation[binary_operation[call[name[p]][<ast.Slice object at 0x7da1b07cf0d0>] + <ast.IfExp object at 0x7da1b085bc70>] + name[self].a] + list[[<ast.Constant object at 0x7da1b07cdf30>]]] + call[name[p]][<ast.Slice object at 0x7da1b07cd5d0>]]]] + constant[
]]
variable[r] assign[=] call[name[re].sub, parameter[name[CLOSING_CURLY_ENDLINES], constant[}\n\2}], name[r]]]
variable[r] assign[=] call[name[re].sub, parameter[name[JS_BRACKET], constant[}\1], name[r]]]
return[call[name[re].sub, parameter[name[TOO_MANY_ENDLINES], constant[\n\n], name[r]]]] | keyword[def] identifier[generate] ( identifier[self] , identifier[tree] ):
literal[string]
keyword[for] identifier[middleware] keyword[in] identifier[DEFAULT_MIDDLEWARES] + identifier[self] . identifier[middlewares] :
identifier[tree] = identifier[middleware] . identifier[process] ( identifier[tree] )
identifier[original] = identifier[self] . identifier[_generate_node] ( identifier[tree] )
keyword[if] identifier[self] . identifier[a] keyword[and] identifier[tree] . identifier[type] == literal[string] :
identifier[p] = identifier[original] . identifier[split] ( literal[string] )
identifier[r] = literal[string] . identifier[join] ( identifier[p] [: identifier[len] ( identifier[tree] . identifier[dependencies] )]+([ literal[string] ] keyword[if] identifier[tree] . identifier[dependencies] keyword[else] [])+ identifier[self] . identifier[a] +[ literal[string] ]+ identifier[p] [ identifier[len] ( identifier[tree] . identifier[dependencies] ):])+ literal[string]
keyword[else] :
identifier[r] = identifier[original]
identifier[r] = identifier[re] . identifier[sub] ( identifier[CLOSING_CURLY_ENDLINES] , literal[string] , identifier[r] )
identifier[r] = identifier[re] . identifier[sub] ( identifier[JS_BRACKET] , literal[string] , identifier[r] )
keyword[return] identifier[re] . identifier[sub] ( identifier[TOO_MANY_ENDLINES] , literal[string] , identifier[r] ) | def generate(self, tree):
"""
generates code based on templates and gen functions
defined in the <x> lang generator
"""
for middleware in DEFAULT_MIDDLEWARES + self.middlewares:
tree = middleware.process(tree) # changed in place!! # depends on [control=['for'], data=['middleware']]
original = self._generate_node(tree)
# first n lines n dependencies
# after that additional code
if self.a and tree.type == 'module':
p = original.split('\n')
r = '\n'.join(p[:len(tree.dependencies)] + (['\n'] if tree.dependencies else []) + self.a + ['\n'] + p[len(tree.dependencies):]) + '\n' # depends on [control=['if'], data=[]]
else:
r = original
r = re.sub(CLOSING_CURLY_ENDLINES, '}\\n\\2}', r)
r = re.sub(JS_BRACKET, '}\\1', r)
return re.sub(TOO_MANY_ENDLINES, '\\n\\n', r) |
def iters(cls, batch_size=32, bptt_len=35, device=0, root='.data',
vectors=None, **kwargs):
"""Create iterator objects for splits of the WikiText-2 dataset.
This is the simplest way to use the dataset, and assumes common
defaults for field, vocabulary, and iterator parameters.
Arguments:
batch_size: Batch size.
bptt_len: Length of sequences for backpropagation through time.
device: Device to create batches on. Use -1 for CPU and None for
the currently active GPU device.
root: The root directory that the dataset's zip archive will be
expanded into; therefore the directory in whose wikitext-2
subdirectory the data files will be stored.
wv_dir, wv_type, wv_dim: Passed to the Vocab constructor for the
text field. The word vectors are accessible as
train.dataset.fields['text'].vocab.vectors.
Remaining keyword arguments: Passed to the splits method.
"""
TEXT = data.Field()
train, val, test = cls.splits(TEXT, root=root, **kwargs)
TEXT.build_vocab(train, vectors=vectors)
return data.BPTTIterator.splits(
(train, val, test), batch_size=batch_size, bptt_len=bptt_len,
device=device) | def function[iters, parameter[cls, batch_size, bptt_len, device, root, vectors]]:
constant[Create iterator objects for splits of the WikiText-2 dataset.
This is the simplest way to use the dataset, and assumes common
defaults for field, vocabulary, and iterator parameters.
Arguments:
batch_size: Batch size.
bptt_len: Length of sequences for backpropagation through time.
device: Device to create batches on. Use -1 for CPU and None for
the currently active GPU device.
root: The root directory that the dataset's zip archive will be
expanded into; therefore the directory in whose wikitext-2
subdirectory the data files will be stored.
wv_dir, wv_type, wv_dim: Passed to the Vocab constructor for the
text field. The word vectors are accessible as
train.dataset.fields['text'].vocab.vectors.
Remaining keyword arguments: Passed to the splits method.
]
variable[TEXT] assign[=] call[name[data].Field, parameter[]]
<ast.Tuple object at 0x7da1b1c29000> assign[=] call[name[cls].splits, parameter[name[TEXT]]]
call[name[TEXT].build_vocab, parameter[name[train]]]
return[call[name[data].BPTTIterator.splits, parameter[tuple[[<ast.Name object at 0x7da1b1c2b280>, <ast.Name object at 0x7da1b1c28ee0>, <ast.Name object at 0x7da1b1c2b670>]]]]] | keyword[def] identifier[iters] ( identifier[cls] , identifier[batch_size] = literal[int] , identifier[bptt_len] = literal[int] , identifier[device] = literal[int] , identifier[root] = literal[string] ,
identifier[vectors] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[TEXT] = identifier[data] . identifier[Field] ()
identifier[train] , identifier[val] , identifier[test] = identifier[cls] . identifier[splits] ( identifier[TEXT] , identifier[root] = identifier[root] ,** identifier[kwargs] )
identifier[TEXT] . identifier[build_vocab] ( identifier[train] , identifier[vectors] = identifier[vectors] )
keyword[return] identifier[data] . identifier[BPTTIterator] . identifier[splits] (
( identifier[train] , identifier[val] , identifier[test] ), identifier[batch_size] = identifier[batch_size] , identifier[bptt_len] = identifier[bptt_len] ,
identifier[device] = identifier[device] ) | def iters(cls, batch_size=32, bptt_len=35, device=0, root='.data', vectors=None, **kwargs):
"""Create iterator objects for splits of the WikiText-2 dataset.
This is the simplest way to use the dataset, and assumes common
defaults for field, vocabulary, and iterator parameters.
Arguments:
batch_size: Batch size.
bptt_len: Length of sequences for backpropagation through time.
device: Device to create batches on. Use -1 for CPU and None for
the currently active GPU device.
root: The root directory that the dataset's zip archive will be
expanded into; therefore the directory in whose wikitext-2
subdirectory the data files will be stored.
wv_dir, wv_type, wv_dim: Passed to the Vocab constructor for the
text field. The word vectors are accessible as
train.dataset.fields['text'].vocab.vectors.
Remaining keyword arguments: Passed to the splits method.
"""
TEXT = data.Field()
(train, val, test) = cls.splits(TEXT, root=root, **kwargs)
TEXT.build_vocab(train, vectors=vectors)
return data.BPTTIterator.splits((train, val, test), batch_size=batch_size, bptt_len=bptt_len, device=device) |
def volume_show(self, name):
'''
Show one volume
'''
if self.volume_conn is None:
raise SaltCloudSystemExit('No cinder endpoint available')
nt_ks = self.volume_conn
volumes = self.volume_list(
search_opts={'display_name': name},
)
volume = volumes[name]
# except Exception as esc:
# # volume doesn't exist
# log.error(esc.strerror)
# return {'name': name, 'status': 'deleted'}
return volume | def function[volume_show, parameter[self, name]]:
constant[
Show one volume
]
if compare[name[self].volume_conn is constant[None]] begin[:]
<ast.Raise object at 0x7da1b21ee620>
variable[nt_ks] assign[=] name[self].volume_conn
variable[volumes] assign[=] call[name[self].volume_list, parameter[]]
variable[volume] assign[=] call[name[volumes]][name[name]]
return[name[volume]] | keyword[def] identifier[volume_show] ( identifier[self] , identifier[name] ):
literal[string]
keyword[if] identifier[self] . identifier[volume_conn] keyword[is] keyword[None] :
keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] )
identifier[nt_ks] = identifier[self] . identifier[volume_conn]
identifier[volumes] = identifier[self] . identifier[volume_list] (
identifier[search_opts] ={ literal[string] : identifier[name] },
)
identifier[volume] = identifier[volumes] [ identifier[name] ]
keyword[return] identifier[volume] | def volume_show(self, name):
"""
Show one volume
"""
if self.volume_conn is None:
raise SaltCloudSystemExit('No cinder endpoint available') # depends on [control=['if'], data=[]]
nt_ks = self.volume_conn
volumes = self.volume_list(search_opts={'display_name': name})
volume = volumes[name]
# except Exception as esc:
# # volume doesn't exist
# log.error(esc.strerror)
# return {'name': name, 'status': 'deleted'}
return volume |
def __yahoo_request(query):
"""Request Yahoo Finance information.
Request information from YQL.
`Check <http://goo.gl/8AROUD>`_ for more information on YQL.
"""
query = quote(query)
url = 'https://query.yahooapis.com/v1/public/yql?q=' + query + \
'&format=json&env=store://datatables.org/alltableswithkeys'
response = urlopen(url).read()
return json.loads(response.decode('utf-8'))['query']['results'] | def function[__yahoo_request, parameter[query]]:
constant[Request Yahoo Finance information.
Request information from YQL.
`Check <http://goo.gl/8AROUD>`_ for more information on YQL.
]
variable[query] assign[=] call[name[quote], parameter[name[query]]]
variable[url] assign[=] binary_operation[binary_operation[constant[https://query.yahooapis.com/v1/public/yql?q=] + name[query]] + constant[&format=json&env=store://datatables.org/alltableswithkeys]]
variable[response] assign[=] call[call[name[urlopen], parameter[name[url]]].read, parameter[]]
return[call[call[call[name[json].loads, parameter[call[name[response].decode, parameter[constant[utf-8]]]]]][constant[query]]][constant[results]]] | keyword[def] identifier[__yahoo_request] ( identifier[query] ):
literal[string]
identifier[query] = identifier[quote] ( identifier[query] )
identifier[url] = literal[string] + identifier[query] + literal[string]
identifier[response] = identifier[urlopen] ( identifier[url] ). identifier[read] ()
keyword[return] identifier[json] . identifier[loads] ( identifier[response] . identifier[decode] ( literal[string] ))[ literal[string] ][ literal[string] ] | def __yahoo_request(query):
"""Request Yahoo Finance information.
Request information from YQL.
`Check <http://goo.gl/8AROUD>`_ for more information on YQL.
"""
query = quote(query)
url = 'https://query.yahooapis.com/v1/public/yql?q=' + query + '&format=json&env=store://datatables.org/alltableswithkeys'
response = urlopen(url).read()
return json.loads(response.decode('utf-8'))['query']['results'] |
def mark_clean(self, entity):
"""
Marks the given entity as CLEAN.
This is done when an entity is loaded fresh from the repository or
after a commit.
"""
state = EntityState.get_state(entity)
state.status = ENTITY_STATUS.CLEAN
state.is_persisted = True | def function[mark_clean, parameter[self, entity]]:
constant[
Marks the given entity as CLEAN.
This is done when an entity is loaded fresh from the repository or
after a commit.
]
variable[state] assign[=] call[name[EntityState].get_state, parameter[name[entity]]]
name[state].status assign[=] name[ENTITY_STATUS].CLEAN
name[state].is_persisted assign[=] constant[True] | keyword[def] identifier[mark_clean] ( identifier[self] , identifier[entity] ):
literal[string]
identifier[state] = identifier[EntityState] . identifier[get_state] ( identifier[entity] )
identifier[state] . identifier[status] = identifier[ENTITY_STATUS] . identifier[CLEAN]
identifier[state] . identifier[is_persisted] = keyword[True] | def mark_clean(self, entity):
"""
Marks the given entity as CLEAN.
This is done when an entity is loaded fresh from the repository or
after a commit.
"""
state = EntityState.get_state(entity)
state.status = ENTITY_STATUS.CLEAN
state.is_persisted = True |
def neg_log_perplexity(batch, model_predictions):
"""Calculate negative log perplexity."""
_, targets = batch
model_predictions, targets = _make_list(model_predictions, targets)
xent = []
for (prediction, target) in zip(model_predictions, targets):
hot_target = layers.one_hot(target, prediction.shape[-1])
xent.append(np.sum(prediction * hot_target, axis=-1))
return masked_mean(xent, targets) | def function[neg_log_perplexity, parameter[batch, model_predictions]]:
constant[Calculate negative log perplexity.]
<ast.Tuple object at 0x7da1b20e54b0> assign[=] name[batch]
<ast.Tuple object at 0x7da1b20e6d70> assign[=] call[name[_make_list], parameter[name[model_predictions], name[targets]]]
variable[xent] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b20620b0>, <ast.Name object at 0x7da1b2063f10>]]] in starred[call[name[zip], parameter[name[model_predictions], name[targets]]]] begin[:]
variable[hot_target] assign[=] call[name[layers].one_hot, parameter[name[target], call[name[prediction].shape][<ast.UnaryOp object at 0x7da1b201d270>]]]
call[name[xent].append, parameter[call[name[np].sum, parameter[binary_operation[name[prediction] * name[hot_target]]]]]]
return[call[name[masked_mean], parameter[name[xent], name[targets]]]] | keyword[def] identifier[neg_log_perplexity] ( identifier[batch] , identifier[model_predictions] ):
literal[string]
identifier[_] , identifier[targets] = identifier[batch]
identifier[model_predictions] , identifier[targets] = identifier[_make_list] ( identifier[model_predictions] , identifier[targets] )
identifier[xent] =[]
keyword[for] ( identifier[prediction] , identifier[target] ) keyword[in] identifier[zip] ( identifier[model_predictions] , identifier[targets] ):
identifier[hot_target] = identifier[layers] . identifier[one_hot] ( identifier[target] , identifier[prediction] . identifier[shape] [- literal[int] ])
identifier[xent] . identifier[append] ( identifier[np] . identifier[sum] ( identifier[prediction] * identifier[hot_target] , identifier[axis] =- literal[int] ))
keyword[return] identifier[masked_mean] ( identifier[xent] , identifier[targets] ) | def neg_log_perplexity(batch, model_predictions):
"""Calculate negative log perplexity."""
(_, targets) = batch
(model_predictions, targets) = _make_list(model_predictions, targets)
xent = []
for (prediction, target) in zip(model_predictions, targets):
hot_target = layers.one_hot(target, prediction.shape[-1])
xent.append(np.sum(prediction * hot_target, axis=-1)) # depends on [control=['for'], data=[]]
return masked_mean(xent, targets) |
def set_workflow_by_name(self, workflow_name):
"""Configure the workflow to run by the name of this one.
Allows the modification of the workflow that the engine will run
by looking in the registry the name passed in parameter.
:param workflow_name: name of the workflow.
:type workflow_name: str
"""
from .proxies import workflows
if workflow_name not in workflows:
# No workflow with that name exists
raise WorkflowDefinitionError("Workflow '%s' does not exist"
% (workflow_name,),
workflow_name=workflow_name)
self.workflow_definition = workflows[workflow_name]
self.callbacks.replace(self.workflow_definition.workflow) | def function[set_workflow_by_name, parameter[self, workflow_name]]:
constant[Configure the workflow to run by the name of this one.
Allows the modification of the workflow that the engine will run
by looking in the registry the name passed in parameter.
:param workflow_name: name of the workflow.
:type workflow_name: str
]
from relative_module[proxies] import module[workflows]
if compare[name[workflow_name] <ast.NotIn object at 0x7da2590d7190> name[workflows]] begin[:]
<ast.Raise object at 0x7da20c7c8be0>
name[self].workflow_definition assign[=] call[name[workflows]][name[workflow_name]]
call[name[self].callbacks.replace, parameter[name[self].workflow_definition.workflow]] | keyword[def] identifier[set_workflow_by_name] ( identifier[self] , identifier[workflow_name] ):
literal[string]
keyword[from] . identifier[proxies] keyword[import] identifier[workflows]
keyword[if] identifier[workflow_name] keyword[not] keyword[in] identifier[workflows] :
keyword[raise] identifier[WorkflowDefinitionError] ( literal[string]
%( identifier[workflow_name] ,),
identifier[workflow_name] = identifier[workflow_name] )
identifier[self] . identifier[workflow_definition] = identifier[workflows] [ identifier[workflow_name] ]
identifier[self] . identifier[callbacks] . identifier[replace] ( identifier[self] . identifier[workflow_definition] . identifier[workflow] ) | def set_workflow_by_name(self, workflow_name):
"""Configure the workflow to run by the name of this one.
Allows the modification of the workflow that the engine will run
by looking in the registry the name passed in parameter.
:param workflow_name: name of the workflow.
:type workflow_name: str
"""
from .proxies import workflows
if workflow_name not in workflows:
# No workflow with that name exists
raise WorkflowDefinitionError("Workflow '%s' does not exist" % (workflow_name,), workflow_name=workflow_name) # depends on [control=['if'], data=['workflow_name']]
self.workflow_definition = workflows[workflow_name]
self.callbacks.replace(self.workflow_definition.workflow) |
def _recursive_update(d1, d2):
""" Little helper function that does what d1.update(d2) does,
but works nice and recursively with dicts of dicts of dicts.
It's not necessarily very efficient.
"""
for k in set(d1).intersection(d2):
if isinstance(d1[k], dict) and isinstance(d2[k], dict):
d1[k] = _recursive_update(d1[k], d2[k])
else:
d1[k] = d2[k]
for k in set(d2).difference(d1):
d1[k] = d2[k]
return d1 | def function[_recursive_update, parameter[d1, d2]]:
constant[ Little helper function that does what d1.update(d2) does,
but works nice and recursively with dicts of dicts of dicts.
It's not necessarily very efficient.
]
for taget[name[k]] in starred[call[call[name[set], parameter[name[d1]]].intersection, parameter[name[d2]]]] begin[:]
if <ast.BoolOp object at 0x7da18bc71e70> begin[:]
call[name[d1]][name[k]] assign[=] call[name[_recursive_update], parameter[call[name[d1]][name[k]], call[name[d2]][name[k]]]]
for taget[name[k]] in starred[call[call[name[set], parameter[name[d2]]].difference, parameter[name[d1]]]] begin[:]
call[name[d1]][name[k]] assign[=] call[name[d2]][name[k]]
return[name[d1]] | keyword[def] identifier[_recursive_update] ( identifier[d1] , identifier[d2] ):
literal[string]
keyword[for] identifier[k] keyword[in] identifier[set] ( identifier[d1] ). identifier[intersection] ( identifier[d2] ):
keyword[if] identifier[isinstance] ( identifier[d1] [ identifier[k] ], identifier[dict] ) keyword[and] identifier[isinstance] ( identifier[d2] [ identifier[k] ], identifier[dict] ):
identifier[d1] [ identifier[k] ]= identifier[_recursive_update] ( identifier[d1] [ identifier[k] ], identifier[d2] [ identifier[k] ])
keyword[else] :
identifier[d1] [ identifier[k] ]= identifier[d2] [ identifier[k] ]
keyword[for] identifier[k] keyword[in] identifier[set] ( identifier[d2] ). identifier[difference] ( identifier[d1] ):
identifier[d1] [ identifier[k] ]= identifier[d2] [ identifier[k] ]
keyword[return] identifier[d1] | def _recursive_update(d1, d2):
""" Little helper function that does what d1.update(d2) does,
but works nice and recursively with dicts of dicts of dicts.
It's not necessarily very efficient.
"""
for k in set(d1).intersection(d2):
if isinstance(d1[k], dict) and isinstance(d2[k], dict):
d1[k] = _recursive_update(d1[k], d2[k]) # depends on [control=['if'], data=[]]
else:
d1[k] = d2[k] # depends on [control=['for'], data=['k']]
for k in set(d2).difference(d1):
d1[k] = d2[k] # depends on [control=['for'], data=['k']]
return d1 |
def mine_patterns(self, threshold):
"""
Mine the constructed FP tree for frequent patterns.
"""
if self.tree_has_single_path(self.root):
return self.generate_pattern_list()
else:
return self.zip_patterns(self.mine_sub_trees(threshold)) | def function[mine_patterns, parameter[self, threshold]]:
constant[
Mine the constructed FP tree for frequent patterns.
]
if call[name[self].tree_has_single_path, parameter[name[self].root]] begin[:]
return[call[name[self].generate_pattern_list, parameter[]]] | keyword[def] identifier[mine_patterns] ( identifier[self] , identifier[threshold] ):
literal[string]
keyword[if] identifier[self] . identifier[tree_has_single_path] ( identifier[self] . identifier[root] ):
keyword[return] identifier[self] . identifier[generate_pattern_list] ()
keyword[else] :
keyword[return] identifier[self] . identifier[zip_patterns] ( identifier[self] . identifier[mine_sub_trees] ( identifier[threshold] )) | def mine_patterns(self, threshold):
"""
Mine the constructed FP tree for frequent patterns.
"""
if self.tree_has_single_path(self.root):
return self.generate_pattern_list() # depends on [control=['if'], data=[]]
else:
return self.zip_patterns(self.mine_sub_trees(threshold)) |
def list(self, full_properties=True, filter_args=None):
"""
List the (one) :term:`Console` representing the HMC this client is
connected to.
Authorization requirements:
* None
Parameters:
full_properties (bool):
Controls whether the full set of resource properties should be
retrieved, vs. only a short set consisting of 'object-uri'.
filter_args (dict):
This parameter exists for consistency with other list() methods
and will be ignored.
Returns:
: A list of :class:`~zhmcclient.Console` objects, containing the one
:term:`Console` representing the HMC this client is connected to.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
uri = self._base_uri # There is only one console object.
if full_properties:
props = self.session.get(uri)
else:
# Note: The Console resource's Object ID is not part of its URI.
props = {
self._uri_prop: uri,
}
resource_obj = self.resource_class(
manager=self,
uri=props[self._uri_prop],
name=props.get(self._name_prop, None),
properties=props)
return [resource_obj] | def function[list, parameter[self, full_properties, filter_args]]:
constant[
List the (one) :term:`Console` representing the HMC this client is
connected to.
Authorization requirements:
* None
Parameters:
full_properties (bool):
Controls whether the full set of resource properties should be
retrieved, vs. only a short set consisting of 'object-uri'.
filter_args (dict):
This parameter exists for consistency with other list() methods
and will be ignored.
Returns:
: A list of :class:`~zhmcclient.Console` objects, containing the one
:term:`Console` representing the HMC this client is connected to.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
]
variable[uri] assign[=] name[self]._base_uri
if name[full_properties] begin[:]
variable[props] assign[=] call[name[self].session.get, parameter[name[uri]]]
variable[resource_obj] assign[=] call[name[self].resource_class, parameter[]]
return[list[[<ast.Name object at 0x7da18fe935b0>]]] | keyword[def] identifier[list] ( identifier[self] , identifier[full_properties] = keyword[True] , identifier[filter_args] = keyword[None] ):
literal[string]
identifier[uri] = identifier[self] . identifier[_base_uri]
keyword[if] identifier[full_properties] :
identifier[props] = identifier[self] . identifier[session] . identifier[get] ( identifier[uri] )
keyword[else] :
identifier[props] ={
identifier[self] . identifier[_uri_prop] : identifier[uri] ,
}
identifier[resource_obj] = identifier[self] . identifier[resource_class] (
identifier[manager] = identifier[self] ,
identifier[uri] = identifier[props] [ identifier[self] . identifier[_uri_prop] ],
identifier[name] = identifier[props] . identifier[get] ( identifier[self] . identifier[_name_prop] , keyword[None] ),
identifier[properties] = identifier[props] )
keyword[return] [ identifier[resource_obj] ] | def list(self, full_properties=True, filter_args=None):
"""
List the (one) :term:`Console` representing the HMC this client is
connected to.
Authorization requirements:
* None
Parameters:
full_properties (bool):
Controls whether the full set of resource properties should be
retrieved, vs. only a short set consisting of 'object-uri'.
filter_args (dict):
This parameter exists for consistency with other list() methods
and will be ignored.
Returns:
: A list of :class:`~zhmcclient.Console` objects, containing the one
:term:`Console` representing the HMC this client is connected to.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError`
"""
uri = self._base_uri # There is only one console object.
if full_properties:
props = self.session.get(uri) # depends on [control=['if'], data=[]]
else:
# Note: The Console resource's Object ID is not part of its URI.
props = {self._uri_prop: uri}
resource_obj = self.resource_class(manager=self, uri=props[self._uri_prop], name=props.get(self._name_prop, None), properties=props)
return [resource_obj] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.