code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def match(self, f, *args):
"""Match grammar function 'f' against next token and set 'self.matched'.
Arguments:
f: A grammar function - see efilter.parsers.common.grammar. Must
return TokenMatch or None.
args: Passed to 'f', if any.
Returns:
Instance of efilter.parsers.common.grammar.TokenMatch or None.
Comment:
If a match is returned, it will also be stored in self.matched.
"""
try:
match = f(self.tokenizer, *args)
except StopIteration:
# The grammar function might have tried to access more tokens than
# are available. That's not really an error, it just means it didn't
# match.
return
if match is None:
return
if not isinstance(match, grammar.TokenMatch):
raise TypeError("Invalid grammar function %r returned %r."
% (f, match))
self.matched = match
return match | def function[match, parameter[self, f]]:
constant[Match grammar function 'f' against next token and set 'self.matched'.
Arguments:
f: A grammar function - see efilter.parsers.common.grammar. Must
return TokenMatch or None.
args: Passed to 'f', if any.
Returns:
Instance of efilter.parsers.common.grammar.TokenMatch or None.
Comment:
If a match is returned, it will also be stored in self.matched.
]
<ast.Try object at 0x7da1b0e7ee90>
if compare[name[match] is constant[None]] begin[:]
return[None]
if <ast.UnaryOp object at 0x7da1b0fd0b50> begin[:]
<ast.Raise object at 0x7da1b0fd0ac0>
name[self].matched assign[=] name[match]
return[name[match]] | keyword[def] identifier[match] ( identifier[self] , identifier[f] ,* identifier[args] ):
literal[string]
keyword[try] :
identifier[match] = identifier[f] ( identifier[self] . identifier[tokenizer] ,* identifier[args] )
keyword[except] identifier[StopIteration] :
keyword[return]
keyword[if] identifier[match] keyword[is] keyword[None] :
keyword[return]
keyword[if] keyword[not] identifier[isinstance] ( identifier[match] , identifier[grammar] . identifier[TokenMatch] ):
keyword[raise] identifier[TypeError] ( literal[string]
%( identifier[f] , identifier[match] ))
identifier[self] . identifier[matched] = identifier[match]
keyword[return] identifier[match] | def match(self, f, *args):
"""Match grammar function 'f' against next token and set 'self.matched'.
Arguments:
f: A grammar function - see efilter.parsers.common.grammar. Must
return TokenMatch or None.
args: Passed to 'f', if any.
Returns:
Instance of efilter.parsers.common.grammar.TokenMatch or None.
Comment:
If a match is returned, it will also be stored in self.matched.
"""
try:
match = f(self.tokenizer, *args) # depends on [control=['try'], data=[]]
except StopIteration:
# The grammar function might have tried to access more tokens than
# are available. That's not really an error, it just means it didn't
# match.
return # depends on [control=['except'], data=[]]
if match is None:
return # depends on [control=['if'], data=[]]
if not isinstance(match, grammar.TokenMatch):
raise TypeError('Invalid grammar function %r returned %r.' % (f, match)) # depends on [control=['if'], data=[]]
self.matched = match
return match |
def updateTrackerItem( self, point = None ):
"""
Updates the tracker item information.
"""
item = self.trackerItem()
if not item:
return
gridRect = self._buildData.get('grid_rect')
if ( not (gridRect and gridRect.isValid()) ):
item.setVisible(False)
return
if ( point is not None ):
item.setPos(point.x(), gridRect.top())
if ( not gridRect.contains(item.pos()) ):
item.setVisible(False)
return
if ( self.chartType() != self.Type.Line ):
item.setVisible(False)
return
if ( not self.isTrackingEnabled() ):
item.setVisible(False)
return
item.rebuild(gridRect) | def function[updateTrackerItem, parameter[self, point]]:
constant[
Updates the tracker item information.
]
variable[item] assign[=] call[name[self].trackerItem, parameter[]]
if <ast.UnaryOp object at 0x7da20cabf400> begin[:]
return[None]
variable[gridRect] assign[=] call[name[self]._buildData.get, parameter[constant[grid_rect]]]
if <ast.UnaryOp object at 0x7da20cabc880> begin[:]
call[name[item].setVisible, parameter[constant[False]]]
return[None]
if compare[name[point] is_not constant[None]] begin[:]
call[name[item].setPos, parameter[call[name[point].x, parameter[]], call[name[gridRect].top, parameter[]]]]
if <ast.UnaryOp object at 0x7da18f00c760> begin[:]
call[name[item].setVisible, parameter[constant[False]]]
return[None]
if compare[call[name[self].chartType, parameter[]] not_equal[!=] name[self].Type.Line] begin[:]
call[name[item].setVisible, parameter[constant[False]]]
return[None]
if <ast.UnaryOp object at 0x7da18f00cfa0> begin[:]
call[name[item].setVisible, parameter[constant[False]]]
return[None]
call[name[item].rebuild, parameter[name[gridRect]]] | keyword[def] identifier[updateTrackerItem] ( identifier[self] , identifier[point] = keyword[None] ):
literal[string]
identifier[item] = identifier[self] . identifier[trackerItem] ()
keyword[if] keyword[not] identifier[item] :
keyword[return]
identifier[gridRect] = identifier[self] . identifier[_buildData] . identifier[get] ( literal[string] )
keyword[if] ( keyword[not] ( identifier[gridRect] keyword[and] identifier[gridRect] . identifier[isValid] ())):
identifier[item] . identifier[setVisible] ( keyword[False] )
keyword[return]
keyword[if] ( identifier[point] keyword[is] keyword[not] keyword[None] ):
identifier[item] . identifier[setPos] ( identifier[point] . identifier[x] (), identifier[gridRect] . identifier[top] ())
keyword[if] ( keyword[not] identifier[gridRect] . identifier[contains] ( identifier[item] . identifier[pos] ())):
identifier[item] . identifier[setVisible] ( keyword[False] )
keyword[return]
keyword[if] ( identifier[self] . identifier[chartType] ()!= identifier[self] . identifier[Type] . identifier[Line] ):
identifier[item] . identifier[setVisible] ( keyword[False] )
keyword[return]
keyword[if] ( keyword[not] identifier[self] . identifier[isTrackingEnabled] ()):
identifier[item] . identifier[setVisible] ( keyword[False] )
keyword[return]
identifier[item] . identifier[rebuild] ( identifier[gridRect] ) | def updateTrackerItem(self, point=None):
"""
Updates the tracker item information.
"""
item = self.trackerItem()
if not item:
return # depends on [control=['if'], data=[]]
gridRect = self._buildData.get('grid_rect')
if not (gridRect and gridRect.isValid()):
item.setVisible(False)
return # depends on [control=['if'], data=[]]
if point is not None:
item.setPos(point.x(), gridRect.top()) # depends on [control=['if'], data=['point']]
if not gridRect.contains(item.pos()):
item.setVisible(False)
return # depends on [control=['if'], data=[]]
if self.chartType() != self.Type.Line:
item.setVisible(False)
return # depends on [control=['if'], data=[]]
if not self.isTrackingEnabled():
item.setVisible(False)
return # depends on [control=['if'], data=[]]
item.rebuild(gridRect) |
def icons(self, strip_ext=False):
'''Get all icons in this DAP, optionally strip extensions'''
result = [f for f in self._stripped_files if self._icons_pattern.match(f)]
if strip_ext:
result = [strip_suffix(f, '\.({ext})'.format(ext=self._icons_ext), regex=True) for f in result]
return result | def function[icons, parameter[self, strip_ext]]:
constant[Get all icons in this DAP, optionally strip extensions]
variable[result] assign[=] <ast.ListComp object at 0x7da1b1025c60>
if name[strip_ext] begin[:]
variable[result] assign[=] <ast.ListComp object at 0x7da1b1152410>
return[name[result]] | keyword[def] identifier[icons] ( identifier[self] , identifier[strip_ext] = keyword[False] ):
literal[string]
identifier[result] =[ identifier[f] keyword[for] identifier[f] keyword[in] identifier[self] . identifier[_stripped_files] keyword[if] identifier[self] . identifier[_icons_pattern] . identifier[match] ( identifier[f] )]
keyword[if] identifier[strip_ext] :
identifier[result] =[ identifier[strip_suffix] ( identifier[f] , literal[string] . identifier[format] ( identifier[ext] = identifier[self] . identifier[_icons_ext] ), identifier[regex] = keyword[True] ) keyword[for] identifier[f] keyword[in] identifier[result] ]
keyword[return] identifier[result] | def icons(self, strip_ext=False):
"""Get all icons in this DAP, optionally strip extensions"""
result = [f for f in self._stripped_files if self._icons_pattern.match(f)]
if strip_ext:
result = [strip_suffix(f, '\\.({ext})'.format(ext=self._icons_ext), regex=True) for f in result] # depends on [control=['if'], data=[]]
return result |
def get_visualizations():
"""Get the available visualizations from the request context. Put the
visualizations in the request context if they are not yet there.
Returns:
:obj:`list` of instances of :class:`.BaseVisualization` or
derived class
"""
if not hasattr(g, 'visualizations'):
g.visualizations = {}
for VisClass in _get_visualization_classes():
vis = VisClass(get_model())
g.visualizations[vis.__class__.__name__] = vis
return g.visualizations | def function[get_visualizations, parameter[]]:
constant[Get the available visualizations from the request context. Put the
visualizations in the request context if they are not yet there.
Returns:
:obj:`list` of instances of :class:`.BaseVisualization` or
derived class
]
if <ast.UnaryOp object at 0x7da20c7c9c00> begin[:]
name[g].visualizations assign[=] dictionary[[], []]
for taget[name[VisClass]] in starred[call[name[_get_visualization_classes], parameter[]]] begin[:]
variable[vis] assign[=] call[name[VisClass], parameter[call[name[get_model], parameter[]]]]
call[name[g].visualizations][name[vis].__class__.__name__] assign[=] name[vis]
return[name[g].visualizations] | keyword[def] identifier[get_visualizations] ():
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[g] , literal[string] ):
identifier[g] . identifier[visualizations] ={}
keyword[for] identifier[VisClass] keyword[in] identifier[_get_visualization_classes] ():
identifier[vis] = identifier[VisClass] ( identifier[get_model] ())
identifier[g] . identifier[visualizations] [ identifier[vis] . identifier[__class__] . identifier[__name__] ]= identifier[vis]
keyword[return] identifier[g] . identifier[visualizations] | def get_visualizations():
"""Get the available visualizations from the request context. Put the
visualizations in the request context if they are not yet there.
Returns:
:obj:`list` of instances of :class:`.BaseVisualization` or
derived class
"""
if not hasattr(g, 'visualizations'):
g.visualizations = {}
for VisClass in _get_visualization_classes():
vis = VisClass(get_model())
g.visualizations[vis.__class__.__name__] = vis # depends on [control=['for'], data=['VisClass']] # depends on [control=['if'], data=[]]
return g.visualizations |
def get_limits(self, coord='data'):
"""Get the bounding box of the viewer extents.
Returns
-------
limits : tuple
Bounding box in coordinates of type `coord` in the form of
``(ll_pt, ur_pt)``.
"""
limits = self.t_['limits']
if limits is None:
# No user defined limits. If there is an image loaded
# use its dimensions as the limits
image = self.get_image()
if image is not None:
wd, ht = image.get_size()
limits = ((self.data_off, self.data_off),
(float(wd - 1 + self.data_off),
float(ht - 1 + self.data_off)))
else:
# Calculate limits based on plotted points, if any
canvas = self.get_canvas()
pts = canvas.get_points()
if len(pts) > 0:
limits = trcalc.get_bounds(pts)
else:
# No limits found, go to default
limits = ((0.0, 0.0), (0.0, 0.0))
# convert to desired coordinates
crdmap = self.get_coordmap(coord)
limits = crdmap.data_to(limits)
return limits | def function[get_limits, parameter[self, coord]]:
constant[Get the bounding box of the viewer extents.
Returns
-------
limits : tuple
Bounding box in coordinates of type `coord` in the form of
``(ll_pt, ur_pt)``.
]
variable[limits] assign[=] call[name[self].t_][constant[limits]]
if compare[name[limits] is constant[None]] begin[:]
variable[image] assign[=] call[name[self].get_image, parameter[]]
if compare[name[image] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da2041d8a30> assign[=] call[name[image].get_size, parameter[]]
variable[limits] assign[=] tuple[[<ast.Tuple object at 0x7da2041dbe80>, <ast.Tuple object at 0x7da2041dbb20>]]
variable[crdmap] assign[=] call[name[self].get_coordmap, parameter[name[coord]]]
variable[limits] assign[=] call[name[crdmap].data_to, parameter[name[limits]]]
return[name[limits]] | keyword[def] identifier[get_limits] ( identifier[self] , identifier[coord] = literal[string] ):
literal[string]
identifier[limits] = identifier[self] . identifier[t_] [ literal[string] ]
keyword[if] identifier[limits] keyword[is] keyword[None] :
identifier[image] = identifier[self] . identifier[get_image] ()
keyword[if] identifier[image] keyword[is] keyword[not] keyword[None] :
identifier[wd] , identifier[ht] = identifier[image] . identifier[get_size] ()
identifier[limits] =(( identifier[self] . identifier[data_off] , identifier[self] . identifier[data_off] ),
( identifier[float] ( identifier[wd] - literal[int] + identifier[self] . identifier[data_off] ),
identifier[float] ( identifier[ht] - literal[int] + identifier[self] . identifier[data_off] )))
keyword[else] :
identifier[canvas] = identifier[self] . identifier[get_canvas] ()
identifier[pts] = identifier[canvas] . identifier[get_points] ()
keyword[if] identifier[len] ( identifier[pts] )> literal[int] :
identifier[limits] = identifier[trcalc] . identifier[get_bounds] ( identifier[pts] )
keyword[else] :
identifier[limits] =(( literal[int] , literal[int] ),( literal[int] , literal[int] ))
identifier[crdmap] = identifier[self] . identifier[get_coordmap] ( identifier[coord] )
identifier[limits] = identifier[crdmap] . identifier[data_to] ( identifier[limits] )
keyword[return] identifier[limits] | def get_limits(self, coord='data'):
"""Get the bounding box of the viewer extents.
Returns
-------
limits : tuple
Bounding box in coordinates of type `coord` in the form of
``(ll_pt, ur_pt)``.
"""
limits = self.t_['limits']
if limits is None:
# No user defined limits. If there is an image loaded
# use its dimensions as the limits
image = self.get_image()
if image is not None:
(wd, ht) = image.get_size()
limits = ((self.data_off, self.data_off), (float(wd - 1 + self.data_off), float(ht - 1 + self.data_off))) # depends on [control=['if'], data=['image']]
else:
# Calculate limits based on plotted points, if any
canvas = self.get_canvas()
pts = canvas.get_points()
if len(pts) > 0:
limits = trcalc.get_bounds(pts) # depends on [control=['if'], data=[]]
else:
# No limits found, go to default
limits = ((0.0, 0.0), (0.0, 0.0)) # depends on [control=['if'], data=['limits']]
# convert to desired coordinates
crdmap = self.get_coordmap(coord)
limits = crdmap.data_to(limits)
return limits |
def askretrycancel(title=None, message=None, **options):
"""Original doc: Ask if operation should be retried; return true if the answer is yes"""
return psidialogs.ask_ok_cancel(title=title, message=message, ok='Retry') | def function[askretrycancel, parameter[title, message]]:
constant[Original doc: Ask if operation should be retried; return true if the answer is yes]
return[call[name[psidialogs].ask_ok_cancel, parameter[]]] | keyword[def] identifier[askretrycancel] ( identifier[title] = keyword[None] , identifier[message] = keyword[None] ,** identifier[options] ):
literal[string]
keyword[return] identifier[psidialogs] . identifier[ask_ok_cancel] ( identifier[title] = identifier[title] , identifier[message] = identifier[message] , identifier[ok] = literal[string] ) | def askretrycancel(title=None, message=None, **options):
"""Original doc: Ask if operation should be retried; return true if the answer is yes"""
return psidialogs.ask_ok_cancel(title=title, message=message, ok='Retry') |
def _get_basic_logger(loggername, log_to_file, logpath):
"""
Get a logger with our basic configuration done.
:param loggername: Name of logger.
:param log_to_file: Boolean, True if this logger should write a file.
:return: Logger
"""
logger = logging.getLogger(loggername)
logger.propagate = False
remove_handlers(logger)
logger.setLevel(logging.DEBUG)
logger_config = LOGGING_CONFIG.get(loggername, DEFAULT_LOGGING_CONFIG)
if TRUNCATE_LOG or logger_config.get("truncate_logs").get("truncate"):
cfilter = ContextFilter()
trunc_logs = logger_config.get("truncate_logs")
# pylint: disable=invalid-name
cfilter.MAXIMUM_LENGTH = trunc_logs.get("max_len",
DEFAULT_LOGGING_CONFIG.get(
"truncate_logs").get("max_len"))
cfilter.REVEAL_LENGTH = trunc_logs.get("reveal_len",
DEFAULT_LOGGING_CONFIG.get(
"truncate_logs").get("reveal_len"))
logger.addFilter(cfilter)
# Filehandler for logger
if log_to_file:
_add_filehandler(logger, logpath, name=loggername)
return logger | def function[_get_basic_logger, parameter[loggername, log_to_file, logpath]]:
constant[
Get a logger with our basic configuration done.
:param loggername: Name of logger.
:param log_to_file: Boolean, True if this logger should write a file.
:return: Logger
]
variable[logger] assign[=] call[name[logging].getLogger, parameter[name[loggername]]]
name[logger].propagate assign[=] constant[False]
call[name[remove_handlers], parameter[name[logger]]]
call[name[logger].setLevel, parameter[name[logging].DEBUG]]
variable[logger_config] assign[=] call[name[LOGGING_CONFIG].get, parameter[name[loggername], name[DEFAULT_LOGGING_CONFIG]]]
if <ast.BoolOp object at 0x7da1b0e15540> begin[:]
variable[cfilter] assign[=] call[name[ContextFilter], parameter[]]
variable[trunc_logs] assign[=] call[name[logger_config].get, parameter[constant[truncate_logs]]]
name[cfilter].MAXIMUM_LENGTH assign[=] call[name[trunc_logs].get, parameter[constant[max_len], call[call[name[DEFAULT_LOGGING_CONFIG].get, parameter[constant[truncate_logs]]].get, parameter[constant[max_len]]]]]
name[cfilter].REVEAL_LENGTH assign[=] call[name[trunc_logs].get, parameter[constant[reveal_len], call[call[name[DEFAULT_LOGGING_CONFIG].get, parameter[constant[truncate_logs]]].get, parameter[constant[reveal_len]]]]]
call[name[logger].addFilter, parameter[name[cfilter]]]
if name[log_to_file] begin[:]
call[name[_add_filehandler], parameter[name[logger], name[logpath]]]
return[name[logger]] | keyword[def] identifier[_get_basic_logger] ( identifier[loggername] , identifier[log_to_file] , identifier[logpath] ):
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[loggername] )
identifier[logger] . identifier[propagate] = keyword[False]
identifier[remove_handlers] ( identifier[logger] )
identifier[logger] . identifier[setLevel] ( identifier[logging] . identifier[DEBUG] )
identifier[logger_config] = identifier[LOGGING_CONFIG] . identifier[get] ( identifier[loggername] , identifier[DEFAULT_LOGGING_CONFIG] )
keyword[if] identifier[TRUNCATE_LOG] keyword[or] identifier[logger_config] . identifier[get] ( literal[string] ). identifier[get] ( literal[string] ):
identifier[cfilter] = identifier[ContextFilter] ()
identifier[trunc_logs] = identifier[logger_config] . identifier[get] ( literal[string] )
identifier[cfilter] . identifier[MAXIMUM_LENGTH] = identifier[trunc_logs] . identifier[get] ( literal[string] ,
identifier[DEFAULT_LOGGING_CONFIG] . identifier[get] (
literal[string] ). identifier[get] ( literal[string] ))
identifier[cfilter] . identifier[REVEAL_LENGTH] = identifier[trunc_logs] . identifier[get] ( literal[string] ,
identifier[DEFAULT_LOGGING_CONFIG] . identifier[get] (
literal[string] ). identifier[get] ( literal[string] ))
identifier[logger] . identifier[addFilter] ( identifier[cfilter] )
keyword[if] identifier[log_to_file] :
identifier[_add_filehandler] ( identifier[logger] , identifier[logpath] , identifier[name] = identifier[loggername] )
keyword[return] identifier[logger] | def _get_basic_logger(loggername, log_to_file, logpath):
"""
Get a logger with our basic configuration done.
:param loggername: Name of logger.
:param log_to_file: Boolean, True if this logger should write a file.
:return: Logger
"""
logger = logging.getLogger(loggername)
logger.propagate = False
remove_handlers(logger)
logger.setLevel(logging.DEBUG)
logger_config = LOGGING_CONFIG.get(loggername, DEFAULT_LOGGING_CONFIG)
if TRUNCATE_LOG or logger_config.get('truncate_logs').get('truncate'):
cfilter = ContextFilter()
trunc_logs = logger_config.get('truncate_logs')
# pylint: disable=invalid-name
cfilter.MAXIMUM_LENGTH = trunc_logs.get('max_len', DEFAULT_LOGGING_CONFIG.get('truncate_logs').get('max_len'))
cfilter.REVEAL_LENGTH = trunc_logs.get('reveal_len', DEFAULT_LOGGING_CONFIG.get('truncate_logs').get('reveal_len'))
logger.addFilter(cfilter) # depends on [control=['if'], data=[]]
# Filehandler for logger
if log_to_file:
_add_filehandler(logger, logpath, name=loggername) # depends on [control=['if'], data=[]]
return logger |
def datetime(self):
"""
Returns a datetime object representing the date the game was played.
"""
date_string = '%s %s %s' % (self._day,
self._date,
self._year)
return datetime.strptime(date_string, '%a %B %d %Y') | def function[datetime, parameter[self]]:
constant[
Returns a datetime object representing the date the game was played.
]
variable[date_string] assign[=] binary_operation[constant[%s %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0b37cd0>, <ast.Attribute object at 0x7da1b0b35e40>, <ast.Attribute object at 0x7da1b0b34f10>]]]
return[call[name[datetime].strptime, parameter[name[date_string], constant[%a %B %d %Y]]]] | keyword[def] identifier[datetime] ( identifier[self] ):
literal[string]
identifier[date_string] = literal[string] %( identifier[self] . identifier[_day] ,
identifier[self] . identifier[_date] ,
identifier[self] . identifier[_year] )
keyword[return] identifier[datetime] . identifier[strptime] ( identifier[date_string] , literal[string] ) | def datetime(self):
"""
Returns a datetime object representing the date the game was played.
"""
date_string = '%s %s %s' % (self._day, self._date, self._year)
return datetime.strptime(date_string, '%a %B %d %Y') |
def child_added(self, child):
""" Handle the child added event from the declaration.
This handler will unparent the child toolkit widget. Subclasses
which need more control should reimplement this method.
"""
super(UiKitView, self).child_added(child)
widget = self.widget
#: TODO: Should index be cached?
for i, child_widget in enumerate(self.child_widgets()):
if child_widget == child.widget:
widget.insertSubview(child_widget, atIndex=i) | def function[child_added, parameter[self, child]]:
constant[ Handle the child added event from the declaration.
This handler will unparent the child toolkit widget. Subclasses
which need more control should reimplement this method.
]
call[call[name[super], parameter[name[UiKitView], name[self]]].child_added, parameter[name[child]]]
variable[widget] assign[=] name[self].widget
for taget[tuple[[<ast.Name object at 0x7da1b1c60910>, <ast.Name object at 0x7da1b1c61180>]]] in starred[call[name[enumerate], parameter[call[name[self].child_widgets, parameter[]]]]] begin[:]
if compare[name[child_widget] equal[==] name[child].widget] begin[:]
call[name[widget].insertSubview, parameter[name[child_widget]]] | keyword[def] identifier[child_added] ( identifier[self] , identifier[child] ):
literal[string]
identifier[super] ( identifier[UiKitView] , identifier[self] ). identifier[child_added] ( identifier[child] )
identifier[widget] = identifier[self] . identifier[widget]
keyword[for] identifier[i] , identifier[child_widget] keyword[in] identifier[enumerate] ( identifier[self] . identifier[child_widgets] ()):
keyword[if] identifier[child_widget] == identifier[child] . identifier[widget] :
identifier[widget] . identifier[insertSubview] ( identifier[child_widget] , identifier[atIndex] = identifier[i] ) | def child_added(self, child):
""" Handle the child added event from the declaration.
This handler will unparent the child toolkit widget. Subclasses
which need more control should reimplement this method.
"""
super(UiKitView, self).child_added(child)
widget = self.widget
#: TODO: Should index be cached?
for (i, child_widget) in enumerate(self.child_widgets()):
if child_widget == child.widget:
widget.insertSubview(child_widget, atIndex=i) # depends on [control=['if'], data=['child_widget']] # depends on [control=['for'], data=[]] |
def create_information_tear_sheet(factor_data,
group_neutral=False,
by_group=False):
"""
Creates a tear sheet for information analysis of a factor.
Parameters
----------
factor_data : pd.DataFrame - MultiIndex
A MultiIndex DataFrame indexed by date (level 0) and asset (level 1),
containing the values for a single alpha factor, forward returns for
each period, the factor quantile/bin that factor value belongs to, and
(optionally) the group the asset belongs to.
- See full explanation in utils.get_clean_factor_and_forward_returns
group_neutral : bool
Demean forward returns by group before computing IC.
by_group : bool
If True, display graphs separately for each group.
"""
ic = perf.factor_information_coefficient(factor_data, group_neutral)
plotting.plot_information_table(ic)
columns_wide = 2
fr_cols = len(ic.columns)
rows_when_wide = (((fr_cols - 1) // columns_wide) + 1)
vertical_sections = fr_cols + 3 * rows_when_wide + 2 * fr_cols
gf = GridFigure(rows=vertical_sections, cols=columns_wide)
ax_ic_ts = [gf.next_row() for _ in range(fr_cols)]
plotting.plot_ic_ts(ic, ax=ax_ic_ts)
ax_ic_hqq = [gf.next_cell() for _ in range(fr_cols * 2)]
plotting.plot_ic_hist(ic, ax=ax_ic_hqq[::2])
plotting.plot_ic_qq(ic, ax=ax_ic_hqq[1::2])
if not by_group:
mean_monthly_ic = \
perf.mean_information_coefficient(factor_data,
group_adjust=group_neutral,
by_group=False,
by_time="M")
ax_monthly_ic_heatmap = [gf.next_cell() for x in range(fr_cols)]
plotting.plot_monthly_ic_heatmap(mean_monthly_ic,
ax=ax_monthly_ic_heatmap)
if by_group:
mean_group_ic = \
perf.mean_information_coefficient(factor_data,
group_adjust=group_neutral,
by_group=True)
plotting.plot_ic_by_group(mean_group_ic, ax=gf.next_row())
plt.show()
gf.close() | def function[create_information_tear_sheet, parameter[factor_data, group_neutral, by_group]]:
constant[
Creates a tear sheet for information analysis of a factor.
Parameters
----------
factor_data : pd.DataFrame - MultiIndex
A MultiIndex DataFrame indexed by date (level 0) and asset (level 1),
containing the values for a single alpha factor, forward returns for
each period, the factor quantile/bin that factor value belongs to, and
(optionally) the group the asset belongs to.
- See full explanation in utils.get_clean_factor_and_forward_returns
group_neutral : bool
Demean forward returns by group before computing IC.
by_group : bool
If True, display graphs separately for each group.
]
variable[ic] assign[=] call[name[perf].factor_information_coefficient, parameter[name[factor_data], name[group_neutral]]]
call[name[plotting].plot_information_table, parameter[name[ic]]]
variable[columns_wide] assign[=] constant[2]
variable[fr_cols] assign[=] call[name[len], parameter[name[ic].columns]]
variable[rows_when_wide] assign[=] binary_operation[binary_operation[binary_operation[name[fr_cols] - constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> name[columns_wide]] + constant[1]]
variable[vertical_sections] assign[=] binary_operation[binary_operation[name[fr_cols] + binary_operation[constant[3] * name[rows_when_wide]]] + binary_operation[constant[2] * name[fr_cols]]]
variable[gf] assign[=] call[name[GridFigure], parameter[]]
variable[ax_ic_ts] assign[=] <ast.ListComp object at 0x7da20c7940a0>
call[name[plotting].plot_ic_ts, parameter[name[ic]]]
variable[ax_ic_hqq] assign[=] <ast.ListComp object at 0x7da20c796a40>
call[name[plotting].plot_ic_hist, parameter[name[ic]]]
call[name[plotting].plot_ic_qq, parameter[name[ic]]]
if <ast.UnaryOp object at 0x7da20c796710> begin[:]
variable[mean_monthly_ic] assign[=] call[name[perf].mean_information_coefficient, parameter[name[factor_data]]]
variable[ax_monthly_ic_heatmap] assign[=] <ast.ListComp object at 0x7da2047e8490>
call[name[plotting].plot_monthly_ic_heatmap, parameter[name[mean_monthly_ic]]]
if name[by_group] begin[:]
variable[mean_group_ic] assign[=] call[name[perf].mean_information_coefficient, parameter[name[factor_data]]]
call[name[plotting].plot_ic_by_group, parameter[name[mean_group_ic]]]
call[name[plt].show, parameter[]]
call[name[gf].close, parameter[]] | keyword[def] identifier[create_information_tear_sheet] ( identifier[factor_data] ,
identifier[group_neutral] = keyword[False] ,
identifier[by_group] = keyword[False] ):
literal[string]
identifier[ic] = identifier[perf] . identifier[factor_information_coefficient] ( identifier[factor_data] , identifier[group_neutral] )
identifier[plotting] . identifier[plot_information_table] ( identifier[ic] )
identifier[columns_wide] = literal[int]
identifier[fr_cols] = identifier[len] ( identifier[ic] . identifier[columns] )
identifier[rows_when_wide] =((( identifier[fr_cols] - literal[int] )// identifier[columns_wide] )+ literal[int] )
identifier[vertical_sections] = identifier[fr_cols] + literal[int] * identifier[rows_when_wide] + literal[int] * identifier[fr_cols]
identifier[gf] = identifier[GridFigure] ( identifier[rows] = identifier[vertical_sections] , identifier[cols] = identifier[columns_wide] )
identifier[ax_ic_ts] =[ identifier[gf] . identifier[next_row] () keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[fr_cols] )]
identifier[plotting] . identifier[plot_ic_ts] ( identifier[ic] , identifier[ax] = identifier[ax_ic_ts] )
identifier[ax_ic_hqq] =[ identifier[gf] . identifier[next_cell] () keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[fr_cols] * literal[int] )]
identifier[plotting] . identifier[plot_ic_hist] ( identifier[ic] , identifier[ax] = identifier[ax_ic_hqq] [:: literal[int] ])
identifier[plotting] . identifier[plot_ic_qq] ( identifier[ic] , identifier[ax] = identifier[ax_ic_hqq] [ literal[int] :: literal[int] ])
keyword[if] keyword[not] identifier[by_group] :
identifier[mean_monthly_ic] = identifier[perf] . identifier[mean_information_coefficient] ( identifier[factor_data] ,
identifier[group_adjust] = identifier[group_neutral] ,
identifier[by_group] = keyword[False] ,
identifier[by_time] = literal[string] )
identifier[ax_monthly_ic_heatmap] =[ identifier[gf] . identifier[next_cell] () keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[fr_cols] )]
identifier[plotting] . identifier[plot_monthly_ic_heatmap] ( identifier[mean_monthly_ic] ,
identifier[ax] = identifier[ax_monthly_ic_heatmap] )
keyword[if] identifier[by_group] :
identifier[mean_group_ic] = identifier[perf] . identifier[mean_information_coefficient] ( identifier[factor_data] ,
identifier[group_adjust] = identifier[group_neutral] ,
identifier[by_group] = keyword[True] )
identifier[plotting] . identifier[plot_ic_by_group] ( identifier[mean_group_ic] , identifier[ax] = identifier[gf] . identifier[next_row] ())
identifier[plt] . identifier[show] ()
identifier[gf] . identifier[close] () | def create_information_tear_sheet(factor_data, group_neutral=False, by_group=False):
"""
Creates a tear sheet for information analysis of a factor.
Parameters
----------
factor_data : pd.DataFrame - MultiIndex
A MultiIndex DataFrame indexed by date (level 0) and asset (level 1),
containing the values for a single alpha factor, forward returns for
each period, the factor quantile/bin that factor value belongs to, and
(optionally) the group the asset belongs to.
- See full explanation in utils.get_clean_factor_and_forward_returns
group_neutral : bool
Demean forward returns by group before computing IC.
by_group : bool
If True, display graphs separately for each group.
"""
ic = perf.factor_information_coefficient(factor_data, group_neutral)
plotting.plot_information_table(ic)
columns_wide = 2
fr_cols = len(ic.columns)
rows_when_wide = (fr_cols - 1) // columns_wide + 1
vertical_sections = fr_cols + 3 * rows_when_wide + 2 * fr_cols
gf = GridFigure(rows=vertical_sections, cols=columns_wide)
ax_ic_ts = [gf.next_row() for _ in range(fr_cols)]
plotting.plot_ic_ts(ic, ax=ax_ic_ts)
ax_ic_hqq = [gf.next_cell() for _ in range(fr_cols * 2)]
plotting.plot_ic_hist(ic, ax=ax_ic_hqq[::2])
plotting.plot_ic_qq(ic, ax=ax_ic_hqq[1::2])
if not by_group:
mean_monthly_ic = perf.mean_information_coefficient(factor_data, group_adjust=group_neutral, by_group=False, by_time='M')
ax_monthly_ic_heatmap = [gf.next_cell() for x in range(fr_cols)]
plotting.plot_monthly_ic_heatmap(mean_monthly_ic, ax=ax_monthly_ic_heatmap) # depends on [control=['if'], data=[]]
if by_group:
mean_group_ic = perf.mean_information_coefficient(factor_data, group_adjust=group_neutral, by_group=True)
plotting.plot_ic_by_group(mean_group_ic, ax=gf.next_row()) # depends on [control=['if'], data=[]]
plt.show()
gf.close() |
def make_router(self, rule, method=None, handler=None, cls=None,
name=None, **params):
'''Create a new :class:`.Router` from a ``rule`` and parameters.
This method is used during initialisation when building child
Routers from the :attr:`rule_methods`.
'''
cls = cls or Router
router = cls(rule, name=name, **params)
for r in self.routes:
if r._route == router._route:
if isinstance(r, cls):
router = r
router._set_params(params)
break
if method and handler:
if isinstance(method, tuple):
for m in method:
setattr(router, m, handler)
else:
setattr(router, method, handler)
return router | def function[make_router, parameter[self, rule, method, handler, cls, name]]:
constant[Create a new :class:`.Router` from a ``rule`` and parameters.
This method is used during initialisation when building child
Routers from the :attr:`rule_methods`.
]
variable[cls] assign[=] <ast.BoolOp object at 0x7da18eb578b0>
variable[router] assign[=] call[name[cls], parameter[name[rule]]]
for taget[name[r]] in starred[name[self].routes] begin[:]
if compare[name[r]._route equal[==] name[router]._route] begin[:]
if call[name[isinstance], parameter[name[r], name[cls]]] begin[:]
variable[router] assign[=] name[r]
call[name[router]._set_params, parameter[name[params]]]
break
if <ast.BoolOp object at 0x7da18eb55780> begin[:]
if call[name[isinstance], parameter[name[method], name[tuple]]] begin[:]
for taget[name[m]] in starred[name[method]] begin[:]
call[name[setattr], parameter[name[router], name[m], name[handler]]]
return[name[router]] | keyword[def] identifier[make_router] ( identifier[self] , identifier[rule] , identifier[method] = keyword[None] , identifier[handler] = keyword[None] , identifier[cls] = keyword[None] ,
identifier[name] = keyword[None] ,** identifier[params] ):
literal[string]
identifier[cls] = identifier[cls] keyword[or] identifier[Router]
identifier[router] = identifier[cls] ( identifier[rule] , identifier[name] = identifier[name] ,** identifier[params] )
keyword[for] identifier[r] keyword[in] identifier[self] . identifier[routes] :
keyword[if] identifier[r] . identifier[_route] == identifier[router] . identifier[_route] :
keyword[if] identifier[isinstance] ( identifier[r] , identifier[cls] ):
identifier[router] = identifier[r]
identifier[router] . identifier[_set_params] ( identifier[params] )
keyword[break]
keyword[if] identifier[method] keyword[and] identifier[handler] :
keyword[if] identifier[isinstance] ( identifier[method] , identifier[tuple] ):
keyword[for] identifier[m] keyword[in] identifier[method] :
identifier[setattr] ( identifier[router] , identifier[m] , identifier[handler] )
keyword[else] :
identifier[setattr] ( identifier[router] , identifier[method] , identifier[handler] )
keyword[return] identifier[router] | def make_router(self, rule, method=None, handler=None, cls=None, name=None, **params):
"""Create a new :class:`.Router` from a ``rule`` and parameters.
This method is used during initialisation when building child
Routers from the :attr:`rule_methods`.
"""
cls = cls or Router
router = cls(rule, name=name, **params)
for r in self.routes:
if r._route == router._route:
if isinstance(r, cls):
router = r
router._set_params(params)
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']]
if method and handler:
if isinstance(method, tuple):
for m in method:
setattr(router, m, handler) # depends on [control=['for'], data=['m']] # depends on [control=['if'], data=[]]
else:
setattr(router, method, handler) # depends on [control=['if'], data=[]]
return router |
def get(self, direction=NOMINAL, names=ALL, diff=False, factor=False):
""" get(direction=NOMINAL, names=ALL, diff=False, factor=False)
Returns different representations of the contained value(s). *direction* should be any of
*NOMINAL*, *UP* or *DOWN*. When not *NOMINAL*, *names* decides which uncertainties to take
into account for the combination. When *diff* is *True*, only the unsigned, combined
uncertainty is returned. When *False*, the nominal value plus or minus the uncertainty is
returned. When *factor* is *True*, the ratio w.r.t. the nominal value is returned.
"""
if direction == self.NOMINAL:
value = self.nominal
elif direction in (self.UP, self.DOWN):
# find uncertainties to take into account
if names == self.ALL:
names = self.uncertainties.keys()
else:
names = make_list(names)
if any(name not in self.uncertainties for name in names):
unknown = list(set(names) - set(self.uncertainties.keys()))
raise ValueError("unknown uncertainty name(s): {}".format(unknown))
# calculate the combined uncertainty without correlation
idx = int(direction == self.DOWN)
uncs = [self.uncertainties[name][idx] for name in names]
unc = sum(u**2. for u in uncs)**0.5
# determine the output value
if diff:
value = unc
elif direction == self.UP:
value = self.nominal + unc
else:
value = self.nominal - unc
else:
raise ValueError("unknown direction: {}".format(direction))
return value if not factor else value / self.nominal | def function[get, parameter[self, direction, names, diff, factor]]:
constant[ get(direction=NOMINAL, names=ALL, diff=False, factor=False)
Returns different representations of the contained value(s). *direction* should be any of
*NOMINAL*, *UP* or *DOWN*. When not *NOMINAL*, *names* decides which uncertainties to take
into account for the combination. When *diff* is *True*, only the unsigned, combined
uncertainty is returned. When *False*, the nominal value plus or minus the uncertainty is
returned. When *factor* is *True*, the ratio w.r.t. the nominal value is returned.
]
if compare[name[direction] equal[==] name[self].NOMINAL] begin[:]
variable[value] assign[=] name[self].nominal
return[<ast.IfExp object at 0x7da1b1fba2f0>] | keyword[def] identifier[get] ( identifier[self] , identifier[direction] = identifier[NOMINAL] , identifier[names] = identifier[ALL] , identifier[diff] = keyword[False] , identifier[factor] = keyword[False] ):
literal[string]
keyword[if] identifier[direction] == identifier[self] . identifier[NOMINAL] :
identifier[value] = identifier[self] . identifier[nominal]
keyword[elif] identifier[direction] keyword[in] ( identifier[self] . identifier[UP] , identifier[self] . identifier[DOWN] ):
keyword[if] identifier[names] == identifier[self] . identifier[ALL] :
identifier[names] = identifier[self] . identifier[uncertainties] . identifier[keys] ()
keyword[else] :
identifier[names] = identifier[make_list] ( identifier[names] )
keyword[if] identifier[any] ( identifier[name] keyword[not] keyword[in] identifier[self] . identifier[uncertainties] keyword[for] identifier[name] keyword[in] identifier[names] ):
identifier[unknown] = identifier[list] ( identifier[set] ( identifier[names] )- identifier[set] ( identifier[self] . identifier[uncertainties] . identifier[keys] ()))
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[unknown] ))
identifier[idx] = identifier[int] ( identifier[direction] == identifier[self] . identifier[DOWN] )
identifier[uncs] =[ identifier[self] . identifier[uncertainties] [ identifier[name] ][ identifier[idx] ] keyword[for] identifier[name] keyword[in] identifier[names] ]
identifier[unc] = identifier[sum] ( identifier[u] ** literal[int] keyword[for] identifier[u] keyword[in] identifier[uncs] )** literal[int]
keyword[if] identifier[diff] :
identifier[value] = identifier[unc]
keyword[elif] identifier[direction] == identifier[self] . identifier[UP] :
identifier[value] = identifier[self] . identifier[nominal] + identifier[unc]
keyword[else] :
identifier[value] = identifier[self] . identifier[nominal] - identifier[unc]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[direction] ))
keyword[return] identifier[value] keyword[if] keyword[not] identifier[factor] keyword[else] identifier[value] / identifier[self] . identifier[nominal] | def get(self, direction=NOMINAL, names=ALL, diff=False, factor=False):
""" get(direction=NOMINAL, names=ALL, diff=False, factor=False)
Returns different representations of the contained value(s). *direction* should be any of
*NOMINAL*, *UP* or *DOWN*. When not *NOMINAL*, *names* decides which uncertainties to take
into account for the combination. When *diff* is *True*, only the unsigned, combined
uncertainty is returned. When *False*, the nominal value plus or minus the uncertainty is
returned. When *factor* is *True*, the ratio w.r.t. the nominal value is returned.
"""
if direction == self.NOMINAL:
value = self.nominal # depends on [control=['if'], data=[]]
elif direction in (self.UP, self.DOWN):
# find uncertainties to take into account
if names == self.ALL:
names = self.uncertainties.keys() # depends on [control=['if'], data=['names']]
else:
names = make_list(names)
if any((name not in self.uncertainties for name in names)):
unknown = list(set(names) - set(self.uncertainties.keys()))
raise ValueError('unknown uncertainty name(s): {}'.format(unknown)) # depends on [control=['if'], data=[]]
# calculate the combined uncertainty without correlation
idx = int(direction == self.DOWN)
uncs = [self.uncertainties[name][idx] for name in names]
unc = sum((u ** 2.0 for u in uncs)) ** 0.5
# determine the output value
if diff:
value = unc # depends on [control=['if'], data=[]]
elif direction == self.UP:
value = self.nominal + unc # depends on [control=['if'], data=[]]
else:
value = self.nominal - unc # depends on [control=['if'], data=['direction']]
else:
raise ValueError('unknown direction: {}'.format(direction))
return value if not factor else value / self.nominal |
def registerevent(self, event_name, fn_name, *args):
"""
Register at-spi event
@param event_name: Event name in at-spi format.
@type event_name: string
@param fn_name: Callback function
@type fn_name: function
@param *args: arguments to be passed to the callback function
@type *args: var args
@return: 1 if registration was successful, 0 if not.
@rtype: integer
"""
if not isinstance(event_name, str):
raise ValueError("event_name should be string")
self._pollEvents._callback[event_name] = [event_name, fn_name, args]
return self._remote_registerevent(event_name) | def function[registerevent, parameter[self, event_name, fn_name]]:
constant[
Register at-spi event
@param event_name: Event name in at-spi format.
@type event_name: string
@param fn_name: Callback function
@type fn_name: function
@param *args: arguments to be passed to the callback function
@type *args: var args
@return: 1 if registration was successful, 0 if not.
@rtype: integer
]
if <ast.UnaryOp object at 0x7da1aff75210> begin[:]
<ast.Raise object at 0x7da1aff74610>
call[name[self]._pollEvents._callback][name[event_name]] assign[=] list[[<ast.Name object at 0x7da1aff77490>, <ast.Name object at 0x7da1aff74f10>, <ast.Name object at 0x7da1aff768c0>]]
return[call[name[self]._remote_registerevent, parameter[name[event_name]]]] | keyword[def] identifier[registerevent] ( identifier[self] , identifier[event_name] , identifier[fn_name] ,* identifier[args] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[event_name] , identifier[str] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[_pollEvents] . identifier[_callback] [ identifier[event_name] ]=[ identifier[event_name] , identifier[fn_name] , identifier[args] ]
keyword[return] identifier[self] . identifier[_remote_registerevent] ( identifier[event_name] ) | def registerevent(self, event_name, fn_name, *args):
"""
Register at-spi event
@param event_name: Event name in at-spi format.
@type event_name: string
@param fn_name: Callback function
@type fn_name: function
@param *args: arguments to be passed to the callback function
@type *args: var args
@return: 1 if registration was successful, 0 if not.
@rtype: integer
"""
if not isinstance(event_name, str):
raise ValueError('event_name should be string') # depends on [control=['if'], data=[]]
self._pollEvents._callback[event_name] = [event_name, fn_name, args]
return self._remote_registerevent(event_name) |
def make_user_list(self, emails, usernames):
"""
Given a list of emails and usernames fetch DukeDS user info.
Parameters that are None will be skipped.
:param emails: [str]: list of emails (can be null)
:param usernames: [str]: list of usernames(netid)
:return: [RemoteUser]: details about any users referenced the two parameters
"""
to_users = []
remaining_emails = [] if not emails else list(emails)
remaining_usernames = [] if not usernames else list(usernames)
for user in self.remote_store.fetch_users():
if user.email in remaining_emails:
to_users.append(user)
remaining_emails.remove(user.email)
elif user.username in remaining_usernames:
to_users.append(user)
remaining_usernames.remove(user.username)
if remaining_emails or remaining_usernames:
unable_to_find_users = ','.join(remaining_emails + remaining_usernames)
msg = "Unable to find users for the following email/usernames: {}".format(unable_to_find_users)
raise ValueError(msg)
return to_users | def function[make_user_list, parameter[self, emails, usernames]]:
constant[
Given a list of emails and usernames fetch DukeDS user info.
Parameters that are None will be skipped.
:param emails: [str]: list of emails (can be null)
:param usernames: [str]: list of usernames(netid)
:return: [RemoteUser]: details about any users referenced the two parameters
]
variable[to_users] assign[=] list[[]]
variable[remaining_emails] assign[=] <ast.IfExp object at 0x7da20c7cacb0>
variable[remaining_usernames] assign[=] <ast.IfExp object at 0x7da20c7c8910>
for taget[name[user]] in starred[call[name[self].remote_store.fetch_users, parameter[]]] begin[:]
if compare[name[user].email in name[remaining_emails]] begin[:]
call[name[to_users].append, parameter[name[user]]]
call[name[remaining_emails].remove, parameter[name[user].email]]
if <ast.BoolOp object at 0x7da204344460> begin[:]
variable[unable_to_find_users] assign[=] call[constant[,].join, parameter[binary_operation[name[remaining_emails] + name[remaining_usernames]]]]
variable[msg] assign[=] call[constant[Unable to find users for the following email/usernames: {}].format, parameter[name[unable_to_find_users]]]
<ast.Raise object at 0x7da2043452a0>
return[name[to_users]] | keyword[def] identifier[make_user_list] ( identifier[self] , identifier[emails] , identifier[usernames] ):
literal[string]
identifier[to_users] =[]
identifier[remaining_emails] =[] keyword[if] keyword[not] identifier[emails] keyword[else] identifier[list] ( identifier[emails] )
identifier[remaining_usernames] =[] keyword[if] keyword[not] identifier[usernames] keyword[else] identifier[list] ( identifier[usernames] )
keyword[for] identifier[user] keyword[in] identifier[self] . identifier[remote_store] . identifier[fetch_users] ():
keyword[if] identifier[user] . identifier[email] keyword[in] identifier[remaining_emails] :
identifier[to_users] . identifier[append] ( identifier[user] )
identifier[remaining_emails] . identifier[remove] ( identifier[user] . identifier[email] )
keyword[elif] identifier[user] . identifier[username] keyword[in] identifier[remaining_usernames] :
identifier[to_users] . identifier[append] ( identifier[user] )
identifier[remaining_usernames] . identifier[remove] ( identifier[user] . identifier[username] )
keyword[if] identifier[remaining_emails] keyword[or] identifier[remaining_usernames] :
identifier[unable_to_find_users] = literal[string] . identifier[join] ( identifier[remaining_emails] + identifier[remaining_usernames] )
identifier[msg] = literal[string] . identifier[format] ( identifier[unable_to_find_users] )
keyword[raise] identifier[ValueError] ( identifier[msg] )
keyword[return] identifier[to_users] | def make_user_list(self, emails, usernames):
"""
Given a list of emails and usernames fetch DukeDS user info.
Parameters that are None will be skipped.
:param emails: [str]: list of emails (can be null)
:param usernames: [str]: list of usernames(netid)
:return: [RemoteUser]: details about any users referenced the two parameters
"""
to_users = []
remaining_emails = [] if not emails else list(emails)
remaining_usernames = [] if not usernames else list(usernames)
for user in self.remote_store.fetch_users():
if user.email in remaining_emails:
to_users.append(user)
remaining_emails.remove(user.email) # depends on [control=['if'], data=['remaining_emails']]
elif user.username in remaining_usernames:
to_users.append(user)
remaining_usernames.remove(user.username) # depends on [control=['if'], data=['remaining_usernames']] # depends on [control=['for'], data=['user']]
if remaining_emails or remaining_usernames:
unable_to_find_users = ','.join(remaining_emails + remaining_usernames)
msg = 'Unable to find users for the following email/usernames: {}'.format(unable_to_find_users)
raise ValueError(msg) # depends on [control=['if'], data=[]]
return to_users |
def unicode_wrapper(self, property, default=ugettext('Untitled')):
"""
Wrapper to allow for easy unicode representation of an object by
the specified property. If this wrapper is not able to find the
right translation of the specified property, it will return the
default value instead.
Example::
def __unicode__(self):
return unicode_wrapper('name', default='Unnamed')
"""
# TODO: Test coverage!
try:
value = getattr(self, property)
except ValueError:
logger.warn(
u'ValueError rendering unicode for %s object.',
self._meta.object_name
)
value = None
if not value:
value = default
return value | def function[unicode_wrapper, parameter[self, property, default]]:
constant[
Wrapper to allow for easy unicode representation of an object by
the specified property. If this wrapper is not able to find the
right translation of the specified property, it will return the
default value instead.
Example::
def __unicode__(self):
return unicode_wrapper('name', default='Unnamed')
]
<ast.Try object at 0x7da2054a7dc0>
if <ast.UnaryOp object at 0x7da204565960> begin[:]
variable[value] assign[=] name[default]
return[name[value]] | keyword[def] identifier[unicode_wrapper] ( identifier[self] , identifier[property] , identifier[default] = identifier[ugettext] ( literal[string] )):
literal[string]
keyword[try] :
identifier[value] = identifier[getattr] ( identifier[self] , identifier[property] )
keyword[except] identifier[ValueError] :
identifier[logger] . identifier[warn] (
literal[string] ,
identifier[self] . identifier[_meta] . identifier[object_name]
)
identifier[value] = keyword[None]
keyword[if] keyword[not] identifier[value] :
identifier[value] = identifier[default]
keyword[return] identifier[value] | def unicode_wrapper(self, property, default=ugettext('Untitled')):
"""
Wrapper to allow for easy unicode representation of an object by
the specified property. If this wrapper is not able to find the
right translation of the specified property, it will return the
default value instead.
Example::
def __unicode__(self):
return unicode_wrapper('name', default='Unnamed')
"""
# TODO: Test coverage!
try:
value = getattr(self, property) # depends on [control=['try'], data=[]]
except ValueError:
logger.warn(u'ValueError rendering unicode for %s object.', self._meta.object_name)
value = None # depends on [control=['except'], data=[]]
if not value:
value = default # depends on [control=['if'], data=[]]
return value |
def print_results(distributions, list_all_files):
"""
Print the informations from installed distributions found.
"""
results_printed = False
for dist in distributions:
results_printed = True
logger.info("---")
logger.info("Metadata-Version: %s" % dist.get('metadata-version'))
logger.info("Name: %s" % dist['name'])
logger.info("Version: %s" % dist['version'])
logger.info("Summary: %s" % dist.get('summary'))
logger.info("Home-page: %s" % dist.get('home-page'))
logger.info("Author: %s" % dist.get('author'))
logger.info("Author-email: %s" % dist.get('author-email'))
logger.info("License: %s" % dist.get('license'))
logger.info("Location: %s" % dist['location'])
logger.info("Requires: %s" % ', '.join(dist['requires']))
if list_all_files:
logger.info("Files:")
if dist['files'] is not None:
for line in dist['files']:
logger.info(" %s" % line.strip())
else:
logger.info("Cannot locate installed-files.txt")
if 'entry_points' in dist:
logger.info("Entry-points:")
for line in dist['entry_points']:
logger.info(" %s" % line.strip())
return results_printed | def function[print_results, parameter[distributions, list_all_files]]:
constant[
Print the informations from installed distributions found.
]
variable[results_printed] assign[=] constant[False]
for taget[name[dist]] in starred[name[distributions]] begin[:]
variable[results_printed] assign[=] constant[True]
call[name[logger].info, parameter[constant[---]]]
call[name[logger].info, parameter[binary_operation[constant[Metadata-Version: %s] <ast.Mod object at 0x7da2590d6920> call[name[dist].get, parameter[constant[metadata-version]]]]]]
call[name[logger].info, parameter[binary_operation[constant[Name: %s] <ast.Mod object at 0x7da2590d6920> call[name[dist]][constant[name]]]]]
call[name[logger].info, parameter[binary_operation[constant[Version: %s] <ast.Mod object at 0x7da2590d6920> call[name[dist]][constant[version]]]]]
call[name[logger].info, parameter[binary_operation[constant[Summary: %s] <ast.Mod object at 0x7da2590d6920> call[name[dist].get, parameter[constant[summary]]]]]]
call[name[logger].info, parameter[binary_operation[constant[Home-page: %s] <ast.Mod object at 0x7da2590d6920> call[name[dist].get, parameter[constant[home-page]]]]]]
call[name[logger].info, parameter[binary_operation[constant[Author: %s] <ast.Mod object at 0x7da2590d6920> call[name[dist].get, parameter[constant[author]]]]]]
call[name[logger].info, parameter[binary_operation[constant[Author-email: %s] <ast.Mod object at 0x7da2590d6920> call[name[dist].get, parameter[constant[author-email]]]]]]
call[name[logger].info, parameter[binary_operation[constant[License: %s] <ast.Mod object at 0x7da2590d6920> call[name[dist].get, parameter[constant[license]]]]]]
call[name[logger].info, parameter[binary_operation[constant[Location: %s] <ast.Mod object at 0x7da2590d6920> call[name[dist]][constant[location]]]]]
call[name[logger].info, parameter[binary_operation[constant[Requires: %s] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[call[name[dist]][constant[requires]]]]]]]
if name[list_all_files] begin[:]
call[name[logger].info, parameter[constant[Files:]]]
if compare[call[name[dist]][constant[files]] is_not constant[None]] begin[:]
for taget[name[line]] in starred[call[name[dist]][constant[files]]] begin[:]
call[name[logger].info, parameter[binary_operation[constant[ %s] <ast.Mod object at 0x7da2590d6920> call[name[line].strip, parameter[]]]]]
if compare[constant[entry_points] in name[dist]] begin[:]
call[name[logger].info, parameter[constant[Entry-points:]]]
for taget[name[line]] in starred[call[name[dist]][constant[entry_points]]] begin[:]
call[name[logger].info, parameter[binary_operation[constant[ %s] <ast.Mod object at 0x7da2590d6920> call[name[line].strip, parameter[]]]]]
return[name[results_printed]] | keyword[def] identifier[print_results] ( identifier[distributions] , identifier[list_all_files] ):
literal[string]
identifier[results_printed] = keyword[False]
keyword[for] identifier[dist] keyword[in] identifier[distributions] :
identifier[results_printed] = keyword[True]
identifier[logger] . identifier[info] ( literal[string] )
identifier[logger] . identifier[info] ( literal[string] % identifier[dist] . identifier[get] ( literal[string] ))
identifier[logger] . identifier[info] ( literal[string] % identifier[dist] [ literal[string] ])
identifier[logger] . identifier[info] ( literal[string] % identifier[dist] [ literal[string] ])
identifier[logger] . identifier[info] ( literal[string] % identifier[dist] . identifier[get] ( literal[string] ))
identifier[logger] . identifier[info] ( literal[string] % identifier[dist] . identifier[get] ( literal[string] ))
identifier[logger] . identifier[info] ( literal[string] % identifier[dist] . identifier[get] ( literal[string] ))
identifier[logger] . identifier[info] ( literal[string] % identifier[dist] . identifier[get] ( literal[string] ))
identifier[logger] . identifier[info] ( literal[string] % identifier[dist] . identifier[get] ( literal[string] ))
identifier[logger] . identifier[info] ( literal[string] % identifier[dist] [ literal[string] ])
identifier[logger] . identifier[info] ( literal[string] % literal[string] . identifier[join] ( identifier[dist] [ literal[string] ]))
keyword[if] identifier[list_all_files] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] identifier[dist] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[line] keyword[in] identifier[dist] [ literal[string] ]:
identifier[logger] . identifier[info] ( literal[string] % identifier[line] . identifier[strip] ())
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[dist] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[for] identifier[line] keyword[in] identifier[dist] [ literal[string] ]:
identifier[logger] . identifier[info] ( literal[string] % identifier[line] . identifier[strip] ())
keyword[return] identifier[results_printed] | def print_results(distributions, list_all_files):
"""
Print the informations from installed distributions found.
"""
results_printed = False
for dist in distributions:
results_printed = True
logger.info('---')
logger.info('Metadata-Version: %s' % dist.get('metadata-version'))
logger.info('Name: %s' % dist['name'])
logger.info('Version: %s' % dist['version'])
logger.info('Summary: %s' % dist.get('summary'))
logger.info('Home-page: %s' % dist.get('home-page'))
logger.info('Author: %s' % dist.get('author'))
logger.info('Author-email: %s' % dist.get('author-email'))
logger.info('License: %s' % dist.get('license'))
logger.info('Location: %s' % dist['location'])
logger.info('Requires: %s' % ', '.join(dist['requires']))
if list_all_files:
logger.info('Files:')
if dist['files'] is not None:
for line in dist['files']:
logger.info(' %s' % line.strip()) # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=[]]
else:
logger.info('Cannot locate installed-files.txt') # depends on [control=['if'], data=[]]
if 'entry_points' in dist:
logger.info('Entry-points:')
for line in dist['entry_points']:
logger.info(' %s' % line.strip()) # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=['dist']] # depends on [control=['for'], data=['dist']]
return results_printed |
def goes_requires(self, regs):
""" Returns whether any of the goes_to block requires any of
the given registers.
"""
if len(self) and self.mem[-1].inst == 'call' and self.mem[-1].condition_flag is None:
for block in self.calls:
if block.is_used(regs, 0):
return True
d = block.destroys()
if not len([x for x in regs if x not in d]):
return False # If all registers are destroyed then they're not used
for block in self.goes_to:
if block.is_used(regs, 0):
return True
return False | def function[goes_requires, parameter[self, regs]]:
constant[ Returns whether any of the goes_to block requires any of
the given registers.
]
if <ast.BoolOp object at 0x7da1b26ad9c0> begin[:]
for taget[name[block]] in starred[name[self].calls] begin[:]
if call[name[block].is_used, parameter[name[regs], constant[0]]] begin[:]
return[constant[True]]
variable[d] assign[=] call[name[block].destroys, parameter[]]
if <ast.UnaryOp object at 0x7da1b05cd5d0> begin[:]
return[constant[False]]
for taget[name[block]] in starred[name[self].goes_to] begin[:]
if call[name[block].is_used, parameter[name[regs], constant[0]]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[goes_requires] ( identifier[self] , identifier[regs] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] ) keyword[and] identifier[self] . identifier[mem] [- literal[int] ]. identifier[inst] == literal[string] keyword[and] identifier[self] . identifier[mem] [- literal[int] ]. identifier[condition_flag] keyword[is] keyword[None] :
keyword[for] identifier[block] keyword[in] identifier[self] . identifier[calls] :
keyword[if] identifier[block] . identifier[is_used] ( identifier[regs] , literal[int] ):
keyword[return] keyword[True]
identifier[d] = identifier[block] . identifier[destroys] ()
keyword[if] keyword[not] identifier[len] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[regs] keyword[if] identifier[x] keyword[not] keyword[in] identifier[d] ]):
keyword[return] keyword[False]
keyword[for] identifier[block] keyword[in] identifier[self] . identifier[goes_to] :
keyword[if] identifier[block] . identifier[is_used] ( identifier[regs] , literal[int] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def goes_requires(self, regs):
""" Returns whether any of the goes_to block requires any of
the given registers.
"""
if len(self) and self.mem[-1].inst == 'call' and (self.mem[-1].condition_flag is None):
for block in self.calls:
if block.is_used(regs, 0):
return True # depends on [control=['if'], data=[]]
d = block.destroys()
if not len([x for x in regs if x not in d]):
return False # If all registers are destroyed then they're not used # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['block']] # depends on [control=['if'], data=[]]
for block in self.goes_to:
if block.is_used(regs, 0):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['block']]
return False |
def _config_getter(get_opt,
key,
value_regex=None,
cwd=None,
user=None,
password=None,
ignore_retcode=False,
output_encoding=None,
**kwargs):
'''
Common code for config.get_* functions, builds and runs the git CLI command
and returns the result dict for the calling function to parse.
'''
kwargs = salt.utils.args.clean_kwargs(**kwargs)
global_ = kwargs.pop('global', False)
if kwargs:
salt.utils.args.invalid_kwargs(kwargs)
if cwd is None:
if not global_:
raise SaltInvocationError(
'\'cwd\' argument required unless global=True'
)
else:
cwd = _expand_path(cwd, user)
if get_opt == '--get-regexp':
if value_regex is not None \
and not isinstance(value_regex, six.string_types):
value_regex = six.text_type(value_regex)
else:
# Ignore value_regex
value_regex = None
command = ['git', 'config']
command.extend(_which_git_config(global_, cwd, user, password,
output_encoding=output_encoding))
command.append(get_opt)
command.append(key)
if value_regex is not None:
command.append(value_regex)
return _git_run(command,
cwd=cwd,
user=user,
password=password,
ignore_retcode=ignore_retcode,
failhard=False,
output_encoding=output_encoding) | def function[_config_getter, parameter[get_opt, key, value_regex, cwd, user, password, ignore_retcode, output_encoding]]:
constant[
Common code for config.get_* functions, builds and runs the git CLI command
and returns the result dict for the calling function to parse.
]
variable[kwargs] assign[=] call[name[salt].utils.args.clean_kwargs, parameter[]]
variable[global_] assign[=] call[name[kwargs].pop, parameter[constant[global], constant[False]]]
if name[kwargs] begin[:]
call[name[salt].utils.args.invalid_kwargs, parameter[name[kwargs]]]
if compare[name[cwd] is constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da20c993b80> begin[:]
<ast.Raise object at 0x7da20c992c20>
if compare[name[get_opt] equal[==] constant[--get-regexp]] begin[:]
if <ast.BoolOp object at 0x7da20c991e70> begin[:]
variable[value_regex] assign[=] call[name[six].text_type, parameter[name[value_regex]]]
variable[command] assign[=] list[[<ast.Constant object at 0x7da20c992d10>, <ast.Constant object at 0x7da20c991120>]]
call[name[command].extend, parameter[call[name[_which_git_config], parameter[name[global_], name[cwd], name[user], name[password]]]]]
call[name[command].append, parameter[name[get_opt]]]
call[name[command].append, parameter[name[key]]]
if compare[name[value_regex] is_not constant[None]] begin[:]
call[name[command].append, parameter[name[value_regex]]]
return[call[name[_git_run], parameter[name[command]]]] | keyword[def] identifier[_config_getter] ( identifier[get_opt] ,
identifier[key] ,
identifier[value_regex] = keyword[None] ,
identifier[cwd] = keyword[None] ,
identifier[user] = keyword[None] ,
identifier[password] = keyword[None] ,
identifier[ignore_retcode] = keyword[False] ,
identifier[output_encoding] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
identifier[kwargs] = identifier[salt] . identifier[utils] . identifier[args] . identifier[clean_kwargs] (** identifier[kwargs] )
identifier[global_] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
keyword[if] identifier[kwargs] :
identifier[salt] . identifier[utils] . identifier[args] . identifier[invalid_kwargs] ( identifier[kwargs] )
keyword[if] identifier[cwd] keyword[is] keyword[None] :
keyword[if] keyword[not] identifier[global_] :
keyword[raise] identifier[SaltInvocationError] (
literal[string]
)
keyword[else] :
identifier[cwd] = identifier[_expand_path] ( identifier[cwd] , identifier[user] )
keyword[if] identifier[get_opt] == literal[string] :
keyword[if] identifier[value_regex] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[value_regex] , identifier[six] . identifier[string_types] ):
identifier[value_regex] = identifier[six] . identifier[text_type] ( identifier[value_regex] )
keyword[else] :
identifier[value_regex] = keyword[None]
identifier[command] =[ literal[string] , literal[string] ]
identifier[command] . identifier[extend] ( identifier[_which_git_config] ( identifier[global_] , identifier[cwd] , identifier[user] , identifier[password] ,
identifier[output_encoding] = identifier[output_encoding] ))
identifier[command] . identifier[append] ( identifier[get_opt] )
identifier[command] . identifier[append] ( identifier[key] )
keyword[if] identifier[value_regex] keyword[is] keyword[not] keyword[None] :
identifier[command] . identifier[append] ( identifier[value_regex] )
keyword[return] identifier[_git_run] ( identifier[command] ,
identifier[cwd] = identifier[cwd] ,
identifier[user] = identifier[user] ,
identifier[password] = identifier[password] ,
identifier[ignore_retcode] = identifier[ignore_retcode] ,
identifier[failhard] = keyword[False] ,
identifier[output_encoding] = identifier[output_encoding] ) | def _config_getter(get_opt, key, value_regex=None, cwd=None, user=None, password=None, ignore_retcode=False, output_encoding=None, **kwargs):
"""
Common code for config.get_* functions, builds and runs the git CLI command
and returns the result dict for the calling function to parse.
"""
kwargs = salt.utils.args.clean_kwargs(**kwargs)
global_ = kwargs.pop('global', False)
if kwargs:
salt.utils.args.invalid_kwargs(kwargs) # depends on [control=['if'], data=[]]
if cwd is None:
if not global_:
raise SaltInvocationError("'cwd' argument required unless global=True") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
cwd = _expand_path(cwd, user)
if get_opt == '--get-regexp':
if value_regex is not None and (not isinstance(value_regex, six.string_types)):
value_regex = six.text_type(value_regex) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Ignore value_regex
value_regex = None
command = ['git', 'config']
command.extend(_which_git_config(global_, cwd, user, password, output_encoding=output_encoding))
command.append(get_opt)
command.append(key)
if value_regex is not None:
command.append(value_regex) # depends on [control=['if'], data=['value_regex']]
return _git_run(command, cwd=cwd, user=user, password=password, ignore_retcode=ignore_retcode, failhard=False, output_encoding=output_encoding) |
def present(name, params, static_host_list=True, **kwargs):
'''
Creates Zabbix Template object or if differs update it according defined parameters. See Zabbix API documentation.
Zabbix API version: >3.0
:param name: Zabbix Template name
:param params: Additional parameters according to Zabbix API documentation
:param static_host_list: If hosts assigned to the template are controlled
only by this state or can be also assigned externally
:param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring)
:param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring)
:param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring)
.. note::
If there is a need to get a value from current zabbix online (e.g. ids of host groups you want the template
to be associated with), put a dictionary with two keys "query_object" and "query_name" instead of the value.
In this example we want to create template named "Testing Template", assign it to hostgroup Templates,
link it to two ceph nodes and create a macro.
.. note::
IMPORTANT NOTE:
Objects (except for template name) are identified by name (or by other key in some exceptional cases)
so changing name of object means deleting old one and creating new one with new ID !!!
.. note::
NOT SUPPORTED FEATURES:
- linked templates
- trigger dependencies
- groups and group prototypes for host prototypes
SLS Example:
.. code-block:: yaml
zabbix-template-present:
zabbix_template.present:
- name: Testing Template
# Do not touch existing assigned hosts
# True will detach all other hosts than defined here
- static_host_list: False
- params:
description: Template for Ceph nodes
groups:
# groups must already exist
# template must be at least in one hostgroup
- groupid:
query_object: hostgroup
query_name: Templates
macros:
- macro: "{$CEPH_CLUSTER_NAME}"
value: ceph
hosts:
# hosts must already exist
- hostid:
query_object: host
query_name: ceph-osd-01
- hostid:
query_object: host
query_name: ceph-osd-02
# templates:
# Linked templates - not supported by state module but can be linked manually (will not be touched)
applications:
- name: Ceph OSD
items:
- name: Ceph OSD avg fill item
key_: ceph.osd_avg_fill
type: 2
value_type: 0
delay: 60
units: '%'
description: 'Average fill of OSD'
applications:
- applicationid:
query_object: application
query_name: Ceph OSD
triggers:
- description: "Ceph OSD filled more that 90%"
expression: "{{'{'}}Testing Template:ceph.osd_avg_fill.last(){{'}'}}>90"
priority: 4
discoveries:
- name: Mounted filesystem discovery
key_: vfs.fs.discovery
type: 0
delay: 60
itemprototypes:
- name: Free disk space on {{'{#'}}FSNAME}
key_: vfs.fs.size[{{'{#'}}FSNAME},free]
type: 0
value_type: 3
delay: 60
applications:
- applicationid:
query_object: application
query_name: Ceph OSD
triggerprototypes:
- description: "Free disk space is less than 20% on volume {{'{#'}}FSNAME{{'}'}}"
expression: "{{'{'}}Testing Template:vfs.fs.size[{{'{#'}}FSNAME},free].last(){{'}'}}<20"
graphs:
- name: Ceph OSD avg fill graph
width: 900
height: 200
graphtype: 0
gitems:
- color: F63100
itemid:
query_object: item
query_name: Ceph OSD avg fill item
screens:
- name: Ceph
hsize: 1
vsize: 1
screenitems:
- x: 0
y: 0
resourcetype: 0
resourceid:
query_object: graph
query_name: Ceph OSD avg fill graph
'''
zabbix_id_mapper = __salt__['zabbix.get_zabbix_id_mapper']()
dry_run = __opts__['test']
ret = {'name': name, 'result': False, 'comment': '', 'changes': {}}
params['host'] = name
del CHANGE_STACK[:]
# Divide template yaml definition into parts
# - template definition itself
# - simple template components
# - components that have other sub-components
# (e.g. discoveries - where parent ID is needed in advance for sub-component manipulation)
template_definition = {}
template_components = {}
discovery_components = []
for attr in params:
if attr in TEMPLATE_COMPONENT_ORDER and six.text_type(attr) != 'discoveries':
template_components[attr] = params[attr]
elif six.text_type(attr) == 'discoveries':
d_rules = []
for d_rule in params[attr]:
d_rule_components = {'query_pid': {'component': attr,
'filter_val': d_rule[TEMPLATE_COMPONENT_DEF[attr]['filter']]}}
for proto_name in DISCOVERYRULE_COMPONENT_ORDER:
if proto_name in d_rule:
d_rule_components[proto_name] = d_rule[proto_name]
del d_rule[proto_name]
discovery_components.append(d_rule_components)
d_rules.append(d_rule)
template_components[attr] = d_rules
else:
template_definition[attr] = params[attr]
# if a component is not defined, it means to remove existing items during update (empty list)
for attr in TEMPLATE_COMPONENT_ORDER:
if attr not in template_components:
template_components[attr] = []
# if a component is not defined, it means to remove existing items during update (empty list)
for attr in TEMPLATE_RELATIONS:
template_definition[attr] = params[attr] if attr in params and params[attr] else []
defined_obj = __salt__['zabbix.substitute_params'](template_definition, **kwargs)
log.info('SUBSTITUTED template_definition: %s', six.text_type(json.dumps(defined_obj, indent=4)))
tmpl_get = __salt__['zabbix.run_query']('template.get',
{'output': 'extend', 'selectGroups': 'groupid', 'selectHosts': 'hostid',
'selectTemplates': 'templateid', 'selectMacros': 'extend',
'filter': {'host': name}},
**kwargs)
log.info('TEMPLATE get result: %s', six.text_type(json.dumps(tmpl_get, indent=4)))
existing_obj = __salt__['zabbix.substitute_params'](tmpl_get[0], **kwargs) \
if tmpl_get and len(tmpl_get) == 1 else False
if existing_obj:
template_id = existing_obj[zabbix_id_mapper['template']]
if not static_host_list:
# Prepare objects for comparison
defined_wo_hosts = defined_obj
if 'hosts' in defined_obj:
defined_hosts = defined_obj['hosts']
del defined_wo_hosts['hosts']
else:
defined_hosts = []
existing_wo_hosts = existing_obj
if 'hosts' in existing_obj:
existing_hosts = existing_obj['hosts']
del existing_wo_hosts['hosts']
else:
existing_hosts = []
# Compare host list separately from the rest of the object comparison since the merged list is needed for
# update
hosts_list = _diff_and_merge_host_list(defined_hosts, existing_hosts)
# Compare objects without hosts
diff_params = __salt__['zabbix.compare_params'](defined_wo_hosts, existing_wo_hosts, True)
# Merge comparison results together
if ('new' in diff_params and 'hosts' in diff_params['new']) or hosts_list:
diff_params['new']['hosts'] = hosts_list
else:
diff_params = __salt__['zabbix.compare_params'](defined_obj, existing_obj, True)
if diff_params['new']:
diff_params['new'][zabbix_id_mapper['template']] = template_id
diff_params['old'][zabbix_id_mapper['template']] = template_id
log.info('TEMPLATE: update params: %s', six.text_type(json.dumps(diff_params, indent=4)))
CHANGE_STACK.append({'component': 'template', 'action': 'update', 'params': diff_params['new']})
if not dry_run:
tmpl_update = __salt__['zabbix.run_query']('template.update', diff_params['new'], **kwargs)
log.info('TEMPLATE update result: %s', six.text_type(tmpl_update))
else:
CHANGE_STACK.append({'component': 'template', 'action': 'create', 'params': defined_obj})
if not dry_run:
tmpl_create = __salt__['zabbix.run_query']('template.create', defined_obj, **kwargs)
log.info('TEMPLATE create result: %s', tmpl_create)
if tmpl_create:
template_id = tmpl_create['templateids'][0]
log.info('\n\ntemplate_components: %s', json.dumps(template_components, indent=4))
log.info('\n\ndiscovery_components: %s', json.dumps(discovery_components, indent=4))
log.info('\n\nCurrent CHANGE_STACK: %s', six.text_type(json.dumps(CHANGE_STACK, indent=4)))
if existing_obj or not dry_run:
for component in TEMPLATE_COMPONENT_ORDER:
log.info('\n\n\n\n\nCOMPONENT: %s\n\n', six.text_type(json.dumps(component)))
# 1) query for components which belongs to the template
existing_c_list = _get_existing_template_c_list(component, template_id, **kwargs)
existing_c_list_subs = __salt__['zabbix.substitute_params'](existing_c_list, **kwargs) \
if existing_c_list else []
if component in template_components:
defined_c_list_subs = __salt__['zabbix.substitute_params'](
template_components[component],
extend_params={TEMPLATE_COMPONENT_DEF[component]['qselectpid']: template_id},
filter_key=TEMPLATE_COMPONENT_DEF[component]['filter'],
**kwargs)
else:
defined_c_list_subs = []
# 2) take lists of particular component and compare -> do create, update and delete actions
_manage_component(component, template_id, defined_c_list_subs, existing_c_list_subs, **kwargs)
log.info('\n\nCurrent CHANGE_STACK: %s', six.text_type(json.dumps(CHANGE_STACK, indent=4)))
for d_rule_component in discovery_components:
# query for parent id -> "query_pid": {"filter_val": "vfs.fs.discovery", "component": "discoveries"}
q_def = d_rule_component['query_pid']
c_def = TEMPLATE_COMPONENT_DEF[q_def['component']]
q_object = c_def['qtype']
q_params = dict(c_def['output'])
q_params.update({c_def['qselectpid']: template_id})
q_params.update({'filter': {c_def['filter']: q_def['filter_val']}})
parent_id = __salt__['zabbix.get_object_id_by_params'](q_object, q_params, **kwargs)
for proto_name in DISCOVERYRULE_COMPONENT_ORDER:
log.info('\n\n\n\n\nPROTOTYPE_NAME: %s\n\n', six.text_type(json.dumps(proto_name)))
existing_p_list = _get_existing_template_c_list(proto_name, parent_id, **kwargs)
existing_p_list_subs = __salt__['zabbix.substitute_params'](existing_p_list, **kwargs)\
if existing_p_list else []
if proto_name in d_rule_component:
defined_p_list_subs = __salt__['zabbix.substitute_params'](
d_rule_component[proto_name],
extend_params={c_def['qselectpid']: template_id},
**kwargs)
else:
defined_p_list_subs = []
_manage_component(proto_name,
parent_id,
defined_p_list_subs,
existing_p_list_subs,
template_id=template_id,
**kwargs)
log.info('\n\nCurrent CHANGE_STACK: %s', six.text_type(json.dumps(CHANGE_STACK, indent=4)))
if not CHANGE_STACK:
ret['result'] = True
ret['comment'] = 'Zabbix Template "{0}" already exists and corresponds to a definition.'.format(name)
else:
tmpl_action = next((item for item in CHANGE_STACK
if item['component'] == 'template' and item['action'] == 'create'), None)
if tmpl_action:
ret['result'] = True
if dry_run:
ret['comment'] = 'Zabbix Template "{0}" would be created.'.format(name)
ret['changes'] = {name: {'old': 'Zabbix Template "{0}" does not exist.'.format(name),
'new': 'Zabbix Template "{0}" would be created '
'according definition.'.format(name)}}
else:
ret['comment'] = 'Zabbix Template "{0}" created.'.format(name)
ret['changes'] = {name: {'old': 'Zabbix Template "{0}" did not exist.'.format(name),
'new': 'Zabbix Template "{0}" created according definition.'.format(name)}}
else:
ret['result'] = True
if dry_run:
ret['comment'] = 'Zabbix Template "{0}" would be updated.'.format(name)
ret['changes'] = {name: {'old': 'Zabbix Template "{0}" differs.'.format(name),
'new': 'Zabbix Template "{0}" would be updated '
'according definition.'.format(name)}}
else:
ret['comment'] = 'Zabbix Template "{0}" updated.'.format(name)
ret['changes'] = {name: {'old': 'Zabbix Template "{0}" differed.'.format(name),
'new': 'Zabbix Template "{0}" updated according definition.'.format(name)}}
return ret | def function[present, parameter[name, params, static_host_list]]:
constant[
Creates Zabbix Template object or if differs update it according defined parameters. See Zabbix API documentation.
Zabbix API version: >3.0
:param name: Zabbix Template name
:param params: Additional parameters according to Zabbix API documentation
:param static_host_list: If hosts assigned to the template are controlled
only by this state or can be also assigned externally
:param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring)
:param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring)
:param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring)
.. note::
If there is a need to get a value from current zabbix online (e.g. ids of host groups you want the template
to be associated with), put a dictionary with two keys "query_object" and "query_name" instead of the value.
In this example we want to create template named "Testing Template", assign it to hostgroup Templates,
link it to two ceph nodes and create a macro.
.. note::
IMPORTANT NOTE:
Objects (except for template name) are identified by name (or by other key in some exceptional cases)
so changing name of object means deleting old one and creating new one with new ID !!!
.. note::
NOT SUPPORTED FEATURES:
- linked templates
- trigger dependencies
- groups and group prototypes for host prototypes
SLS Example:
.. code-block:: yaml
zabbix-template-present:
zabbix_template.present:
- name: Testing Template
# Do not touch existing assigned hosts
# True will detach all other hosts than defined here
- static_host_list: False
- params:
description: Template for Ceph nodes
groups:
# groups must already exist
# template must be at least in one hostgroup
- groupid:
query_object: hostgroup
query_name: Templates
macros:
- macro: "{$CEPH_CLUSTER_NAME}"
value: ceph
hosts:
# hosts must already exist
- hostid:
query_object: host
query_name: ceph-osd-01
- hostid:
query_object: host
query_name: ceph-osd-02
# templates:
# Linked templates - not supported by state module but can be linked manually (will not be touched)
applications:
- name: Ceph OSD
items:
- name: Ceph OSD avg fill item
key_: ceph.osd_avg_fill
type: 2
value_type: 0
delay: 60
units: '%'
description: 'Average fill of OSD'
applications:
- applicationid:
query_object: application
query_name: Ceph OSD
triggers:
- description: "Ceph OSD filled more that 90%"
expression: "{{'{'}}Testing Template:ceph.osd_avg_fill.last(){{'}'}}>90"
priority: 4
discoveries:
- name: Mounted filesystem discovery
key_: vfs.fs.discovery
type: 0
delay: 60
itemprototypes:
- name: Free disk space on {{'{#'}}FSNAME}
key_: vfs.fs.size[{{'{#'}}FSNAME},free]
type: 0
value_type: 3
delay: 60
applications:
- applicationid:
query_object: application
query_name: Ceph OSD
triggerprototypes:
- description: "Free disk space is less than 20% on volume {{'{#'}}FSNAME{{'}'}}"
expression: "{{'{'}}Testing Template:vfs.fs.size[{{'{#'}}FSNAME},free].last(){{'}'}}<20"
graphs:
- name: Ceph OSD avg fill graph
width: 900
height: 200
graphtype: 0
gitems:
- color: F63100
itemid:
query_object: item
query_name: Ceph OSD avg fill item
screens:
- name: Ceph
hsize: 1
vsize: 1
screenitems:
- x: 0
y: 0
resourcetype: 0
resourceid:
query_object: graph
query_name: Ceph OSD avg fill graph
]
variable[zabbix_id_mapper] assign[=] call[call[name[__salt__]][constant[zabbix.get_zabbix_id_mapper]], parameter[]]
variable[dry_run] assign[=] call[name[__opts__]][constant[test]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b208dc90>, <ast.Constant object at 0x7da1b208dc60>, <ast.Constant object at 0x7da1b208dc30>, <ast.Constant object at 0x7da1b208dc00>], [<ast.Name object at 0x7da1b208dbd0>, <ast.Constant object at 0x7da1b208dba0>, <ast.Constant object at 0x7da1b208db70>, <ast.Dict object at 0x7da1b208db40>]]
call[name[params]][constant[host]] assign[=] name[name]
<ast.Delete object at 0x7da1b208da20>
variable[template_definition] assign[=] dictionary[[], []]
variable[template_components] assign[=] dictionary[[], []]
variable[discovery_components] assign[=] list[[]]
for taget[name[attr]] in starred[name[params]] begin[:]
if <ast.BoolOp object at 0x7da1b208d6c0> begin[:]
call[name[template_components]][name[attr]] assign[=] call[name[params]][name[attr]]
for taget[name[attr]] in starred[name[TEMPLATE_COMPONENT_ORDER]] begin[:]
if compare[name[attr] <ast.NotIn object at 0x7da2590d7190> name[template_components]] begin[:]
call[name[template_components]][name[attr]] assign[=] list[[]]
for taget[name[attr]] in starred[name[TEMPLATE_RELATIONS]] begin[:]
call[name[template_definition]][name[attr]] assign[=] <ast.IfExp object at 0x7da1b208c250>
variable[defined_obj] assign[=] call[call[name[__salt__]][constant[zabbix.substitute_params]], parameter[name[template_definition]]]
call[name[log].info, parameter[constant[SUBSTITUTED template_definition: %s], call[name[six].text_type, parameter[call[name[json].dumps, parameter[name[defined_obj]]]]]]]
variable[tmpl_get] assign[=] call[call[name[__salt__]][constant[zabbix.run_query]], parameter[constant[template.get], dictionary[[<ast.Constant object at 0x7da1b21a1060>, <ast.Constant object at 0x7da1b21a1090>, <ast.Constant object at 0x7da1b21a10c0>, <ast.Constant object at 0x7da1b21a10f0>, <ast.Constant object at 0x7da1b21a1120>, <ast.Constant object at 0x7da1b21a1150>], [<ast.Constant object at 0x7da1b21a11b0>, <ast.Constant object at 0x7da1b21a11e0>, <ast.Constant object at 0x7da1b21a1210>, <ast.Constant object at 0x7da1b21a1570>, <ast.Constant object at 0x7da1b21a1540>, <ast.Dict object at 0x7da1b21a1510>]]]]
call[name[log].info, parameter[constant[TEMPLATE get result: %s], call[name[six].text_type, parameter[call[name[json].dumps, parameter[name[tmpl_get]]]]]]]
variable[existing_obj] assign[=] <ast.IfExp object at 0x7da1b21a0310>
if name[existing_obj] begin[:]
variable[template_id] assign[=] call[name[existing_obj]][call[name[zabbix_id_mapper]][constant[template]]]
if <ast.UnaryOp object at 0x7da1b21a2230> begin[:]
variable[defined_wo_hosts] assign[=] name[defined_obj]
if compare[constant[hosts] in name[defined_obj]] begin[:]
variable[defined_hosts] assign[=] call[name[defined_obj]][constant[hosts]]
<ast.Delete object at 0x7da1b21a1d80>
variable[existing_wo_hosts] assign[=] name[existing_obj]
if compare[constant[hosts] in name[existing_obj]] begin[:]
variable[existing_hosts] assign[=] call[name[existing_obj]][constant[hosts]]
<ast.Delete object at 0x7da1b21a19f0>
variable[hosts_list] assign[=] call[name[_diff_and_merge_host_list], parameter[name[defined_hosts], name[existing_hosts]]]
variable[diff_params] assign[=] call[call[name[__salt__]][constant[zabbix.compare_params]], parameter[name[defined_wo_hosts], name[existing_wo_hosts], constant[True]]]
if <ast.BoolOp object at 0x7da1b21a2260> begin[:]
call[call[name[diff_params]][constant[new]]][constant[hosts]] assign[=] name[hosts_list]
if call[name[diff_params]][constant[new]] begin[:]
call[call[name[diff_params]][constant[new]]][call[name[zabbix_id_mapper]][constant[template]]] assign[=] name[template_id]
call[call[name[diff_params]][constant[old]]][call[name[zabbix_id_mapper]][constant[template]]] assign[=] name[template_id]
call[name[log].info, parameter[constant[TEMPLATE: update params: %s], call[name[six].text_type, parameter[call[name[json].dumps, parameter[name[diff_params]]]]]]]
call[name[CHANGE_STACK].append, parameter[dictionary[[<ast.Constant object at 0x7da1b21a3190>, <ast.Constant object at 0x7da1b21a3130>, <ast.Constant object at 0x7da1b21a3070>], [<ast.Constant object at 0x7da1b21a30a0>, <ast.Constant object at 0x7da1b21a30d0>, <ast.Subscript object at 0x7da1b21a3010>]]]]
if <ast.UnaryOp object at 0x7da1b21a2fb0> begin[:]
variable[tmpl_update] assign[=] call[call[name[__salt__]][constant[zabbix.run_query]], parameter[constant[template.update], call[name[diff_params]][constant[new]]]]
call[name[log].info, parameter[constant[TEMPLATE update result: %s], call[name[six].text_type, parameter[name[tmpl_update]]]]]
call[name[log].info, parameter[constant[
template_components: %s], call[name[json].dumps, parameter[name[template_components]]]]]
call[name[log].info, parameter[constant[
discovery_components: %s], call[name[json].dumps, parameter[name[discovery_components]]]]]
call[name[log].info, parameter[constant[
Current CHANGE_STACK: %s], call[name[six].text_type, parameter[call[name[json].dumps, parameter[name[CHANGE_STACK]]]]]]]
if <ast.BoolOp object at 0x7da1b20edc60> begin[:]
for taget[name[component]] in starred[name[TEMPLATE_COMPONENT_ORDER]] begin[:]
call[name[log].info, parameter[constant[
COMPONENT: %s
], call[name[six].text_type, parameter[call[name[json].dumps, parameter[name[component]]]]]]]
variable[existing_c_list] assign[=] call[name[_get_existing_template_c_list], parameter[name[component], name[template_id]]]
variable[existing_c_list_subs] assign[=] <ast.IfExp object at 0x7da1b20ed360>
if compare[name[component] in name[template_components]] begin[:]
variable[defined_c_list_subs] assign[=] call[call[name[__salt__]][constant[zabbix.substitute_params]], parameter[call[name[template_components]][name[component]]]]
call[name[_manage_component], parameter[name[component], name[template_id], name[defined_c_list_subs], name[existing_c_list_subs]]]
call[name[log].info, parameter[constant[
Current CHANGE_STACK: %s], call[name[six].text_type, parameter[call[name[json].dumps, parameter[name[CHANGE_STACK]]]]]]]
for taget[name[d_rule_component]] in starred[name[discovery_components]] begin[:]
variable[q_def] assign[=] call[name[d_rule_component]][constant[query_pid]]
variable[c_def] assign[=] call[name[TEMPLATE_COMPONENT_DEF]][call[name[q_def]][constant[component]]]
variable[q_object] assign[=] call[name[c_def]][constant[qtype]]
variable[q_params] assign[=] call[name[dict], parameter[call[name[c_def]][constant[output]]]]
call[name[q_params].update, parameter[dictionary[[<ast.Subscript object at 0x7da1b20ef0a0>], [<ast.Name object at 0x7da1b20ef730>]]]]
call[name[q_params].update, parameter[dictionary[[<ast.Constant object at 0x7da1b20ef6d0>], [<ast.Dict object at 0x7da1b20ef760>]]]]
variable[parent_id] assign[=] call[call[name[__salt__]][constant[zabbix.get_object_id_by_params]], parameter[name[q_object], name[q_params]]]
for taget[name[proto_name]] in starred[name[DISCOVERYRULE_COMPONENT_ORDER]] begin[:]
call[name[log].info, parameter[constant[
PROTOTYPE_NAME: %s
], call[name[six].text_type, parameter[call[name[json].dumps, parameter[name[proto_name]]]]]]]
variable[existing_p_list] assign[=] call[name[_get_existing_template_c_list], parameter[name[proto_name], name[parent_id]]]
variable[existing_p_list_subs] assign[=] <ast.IfExp object at 0x7da1b20ed2a0>
if compare[name[proto_name] in name[d_rule_component]] begin[:]
variable[defined_p_list_subs] assign[=] call[call[name[__salt__]][constant[zabbix.substitute_params]], parameter[call[name[d_rule_component]][name[proto_name]]]]
call[name[_manage_component], parameter[name[proto_name], name[parent_id], name[defined_p_list_subs], name[existing_p_list_subs]]]
call[name[log].info, parameter[constant[
Current CHANGE_STACK: %s], call[name[six].text_type, parameter[call[name[json].dumps, parameter[name[CHANGE_STACK]]]]]]]
if <ast.UnaryOp object at 0x7da207f01de0> begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
call[name[ret]][constant[comment]] assign[=] call[constant[Zabbix Template "{0}" already exists and corresponds to a definition.].format, parameter[name[name]]]
return[name[ret]] | keyword[def] identifier[present] ( identifier[name] , identifier[params] , identifier[static_host_list] = keyword[True] ,** identifier[kwargs] ):
literal[string]
identifier[zabbix_id_mapper] = identifier[__salt__] [ literal[string] ]()
identifier[dry_run] = identifier[__opts__] [ literal[string] ]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[False] , literal[string] : literal[string] , literal[string] :{}}
identifier[params] [ literal[string] ]= identifier[name]
keyword[del] identifier[CHANGE_STACK] [:]
identifier[template_definition] ={}
identifier[template_components] ={}
identifier[discovery_components] =[]
keyword[for] identifier[attr] keyword[in] identifier[params] :
keyword[if] identifier[attr] keyword[in] identifier[TEMPLATE_COMPONENT_ORDER] keyword[and] identifier[six] . identifier[text_type] ( identifier[attr] )!= literal[string] :
identifier[template_components] [ identifier[attr] ]= identifier[params] [ identifier[attr] ]
keyword[elif] identifier[six] . identifier[text_type] ( identifier[attr] )== literal[string] :
identifier[d_rules] =[]
keyword[for] identifier[d_rule] keyword[in] identifier[params] [ identifier[attr] ]:
identifier[d_rule_components] ={ literal[string] :{ literal[string] : identifier[attr] ,
literal[string] : identifier[d_rule] [ identifier[TEMPLATE_COMPONENT_DEF] [ identifier[attr] ][ literal[string] ]]}}
keyword[for] identifier[proto_name] keyword[in] identifier[DISCOVERYRULE_COMPONENT_ORDER] :
keyword[if] identifier[proto_name] keyword[in] identifier[d_rule] :
identifier[d_rule_components] [ identifier[proto_name] ]= identifier[d_rule] [ identifier[proto_name] ]
keyword[del] identifier[d_rule] [ identifier[proto_name] ]
identifier[discovery_components] . identifier[append] ( identifier[d_rule_components] )
identifier[d_rules] . identifier[append] ( identifier[d_rule] )
identifier[template_components] [ identifier[attr] ]= identifier[d_rules]
keyword[else] :
identifier[template_definition] [ identifier[attr] ]= identifier[params] [ identifier[attr] ]
keyword[for] identifier[attr] keyword[in] identifier[TEMPLATE_COMPONENT_ORDER] :
keyword[if] identifier[attr] keyword[not] keyword[in] identifier[template_components] :
identifier[template_components] [ identifier[attr] ]=[]
keyword[for] identifier[attr] keyword[in] identifier[TEMPLATE_RELATIONS] :
identifier[template_definition] [ identifier[attr] ]= identifier[params] [ identifier[attr] ] keyword[if] identifier[attr] keyword[in] identifier[params] keyword[and] identifier[params] [ identifier[attr] ] keyword[else] []
identifier[defined_obj] = identifier[__salt__] [ literal[string] ]( identifier[template_definition] ,** identifier[kwargs] )
identifier[log] . identifier[info] ( literal[string] , identifier[six] . identifier[text_type] ( identifier[json] . identifier[dumps] ( identifier[defined_obj] , identifier[indent] = literal[int] )))
identifier[tmpl_get] = identifier[__salt__] [ literal[string] ]( literal[string] ,
{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] ,
literal[string] : literal[string] , literal[string] : literal[string] ,
literal[string] :{ literal[string] : identifier[name] }},
** identifier[kwargs] )
identifier[log] . identifier[info] ( literal[string] , identifier[six] . identifier[text_type] ( identifier[json] . identifier[dumps] ( identifier[tmpl_get] , identifier[indent] = literal[int] )))
identifier[existing_obj] = identifier[__salt__] [ literal[string] ]( identifier[tmpl_get] [ literal[int] ],** identifier[kwargs] ) keyword[if] identifier[tmpl_get] keyword[and] identifier[len] ( identifier[tmpl_get] )== literal[int] keyword[else] keyword[False]
keyword[if] identifier[existing_obj] :
identifier[template_id] = identifier[existing_obj] [ identifier[zabbix_id_mapper] [ literal[string] ]]
keyword[if] keyword[not] identifier[static_host_list] :
identifier[defined_wo_hosts] = identifier[defined_obj]
keyword[if] literal[string] keyword[in] identifier[defined_obj] :
identifier[defined_hosts] = identifier[defined_obj] [ literal[string] ]
keyword[del] identifier[defined_wo_hosts] [ literal[string] ]
keyword[else] :
identifier[defined_hosts] =[]
identifier[existing_wo_hosts] = identifier[existing_obj]
keyword[if] literal[string] keyword[in] identifier[existing_obj] :
identifier[existing_hosts] = identifier[existing_obj] [ literal[string] ]
keyword[del] identifier[existing_wo_hosts] [ literal[string] ]
keyword[else] :
identifier[existing_hosts] =[]
identifier[hosts_list] = identifier[_diff_and_merge_host_list] ( identifier[defined_hosts] , identifier[existing_hosts] )
identifier[diff_params] = identifier[__salt__] [ literal[string] ]( identifier[defined_wo_hosts] , identifier[existing_wo_hosts] , keyword[True] )
keyword[if] ( literal[string] keyword[in] identifier[diff_params] keyword[and] literal[string] keyword[in] identifier[diff_params] [ literal[string] ]) keyword[or] identifier[hosts_list] :
identifier[diff_params] [ literal[string] ][ literal[string] ]= identifier[hosts_list]
keyword[else] :
identifier[diff_params] = identifier[__salt__] [ literal[string] ]( identifier[defined_obj] , identifier[existing_obj] , keyword[True] )
keyword[if] identifier[diff_params] [ literal[string] ]:
identifier[diff_params] [ literal[string] ][ identifier[zabbix_id_mapper] [ literal[string] ]]= identifier[template_id]
identifier[diff_params] [ literal[string] ][ identifier[zabbix_id_mapper] [ literal[string] ]]= identifier[template_id]
identifier[log] . identifier[info] ( literal[string] , identifier[six] . identifier[text_type] ( identifier[json] . identifier[dumps] ( identifier[diff_params] , identifier[indent] = literal[int] )))
identifier[CHANGE_STACK] . identifier[append] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[diff_params] [ literal[string] ]})
keyword[if] keyword[not] identifier[dry_run] :
identifier[tmpl_update] = identifier[__salt__] [ literal[string] ]( literal[string] , identifier[diff_params] [ literal[string] ],** identifier[kwargs] )
identifier[log] . identifier[info] ( literal[string] , identifier[six] . identifier[text_type] ( identifier[tmpl_update] ))
keyword[else] :
identifier[CHANGE_STACK] . identifier[append] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[defined_obj] })
keyword[if] keyword[not] identifier[dry_run] :
identifier[tmpl_create] = identifier[__salt__] [ literal[string] ]( literal[string] , identifier[defined_obj] ,** identifier[kwargs] )
identifier[log] . identifier[info] ( literal[string] , identifier[tmpl_create] )
keyword[if] identifier[tmpl_create] :
identifier[template_id] = identifier[tmpl_create] [ literal[string] ][ literal[int] ]
identifier[log] . identifier[info] ( literal[string] , identifier[json] . identifier[dumps] ( identifier[template_components] , identifier[indent] = literal[int] ))
identifier[log] . identifier[info] ( literal[string] , identifier[json] . identifier[dumps] ( identifier[discovery_components] , identifier[indent] = literal[int] ))
identifier[log] . identifier[info] ( literal[string] , identifier[six] . identifier[text_type] ( identifier[json] . identifier[dumps] ( identifier[CHANGE_STACK] , identifier[indent] = literal[int] )))
keyword[if] identifier[existing_obj] keyword[or] keyword[not] identifier[dry_run] :
keyword[for] identifier[component] keyword[in] identifier[TEMPLATE_COMPONENT_ORDER] :
identifier[log] . identifier[info] ( literal[string] , identifier[six] . identifier[text_type] ( identifier[json] . identifier[dumps] ( identifier[component] )))
identifier[existing_c_list] = identifier[_get_existing_template_c_list] ( identifier[component] , identifier[template_id] ,** identifier[kwargs] )
identifier[existing_c_list_subs] = identifier[__salt__] [ literal[string] ]( identifier[existing_c_list] ,** identifier[kwargs] ) keyword[if] identifier[existing_c_list] keyword[else] []
keyword[if] identifier[component] keyword[in] identifier[template_components] :
identifier[defined_c_list_subs] = identifier[__salt__] [ literal[string] ](
identifier[template_components] [ identifier[component] ],
identifier[extend_params] ={ identifier[TEMPLATE_COMPONENT_DEF] [ identifier[component] ][ literal[string] ]: identifier[template_id] },
identifier[filter_key] = identifier[TEMPLATE_COMPONENT_DEF] [ identifier[component] ][ literal[string] ],
** identifier[kwargs] )
keyword[else] :
identifier[defined_c_list_subs] =[]
identifier[_manage_component] ( identifier[component] , identifier[template_id] , identifier[defined_c_list_subs] , identifier[existing_c_list_subs] ,** identifier[kwargs] )
identifier[log] . identifier[info] ( literal[string] , identifier[six] . identifier[text_type] ( identifier[json] . identifier[dumps] ( identifier[CHANGE_STACK] , identifier[indent] = literal[int] )))
keyword[for] identifier[d_rule_component] keyword[in] identifier[discovery_components] :
identifier[q_def] = identifier[d_rule_component] [ literal[string] ]
identifier[c_def] = identifier[TEMPLATE_COMPONENT_DEF] [ identifier[q_def] [ literal[string] ]]
identifier[q_object] = identifier[c_def] [ literal[string] ]
identifier[q_params] = identifier[dict] ( identifier[c_def] [ literal[string] ])
identifier[q_params] . identifier[update] ({ identifier[c_def] [ literal[string] ]: identifier[template_id] })
identifier[q_params] . identifier[update] ({ literal[string] :{ identifier[c_def] [ literal[string] ]: identifier[q_def] [ literal[string] ]}})
identifier[parent_id] = identifier[__salt__] [ literal[string] ]( identifier[q_object] , identifier[q_params] ,** identifier[kwargs] )
keyword[for] identifier[proto_name] keyword[in] identifier[DISCOVERYRULE_COMPONENT_ORDER] :
identifier[log] . identifier[info] ( literal[string] , identifier[six] . identifier[text_type] ( identifier[json] . identifier[dumps] ( identifier[proto_name] )))
identifier[existing_p_list] = identifier[_get_existing_template_c_list] ( identifier[proto_name] , identifier[parent_id] ,** identifier[kwargs] )
identifier[existing_p_list_subs] = identifier[__salt__] [ literal[string] ]( identifier[existing_p_list] ,** identifier[kwargs] ) keyword[if] identifier[existing_p_list] keyword[else] []
keyword[if] identifier[proto_name] keyword[in] identifier[d_rule_component] :
identifier[defined_p_list_subs] = identifier[__salt__] [ literal[string] ](
identifier[d_rule_component] [ identifier[proto_name] ],
identifier[extend_params] ={ identifier[c_def] [ literal[string] ]: identifier[template_id] },
** identifier[kwargs] )
keyword[else] :
identifier[defined_p_list_subs] =[]
identifier[_manage_component] ( identifier[proto_name] ,
identifier[parent_id] ,
identifier[defined_p_list_subs] ,
identifier[existing_p_list_subs] ,
identifier[template_id] = identifier[template_id] ,
** identifier[kwargs] )
identifier[log] . identifier[info] ( literal[string] , identifier[six] . identifier[text_type] ( identifier[json] . identifier[dumps] ( identifier[CHANGE_STACK] , identifier[indent] = literal[int] )))
keyword[if] keyword[not] identifier[CHANGE_STACK] :
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[else] :
identifier[tmpl_action] = identifier[next] (( identifier[item] keyword[for] identifier[item] keyword[in] identifier[CHANGE_STACK]
keyword[if] identifier[item] [ literal[string] ]== literal[string] keyword[and] identifier[item] [ literal[string] ]== literal[string] ), keyword[None] )
keyword[if] identifier[tmpl_action] :
identifier[ret] [ literal[string] ]= keyword[True]
keyword[if] identifier[dry_run] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={ identifier[name] :{ literal[string] : literal[string] . identifier[format] ( identifier[name] ),
literal[string] : literal[string]
literal[string] . identifier[format] ( identifier[name] )}}
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={ identifier[name] :{ literal[string] : literal[string] . identifier[format] ( identifier[name] ),
literal[string] : literal[string] . identifier[format] ( identifier[name] )}}
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[True]
keyword[if] identifier[dry_run] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={ identifier[name] :{ literal[string] : literal[string] . identifier[format] ( identifier[name] ),
literal[string] : literal[string]
literal[string] . identifier[format] ( identifier[name] )}}
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={ identifier[name] :{ literal[string] : literal[string] . identifier[format] ( identifier[name] ),
literal[string] : literal[string] . identifier[format] ( identifier[name] )}}
keyword[return] identifier[ret] | def present(name, params, static_host_list=True, **kwargs):
"""
Creates Zabbix Template object or if differs update it according defined parameters. See Zabbix API documentation.
Zabbix API version: >3.0
:param name: Zabbix Template name
:param params: Additional parameters according to Zabbix API documentation
:param static_host_list: If hosts assigned to the template are controlled
only by this state or can be also assigned externally
:param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring)
:param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring)
:param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring)
.. note::
If there is a need to get a value from current zabbix online (e.g. ids of host groups you want the template
to be associated with), put a dictionary with two keys "query_object" and "query_name" instead of the value.
In this example we want to create template named "Testing Template", assign it to hostgroup Templates,
link it to two ceph nodes and create a macro.
.. note::
IMPORTANT NOTE:
Objects (except for template name) are identified by name (or by other key in some exceptional cases)
so changing name of object means deleting old one and creating new one with new ID !!!
.. note::
NOT SUPPORTED FEATURES:
- linked templates
- trigger dependencies
- groups and group prototypes for host prototypes
SLS Example:
.. code-block:: yaml
zabbix-template-present:
zabbix_template.present:
- name: Testing Template
# Do not touch existing assigned hosts
# True will detach all other hosts than defined here
- static_host_list: False
- params:
description: Template for Ceph nodes
groups:
# groups must already exist
# template must be at least in one hostgroup
- groupid:
query_object: hostgroup
query_name: Templates
macros:
- macro: "{$CEPH_CLUSTER_NAME}"
value: ceph
hosts:
# hosts must already exist
- hostid:
query_object: host
query_name: ceph-osd-01
- hostid:
query_object: host
query_name: ceph-osd-02
# templates:
# Linked templates - not supported by state module but can be linked manually (will not be touched)
applications:
- name: Ceph OSD
items:
- name: Ceph OSD avg fill item
key_: ceph.osd_avg_fill
type: 2
value_type: 0
delay: 60
units: '%'
description: 'Average fill of OSD'
applications:
- applicationid:
query_object: application
query_name: Ceph OSD
triggers:
- description: "Ceph OSD filled more that 90%"
expression: "{{'{'}}Testing Template:ceph.osd_avg_fill.last(){{'}'}}>90"
priority: 4
discoveries:
- name: Mounted filesystem discovery
key_: vfs.fs.discovery
type: 0
delay: 60
itemprototypes:
- name: Free disk space on {{'{#'}}FSNAME}
key_: vfs.fs.size[{{'{#'}}FSNAME},free]
type: 0
value_type: 3
delay: 60
applications:
- applicationid:
query_object: application
query_name: Ceph OSD
triggerprototypes:
- description: "Free disk space is less than 20% on volume {{'{#'}}FSNAME{{'}'}}"
expression: "{{'{'}}Testing Template:vfs.fs.size[{{'{#'}}FSNAME},free].last(){{'}'}}<20"
graphs:
- name: Ceph OSD avg fill graph
width: 900
height: 200
graphtype: 0
gitems:
- color: F63100
itemid:
query_object: item
query_name: Ceph OSD avg fill item
screens:
- name: Ceph
hsize: 1
vsize: 1
screenitems:
- x: 0
y: 0
resourcetype: 0
resourceid:
query_object: graph
query_name: Ceph OSD avg fill graph
"""
zabbix_id_mapper = __salt__['zabbix.get_zabbix_id_mapper']()
dry_run = __opts__['test']
ret = {'name': name, 'result': False, 'comment': '', 'changes': {}}
params['host'] = name
del CHANGE_STACK[:]
# Divide template yaml definition into parts
# - template definition itself
# - simple template components
# - components that have other sub-components
# (e.g. discoveries - where parent ID is needed in advance for sub-component manipulation)
template_definition = {}
template_components = {}
discovery_components = []
for attr in params:
if attr in TEMPLATE_COMPONENT_ORDER and six.text_type(attr) != 'discoveries':
template_components[attr] = params[attr] # depends on [control=['if'], data=[]]
elif six.text_type(attr) == 'discoveries':
d_rules = []
for d_rule in params[attr]:
d_rule_components = {'query_pid': {'component': attr, 'filter_val': d_rule[TEMPLATE_COMPONENT_DEF[attr]['filter']]}}
for proto_name in DISCOVERYRULE_COMPONENT_ORDER:
if proto_name in d_rule:
d_rule_components[proto_name] = d_rule[proto_name]
del d_rule[proto_name] # depends on [control=['if'], data=['proto_name', 'd_rule']] # depends on [control=['for'], data=['proto_name']]
discovery_components.append(d_rule_components)
d_rules.append(d_rule) # depends on [control=['for'], data=['d_rule']]
template_components[attr] = d_rules # depends on [control=['if'], data=[]]
else:
template_definition[attr] = params[attr] # depends on [control=['for'], data=['attr']]
# if a component is not defined, it means to remove existing items during update (empty list)
for attr in TEMPLATE_COMPONENT_ORDER:
if attr not in template_components:
template_components[attr] = [] # depends on [control=['if'], data=['attr', 'template_components']] # depends on [control=['for'], data=['attr']]
# if a component is not defined, it means to remove existing items during update (empty list)
for attr in TEMPLATE_RELATIONS:
template_definition[attr] = params[attr] if attr in params and params[attr] else [] # depends on [control=['for'], data=['attr']]
defined_obj = __salt__['zabbix.substitute_params'](template_definition, **kwargs)
log.info('SUBSTITUTED template_definition: %s', six.text_type(json.dumps(defined_obj, indent=4)))
tmpl_get = __salt__['zabbix.run_query']('template.get', {'output': 'extend', 'selectGroups': 'groupid', 'selectHosts': 'hostid', 'selectTemplates': 'templateid', 'selectMacros': 'extend', 'filter': {'host': name}}, **kwargs)
log.info('TEMPLATE get result: %s', six.text_type(json.dumps(tmpl_get, indent=4)))
existing_obj = __salt__['zabbix.substitute_params'](tmpl_get[0], **kwargs) if tmpl_get and len(tmpl_get) == 1 else False
if existing_obj:
template_id = existing_obj[zabbix_id_mapper['template']]
if not static_host_list:
# Prepare objects for comparison
defined_wo_hosts = defined_obj
if 'hosts' in defined_obj:
defined_hosts = defined_obj['hosts']
del defined_wo_hosts['hosts'] # depends on [control=['if'], data=['defined_obj']]
else:
defined_hosts = []
existing_wo_hosts = existing_obj
if 'hosts' in existing_obj:
existing_hosts = existing_obj['hosts']
del existing_wo_hosts['hosts'] # depends on [control=['if'], data=['existing_obj']]
else:
existing_hosts = []
# Compare host list separately from the rest of the object comparison since the merged list is needed for
# update
hosts_list = _diff_and_merge_host_list(defined_hosts, existing_hosts)
# Compare objects without hosts
diff_params = __salt__['zabbix.compare_params'](defined_wo_hosts, existing_wo_hosts, True)
# Merge comparison results together
if 'new' in diff_params and 'hosts' in diff_params['new'] or hosts_list:
diff_params['new']['hosts'] = hosts_list # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
diff_params = __salt__['zabbix.compare_params'](defined_obj, existing_obj, True)
if diff_params['new']:
diff_params['new'][zabbix_id_mapper['template']] = template_id
diff_params['old'][zabbix_id_mapper['template']] = template_id
log.info('TEMPLATE: update params: %s', six.text_type(json.dumps(diff_params, indent=4)))
CHANGE_STACK.append({'component': 'template', 'action': 'update', 'params': diff_params['new']})
if not dry_run:
tmpl_update = __salt__['zabbix.run_query']('template.update', diff_params['new'], **kwargs)
log.info('TEMPLATE update result: %s', six.text_type(tmpl_update)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
CHANGE_STACK.append({'component': 'template', 'action': 'create', 'params': defined_obj})
if not dry_run:
tmpl_create = __salt__['zabbix.run_query']('template.create', defined_obj, **kwargs)
log.info('TEMPLATE create result: %s', tmpl_create)
if tmpl_create:
template_id = tmpl_create['templateids'][0] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
log.info('\n\ntemplate_components: %s', json.dumps(template_components, indent=4))
log.info('\n\ndiscovery_components: %s', json.dumps(discovery_components, indent=4))
log.info('\n\nCurrent CHANGE_STACK: %s', six.text_type(json.dumps(CHANGE_STACK, indent=4)))
if existing_obj or not dry_run:
for component in TEMPLATE_COMPONENT_ORDER:
log.info('\n\n\n\n\nCOMPONENT: %s\n\n', six.text_type(json.dumps(component)))
# 1) query for components which belongs to the template
existing_c_list = _get_existing_template_c_list(component, template_id, **kwargs)
existing_c_list_subs = __salt__['zabbix.substitute_params'](existing_c_list, **kwargs) if existing_c_list else []
if component in template_components:
defined_c_list_subs = __salt__['zabbix.substitute_params'](template_components[component], extend_params={TEMPLATE_COMPONENT_DEF[component]['qselectpid']: template_id}, filter_key=TEMPLATE_COMPONENT_DEF[component]['filter'], **kwargs) # depends on [control=['if'], data=['component', 'template_components']]
else:
defined_c_list_subs = []
# 2) take lists of particular component and compare -> do create, update and delete actions
_manage_component(component, template_id, defined_c_list_subs, existing_c_list_subs, **kwargs) # depends on [control=['for'], data=['component']]
log.info('\n\nCurrent CHANGE_STACK: %s', six.text_type(json.dumps(CHANGE_STACK, indent=4)))
for d_rule_component in discovery_components:
# query for parent id -> "query_pid": {"filter_val": "vfs.fs.discovery", "component": "discoveries"}
q_def = d_rule_component['query_pid']
c_def = TEMPLATE_COMPONENT_DEF[q_def['component']]
q_object = c_def['qtype']
q_params = dict(c_def['output'])
q_params.update({c_def['qselectpid']: template_id})
q_params.update({'filter': {c_def['filter']: q_def['filter_val']}})
parent_id = __salt__['zabbix.get_object_id_by_params'](q_object, q_params, **kwargs)
for proto_name in DISCOVERYRULE_COMPONENT_ORDER:
log.info('\n\n\n\n\nPROTOTYPE_NAME: %s\n\n', six.text_type(json.dumps(proto_name)))
existing_p_list = _get_existing_template_c_list(proto_name, parent_id, **kwargs)
existing_p_list_subs = __salt__['zabbix.substitute_params'](existing_p_list, **kwargs) if existing_p_list else []
if proto_name in d_rule_component:
defined_p_list_subs = __salt__['zabbix.substitute_params'](d_rule_component[proto_name], extend_params={c_def['qselectpid']: template_id}, **kwargs) # depends on [control=['if'], data=['proto_name', 'd_rule_component']]
else:
defined_p_list_subs = []
_manage_component(proto_name, parent_id, defined_p_list_subs, existing_p_list_subs, template_id=template_id, **kwargs) # depends on [control=['for'], data=['proto_name']] # depends on [control=['for'], data=['d_rule_component']] # depends on [control=['if'], data=[]]
log.info('\n\nCurrent CHANGE_STACK: %s', six.text_type(json.dumps(CHANGE_STACK, indent=4)))
if not CHANGE_STACK:
ret['result'] = True
ret['comment'] = 'Zabbix Template "{0}" already exists and corresponds to a definition.'.format(name) # depends on [control=['if'], data=[]]
else:
tmpl_action = next((item for item in CHANGE_STACK if item['component'] == 'template' and item['action'] == 'create'), None)
if tmpl_action:
ret['result'] = True
if dry_run:
ret['comment'] = 'Zabbix Template "{0}" would be created.'.format(name)
ret['changes'] = {name: {'old': 'Zabbix Template "{0}" does not exist.'.format(name), 'new': 'Zabbix Template "{0}" would be created according definition.'.format(name)}} # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Zabbix Template "{0}" created.'.format(name)
ret['changes'] = {name: {'old': 'Zabbix Template "{0}" did not exist.'.format(name), 'new': 'Zabbix Template "{0}" created according definition.'.format(name)}} # depends on [control=['if'], data=[]]
else:
ret['result'] = True
if dry_run:
ret['comment'] = 'Zabbix Template "{0}" would be updated.'.format(name)
ret['changes'] = {name: {'old': 'Zabbix Template "{0}" differs.'.format(name), 'new': 'Zabbix Template "{0}" would be updated according definition.'.format(name)}} # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Zabbix Template "{0}" updated.'.format(name)
ret['changes'] = {name: {'old': 'Zabbix Template "{0}" differed.'.format(name), 'new': 'Zabbix Template "{0}" updated according definition.'.format(name)}}
return ret |
def import_training_data(self,
positive_corpus_file=os.path.join(os.path.dirname(__file__),
"positive.txt"),
negative_corpus_file=os.path.join(os.path.dirname(__file__),
"negative.txt")
):
"""
This method imports the positive and negative training data from the
two corpus files and creates the training data list.
"""
positive_corpus = open(positive_corpus_file)
negative_corpus = open(negative_corpus_file)
# for line in positive_corpus:
# self.training_data.append((line, True))
# for line in negative_corpus:
# self.training_data.append((line, False))
# The following code works. Need to profile this to see if this is an
# improvement over the code above.
positive_training_data = list(map(lambda x: (x, True), positive_corpus))
negative_training_data = list(map(lambda x: (x, False), negative_corpus))
self.training_data = positive_training_data + negative_training_data | def function[import_training_data, parameter[self, positive_corpus_file, negative_corpus_file]]:
constant[
This method imports the positive and negative training data from the
two corpus files and creates the training data list.
]
variable[positive_corpus] assign[=] call[name[open], parameter[name[positive_corpus_file]]]
variable[negative_corpus] assign[=] call[name[open], parameter[name[negative_corpus_file]]]
variable[positive_training_data] assign[=] call[name[list], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b236ba60>, name[positive_corpus]]]]]
variable[negative_training_data] assign[=] call[name[list], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b236ba90>, name[negative_corpus]]]]]
name[self].training_data assign[=] binary_operation[name[positive_training_data] + name[negative_training_data]] | keyword[def] identifier[import_training_data] ( identifier[self] ,
identifier[positive_corpus_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ),
literal[string] ),
identifier[negative_corpus_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ),
literal[string] )
):
literal[string]
identifier[positive_corpus] = identifier[open] ( identifier[positive_corpus_file] )
identifier[negative_corpus] = identifier[open] ( identifier[negative_corpus_file] )
identifier[positive_training_data] = identifier[list] ( identifier[map] ( keyword[lambda] identifier[x] :( identifier[x] , keyword[True] ), identifier[positive_corpus] ))
identifier[negative_training_data] = identifier[list] ( identifier[map] ( keyword[lambda] identifier[x] :( identifier[x] , keyword[False] ), identifier[negative_corpus] ))
identifier[self] . identifier[training_data] = identifier[positive_training_data] + identifier[negative_training_data] | def import_training_data(self, positive_corpus_file=os.path.join(os.path.dirname(__file__), 'positive.txt'), negative_corpus_file=os.path.join(os.path.dirname(__file__), 'negative.txt')):
"""
This method imports the positive and negative training data from the
two corpus files and creates the training data list.
"""
positive_corpus = open(positive_corpus_file)
negative_corpus = open(negative_corpus_file) # for line in positive_corpus:
# self.training_data.append((line, True))
# for line in negative_corpus:
# self.training_data.append((line, False))
# The following code works. Need to profile this to see if this is an
# improvement over the code above.
positive_training_data = list(map(lambda x: (x, True), positive_corpus))
negative_training_data = list(map(lambda x: (x, False), negative_corpus))
self.training_data = positive_training_data + negative_training_data |
def _merge_perm(self, permission_name, view_menu_name):
"""
Add the new permission , view_menu to ab_permission_view_role if not exists.
It will add the related entry to ab_permission
and ab_view_menu two meta tables as well.
:param permission_name: Name of the permission.
:type permission_name: str
:param view_menu_name: Name of the view-menu
:type view_menu_name: str
:return:
"""
permission = self.find_permission(permission_name)
view_menu = self.find_view_menu(view_menu_name)
pv = None
if permission and view_menu:
pv = self.get_session.query(self.permissionview_model).filter_by(
permission=permission, view_menu=view_menu).first()
if not pv and permission_name and view_menu_name:
self.add_permission_view_menu(permission_name, view_menu_name) | def function[_merge_perm, parameter[self, permission_name, view_menu_name]]:
constant[
Add the new permission , view_menu to ab_permission_view_role if not exists.
It will add the related entry to ab_permission
and ab_view_menu two meta tables as well.
:param permission_name: Name of the permission.
:type permission_name: str
:param view_menu_name: Name of the view-menu
:type view_menu_name: str
:return:
]
variable[permission] assign[=] call[name[self].find_permission, parameter[name[permission_name]]]
variable[view_menu] assign[=] call[name[self].find_view_menu, parameter[name[view_menu_name]]]
variable[pv] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b033ef20> begin[:]
variable[pv] assign[=] call[call[call[name[self].get_session.query, parameter[name[self].permissionview_model]].filter_by, parameter[]].first, parameter[]]
if <ast.BoolOp object at 0x7da1b033cb50> begin[:]
call[name[self].add_permission_view_menu, parameter[name[permission_name], name[view_menu_name]]] | keyword[def] identifier[_merge_perm] ( identifier[self] , identifier[permission_name] , identifier[view_menu_name] ):
literal[string]
identifier[permission] = identifier[self] . identifier[find_permission] ( identifier[permission_name] )
identifier[view_menu] = identifier[self] . identifier[find_view_menu] ( identifier[view_menu_name] )
identifier[pv] = keyword[None]
keyword[if] identifier[permission] keyword[and] identifier[view_menu] :
identifier[pv] = identifier[self] . identifier[get_session] . identifier[query] ( identifier[self] . identifier[permissionview_model] ). identifier[filter_by] (
identifier[permission] = identifier[permission] , identifier[view_menu] = identifier[view_menu] ). identifier[first] ()
keyword[if] keyword[not] identifier[pv] keyword[and] identifier[permission_name] keyword[and] identifier[view_menu_name] :
identifier[self] . identifier[add_permission_view_menu] ( identifier[permission_name] , identifier[view_menu_name] ) | def _merge_perm(self, permission_name, view_menu_name):
"""
Add the new permission , view_menu to ab_permission_view_role if not exists.
It will add the related entry to ab_permission
and ab_view_menu two meta tables as well.
:param permission_name: Name of the permission.
:type permission_name: str
:param view_menu_name: Name of the view-menu
:type view_menu_name: str
:return:
"""
permission = self.find_permission(permission_name)
view_menu = self.find_view_menu(view_menu_name)
pv = None
if permission and view_menu:
pv = self.get_session.query(self.permissionview_model).filter_by(permission=permission, view_menu=view_menu).first() # depends on [control=['if'], data=[]]
if not pv and permission_name and view_menu_name:
self.add_permission_view_menu(permission_name, view_menu_name) # depends on [control=['if'], data=[]] |
def forward(self, x, mask):
"""Pass the input (and mask) through each layer in turn."""
all_layers = []
for layer in self.layers:
x = layer(x, mask)
if self.return_all_layers:
all_layers.append(x)
if self.return_all_layers:
all_layers[-1] = self.norm(all_layers[-1])
return all_layers
return self.norm(x) | def function[forward, parameter[self, x, mask]]:
constant[Pass the input (and mask) through each layer in turn.]
variable[all_layers] assign[=] list[[]]
for taget[name[layer]] in starred[name[self].layers] begin[:]
variable[x] assign[=] call[name[layer], parameter[name[x], name[mask]]]
if name[self].return_all_layers begin[:]
call[name[all_layers].append, parameter[name[x]]]
if name[self].return_all_layers begin[:]
call[name[all_layers]][<ast.UnaryOp object at 0x7da20c990700>] assign[=] call[name[self].norm, parameter[call[name[all_layers]][<ast.UnaryOp object at 0x7da20c9937f0>]]]
return[name[all_layers]]
return[call[name[self].norm, parameter[name[x]]]] | keyword[def] identifier[forward] ( identifier[self] , identifier[x] , identifier[mask] ):
literal[string]
identifier[all_layers] =[]
keyword[for] identifier[layer] keyword[in] identifier[self] . identifier[layers] :
identifier[x] = identifier[layer] ( identifier[x] , identifier[mask] )
keyword[if] identifier[self] . identifier[return_all_layers] :
identifier[all_layers] . identifier[append] ( identifier[x] )
keyword[if] identifier[self] . identifier[return_all_layers] :
identifier[all_layers] [- literal[int] ]= identifier[self] . identifier[norm] ( identifier[all_layers] [- literal[int] ])
keyword[return] identifier[all_layers]
keyword[return] identifier[self] . identifier[norm] ( identifier[x] ) | def forward(self, x, mask):
"""Pass the input (and mask) through each layer in turn."""
all_layers = []
for layer in self.layers:
x = layer(x, mask)
if self.return_all_layers:
all_layers.append(x) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['layer']]
if self.return_all_layers:
all_layers[-1] = self.norm(all_layers[-1])
return all_layers # depends on [control=['if'], data=[]]
return self.norm(x) |
def sector_shift(self):
"""
Property with current sector size shift. Actually sector size is
2 ** sector shift
"""
header = self.source.header
return header.mini_sector_shift if self._is_mini \
else header.sector_shift | def function[sector_shift, parameter[self]]:
constant[
Property with current sector size shift. Actually sector size is
2 ** sector shift
]
variable[header] assign[=] name[self].source.header
return[<ast.IfExp object at 0x7da1b0a65180>] | keyword[def] identifier[sector_shift] ( identifier[self] ):
literal[string]
identifier[header] = identifier[self] . identifier[source] . identifier[header]
keyword[return] identifier[header] . identifier[mini_sector_shift] keyword[if] identifier[self] . identifier[_is_mini] keyword[else] identifier[header] . identifier[sector_shift] | def sector_shift(self):
"""
Property with current sector size shift. Actually sector size is
2 ** sector shift
"""
header = self.source.header
return header.mini_sector_shift if self._is_mini else header.sector_shift |
def find_templates(self):
"""
Look for templates and extract the nodes containing the SASS file.
"""
paths = set()
for loader in self.get_loaders():
try:
module = import_module(loader.__module__)
get_template_sources = getattr(
module, 'get_template_sources', loader.get_template_sources)
template_sources = get_template_sources('')
paths.update([t.name if isinstance(t, Origin) else t for t in template_sources])
except (ImportError, AttributeError):
pass
if not paths:
raise CommandError(
"No template paths found. None of the configured template loaders provided template paths")
templates = set()
for path in paths:
for root, _, files in os.walk(str(path)):
templates.update(os.path.join(root, name)
for name in files if not name.startswith('.') and
any(name.endswith(ext) for ext in self.template_exts))
if not templates:
raise CommandError(
"No templates found. Make sure your TEMPLATE_LOADERS and TEMPLATE_DIRS settings are correct.")
return templates | def function[find_templates, parameter[self]]:
constant[
Look for templates and extract the nodes containing the SASS file.
]
variable[paths] assign[=] call[name[set], parameter[]]
for taget[name[loader]] in starred[call[name[self].get_loaders, parameter[]]] begin[:]
<ast.Try object at 0x7da20c7c88e0>
if <ast.UnaryOp object at 0x7da20c7cba30> begin[:]
<ast.Raise object at 0x7da20c7cb640>
variable[templates] assign[=] call[name[set], parameter[]]
for taget[name[path]] in starred[name[paths]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c7cae30>, <ast.Name object at 0x7da20c7cb9d0>, <ast.Name object at 0x7da20c7c96f0>]]] in starred[call[name[os].walk, parameter[call[name[str], parameter[name[path]]]]]] begin[:]
call[name[templates].update, parameter[<ast.GeneratorExp object at 0x7da20c7ca470>]]
if <ast.UnaryOp object at 0x7da18f09e560> begin[:]
<ast.Raise object at 0x7da18f09c430>
return[name[templates]] | keyword[def] identifier[find_templates] ( identifier[self] ):
literal[string]
identifier[paths] = identifier[set] ()
keyword[for] identifier[loader] keyword[in] identifier[self] . identifier[get_loaders] ():
keyword[try] :
identifier[module] = identifier[import_module] ( identifier[loader] . identifier[__module__] )
identifier[get_template_sources] = identifier[getattr] (
identifier[module] , literal[string] , identifier[loader] . identifier[get_template_sources] )
identifier[template_sources] = identifier[get_template_sources] ( literal[string] )
identifier[paths] . identifier[update] ([ identifier[t] . identifier[name] keyword[if] identifier[isinstance] ( identifier[t] , identifier[Origin] ) keyword[else] identifier[t] keyword[for] identifier[t] keyword[in] identifier[template_sources] ])
keyword[except] ( identifier[ImportError] , identifier[AttributeError] ):
keyword[pass]
keyword[if] keyword[not] identifier[paths] :
keyword[raise] identifier[CommandError] (
literal[string] )
identifier[templates] = identifier[set] ()
keyword[for] identifier[path] keyword[in] identifier[paths] :
keyword[for] identifier[root] , identifier[_] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[str] ( identifier[path] )):
identifier[templates] . identifier[update] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[name] )
keyword[for] identifier[name] keyword[in] identifier[files] keyword[if] keyword[not] identifier[name] . identifier[startswith] ( literal[string] ) keyword[and]
identifier[any] ( identifier[name] . identifier[endswith] ( identifier[ext] ) keyword[for] identifier[ext] keyword[in] identifier[self] . identifier[template_exts] ))
keyword[if] keyword[not] identifier[templates] :
keyword[raise] identifier[CommandError] (
literal[string] )
keyword[return] identifier[templates] | def find_templates(self):
"""
Look for templates and extract the nodes containing the SASS file.
"""
paths = set()
for loader in self.get_loaders():
try:
module = import_module(loader.__module__)
get_template_sources = getattr(module, 'get_template_sources', loader.get_template_sources)
template_sources = get_template_sources('')
paths.update([t.name if isinstance(t, Origin) else t for t in template_sources]) # depends on [control=['try'], data=[]]
except (ImportError, AttributeError):
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['loader']]
if not paths:
raise CommandError('No template paths found. None of the configured template loaders provided template paths') # depends on [control=['if'], data=[]]
templates = set()
for path in paths:
for (root, _, files) in os.walk(str(path)):
templates.update((os.path.join(root, name) for name in files if not name.startswith('.') and any((name.endswith(ext) for ext in self.template_exts)))) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['path']]
if not templates:
raise CommandError('No templates found. Make sure your TEMPLATE_LOADERS and TEMPLATE_DIRS settings are correct.') # depends on [control=['if'], data=[]]
return templates |
def escape(self, obj, mapping=None):
"""Escape whatever value you pass to it.
Non-standard, for internal use; do not use this in your applications.
"""
if isinstance(obj, str_type):
return "'" + self.escape_string(obj) + "'"
if isinstance(obj, (bytes, bytearray)):
ret = self._quote_bytes(obj)
if self._binary_prefix:
ret = "_binary" + ret
return ret
return converters.escape_item(obj, self.charset, mapping=mapping) | def function[escape, parameter[self, obj, mapping]]:
constant[Escape whatever value you pass to it.
Non-standard, for internal use; do not use this in your applications.
]
if call[name[isinstance], parameter[name[obj], name[str_type]]] begin[:]
return[binary_operation[binary_operation[constant['] + call[name[self].escape_string, parameter[name[obj]]]] + constant[']]]
if call[name[isinstance], parameter[name[obj], tuple[[<ast.Name object at 0x7da20c7cb280>, <ast.Name object at 0x7da20c7ca710>]]]] begin[:]
variable[ret] assign[=] call[name[self]._quote_bytes, parameter[name[obj]]]
if name[self]._binary_prefix begin[:]
variable[ret] assign[=] binary_operation[constant[_binary] + name[ret]]
return[name[ret]]
return[call[name[converters].escape_item, parameter[name[obj], name[self].charset]]] | keyword[def] identifier[escape] ( identifier[self] , identifier[obj] , identifier[mapping] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[str_type] ):
keyword[return] literal[string] + identifier[self] . identifier[escape_string] ( identifier[obj] )+ literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] ,( identifier[bytes] , identifier[bytearray] )):
identifier[ret] = identifier[self] . identifier[_quote_bytes] ( identifier[obj] )
keyword[if] identifier[self] . identifier[_binary_prefix] :
identifier[ret] = literal[string] + identifier[ret]
keyword[return] identifier[ret]
keyword[return] identifier[converters] . identifier[escape_item] ( identifier[obj] , identifier[self] . identifier[charset] , identifier[mapping] = identifier[mapping] ) | def escape(self, obj, mapping=None):
"""Escape whatever value you pass to it.
Non-standard, for internal use; do not use this in your applications.
"""
if isinstance(obj, str_type):
return "'" + self.escape_string(obj) + "'" # depends on [control=['if'], data=[]]
if isinstance(obj, (bytes, bytearray)):
ret = self._quote_bytes(obj)
if self._binary_prefix:
ret = '_binary' + ret # depends on [control=['if'], data=[]]
return ret # depends on [control=['if'], data=[]]
return converters.escape_item(obj, self.charset, mapping=mapping) |
def get_fp_raw():
'''
生成fp_raw_str
'''
fp_file_path = os.path.expanduser('~/.xunleipy_fp')
fp_list = []
try:
with open(fp_file_path, 'r') as fp_file:
fp_str = fp_file.readline()
if len(fp_str) > 0:
fp_list = fp_str.split('###')
except IOError:
pass
if len(fp_list) < 14:
fp_list = _get_random_fp_raw()
fp_str = '###'.join(fp_list)
with open(fp_file_path, 'w') as fp_file:
fp_file.write(fp_str)
source = fp_str.strip()
if six.PY3:
source = source.encode('utf-8')
fp_raw = base64.b64encode(source)
return fp_raw | def function[get_fp_raw, parameter[]]:
constant[
生成fp_raw_str
]
variable[fp_file_path] assign[=] call[name[os].path.expanduser, parameter[constant[~/.xunleipy_fp]]]
variable[fp_list] assign[=] list[[]]
<ast.Try object at 0x7da18bc70670>
if compare[call[name[len], parameter[name[fp_list]]] less[<] constant[14]] begin[:]
variable[fp_list] assign[=] call[name[_get_random_fp_raw], parameter[]]
variable[fp_str] assign[=] call[constant[###].join, parameter[name[fp_list]]]
with call[name[open], parameter[name[fp_file_path], constant[w]]] begin[:]
call[name[fp_file].write, parameter[name[fp_str]]]
variable[source] assign[=] call[name[fp_str].strip, parameter[]]
if name[six].PY3 begin[:]
variable[source] assign[=] call[name[source].encode, parameter[constant[utf-8]]]
variable[fp_raw] assign[=] call[name[base64].b64encode, parameter[name[source]]]
return[name[fp_raw]] | keyword[def] identifier[get_fp_raw] ():
literal[string]
identifier[fp_file_path] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] )
identifier[fp_list] =[]
keyword[try] :
keyword[with] identifier[open] ( identifier[fp_file_path] , literal[string] ) keyword[as] identifier[fp_file] :
identifier[fp_str] = identifier[fp_file] . identifier[readline] ()
keyword[if] identifier[len] ( identifier[fp_str] )> literal[int] :
identifier[fp_list] = identifier[fp_str] . identifier[split] ( literal[string] )
keyword[except] identifier[IOError] :
keyword[pass]
keyword[if] identifier[len] ( identifier[fp_list] )< literal[int] :
identifier[fp_list] = identifier[_get_random_fp_raw] ()
identifier[fp_str] = literal[string] . identifier[join] ( identifier[fp_list] )
keyword[with] identifier[open] ( identifier[fp_file_path] , literal[string] ) keyword[as] identifier[fp_file] :
identifier[fp_file] . identifier[write] ( identifier[fp_str] )
identifier[source] = identifier[fp_str] . identifier[strip] ()
keyword[if] identifier[six] . identifier[PY3] :
identifier[source] = identifier[source] . identifier[encode] ( literal[string] )
identifier[fp_raw] = identifier[base64] . identifier[b64encode] ( identifier[source] )
keyword[return] identifier[fp_raw] | def get_fp_raw():
"""
生成fp_raw_str
"""
fp_file_path = os.path.expanduser('~/.xunleipy_fp')
fp_list = []
try:
with open(fp_file_path, 'r') as fp_file:
fp_str = fp_file.readline()
if len(fp_str) > 0:
fp_list = fp_str.split('###') # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['fp_file']] # depends on [control=['try'], data=[]]
except IOError:
pass # depends on [control=['except'], data=[]]
if len(fp_list) < 14:
fp_list = _get_random_fp_raw()
fp_str = '###'.join(fp_list)
with open(fp_file_path, 'w') as fp_file:
fp_file.write(fp_str) # depends on [control=['with'], data=['fp_file']] # depends on [control=['if'], data=[]]
source = fp_str.strip()
if six.PY3:
source = source.encode('utf-8') # depends on [control=['if'], data=[]]
fp_raw = base64.b64encode(source)
return fp_raw |
def resolve_memory_access(self, tb, x86_mem_operand):
"""Return operand memory access translation.
"""
size = self.__get_memory_access_size(x86_mem_operand)
addr = None
if x86_mem_operand.base:
addr = ReilRegisterOperand(x86_mem_operand.base, size)
if x86_mem_operand.index and x86_mem_operand.scale != 0x0:
index = ReilRegisterOperand(x86_mem_operand.index, size)
scale = ReilImmediateOperand(x86_mem_operand.scale, size)
scaled_index = tb.temporal(size)
tb.add(tb._builder.gen_mul(index, scale, scaled_index))
if addr:
tmp = tb.temporal(size)
tb.add(tb._builder.gen_add(addr, scaled_index, tmp))
addr = tmp
else:
addr = scaled_index
if x86_mem_operand.displacement != 0x0:
disp = ReilImmediateOperand(x86_mem_operand.displacement, size)
if addr:
tmp = tb.temporal(size)
tb.add(tb._builder.gen_add(addr, disp, tmp))
addr = tmp
else:
addr = disp
else:
if not addr:
disp = ReilImmediateOperand(x86_mem_operand.displacement, size)
addr = disp
# TODO Improve this code and add support for the rest of the segments.
if x86_mem_operand.segment in ["gs", "fs"]:
seg_base_addr_map = {
"gs": "gs_base_addr",
"fs": "fs_base_addr",
}
seg_base = ReilRegisterOperand(seg_base_addr_map[x86_mem_operand.segment], size)
if addr:
tmp = tb.temporal(size)
tb.add(tb._builder.gen_add(addr, seg_base, tmp))
addr = tmp
else:
addr = seg_base
return addr | def function[resolve_memory_access, parameter[self, tb, x86_mem_operand]]:
constant[Return operand memory access translation.
]
variable[size] assign[=] call[name[self].__get_memory_access_size, parameter[name[x86_mem_operand]]]
variable[addr] assign[=] constant[None]
if name[x86_mem_operand].base begin[:]
variable[addr] assign[=] call[name[ReilRegisterOperand], parameter[name[x86_mem_operand].base, name[size]]]
if <ast.BoolOp object at 0x7da1b26ae110> begin[:]
variable[index] assign[=] call[name[ReilRegisterOperand], parameter[name[x86_mem_operand].index, name[size]]]
variable[scale] assign[=] call[name[ReilImmediateOperand], parameter[name[x86_mem_operand].scale, name[size]]]
variable[scaled_index] assign[=] call[name[tb].temporal, parameter[name[size]]]
call[name[tb].add, parameter[call[name[tb]._builder.gen_mul, parameter[name[index], name[scale], name[scaled_index]]]]]
if name[addr] begin[:]
variable[tmp] assign[=] call[name[tb].temporal, parameter[name[size]]]
call[name[tb].add, parameter[call[name[tb]._builder.gen_add, parameter[name[addr], name[scaled_index], name[tmp]]]]]
variable[addr] assign[=] name[tmp]
if compare[name[x86_mem_operand].displacement not_equal[!=] constant[0]] begin[:]
variable[disp] assign[=] call[name[ReilImmediateOperand], parameter[name[x86_mem_operand].displacement, name[size]]]
if name[addr] begin[:]
variable[tmp] assign[=] call[name[tb].temporal, parameter[name[size]]]
call[name[tb].add, parameter[call[name[tb]._builder.gen_add, parameter[name[addr], name[disp], name[tmp]]]]]
variable[addr] assign[=] name[tmp]
if compare[name[x86_mem_operand].segment in list[[<ast.Constant object at 0x7da1b0980dc0>, <ast.Constant object at 0x7da1b0981090>]]] begin[:]
variable[seg_base_addr_map] assign[=] dictionary[[<ast.Constant object at 0x7da1b0983460>, <ast.Constant object at 0x7da1b0983970>], [<ast.Constant object at 0x7da1b0980580>, <ast.Constant object at 0x7da1b0981720>]]
variable[seg_base] assign[=] call[name[ReilRegisterOperand], parameter[call[name[seg_base_addr_map]][name[x86_mem_operand].segment], name[size]]]
if name[addr] begin[:]
variable[tmp] assign[=] call[name[tb].temporal, parameter[name[size]]]
call[name[tb].add, parameter[call[name[tb]._builder.gen_add, parameter[name[addr], name[seg_base], name[tmp]]]]]
variable[addr] assign[=] name[tmp]
return[name[addr]] | keyword[def] identifier[resolve_memory_access] ( identifier[self] , identifier[tb] , identifier[x86_mem_operand] ):
literal[string]
identifier[size] = identifier[self] . identifier[__get_memory_access_size] ( identifier[x86_mem_operand] )
identifier[addr] = keyword[None]
keyword[if] identifier[x86_mem_operand] . identifier[base] :
identifier[addr] = identifier[ReilRegisterOperand] ( identifier[x86_mem_operand] . identifier[base] , identifier[size] )
keyword[if] identifier[x86_mem_operand] . identifier[index] keyword[and] identifier[x86_mem_operand] . identifier[scale] != literal[int] :
identifier[index] = identifier[ReilRegisterOperand] ( identifier[x86_mem_operand] . identifier[index] , identifier[size] )
identifier[scale] = identifier[ReilImmediateOperand] ( identifier[x86_mem_operand] . identifier[scale] , identifier[size] )
identifier[scaled_index] = identifier[tb] . identifier[temporal] ( identifier[size] )
identifier[tb] . identifier[add] ( identifier[tb] . identifier[_builder] . identifier[gen_mul] ( identifier[index] , identifier[scale] , identifier[scaled_index] ))
keyword[if] identifier[addr] :
identifier[tmp] = identifier[tb] . identifier[temporal] ( identifier[size] )
identifier[tb] . identifier[add] ( identifier[tb] . identifier[_builder] . identifier[gen_add] ( identifier[addr] , identifier[scaled_index] , identifier[tmp] ))
identifier[addr] = identifier[tmp]
keyword[else] :
identifier[addr] = identifier[scaled_index]
keyword[if] identifier[x86_mem_operand] . identifier[displacement] != literal[int] :
identifier[disp] = identifier[ReilImmediateOperand] ( identifier[x86_mem_operand] . identifier[displacement] , identifier[size] )
keyword[if] identifier[addr] :
identifier[tmp] = identifier[tb] . identifier[temporal] ( identifier[size] )
identifier[tb] . identifier[add] ( identifier[tb] . identifier[_builder] . identifier[gen_add] ( identifier[addr] , identifier[disp] , identifier[tmp] ))
identifier[addr] = identifier[tmp]
keyword[else] :
identifier[addr] = identifier[disp]
keyword[else] :
keyword[if] keyword[not] identifier[addr] :
identifier[disp] = identifier[ReilImmediateOperand] ( identifier[x86_mem_operand] . identifier[displacement] , identifier[size] )
identifier[addr] = identifier[disp]
keyword[if] identifier[x86_mem_operand] . identifier[segment] keyword[in] [ literal[string] , literal[string] ]:
identifier[seg_base_addr_map] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
identifier[seg_base] = identifier[ReilRegisterOperand] ( identifier[seg_base_addr_map] [ identifier[x86_mem_operand] . identifier[segment] ], identifier[size] )
keyword[if] identifier[addr] :
identifier[tmp] = identifier[tb] . identifier[temporal] ( identifier[size] )
identifier[tb] . identifier[add] ( identifier[tb] . identifier[_builder] . identifier[gen_add] ( identifier[addr] , identifier[seg_base] , identifier[tmp] ))
identifier[addr] = identifier[tmp]
keyword[else] :
identifier[addr] = identifier[seg_base]
keyword[return] identifier[addr] | def resolve_memory_access(self, tb, x86_mem_operand):
"""Return operand memory access translation.
"""
size = self.__get_memory_access_size(x86_mem_operand)
addr = None
if x86_mem_operand.base:
addr = ReilRegisterOperand(x86_mem_operand.base, size) # depends on [control=['if'], data=[]]
if x86_mem_operand.index and x86_mem_operand.scale != 0:
index = ReilRegisterOperand(x86_mem_operand.index, size)
scale = ReilImmediateOperand(x86_mem_operand.scale, size)
scaled_index = tb.temporal(size)
tb.add(tb._builder.gen_mul(index, scale, scaled_index))
if addr:
tmp = tb.temporal(size)
tb.add(tb._builder.gen_add(addr, scaled_index, tmp))
addr = tmp # depends on [control=['if'], data=[]]
else:
addr = scaled_index # depends on [control=['if'], data=[]]
if x86_mem_operand.displacement != 0:
disp = ReilImmediateOperand(x86_mem_operand.displacement, size)
if addr:
tmp = tb.temporal(size)
tb.add(tb._builder.gen_add(addr, disp, tmp))
addr = tmp # depends on [control=['if'], data=[]]
else:
addr = disp # depends on [control=['if'], data=[]]
elif not addr:
disp = ReilImmediateOperand(x86_mem_operand.displacement, size)
addr = disp # depends on [control=['if'], data=[]]
# TODO Improve this code and add support for the rest of the segments.
if x86_mem_operand.segment in ['gs', 'fs']:
seg_base_addr_map = {'gs': 'gs_base_addr', 'fs': 'fs_base_addr'}
seg_base = ReilRegisterOperand(seg_base_addr_map[x86_mem_operand.segment], size)
if addr:
tmp = tb.temporal(size)
tb.add(tb._builder.gen_add(addr, seg_base, tmp))
addr = tmp # depends on [control=['if'], data=[]]
else:
addr = seg_base # depends on [control=['if'], data=[]]
return addr |
def open(self) -> bool:
"""
This property is ``True`` when the connection is usable.
It may be used to detect disconnections but this is discouraged per
the EAFP_ principle. When ``open`` is ``False``, using the connection
raises a :exc:`~websockets.exceptions.ConnectionClosed` exception.
.. _EAFP: https://docs.python.org/3/glossary.html#term-eafp
"""
return self.state is State.OPEN and not self.transfer_data_task.done() | def function[open, parameter[self]]:
constant[
This property is ``True`` when the connection is usable.
It may be used to detect disconnections but this is discouraged per
the EAFP_ principle. When ``open`` is ``False``, using the connection
raises a :exc:`~websockets.exceptions.ConnectionClosed` exception.
.. _EAFP: https://docs.python.org/3/glossary.html#term-eafp
]
return[<ast.BoolOp object at 0x7da20c6a9de0>] | keyword[def] identifier[open] ( identifier[self] )-> identifier[bool] :
literal[string]
keyword[return] identifier[self] . identifier[state] keyword[is] identifier[State] . identifier[OPEN] keyword[and] keyword[not] identifier[self] . identifier[transfer_data_task] . identifier[done] () | def open(self) -> bool:
"""
This property is ``True`` when the connection is usable.
It may be used to detect disconnections but this is discouraged per
the EAFP_ principle. When ``open`` is ``False``, using the connection
raises a :exc:`~websockets.exceptions.ConnectionClosed` exception.
.. _EAFP: https://docs.python.org/3/glossary.html#term-eafp
"""
return self.state is State.OPEN and (not self.transfer_data_task.done()) |
def validate_password_strength(value):
"""Validates that a password is as least 7 characters long and has at least
1 digit and 1 letter.
"""
min_length = 7
if len(value) < min_length:
raise ValidationError(_('Password must be at least {0} characters '
'long.').format(min_length))
# check for digit
if not any(char.isdigit() for char in value):
raise ValidationError(_('Password must contain at least 1 digit.'))
# check for letter
if not any(char.isalpha() for char in value):
raise ValidationError(_('Password must contain at least 1 letter.')) | def function[validate_password_strength, parameter[value]]:
constant[Validates that a password is as least 7 characters long and has at least
1 digit and 1 letter.
]
variable[min_length] assign[=] constant[7]
if compare[call[name[len], parameter[name[value]]] less[<] name[min_length]] begin[:]
<ast.Raise object at 0x7da207f9b460>
if <ast.UnaryOp object at 0x7da207f98070> begin[:]
<ast.Raise object at 0x7da207f9a620>
if <ast.UnaryOp object at 0x7da18bcc9660> begin[:]
<ast.Raise object at 0x7da18bccb220> | keyword[def] identifier[validate_password_strength] ( identifier[value] ):
literal[string]
identifier[min_length] = literal[int]
keyword[if] identifier[len] ( identifier[value] )< identifier[min_length] :
keyword[raise] identifier[ValidationError] ( identifier[_] ( literal[string]
literal[string] ). identifier[format] ( identifier[min_length] ))
keyword[if] keyword[not] identifier[any] ( identifier[char] . identifier[isdigit] () keyword[for] identifier[char] keyword[in] identifier[value] ):
keyword[raise] identifier[ValidationError] ( identifier[_] ( literal[string] ))
keyword[if] keyword[not] identifier[any] ( identifier[char] . identifier[isalpha] () keyword[for] identifier[char] keyword[in] identifier[value] ):
keyword[raise] identifier[ValidationError] ( identifier[_] ( literal[string] )) | def validate_password_strength(value):
"""Validates that a password is as least 7 characters long and has at least
1 digit and 1 letter.
"""
min_length = 7
if len(value) < min_length:
raise ValidationError(_('Password must be at least {0} characters long.').format(min_length)) # depends on [control=['if'], data=['min_length']]
# check for digit
if not any((char.isdigit() for char in value)):
raise ValidationError(_('Password must contain at least 1 digit.')) # depends on [control=['if'], data=[]]
# check for letter
if not any((char.isalpha() for char in value)):
raise ValidationError(_('Password must contain at least 1 letter.')) # depends on [control=['if'], data=[]] |
def init_datamembers(self, rec):
"""Initialize current GOTerm with data members for storing optional attributes."""
# pylint: disable=multiple-statements
if 'synonym' in self.optional_attrs: rec.synonym = []
if 'xref' in self.optional_attrs: rec.xref = set()
if 'subset' in self.optional_attrs: rec.subset = set()
if 'comment' in self.optional_attrs: rec.comment = ""
if 'relationship' in self.optional_attrs:
rec.relationship = {}
rec.relationship_rev = {} | def function[init_datamembers, parameter[self, rec]]:
constant[Initialize current GOTerm with data members for storing optional attributes.]
if compare[constant[synonym] in name[self].optional_attrs] begin[:]
name[rec].synonym assign[=] list[[]]
if compare[constant[xref] in name[self].optional_attrs] begin[:]
name[rec].xref assign[=] call[name[set], parameter[]]
if compare[constant[subset] in name[self].optional_attrs] begin[:]
name[rec].subset assign[=] call[name[set], parameter[]]
if compare[constant[comment] in name[self].optional_attrs] begin[:]
name[rec].comment assign[=] constant[]
if compare[constant[relationship] in name[self].optional_attrs] begin[:]
name[rec].relationship assign[=] dictionary[[], []]
name[rec].relationship_rev assign[=] dictionary[[], []] | keyword[def] identifier[init_datamembers] ( identifier[self] , identifier[rec] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[optional_attrs] : identifier[rec] . identifier[synonym] =[]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[optional_attrs] : identifier[rec] . identifier[xref] = identifier[set] ()
keyword[if] literal[string] keyword[in] identifier[self] . identifier[optional_attrs] : identifier[rec] . identifier[subset] = identifier[set] ()
keyword[if] literal[string] keyword[in] identifier[self] . identifier[optional_attrs] : identifier[rec] . identifier[comment] = literal[string]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[optional_attrs] :
identifier[rec] . identifier[relationship] ={}
identifier[rec] . identifier[relationship_rev] ={} | def init_datamembers(self, rec):
"""Initialize current GOTerm with data members for storing optional attributes."""
# pylint: disable=multiple-statements
if 'synonym' in self.optional_attrs:
rec.synonym = [] # depends on [control=['if'], data=[]]
if 'xref' in self.optional_attrs:
rec.xref = set() # depends on [control=['if'], data=[]]
if 'subset' in self.optional_attrs:
rec.subset = set() # depends on [control=['if'], data=[]]
if 'comment' in self.optional_attrs:
rec.comment = '' # depends on [control=['if'], data=[]]
if 'relationship' in self.optional_attrs:
rec.relationship = {}
rec.relationship_rev = {} # depends on [control=['if'], data=[]] |
def _extract_lambda_function_code(resource_properties, code_property_key):
"""
Extracts the Lambda Function Code from the Resource Properties
Parameters
----------
resource_properties dict
Dictionary representing the Properties of the Resource
code_property_key str
Property Key of the code on the Resource
Returns
-------
str
Representing the local code path
"""
codeuri = resource_properties.get(code_property_key, SamFunctionProvider._DEFAULT_CODEURI)
if isinstance(codeuri, dict):
codeuri = SamFunctionProvider._DEFAULT_CODEURI
return codeuri | def function[_extract_lambda_function_code, parameter[resource_properties, code_property_key]]:
constant[
Extracts the Lambda Function Code from the Resource Properties
Parameters
----------
resource_properties dict
Dictionary representing the Properties of the Resource
code_property_key str
Property Key of the code on the Resource
Returns
-------
str
Representing the local code path
]
variable[codeuri] assign[=] call[name[resource_properties].get, parameter[name[code_property_key], name[SamFunctionProvider]._DEFAULT_CODEURI]]
if call[name[isinstance], parameter[name[codeuri], name[dict]]] begin[:]
variable[codeuri] assign[=] name[SamFunctionProvider]._DEFAULT_CODEURI
return[name[codeuri]] | keyword[def] identifier[_extract_lambda_function_code] ( identifier[resource_properties] , identifier[code_property_key] ):
literal[string]
identifier[codeuri] = identifier[resource_properties] . identifier[get] ( identifier[code_property_key] , identifier[SamFunctionProvider] . identifier[_DEFAULT_CODEURI] )
keyword[if] identifier[isinstance] ( identifier[codeuri] , identifier[dict] ):
identifier[codeuri] = identifier[SamFunctionProvider] . identifier[_DEFAULT_CODEURI]
keyword[return] identifier[codeuri] | def _extract_lambda_function_code(resource_properties, code_property_key):
"""
Extracts the Lambda Function Code from the Resource Properties
Parameters
----------
resource_properties dict
Dictionary representing the Properties of the Resource
code_property_key str
Property Key of the code on the Resource
Returns
-------
str
Representing the local code path
"""
codeuri = resource_properties.get(code_property_key, SamFunctionProvider._DEFAULT_CODEURI)
if isinstance(codeuri, dict):
codeuri = SamFunctionProvider._DEFAULT_CODEURI # depends on [control=['if'], data=[]]
return codeuri |
def warn_startup_with_shell_off(platform, gdb_args):
"""return True if user may need to turn shell off
if mac OS version is 16 (sierra) or higher, may need to set shell off due
to os's security requirements
http://stackoverflow.com/questions/39702871/gdb-kind-of-doesnt-work-on-macos-sierra
"""
darwin_match = re.match("darwin-(\d+)\..*", platform)
on_darwin = darwin_match is not None and int(darwin_match.groups()[0]) >= 16
if on_darwin:
shell_is_off = "startup-with-shell off" in gdb_args
return not shell_is_off
return False | def function[warn_startup_with_shell_off, parameter[platform, gdb_args]]:
constant[return True if user may need to turn shell off
if mac OS version is 16 (sierra) or higher, may need to set shell off due
to os's security requirements
http://stackoverflow.com/questions/39702871/gdb-kind-of-doesnt-work-on-macos-sierra
]
variable[darwin_match] assign[=] call[name[re].match, parameter[constant[darwin-(\d+)\..*], name[platform]]]
variable[on_darwin] assign[=] <ast.BoolOp object at 0x7da18c4cea10>
if name[on_darwin] begin[:]
variable[shell_is_off] assign[=] compare[constant[startup-with-shell off] in name[gdb_args]]
return[<ast.UnaryOp object at 0x7da18c4cef50>]
return[constant[False]] | keyword[def] identifier[warn_startup_with_shell_off] ( identifier[platform] , identifier[gdb_args] ):
literal[string]
identifier[darwin_match] = identifier[re] . identifier[match] ( literal[string] , identifier[platform] )
identifier[on_darwin] = identifier[darwin_match] keyword[is] keyword[not] keyword[None] keyword[and] identifier[int] ( identifier[darwin_match] . identifier[groups] ()[ literal[int] ])>= literal[int]
keyword[if] identifier[on_darwin] :
identifier[shell_is_off] = literal[string] keyword[in] identifier[gdb_args]
keyword[return] keyword[not] identifier[shell_is_off]
keyword[return] keyword[False] | def warn_startup_with_shell_off(platform, gdb_args):
"""return True if user may need to turn shell off
if mac OS version is 16 (sierra) or higher, may need to set shell off due
to os's security requirements
http://stackoverflow.com/questions/39702871/gdb-kind-of-doesnt-work-on-macos-sierra
"""
darwin_match = re.match('darwin-(\\d+)\\..*', platform)
on_darwin = darwin_match is not None and int(darwin_match.groups()[0]) >= 16
if on_darwin:
shell_is_off = 'startup-with-shell off' in gdb_args
return not shell_is_off # depends on [control=['if'], data=[]]
return False |
def physical_pin(self, function):
"""
Return the physical pin supporting the specified *function*. If no pins
support the desired *function*, this function raises :exc:`PinNoPins`.
If multiple pins support the desired *function*, :exc:`PinMultiplePins`
will be raised (use :func:`physical_pins` if you expect multiple pins
in the result, such as for electrical ground).
:param str function:
The pin function you wish to search for. Usually this is something
like "GPIO9" for Broadcom GPIO pin 9.
"""
result = self.physical_pins(function)
if len(result) > 1:
raise PinMultiplePins('multiple pins can be used for %s' % function)
elif result:
return result.pop()
else:
raise PinNoPins('no pins can be used for %s' % function) | def function[physical_pin, parameter[self, function]]:
constant[
Return the physical pin supporting the specified *function*. If no pins
support the desired *function*, this function raises :exc:`PinNoPins`.
If multiple pins support the desired *function*, :exc:`PinMultiplePins`
will be raised (use :func:`physical_pins` if you expect multiple pins
in the result, such as for electrical ground).
:param str function:
The pin function you wish to search for. Usually this is something
like "GPIO9" for Broadcom GPIO pin 9.
]
variable[result] assign[=] call[name[self].physical_pins, parameter[name[function]]]
if compare[call[name[len], parameter[name[result]]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da18f09e0e0> | keyword[def] identifier[physical_pin] ( identifier[self] , identifier[function] ):
literal[string]
identifier[result] = identifier[self] . identifier[physical_pins] ( identifier[function] )
keyword[if] identifier[len] ( identifier[result] )> literal[int] :
keyword[raise] identifier[PinMultiplePins] ( literal[string] % identifier[function] )
keyword[elif] identifier[result] :
keyword[return] identifier[result] . identifier[pop] ()
keyword[else] :
keyword[raise] identifier[PinNoPins] ( literal[string] % identifier[function] ) | def physical_pin(self, function):
"""
Return the physical pin supporting the specified *function*. If no pins
support the desired *function*, this function raises :exc:`PinNoPins`.
If multiple pins support the desired *function*, :exc:`PinMultiplePins`
will be raised (use :func:`physical_pins` if you expect multiple pins
in the result, such as for electrical ground).
:param str function:
The pin function you wish to search for. Usually this is something
like "GPIO9" for Broadcom GPIO pin 9.
"""
result = self.physical_pins(function)
if len(result) > 1:
raise PinMultiplePins('multiple pins can be used for %s' % function) # depends on [control=['if'], data=[]]
elif result:
return result.pop() # depends on [control=['if'], data=[]]
else:
raise PinNoPins('no pins can be used for %s' % function) |
def open(self):
""" open: Opens zipfile to write to
Args: None
Returns: None
"""
self.zf = zipfile.ZipFile(self.write_to_path, self.mode) | def function[open, parameter[self]]:
constant[ open: Opens zipfile to write to
Args: None
Returns: None
]
name[self].zf assign[=] call[name[zipfile].ZipFile, parameter[name[self].write_to_path, name[self].mode]] | keyword[def] identifier[open] ( identifier[self] ):
literal[string]
identifier[self] . identifier[zf] = identifier[zipfile] . identifier[ZipFile] ( identifier[self] . identifier[write_to_path] , identifier[self] . identifier[mode] ) | def open(self):
""" open: Opens zipfile to write to
Args: None
Returns: None
"""
self.zf = zipfile.ZipFile(self.write_to_path, self.mode) |
def in_simo_and_inner(self):
"""
Test if a node is simo: single input and multiple output
"""
return len(self.successor) > 1 and self.successor[0] is not None and not self.successor[0].in_or_out and \
len(self.precedence) == 1 and self.precedence[0] is not None and not self.successor[0].in_or_out | def function[in_simo_and_inner, parameter[self]]:
constant[
Test if a node is simo: single input and multiple output
]
return[<ast.BoolOp object at 0x7da1b1d9a470>] | keyword[def] identifier[in_simo_and_inner] ( identifier[self] ):
literal[string]
keyword[return] identifier[len] ( identifier[self] . identifier[successor] )> literal[int] keyword[and] identifier[self] . identifier[successor] [ literal[int] ] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[self] . identifier[successor] [ literal[int] ]. identifier[in_or_out] keyword[and] identifier[len] ( identifier[self] . identifier[precedence] )== literal[int] keyword[and] identifier[self] . identifier[precedence] [ literal[int] ] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[self] . identifier[successor] [ literal[int] ]. identifier[in_or_out] | def in_simo_and_inner(self):
"""
Test if a node is simo: single input and multiple output
"""
return len(self.successor) > 1 and self.successor[0] is not None and (not self.successor[0].in_or_out) and (len(self.precedence) == 1) and (self.precedence[0] is not None) and (not self.successor[0].in_or_out) |
def embed_kernel(module=None, local_ns=None, **kwargs):
"""Embed and start an IPython kernel in a given scope.
Parameters
----------
module : ModuleType, optional
The module to load into IPython globals (default: caller)
local_ns : dict, optional
The namespace to load into IPython user namespace (default: caller)
kwargs : various, optional
Further keyword args are relayed to the KernelApp constructor,
allowing configuration of the Kernel. Will only have an effect
on the first embed_kernel call for a given process.
"""
# get the app if it exists, or set it up if it doesn't
if IPKernelApp.initialized():
app = IPKernelApp.instance()
else:
app = IPKernelApp.instance(**kwargs)
app.initialize([])
# Undo unnecessary sys module mangling from init_sys_modules.
# This would not be necessary if we could prevent it
# in the first place by using a different InteractiveShell
# subclass, as in the regular embed case.
main = app.kernel.shell._orig_sys_modules_main_mod
if main is not None:
sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main
# load the calling scope if not given
(caller_module, caller_locals) = extract_module_locals(1)
if module is None:
module = caller_module
if local_ns is None:
local_ns = caller_locals
app.kernel.user_module = module
app.kernel.user_ns = local_ns
app.shell.set_completer_frame()
app.start() | def function[embed_kernel, parameter[module, local_ns]]:
constant[Embed and start an IPython kernel in a given scope.
Parameters
----------
module : ModuleType, optional
The module to load into IPython globals (default: caller)
local_ns : dict, optional
The namespace to load into IPython user namespace (default: caller)
kwargs : various, optional
Further keyword args are relayed to the KernelApp constructor,
allowing configuration of the Kernel. Will only have an effect
on the first embed_kernel call for a given process.
]
if call[name[IPKernelApp].initialized, parameter[]] begin[:]
variable[app] assign[=] call[name[IPKernelApp].instance, parameter[]]
<ast.Tuple object at 0x7da1b021e350> assign[=] call[name[extract_module_locals], parameter[constant[1]]]
if compare[name[module] is constant[None]] begin[:]
variable[module] assign[=] name[caller_module]
if compare[name[local_ns] is constant[None]] begin[:]
variable[local_ns] assign[=] name[caller_locals]
name[app].kernel.user_module assign[=] name[module]
name[app].kernel.user_ns assign[=] name[local_ns]
call[name[app].shell.set_completer_frame, parameter[]]
call[name[app].start, parameter[]] | keyword[def] identifier[embed_kernel] ( identifier[module] = keyword[None] , identifier[local_ns] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[IPKernelApp] . identifier[initialized] ():
identifier[app] = identifier[IPKernelApp] . identifier[instance] ()
keyword[else] :
identifier[app] = identifier[IPKernelApp] . identifier[instance] (** identifier[kwargs] )
identifier[app] . identifier[initialize] ([])
identifier[main] = identifier[app] . identifier[kernel] . identifier[shell] . identifier[_orig_sys_modules_main_mod]
keyword[if] identifier[main] keyword[is] keyword[not] keyword[None] :
identifier[sys] . identifier[modules] [ identifier[app] . identifier[kernel] . identifier[shell] . identifier[_orig_sys_modules_main_name] ]= identifier[main]
( identifier[caller_module] , identifier[caller_locals] )= identifier[extract_module_locals] ( literal[int] )
keyword[if] identifier[module] keyword[is] keyword[None] :
identifier[module] = identifier[caller_module]
keyword[if] identifier[local_ns] keyword[is] keyword[None] :
identifier[local_ns] = identifier[caller_locals]
identifier[app] . identifier[kernel] . identifier[user_module] = identifier[module]
identifier[app] . identifier[kernel] . identifier[user_ns] = identifier[local_ns]
identifier[app] . identifier[shell] . identifier[set_completer_frame] ()
identifier[app] . identifier[start] () | def embed_kernel(module=None, local_ns=None, **kwargs):
"""Embed and start an IPython kernel in a given scope.
Parameters
----------
module : ModuleType, optional
The module to load into IPython globals (default: caller)
local_ns : dict, optional
The namespace to load into IPython user namespace (default: caller)
kwargs : various, optional
Further keyword args are relayed to the KernelApp constructor,
allowing configuration of the Kernel. Will only have an effect
on the first embed_kernel call for a given process.
"""
# get the app if it exists, or set it up if it doesn't
if IPKernelApp.initialized():
app = IPKernelApp.instance() # depends on [control=['if'], data=[]]
else:
app = IPKernelApp.instance(**kwargs)
app.initialize([])
# Undo unnecessary sys module mangling from init_sys_modules.
# This would not be necessary if we could prevent it
# in the first place by using a different InteractiveShell
# subclass, as in the regular embed case.
main = app.kernel.shell._orig_sys_modules_main_mod
if main is not None:
sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main # depends on [control=['if'], data=['main']]
# load the calling scope if not given
(caller_module, caller_locals) = extract_module_locals(1)
if module is None:
module = caller_module # depends on [control=['if'], data=['module']]
if local_ns is None:
local_ns = caller_locals # depends on [control=['if'], data=['local_ns']]
app.kernel.user_module = module
app.kernel.user_ns = local_ns
app.shell.set_completer_frame()
app.start() |
def delete(obj, key=None):
"""
Delete a single key if specified, or all env if key is none
:param obj: settings object
:param key: key to delete from store location
:return: None
"""
client = StrictRedis(**obj.REDIS_FOR_DYNACONF)
holder = obj.get("ENVVAR_PREFIX_FOR_DYNACONF")
if key:
client.hdel(holder.upper(), key.upper())
obj.unset(key)
else:
keys = client.hkeys(holder.upper())
client.delete(holder.upper())
obj.unset_all(keys) | def function[delete, parameter[obj, key]]:
constant[
Delete a single key if specified, or all env if key is none
:param obj: settings object
:param key: key to delete from store location
:return: None
]
variable[client] assign[=] call[name[StrictRedis], parameter[]]
variable[holder] assign[=] call[name[obj].get, parameter[constant[ENVVAR_PREFIX_FOR_DYNACONF]]]
if name[key] begin[:]
call[name[client].hdel, parameter[call[name[holder].upper, parameter[]], call[name[key].upper, parameter[]]]]
call[name[obj].unset, parameter[name[key]]] | keyword[def] identifier[delete] ( identifier[obj] , identifier[key] = keyword[None] ):
literal[string]
identifier[client] = identifier[StrictRedis] (** identifier[obj] . identifier[REDIS_FOR_DYNACONF] )
identifier[holder] = identifier[obj] . identifier[get] ( literal[string] )
keyword[if] identifier[key] :
identifier[client] . identifier[hdel] ( identifier[holder] . identifier[upper] (), identifier[key] . identifier[upper] ())
identifier[obj] . identifier[unset] ( identifier[key] )
keyword[else] :
identifier[keys] = identifier[client] . identifier[hkeys] ( identifier[holder] . identifier[upper] ())
identifier[client] . identifier[delete] ( identifier[holder] . identifier[upper] ())
identifier[obj] . identifier[unset_all] ( identifier[keys] ) | def delete(obj, key=None):
"""
Delete a single key if specified, or all env if key is none
:param obj: settings object
:param key: key to delete from store location
:return: None
"""
client = StrictRedis(**obj.REDIS_FOR_DYNACONF)
holder = obj.get('ENVVAR_PREFIX_FOR_DYNACONF')
if key:
client.hdel(holder.upper(), key.upper())
obj.unset(key) # depends on [control=['if'], data=[]]
else:
keys = client.hkeys(holder.upper())
client.delete(holder.upper())
obj.unset_all(keys) |
def roughpage(request, url):
"""
Public interface to the rough page view.
"""
if settings.APPEND_SLASH and not url.endswith('/'):
# redirect to the url which have end slash
return redirect(url + '/', permanent=True)
# get base filename from url
filename = url_to_filename(url)
# try to find the template_filename with backends
template_filenames = get_backend().prepare_filenames(filename,
request=request)
# add extra prefix path
root = settings.ROUGHPAGES_TEMPLATE_DIR
template_filenames = [os.path.join(root, x) for x in template_filenames]
try:
t = loader.select_template(template_filenames)
return render_roughpage(request, t)
except TemplateDoesNotExist:
if settings.ROUGHPAGES_RAISE_TEMPLATE_DOES_NOT_EXISTS:
raise
raise Http404 | def function[roughpage, parameter[request, url]]:
constant[
Public interface to the rough page view.
]
if <ast.BoolOp object at 0x7da18eb56d10> begin[:]
return[call[name[redirect], parameter[binary_operation[name[url] + constant[/]]]]]
variable[filename] assign[=] call[name[url_to_filename], parameter[name[url]]]
variable[template_filenames] assign[=] call[call[name[get_backend], parameter[]].prepare_filenames, parameter[name[filename]]]
variable[root] assign[=] name[settings].ROUGHPAGES_TEMPLATE_DIR
variable[template_filenames] assign[=] <ast.ListComp object at 0x7da18eb54460>
<ast.Try object at 0x7da18eb56920> | keyword[def] identifier[roughpage] ( identifier[request] , identifier[url] ):
literal[string]
keyword[if] identifier[settings] . identifier[APPEND_SLASH] keyword[and] keyword[not] identifier[url] . identifier[endswith] ( literal[string] ):
keyword[return] identifier[redirect] ( identifier[url] + literal[string] , identifier[permanent] = keyword[True] )
identifier[filename] = identifier[url_to_filename] ( identifier[url] )
identifier[template_filenames] = identifier[get_backend] (). identifier[prepare_filenames] ( identifier[filename] ,
identifier[request] = identifier[request] )
identifier[root] = identifier[settings] . identifier[ROUGHPAGES_TEMPLATE_DIR]
identifier[template_filenames] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[template_filenames] ]
keyword[try] :
identifier[t] = identifier[loader] . identifier[select_template] ( identifier[template_filenames] )
keyword[return] identifier[render_roughpage] ( identifier[request] , identifier[t] )
keyword[except] identifier[TemplateDoesNotExist] :
keyword[if] identifier[settings] . identifier[ROUGHPAGES_RAISE_TEMPLATE_DOES_NOT_EXISTS] :
keyword[raise]
keyword[raise] identifier[Http404] | def roughpage(request, url):
"""
Public interface to the rough page view.
"""
if settings.APPEND_SLASH and (not url.endswith('/')):
# redirect to the url which have end slash
return redirect(url + '/', permanent=True) # depends on [control=['if'], data=[]]
# get base filename from url
filename = url_to_filename(url)
# try to find the template_filename with backends
template_filenames = get_backend().prepare_filenames(filename, request=request)
# add extra prefix path
root = settings.ROUGHPAGES_TEMPLATE_DIR
template_filenames = [os.path.join(root, x) for x in template_filenames]
try:
t = loader.select_template(template_filenames)
return render_roughpage(request, t) # depends on [control=['try'], data=[]]
except TemplateDoesNotExist:
if settings.ROUGHPAGES_RAISE_TEMPLATE_DOES_NOT_EXISTS:
raise # depends on [control=['if'], data=[]]
raise Http404 # depends on [control=['except'], data=[]] |
def build(ctx):
"""Build documentation as HTML.
The build HTML site is located in the ``doc/_build/html`` directory
of the package.
"""
return_code = run_sphinx(ctx.obj['root_dir'])
if return_code > 0:
sys.exit(return_code) | def function[build, parameter[ctx]]:
constant[Build documentation as HTML.
The build HTML site is located in the ``doc/_build/html`` directory
of the package.
]
variable[return_code] assign[=] call[name[run_sphinx], parameter[call[name[ctx].obj][constant[root_dir]]]]
if compare[name[return_code] greater[>] constant[0]] begin[:]
call[name[sys].exit, parameter[name[return_code]]] | keyword[def] identifier[build] ( identifier[ctx] ):
literal[string]
identifier[return_code] = identifier[run_sphinx] ( identifier[ctx] . identifier[obj] [ literal[string] ])
keyword[if] identifier[return_code] > literal[int] :
identifier[sys] . identifier[exit] ( identifier[return_code] ) | def build(ctx):
"""Build documentation as HTML.
The build HTML site is located in the ``doc/_build/html`` directory
of the package.
"""
return_code = run_sphinx(ctx.obj['root_dir'])
if return_code > 0:
sys.exit(return_code) # depends on [control=['if'], data=['return_code']] |
def get_plugin(self, service_provider=None, auth_url=None, plugins=None,
**kwargs):
"""Authenticate using keystone to keystone federation.
This plugin uses other v3 plugins to authenticate a user to a
identity provider in order to authenticate the user to a service
provider
:param service_provider: service provider ID
:param auth_url: Keystone auth url
:param plugins: list of openstack_auth plugins to check
:returns Keystone2Keystone keystone auth plugin
"""
# Avoid mutable default arg for plugins
plugins = plugins or []
# service_provider being None prevents infinite recursion
if utils.get_keystone_version() < 3 or not service_provider:
return None
keystone_idp_id = getattr(settings, 'KEYSTONE_PROVIDER_IDP_ID',
'localkeystone')
if service_provider == keystone_idp_id:
return None
for plugin in plugins:
unscoped_idp_auth = plugin.get_plugin(plugins=plugins,
auth_url=auth_url, **kwargs)
if unscoped_idp_auth:
break
else:
LOG.debug('Could not find base authentication backend for '
'K2K plugin with the provided credentials.')
return None
idp_exception = None
scoped_idp_auth = None
unscoped_auth_ref = base.BasePlugin.get_access_info(
self, unscoped_idp_auth)
try:
scoped_idp_auth, __ = self.get_project_scoped_auth(
unscoped_idp_auth, unscoped_auth_ref,
recent_project=kwargs['recent_project'])
except exceptions.KeystoneAuthException as idp_excp:
idp_exception = idp_excp
if not scoped_idp_auth or idp_exception:
msg = _('Identity provider authentication failed.')
raise exceptions.KeystoneAuthException(msg)
session = utils.get_session()
if scoped_idp_auth.get_sp_auth_url(session, service_provider) is None:
msg = _('Could not find service provider ID on keystone.')
raise exceptions.KeystoneAuthException(msg)
unscoped_auth = v3_auth.Keystone2Keystone(
base_plugin=scoped_idp_auth,
service_provider=service_provider)
return unscoped_auth | def function[get_plugin, parameter[self, service_provider, auth_url, plugins]]:
constant[Authenticate using keystone to keystone federation.
This plugin uses other v3 plugins to authenticate a user to a
identity provider in order to authenticate the user to a service
provider
:param service_provider: service provider ID
:param auth_url: Keystone auth url
:param plugins: list of openstack_auth plugins to check
:returns Keystone2Keystone keystone auth plugin
]
variable[plugins] assign[=] <ast.BoolOp object at 0x7da1b1950f10>
if <ast.BoolOp object at 0x7da1b1950ee0> begin[:]
return[constant[None]]
variable[keystone_idp_id] assign[=] call[name[getattr], parameter[name[settings], constant[KEYSTONE_PROVIDER_IDP_ID], constant[localkeystone]]]
if compare[name[service_provider] equal[==] name[keystone_idp_id]] begin[:]
return[constant[None]]
for taget[name[plugin]] in starred[name[plugins]] begin[:]
variable[unscoped_idp_auth] assign[=] call[name[plugin].get_plugin, parameter[]]
if name[unscoped_idp_auth] begin[:]
break
variable[idp_exception] assign[=] constant[None]
variable[scoped_idp_auth] assign[=] constant[None]
variable[unscoped_auth_ref] assign[=] call[name[base].BasePlugin.get_access_info, parameter[name[self], name[unscoped_idp_auth]]]
<ast.Try object at 0x7da1b19507c0>
if <ast.BoolOp object at 0x7da1b19f03a0> begin[:]
variable[msg] assign[=] call[name[_], parameter[constant[Identity provider authentication failed.]]]
<ast.Raise object at 0x7da1b18a3ca0>
variable[session] assign[=] call[name[utils].get_session, parameter[]]
if compare[call[name[scoped_idp_auth].get_sp_auth_url, parameter[name[session], name[service_provider]]] is constant[None]] begin[:]
variable[msg] assign[=] call[name[_], parameter[constant[Could not find service provider ID on keystone.]]]
<ast.Raise object at 0x7da1b18a04c0>
variable[unscoped_auth] assign[=] call[name[v3_auth].Keystone2Keystone, parameter[]]
return[name[unscoped_auth]] | keyword[def] identifier[get_plugin] ( identifier[self] , identifier[service_provider] = keyword[None] , identifier[auth_url] = keyword[None] , identifier[plugins] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
identifier[plugins] = identifier[plugins] keyword[or] []
keyword[if] identifier[utils] . identifier[get_keystone_version] ()< literal[int] keyword[or] keyword[not] identifier[service_provider] :
keyword[return] keyword[None]
identifier[keystone_idp_id] = identifier[getattr] ( identifier[settings] , literal[string] ,
literal[string] )
keyword[if] identifier[service_provider] == identifier[keystone_idp_id] :
keyword[return] keyword[None]
keyword[for] identifier[plugin] keyword[in] identifier[plugins] :
identifier[unscoped_idp_auth] = identifier[plugin] . identifier[get_plugin] ( identifier[plugins] = identifier[plugins] ,
identifier[auth_url] = identifier[auth_url] ,** identifier[kwargs] )
keyword[if] identifier[unscoped_idp_auth] :
keyword[break]
keyword[else] :
identifier[LOG] . identifier[debug] ( literal[string]
literal[string] )
keyword[return] keyword[None]
identifier[idp_exception] = keyword[None]
identifier[scoped_idp_auth] = keyword[None]
identifier[unscoped_auth_ref] = identifier[base] . identifier[BasePlugin] . identifier[get_access_info] (
identifier[self] , identifier[unscoped_idp_auth] )
keyword[try] :
identifier[scoped_idp_auth] , identifier[__] = identifier[self] . identifier[get_project_scoped_auth] (
identifier[unscoped_idp_auth] , identifier[unscoped_auth_ref] ,
identifier[recent_project] = identifier[kwargs] [ literal[string] ])
keyword[except] identifier[exceptions] . identifier[KeystoneAuthException] keyword[as] identifier[idp_excp] :
identifier[idp_exception] = identifier[idp_excp]
keyword[if] keyword[not] identifier[scoped_idp_auth] keyword[or] identifier[idp_exception] :
identifier[msg] = identifier[_] ( literal[string] )
keyword[raise] identifier[exceptions] . identifier[KeystoneAuthException] ( identifier[msg] )
identifier[session] = identifier[utils] . identifier[get_session] ()
keyword[if] identifier[scoped_idp_auth] . identifier[get_sp_auth_url] ( identifier[session] , identifier[service_provider] ) keyword[is] keyword[None] :
identifier[msg] = identifier[_] ( literal[string] )
keyword[raise] identifier[exceptions] . identifier[KeystoneAuthException] ( identifier[msg] )
identifier[unscoped_auth] = identifier[v3_auth] . identifier[Keystone2Keystone] (
identifier[base_plugin] = identifier[scoped_idp_auth] ,
identifier[service_provider] = identifier[service_provider] )
keyword[return] identifier[unscoped_auth] | def get_plugin(self, service_provider=None, auth_url=None, plugins=None, **kwargs):
"""Authenticate using keystone to keystone federation.
This plugin uses other v3 plugins to authenticate a user to a
identity provider in order to authenticate the user to a service
provider
:param service_provider: service provider ID
:param auth_url: Keystone auth url
:param plugins: list of openstack_auth plugins to check
:returns Keystone2Keystone keystone auth plugin
"""
# Avoid mutable default arg for plugins
plugins = plugins or []
# service_provider being None prevents infinite recursion
if utils.get_keystone_version() < 3 or not service_provider:
return None # depends on [control=['if'], data=[]]
keystone_idp_id = getattr(settings, 'KEYSTONE_PROVIDER_IDP_ID', 'localkeystone')
if service_provider == keystone_idp_id:
return None # depends on [control=['if'], data=[]]
for plugin in plugins:
unscoped_idp_auth = plugin.get_plugin(plugins=plugins, auth_url=auth_url, **kwargs)
if unscoped_idp_auth:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['plugin']]
else:
LOG.debug('Could not find base authentication backend for K2K plugin with the provided credentials.')
return None
idp_exception = None
scoped_idp_auth = None
unscoped_auth_ref = base.BasePlugin.get_access_info(self, unscoped_idp_auth)
try:
(scoped_idp_auth, __) = self.get_project_scoped_auth(unscoped_idp_auth, unscoped_auth_ref, recent_project=kwargs['recent_project']) # depends on [control=['try'], data=[]]
except exceptions.KeystoneAuthException as idp_excp:
idp_exception = idp_excp # depends on [control=['except'], data=['idp_excp']]
if not scoped_idp_auth or idp_exception:
msg = _('Identity provider authentication failed.')
raise exceptions.KeystoneAuthException(msg) # depends on [control=['if'], data=[]]
session = utils.get_session()
if scoped_idp_auth.get_sp_auth_url(session, service_provider) is None:
msg = _('Could not find service provider ID on keystone.')
raise exceptions.KeystoneAuthException(msg) # depends on [control=['if'], data=[]]
unscoped_auth = v3_auth.Keystone2Keystone(base_plugin=scoped_idp_auth, service_provider=service_provider)
return unscoped_auth |
def shutdown(self):
"""shutdown connection"""
if self.verbose:
print(self.socket.getsockname(), 'xx', self.peername)
try:
self.socket.shutdown(socket.SHUT_RDWR)
except IOError as err:
assert err.errno is _ENOTCONN, "unexpected IOError: %s" % err
# remote peer has already closed the connection,
# just ignore the exceeption
pass | def function[shutdown, parameter[self]]:
constant[shutdown connection]
if name[self].verbose begin[:]
call[name[print], parameter[call[name[self].socket.getsockname, parameter[]], constant[xx], name[self].peername]]
<ast.Try object at 0x7da1b0f3e620> | keyword[def] identifier[shutdown] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( identifier[self] . identifier[socket] . identifier[getsockname] (), literal[string] , identifier[self] . identifier[peername] )
keyword[try] :
identifier[self] . identifier[socket] . identifier[shutdown] ( identifier[socket] . identifier[SHUT_RDWR] )
keyword[except] identifier[IOError] keyword[as] identifier[err] :
keyword[assert] identifier[err] . identifier[errno] keyword[is] identifier[_ENOTCONN] , literal[string] % identifier[err]
keyword[pass] | def shutdown(self):
"""shutdown connection"""
if self.verbose:
print(self.socket.getsockname(), 'xx', self.peername) # depends on [control=['if'], data=[]]
try:
self.socket.shutdown(socket.SHUT_RDWR) # depends on [control=['try'], data=[]]
except IOError as err:
assert err.errno is _ENOTCONN, 'unexpected IOError: %s' % err
# remote peer has already closed the connection,
# just ignore the exceeption
pass # depends on [control=['except'], data=['err']] |
def list(ctx, show_hidden, oath_type, period):
"""
List all credentials.
List all credentials stored on your YubiKey.
"""
ensure_validated(ctx)
controller = ctx.obj['controller']
creds = [cred
for cred in controller.list()
if show_hidden or not cred.is_hidden
]
creds.sort()
for cred in creds:
click.echo(cred.printable_key, nl=False)
if oath_type:
click.echo(u', {}'.format(cred.oath_type.name), nl=False)
if period:
click.echo(', {}'.format(cred.period), nl=False)
click.echo() | def function[list, parameter[ctx, show_hidden, oath_type, period]]:
constant[
List all credentials.
List all credentials stored on your YubiKey.
]
call[name[ensure_validated], parameter[name[ctx]]]
variable[controller] assign[=] call[name[ctx].obj][constant[controller]]
variable[creds] assign[=] <ast.ListComp object at 0x7da20e961f60>
call[name[creds].sort, parameter[]]
for taget[name[cred]] in starred[name[creds]] begin[:]
call[name[click].echo, parameter[name[cred].printable_key]]
if name[oath_type] begin[:]
call[name[click].echo, parameter[call[constant[, {}].format, parameter[name[cred].oath_type.name]]]]
if name[period] begin[:]
call[name[click].echo, parameter[call[constant[, {}].format, parameter[name[cred].period]]]]
call[name[click].echo, parameter[]] | keyword[def] identifier[list] ( identifier[ctx] , identifier[show_hidden] , identifier[oath_type] , identifier[period] ):
literal[string]
identifier[ensure_validated] ( identifier[ctx] )
identifier[controller] = identifier[ctx] . identifier[obj] [ literal[string] ]
identifier[creds] =[ identifier[cred]
keyword[for] identifier[cred] keyword[in] identifier[controller] . identifier[list] ()
keyword[if] identifier[show_hidden] keyword[or] keyword[not] identifier[cred] . identifier[is_hidden]
]
identifier[creds] . identifier[sort] ()
keyword[for] identifier[cred] keyword[in] identifier[creds] :
identifier[click] . identifier[echo] ( identifier[cred] . identifier[printable_key] , identifier[nl] = keyword[False] )
keyword[if] identifier[oath_type] :
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[cred] . identifier[oath_type] . identifier[name] ), identifier[nl] = keyword[False] )
keyword[if] identifier[period] :
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[cred] . identifier[period] ), identifier[nl] = keyword[False] )
identifier[click] . identifier[echo] () | def list(ctx, show_hidden, oath_type, period):
"""
List all credentials.
List all credentials stored on your YubiKey.
"""
ensure_validated(ctx)
controller = ctx.obj['controller']
creds = [cred for cred in controller.list() if show_hidden or not cred.is_hidden]
creds.sort()
for cred in creds:
click.echo(cred.printable_key, nl=False)
if oath_type:
click.echo(u', {}'.format(cred.oath_type.name), nl=False) # depends on [control=['if'], data=[]]
if period:
click.echo(', {}'.format(cred.period), nl=False) # depends on [control=['if'], data=[]]
click.echo() # depends on [control=['for'], data=['cred']] |
def _onsuccess(self, result):
""" To execute on execution success
:param kser.result.Result result: Execution result
:return: Execution result
:rtype: kser.result.Result
"""
if KSER_METRICS_ENABLED == "yes":
KSER_TASKS_STATUS.labels(
__hostname__, self.__class__.path, 'SUCCESS'
).inc()
if result:
result = self.result + result
else:
result = self.result
logger.info(
"{}.Success: {}[{}]: {}".format(
self.__class__.__name__, self.__class__.path, self.uuid, result
),
extra=dict(
kmsg=Message(
self.uuid, entrypoint=self.__class__.path,
params=self.params, metadata=self.metadata
).dump(),
kresult=ResultSchema().dump(result) if result else dict()
)
)
return self.onsuccess(result) | def function[_onsuccess, parameter[self, result]]:
constant[ To execute on execution success
:param kser.result.Result result: Execution result
:return: Execution result
:rtype: kser.result.Result
]
if compare[name[KSER_METRICS_ENABLED] equal[==] constant[yes]] begin[:]
call[call[name[KSER_TASKS_STATUS].labels, parameter[name[__hostname__], name[self].__class__.path, constant[SUCCESS]]].inc, parameter[]]
if name[result] begin[:]
variable[result] assign[=] binary_operation[name[self].result + name[result]]
call[name[logger].info, parameter[call[constant[{}.Success: {}[{}]: {}].format, parameter[name[self].__class__.__name__, name[self].__class__.path, name[self].uuid, name[result]]]]]
return[call[name[self].onsuccess, parameter[name[result]]]] | keyword[def] identifier[_onsuccess] ( identifier[self] , identifier[result] ):
literal[string]
keyword[if] identifier[KSER_METRICS_ENABLED] == literal[string] :
identifier[KSER_TASKS_STATUS] . identifier[labels] (
identifier[__hostname__] , identifier[self] . identifier[__class__] . identifier[path] , literal[string]
). identifier[inc] ()
keyword[if] identifier[result] :
identifier[result] = identifier[self] . identifier[result] + identifier[result]
keyword[else] :
identifier[result] = identifier[self] . identifier[result]
identifier[logger] . identifier[info] (
literal[string] . identifier[format] (
identifier[self] . identifier[__class__] . identifier[__name__] , identifier[self] . identifier[__class__] . identifier[path] , identifier[self] . identifier[uuid] , identifier[result]
),
identifier[extra] = identifier[dict] (
identifier[kmsg] = identifier[Message] (
identifier[self] . identifier[uuid] , identifier[entrypoint] = identifier[self] . identifier[__class__] . identifier[path] ,
identifier[params] = identifier[self] . identifier[params] , identifier[metadata] = identifier[self] . identifier[metadata]
). identifier[dump] (),
identifier[kresult] = identifier[ResultSchema] (). identifier[dump] ( identifier[result] ) keyword[if] identifier[result] keyword[else] identifier[dict] ()
)
)
keyword[return] identifier[self] . identifier[onsuccess] ( identifier[result] ) | def _onsuccess(self, result):
""" To execute on execution success
:param kser.result.Result result: Execution result
:return: Execution result
:rtype: kser.result.Result
"""
if KSER_METRICS_ENABLED == 'yes':
KSER_TASKS_STATUS.labels(__hostname__, self.__class__.path, 'SUCCESS').inc() # depends on [control=['if'], data=[]]
if result:
result = self.result + result # depends on [control=['if'], data=[]]
else:
result = self.result
logger.info('{}.Success: {}[{}]: {}'.format(self.__class__.__name__, self.__class__.path, self.uuid, result), extra=dict(kmsg=Message(self.uuid, entrypoint=self.__class__.path, params=self.params, metadata=self.metadata).dump(), kresult=ResultSchema().dump(result) if result else dict()))
return self.onsuccess(result) |
def create_new_csv(samples, args):
"""create csv file that can be use with bcbio -w template"""
out_fn = os.path.splitext(args.csv)[0] + "-merged.csv"
logger.info("Preparing new csv: %s" % out_fn)
with file_transaction(out_fn) as tx_out:
with open(tx_out, 'w') as handle:
handle.write(_header(args.csv))
for s in samples:
sample_name = s['name'] if isinstance(s['out_file'], list) else os.path.basename(s['out_file'])
handle.write("%s,%s,%s\n" % (sample_name, s['name'], ",".join(s['anno']))) | def function[create_new_csv, parameter[samples, args]]:
constant[create csv file that can be use with bcbio -w template]
variable[out_fn] assign[=] binary_operation[call[call[name[os].path.splitext, parameter[name[args].csv]]][constant[0]] + constant[-merged.csv]]
call[name[logger].info, parameter[binary_operation[constant[Preparing new csv: %s] <ast.Mod object at 0x7da2590d6920> name[out_fn]]]]
with call[name[file_transaction], parameter[name[out_fn]]] begin[:]
with call[name[open], parameter[name[tx_out], constant[w]]] begin[:]
call[name[handle].write, parameter[call[name[_header], parameter[name[args].csv]]]]
for taget[name[s]] in starred[name[samples]] begin[:]
variable[sample_name] assign[=] <ast.IfExp object at 0x7da1b2344880>
call[name[handle].write, parameter[binary_operation[constant[%s,%s,%s
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2346f20>, <ast.Subscript object at 0x7da1b2344eb0>, <ast.Call object at 0x7da1b2347b20>]]]]] | keyword[def] identifier[create_new_csv] ( identifier[samples] , identifier[args] ):
literal[string]
identifier[out_fn] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[args] . identifier[csv] )[ literal[int] ]+ literal[string]
identifier[logger] . identifier[info] ( literal[string] % identifier[out_fn] )
keyword[with] identifier[file_transaction] ( identifier[out_fn] ) keyword[as] identifier[tx_out] :
keyword[with] identifier[open] ( identifier[tx_out] , literal[string] ) keyword[as] identifier[handle] :
identifier[handle] . identifier[write] ( identifier[_header] ( identifier[args] . identifier[csv] ))
keyword[for] identifier[s] keyword[in] identifier[samples] :
identifier[sample_name] = identifier[s] [ literal[string] ] keyword[if] identifier[isinstance] ( identifier[s] [ literal[string] ], identifier[list] ) keyword[else] identifier[os] . identifier[path] . identifier[basename] ( identifier[s] [ literal[string] ])
identifier[handle] . identifier[write] ( literal[string] %( identifier[sample_name] , identifier[s] [ literal[string] ], literal[string] . identifier[join] ( identifier[s] [ literal[string] ]))) | def create_new_csv(samples, args):
"""create csv file that can be use with bcbio -w template"""
out_fn = os.path.splitext(args.csv)[0] + '-merged.csv'
logger.info('Preparing new csv: %s' % out_fn)
with file_transaction(out_fn) as tx_out:
with open(tx_out, 'w') as handle:
handle.write(_header(args.csv))
for s in samples:
sample_name = s['name'] if isinstance(s['out_file'], list) else os.path.basename(s['out_file'])
handle.write('%s,%s,%s\n' % (sample_name, s['name'], ','.join(s['anno']))) # depends on [control=['for'], data=['s']] # depends on [control=['with'], data=['handle']] # depends on [control=['with'], data=['tx_out']] |
def hide(self):
"""
Call `hide_all()` on all known top widgets.
"""
top = self.get_top_widget()
if isinstance(top, (list, tuple)):
for t in top:
if t is not None:
t.hide_all()
elif top is not None:
top.hide_all() | def function[hide, parameter[self]]:
constant[
Call `hide_all()` on all known top widgets.
]
variable[top] assign[=] call[name[self].get_top_widget, parameter[]]
if call[name[isinstance], parameter[name[top], tuple[[<ast.Name object at 0x7da1b14383d0>, <ast.Name object at 0x7da1b1438940>]]]] begin[:]
for taget[name[t]] in starred[name[top]] begin[:]
if compare[name[t] is_not constant[None]] begin[:]
call[name[t].hide_all, parameter[]] | keyword[def] identifier[hide] ( identifier[self] ):
literal[string]
identifier[top] = identifier[self] . identifier[get_top_widget] ()
keyword[if] identifier[isinstance] ( identifier[top] ,( identifier[list] , identifier[tuple] )):
keyword[for] identifier[t] keyword[in] identifier[top] :
keyword[if] identifier[t] keyword[is] keyword[not] keyword[None] :
identifier[t] . identifier[hide_all] ()
keyword[elif] identifier[top] keyword[is] keyword[not] keyword[None] :
identifier[top] . identifier[hide_all] () | def hide(self):
"""
Call `hide_all()` on all known top widgets.
"""
top = self.get_top_widget()
if isinstance(top, (list, tuple)):
for t in top:
if t is not None:
t.hide_all() # depends on [control=['if'], data=['t']] # depends on [control=['for'], data=['t']] # depends on [control=['if'], data=[]]
elif top is not None:
top.hide_all() # depends on [control=['if'], data=['top']] |
def import_model(self, source):
"""Import and return model instance."""
model = super(NonstrictImporter, self).import_model(source)
sbml.convert_sbml_model(model)
return model | def function[import_model, parameter[self, source]]:
constant[Import and return model instance.]
variable[model] assign[=] call[call[name[super], parameter[name[NonstrictImporter], name[self]]].import_model, parameter[name[source]]]
call[name[sbml].convert_sbml_model, parameter[name[model]]]
return[name[model]] | keyword[def] identifier[import_model] ( identifier[self] , identifier[source] ):
literal[string]
identifier[model] = identifier[super] ( identifier[NonstrictImporter] , identifier[self] ). identifier[import_model] ( identifier[source] )
identifier[sbml] . identifier[convert_sbml_model] ( identifier[model] )
keyword[return] identifier[model] | def import_model(self, source):
"""Import and return model instance."""
model = super(NonstrictImporter, self).import_model(source)
sbml.convert_sbml_model(model)
return model |
def template(self, resources):
""" Get the template from: YAML, hierarchy, or class """
template_name = self.acquire(resources, 'template')
if template_name:
return template_name
else:
# We're putting an exception for "resource", the built-in
# rtype/directive. We want it to work out-of-the-box without
# requiring an _templates/resource.html in the docs project.
# Instead, use the page.html the ships with Sphinx.
if self.rtype == 'resource':
return 'page'
else:
return self.rtype | def function[template, parameter[self, resources]]:
constant[ Get the template from: YAML, hierarchy, or class ]
variable[template_name] assign[=] call[name[self].acquire, parameter[name[resources], constant[template]]]
if name[template_name] begin[:]
return[name[template_name]] | keyword[def] identifier[template] ( identifier[self] , identifier[resources] ):
literal[string]
identifier[template_name] = identifier[self] . identifier[acquire] ( identifier[resources] , literal[string] )
keyword[if] identifier[template_name] :
keyword[return] identifier[template_name]
keyword[else] :
keyword[if] identifier[self] . identifier[rtype] == literal[string] :
keyword[return] literal[string]
keyword[else] :
keyword[return] identifier[self] . identifier[rtype] | def template(self, resources):
""" Get the template from: YAML, hierarchy, or class """
template_name = self.acquire(resources, 'template')
if template_name:
return template_name # depends on [control=['if'], data=[]]
# We're putting an exception for "resource", the built-in
# rtype/directive. We want it to work out-of-the-box without
# requiring an _templates/resource.html in the docs project.
# Instead, use the page.html the ships with Sphinx.
elif self.rtype == 'resource':
return 'page' # depends on [control=['if'], data=[]]
else:
return self.rtype |
def label_from_re(self, pat:str, full_path:bool=False, label_cls:Callable=None, **kwargs)->'LabelList':
"Apply the re in `pat` to determine the label of every filename. If `full_path`, search in the full name."
pat = re.compile(pat)
def _inner(o):
s = str((os.path.join(self.path,o) if full_path else o).as_posix())
res = pat.search(s)
assert res,f'Failed to find "{pat}" in "{s}"'
return res.group(1)
return self.label_from_func(_inner, label_cls=label_cls, **kwargs) | def function[label_from_re, parameter[self, pat, full_path, label_cls]]:
constant[Apply the re in `pat` to determine the label of every filename. If `full_path`, search in the full name.]
variable[pat] assign[=] call[name[re].compile, parameter[name[pat]]]
def function[_inner, parameter[o]]:
variable[s] assign[=] call[name[str], parameter[call[<ast.IfExp object at 0x7da1b1eb48b0>.as_posix, parameter[]]]]
variable[res] assign[=] call[name[pat].search, parameter[name[s]]]
assert[name[res]]
return[call[name[res].group, parameter[constant[1]]]]
return[call[name[self].label_from_func, parameter[name[_inner]]]] | keyword[def] identifier[label_from_re] ( identifier[self] , identifier[pat] : identifier[str] , identifier[full_path] : identifier[bool] = keyword[False] , identifier[label_cls] : identifier[Callable] = keyword[None] ,** identifier[kwargs] )-> literal[string] :
literal[string]
identifier[pat] = identifier[re] . identifier[compile] ( identifier[pat] )
keyword[def] identifier[_inner] ( identifier[o] ):
identifier[s] = identifier[str] (( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path] , identifier[o] ) keyword[if] identifier[full_path] keyword[else] identifier[o] ). identifier[as_posix] ())
identifier[res] = identifier[pat] . identifier[search] ( identifier[s] )
keyword[assert] identifier[res] , literal[string]
keyword[return] identifier[res] . identifier[group] ( literal[int] )
keyword[return] identifier[self] . identifier[label_from_func] ( identifier[_inner] , identifier[label_cls] = identifier[label_cls] ,** identifier[kwargs] ) | def label_from_re(self, pat: str, full_path: bool=False, label_cls: Callable=None, **kwargs) -> 'LabelList':
"""Apply the re in `pat` to determine the label of every filename. If `full_path`, search in the full name."""
pat = re.compile(pat)
def _inner(o):
s = str((os.path.join(self.path, o) if full_path else o).as_posix())
res = pat.search(s)
assert res, f'Failed to find "{pat}" in "{s}"'
return res.group(1)
return self.label_from_func(_inner, label_cls=label_cls, **kwargs) |
def filter(self, relation_id=None, duedate__lt=None, duedate__gte=None,
**kwargs):
"""
A common query would be duedate__lt=date(2015, 1, 1) to get all
Receivables that are due in 2014 and earlier.
"""
if relation_id is not None:
# Filter by (relation) account_id. There doesn't seem to be
# any reason to prefer
# 'read/financial/ReceivablesListByAccount?accountId=X' over
# this.
relation_id = self._remote_guid(relation_id)
self._filter_append(kwargs, u'AccountId eq %s' % (relation_id,))
if duedate__lt is not None:
# Not sure what the AgeGroup means in
# ReceivablesListByAgeGroup, but we can certainly do
# without.
duedate__lt = self._remote_datetime(duedate__lt)
self._filter_append(kwargs, u'DueDate lt %s' % (duedate__lt,))
if duedate__gte is not None:
# Not sure what the AgeGroup means in
# ReceivablesListByAgeGroup, but we can certainly do
# without.
duedate__gte = self._remote_datetime(duedate__gte)
self._filter_append(kwargs, u'DueDate ge %s' % (duedate__gte,))
return super(Receivables, self).filter(**kwargs) | def function[filter, parameter[self, relation_id, duedate__lt, duedate__gte]]:
constant[
A common query would be duedate__lt=date(2015, 1, 1) to get all
Receivables that are due in 2014 and earlier.
]
if compare[name[relation_id] is_not constant[None]] begin[:]
variable[relation_id] assign[=] call[name[self]._remote_guid, parameter[name[relation_id]]]
call[name[self]._filter_append, parameter[name[kwargs], binary_operation[constant[AccountId eq %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b05bf1c0>]]]]]
if compare[name[duedate__lt] is_not constant[None]] begin[:]
variable[duedate__lt] assign[=] call[name[self]._remote_datetime, parameter[name[duedate__lt]]]
call[name[self]._filter_append, parameter[name[kwargs], binary_operation[constant[DueDate lt %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b05bd090>]]]]]
if compare[name[duedate__gte] is_not constant[None]] begin[:]
variable[duedate__gte] assign[=] call[name[self]._remote_datetime, parameter[name[duedate__gte]]]
call[name[self]._filter_append, parameter[name[kwargs], binary_operation[constant[DueDate ge %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b03950c0>]]]]]
return[call[call[name[super], parameter[name[Receivables], name[self]]].filter, parameter[]]] | keyword[def] identifier[filter] ( identifier[self] , identifier[relation_id] = keyword[None] , identifier[duedate__lt] = keyword[None] , identifier[duedate__gte] = keyword[None] ,
** identifier[kwargs] ):
literal[string]
keyword[if] identifier[relation_id] keyword[is] keyword[not] keyword[None] :
identifier[relation_id] = identifier[self] . identifier[_remote_guid] ( identifier[relation_id] )
identifier[self] . identifier[_filter_append] ( identifier[kwargs] , literal[string] %( identifier[relation_id] ,))
keyword[if] identifier[duedate__lt] keyword[is] keyword[not] keyword[None] :
identifier[duedate__lt] = identifier[self] . identifier[_remote_datetime] ( identifier[duedate__lt] )
identifier[self] . identifier[_filter_append] ( identifier[kwargs] , literal[string] %( identifier[duedate__lt] ,))
keyword[if] identifier[duedate__gte] keyword[is] keyword[not] keyword[None] :
identifier[duedate__gte] = identifier[self] . identifier[_remote_datetime] ( identifier[duedate__gte] )
identifier[self] . identifier[_filter_append] ( identifier[kwargs] , literal[string] %( identifier[duedate__gte] ,))
keyword[return] identifier[super] ( identifier[Receivables] , identifier[self] ). identifier[filter] (** identifier[kwargs] ) | def filter(self, relation_id=None, duedate__lt=None, duedate__gte=None, **kwargs):
"""
A common query would be duedate__lt=date(2015, 1, 1) to get all
Receivables that are due in 2014 and earlier.
"""
if relation_id is not None:
# Filter by (relation) account_id. There doesn't seem to be
# any reason to prefer
# 'read/financial/ReceivablesListByAccount?accountId=X' over
# this.
relation_id = self._remote_guid(relation_id)
self._filter_append(kwargs, u'AccountId eq %s' % (relation_id,)) # depends on [control=['if'], data=['relation_id']]
if duedate__lt is not None:
# Not sure what the AgeGroup means in
# ReceivablesListByAgeGroup, but we can certainly do
# without.
duedate__lt = self._remote_datetime(duedate__lt)
self._filter_append(kwargs, u'DueDate lt %s' % (duedate__lt,)) # depends on [control=['if'], data=['duedate__lt']]
if duedate__gte is not None:
# Not sure what the AgeGroup means in
# ReceivablesListByAgeGroup, but we can certainly do
# without.
duedate__gte = self._remote_datetime(duedate__gte)
self._filter_append(kwargs, u'DueDate ge %s' % (duedate__gte,)) # depends on [control=['if'], data=['duedate__gte']]
return super(Receivables, self).filter(**kwargs) |
def lineEdit(self):
"""
Returns the line editor associated with this combobox. This will
return the object stored at the reference for the editor since
sometimes the internal Qt process will raise a RuntimeError that
the C/C++ object has been deleted.
:return <XLineEdit> || None
"""
try:
edit = self._lineEdit()
except TypeError:
edit = None
if edit is None:
self._edit = None
return edit | def function[lineEdit, parameter[self]]:
constant[
Returns the line editor associated with this combobox. This will
return the object stored at the reference for the editor since
sometimes the internal Qt process will raise a RuntimeError that
the C/C++ object has been deleted.
:return <XLineEdit> || None
]
<ast.Try object at 0x7da2043444f0>
if compare[name[edit] is constant[None]] begin[:]
name[self]._edit assign[=] constant[None]
return[name[edit]] | keyword[def] identifier[lineEdit] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[edit] = identifier[self] . identifier[_lineEdit] ()
keyword[except] identifier[TypeError] :
identifier[edit] = keyword[None]
keyword[if] identifier[edit] keyword[is] keyword[None] :
identifier[self] . identifier[_edit] = keyword[None]
keyword[return] identifier[edit] | def lineEdit(self):
"""
Returns the line editor associated with this combobox. This will
return the object stored at the reference for the editor since
sometimes the internal Qt process will raise a RuntimeError that
the C/C++ object has been deleted.
:return <XLineEdit> || None
"""
try:
edit = self._lineEdit() # depends on [control=['try'], data=[]]
except TypeError:
edit = None # depends on [control=['except'], data=[]]
if edit is None:
self._edit = None # depends on [control=['if'], data=[]]
return edit |
def ftpparse (line):
"""Parse a FTP list line into a dictionary with attributes:
name - name of file (string)
trycwd - False if cwd is definitely pointless, True otherwise
tryretr - False if retr is definitely pointless, True otherwise
If the line has no file information, None is returned
"""
if len(line) < 2:
# an empty name in EPLF, with no info, could be 2 chars
return None
info = dict(name=None, trycwd=False, tryretr=False)
# EPLF format
# http://pobox.com/~djb/proto/eplf.html
# "+i8388621.29609,m824255902,/,\tdev"
# "+i8388621.44468,m839956783,r,s10376,\tRFCEPLF"
if line[0] == '+':
if '\t' in line:
flags, name = line.split('\t', 1)
info['name'] = name
flags = flags.split(',')
info['trycwd'] = '/' in flags
info['tryretr'] = 'r' in flags
return info
# UNIX-style listing, without inum and without blocks
# "-rw-r--r-- 1 root other 531 Jan 29 03:26 README"
# "dr-xr-xr-x 2 root other 512 Apr 8 1994 etc"
# "dr-xr-xr-x 2 root 512 Apr 8 1994 etc"
# "lrwxrwxrwx 1 root other 7 Jan 25 00:17 bin -> usr/bin"
# Also produced by Microsoft's FTP servers for Windows:
# "---------- 1 owner group 1803128 Jul 10 10:18 ls-lR.Z"
# "d--------- 1 owner group 0 May 9 19:45 Softlib"
# Also WFTPD for MSDOS:
# "-rwxrwxrwx 1 noone nogroup 322 Aug 19 1996 message.ftp"
# Also NetWare:
# "d [R----F--] supervisor 512 Jan 16 18:53 login"
# "- [R----F--] rhesus 214059 Oct 20 15:27 cx.exe"
# Also NetPresenz for the Mac:
# "-------r-- 326 1391972 1392298 Nov 22 1995 MegaPhone.sit"
# "drwxrwxr-x folder 2 May 10 1996 network"
if line[0] in 'bcdlps-':
if line[0] == 'd':
info['trycwd'] = True
if line[0] == '-':
info['tryretr'] = True
if line[0] == 'l':
info['trycwd'] = info['tryretr'] = True
parts = line.split()
if len(parts) < 7:
return None
del parts[0] # skip permissions
if parts[0] != 'folder':
del parts[0] # skip nlink
del parts[0] # skip uid
del parts[0] # skip gid or size
if not ismonth(parts[0]):
del parts[0] # skip size
if not ismonth(parts[0]):
return None
del parts[0] # skip month
del parts[0] # skip day
if not parts:
return None
del parts[0] # skip year or time
name = " ".join(parts)
# resolve links
if line[0] == 'l' and ' -> ' in name:
name = name.split(' -> ', 1)[1]
# eliminate extra NetWare spaces
if line[1] in ' [' and name.startswith(' '):
name = name[3:]
info["name"] = name
return info
# MultiNet (some spaces removed from examples)
# "00README.TXT;1 2 30-DEC-1996 17:44 [SYSTEM] (RWED,RWED,RE,RE)"
# "CORE.DIR;1 1 8-SEP-1996 16:09 [SYSTEM] (RWE,RWE,RE,RE)"
# and non-MutliNet VMS:
# "CII-MANUAL.TEX;1 213/216 29-JAN-1996 03:33:12 [ANONYMOU,ANONYMOUS] (RWED,RWED,,)"
i = line.find(';')
if i != -1:
name = line[:i]
if name.endswith(".DIR"):
name = name[:-4]
info["trycwd"] = True
else:
info["tryretr"] = True
info["name"] = name
return info
# MSDOS format
# 04-27-00 09:09PM <DIR> licensed
# 07-18-00 10:16AM <DIR> pub
# 04-14-00 03:47PM 589 readme.htm
if line[0].isdigit():
parts = line.split()
if len(parts) != 4:
return None
info['name'] = parts[3]
if parts[2][0] == '<':
info['trycwd'] = True
else:
info['tryretr'] = True
return info
# Some useless lines, safely ignored:
# "Total of 11 Files, 10966 Blocks." (VMS)
# "total 14786" (UNIX)
# "DISK$ANONFTP:[ANONYMOUS]" (VMS)
# "Directory DISK$PCSA:[ANONYM]" (VMS)
return None | def function[ftpparse, parameter[line]]:
constant[Parse a FTP list line into a dictionary with attributes:
name - name of file (string)
trycwd - False if cwd is definitely pointless, True otherwise
tryretr - False if retr is definitely pointless, True otherwise
If the line has no file information, None is returned
]
if compare[call[name[len], parameter[name[line]]] less[<] constant[2]] begin[:]
return[constant[None]]
variable[info] assign[=] call[name[dict], parameter[]]
if compare[call[name[line]][constant[0]] equal[==] constant[+]] begin[:]
if compare[constant[ ] in name[line]] begin[:]
<ast.Tuple object at 0x7da20e955840> assign[=] call[name[line].split, parameter[constant[ ], constant[1]]]
call[name[info]][constant[name]] assign[=] name[name]
variable[flags] assign[=] call[name[flags].split, parameter[constant[,]]]
call[name[info]][constant[trycwd]] assign[=] compare[constant[/] in name[flags]]
call[name[info]][constant[tryretr]] assign[=] compare[constant[r] in name[flags]]
return[name[info]]
if compare[call[name[line]][constant[0]] in constant[bcdlps-]] begin[:]
if compare[call[name[line]][constant[0]] equal[==] constant[d]] begin[:]
call[name[info]][constant[trycwd]] assign[=] constant[True]
if compare[call[name[line]][constant[0]] equal[==] constant[-]] begin[:]
call[name[info]][constant[tryretr]] assign[=] constant[True]
if compare[call[name[line]][constant[0]] equal[==] constant[l]] begin[:]
call[name[info]][constant[trycwd]] assign[=] constant[True]
variable[parts] assign[=] call[name[line].split, parameter[]]
if compare[call[name[len], parameter[name[parts]]] less[<] constant[7]] begin[:]
return[constant[None]]
<ast.Delete object at 0x7da1b0ab9240>
if compare[call[name[parts]][constant[0]] not_equal[!=] constant[folder]] begin[:]
<ast.Delete object at 0x7da1b0ab9450>
<ast.Delete object at 0x7da1b0ab8790>
<ast.Delete object at 0x7da1b0ab9540>
if <ast.UnaryOp object at 0x7da1b0ab8730> begin[:]
<ast.Delete object at 0x7da1b0aba410>
if <ast.UnaryOp object at 0x7da1b0ab9f60> begin[:]
return[constant[None]]
<ast.Delete object at 0x7da1b0abb310>
<ast.Delete object at 0x7da1b0ab8400>
if <ast.UnaryOp object at 0x7da1b0aba3e0> begin[:]
return[constant[None]]
<ast.Delete object at 0x7da1b0abaa10>
variable[name] assign[=] call[constant[ ].join, parameter[name[parts]]]
if <ast.BoolOp object at 0x7da1b0ab8370> begin[:]
variable[name] assign[=] call[call[name[name].split, parameter[constant[ -> ], constant[1]]]][constant[1]]
if <ast.BoolOp object at 0x7da20e954160> begin[:]
variable[name] assign[=] call[name[name]][<ast.Slice object at 0x7da20e954820>]
call[name[info]][constant[name]] assign[=] name[name]
return[name[info]]
variable[i] assign[=] call[name[line].find, parameter[constant[;]]]
if compare[name[i] not_equal[!=] <ast.UnaryOp object at 0x7da18eb56290>] begin[:]
variable[name] assign[=] call[name[line]][<ast.Slice object at 0x7da18eb57f70>]
if call[name[name].endswith, parameter[constant[.DIR]]] begin[:]
variable[name] assign[=] call[name[name]][<ast.Slice object at 0x7da18eb56ce0>]
call[name[info]][constant[trycwd]] assign[=] constant[True]
call[name[info]][constant[name]] assign[=] name[name]
return[name[info]]
if call[call[name[line]][constant[0]].isdigit, parameter[]] begin[:]
variable[parts] assign[=] call[name[line].split, parameter[]]
if compare[call[name[len], parameter[name[parts]]] not_equal[!=] constant[4]] begin[:]
return[constant[None]]
call[name[info]][constant[name]] assign[=] call[name[parts]][constant[3]]
if compare[call[call[name[parts]][constant[2]]][constant[0]] equal[==] constant[<]] begin[:]
call[name[info]][constant[trycwd]] assign[=] constant[True]
return[name[info]]
return[constant[None]] | keyword[def] identifier[ftpparse] ( identifier[line] ):
literal[string]
keyword[if] identifier[len] ( identifier[line] )< literal[int] :
keyword[return] keyword[None]
identifier[info] = identifier[dict] ( identifier[name] = keyword[None] , identifier[trycwd] = keyword[False] , identifier[tryretr] = keyword[False] )
keyword[if] identifier[line] [ literal[int] ]== literal[string] :
keyword[if] literal[string] keyword[in] identifier[line] :
identifier[flags] , identifier[name] = identifier[line] . identifier[split] ( literal[string] , literal[int] )
identifier[info] [ literal[string] ]= identifier[name]
identifier[flags] = identifier[flags] . identifier[split] ( literal[string] )
identifier[info] [ literal[string] ]= literal[string] keyword[in] identifier[flags]
identifier[info] [ literal[string] ]= literal[string] keyword[in] identifier[flags]
keyword[return] identifier[info]
keyword[if] identifier[line] [ literal[int] ] keyword[in] literal[string] :
keyword[if] identifier[line] [ literal[int] ]== literal[string] :
identifier[info] [ literal[string] ]= keyword[True]
keyword[if] identifier[line] [ literal[int] ]== literal[string] :
identifier[info] [ literal[string] ]= keyword[True]
keyword[if] identifier[line] [ literal[int] ]== literal[string] :
identifier[info] [ literal[string] ]= identifier[info] [ literal[string] ]= keyword[True]
identifier[parts] = identifier[line] . identifier[split] ()
keyword[if] identifier[len] ( identifier[parts] )< literal[int] :
keyword[return] keyword[None]
keyword[del] identifier[parts] [ literal[int] ]
keyword[if] identifier[parts] [ literal[int] ]!= literal[string] :
keyword[del] identifier[parts] [ literal[int] ]
keyword[del] identifier[parts] [ literal[int] ]
keyword[del] identifier[parts] [ literal[int] ]
keyword[if] keyword[not] identifier[ismonth] ( identifier[parts] [ literal[int] ]):
keyword[del] identifier[parts] [ literal[int] ]
keyword[if] keyword[not] identifier[ismonth] ( identifier[parts] [ literal[int] ]):
keyword[return] keyword[None]
keyword[del] identifier[parts] [ literal[int] ]
keyword[del] identifier[parts] [ literal[int] ]
keyword[if] keyword[not] identifier[parts] :
keyword[return] keyword[None]
keyword[del] identifier[parts] [ literal[int] ]
identifier[name] = literal[string] . identifier[join] ( identifier[parts] )
keyword[if] identifier[line] [ literal[int] ]== literal[string] keyword[and] literal[string] keyword[in] identifier[name] :
identifier[name] = identifier[name] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]
keyword[if] identifier[line] [ literal[int] ] keyword[in] literal[string] keyword[and] identifier[name] . identifier[startswith] ( literal[string] ):
identifier[name] = identifier[name] [ literal[int] :]
identifier[info] [ literal[string] ]= identifier[name]
keyword[return] identifier[info]
identifier[i] = identifier[line] . identifier[find] ( literal[string] )
keyword[if] identifier[i] !=- literal[int] :
identifier[name] = identifier[line] [: identifier[i] ]
keyword[if] identifier[name] . identifier[endswith] ( literal[string] ):
identifier[name] = identifier[name] [:- literal[int] ]
identifier[info] [ literal[string] ]= keyword[True]
keyword[else] :
identifier[info] [ literal[string] ]= keyword[True]
identifier[info] [ literal[string] ]= identifier[name]
keyword[return] identifier[info]
keyword[if] identifier[line] [ literal[int] ]. identifier[isdigit] ():
identifier[parts] = identifier[line] . identifier[split] ()
keyword[if] identifier[len] ( identifier[parts] )!= literal[int] :
keyword[return] keyword[None]
identifier[info] [ literal[string] ]= identifier[parts] [ literal[int] ]
keyword[if] identifier[parts] [ literal[int] ][ literal[int] ]== literal[string] :
identifier[info] [ literal[string] ]= keyword[True]
keyword[else] :
identifier[info] [ literal[string] ]= keyword[True]
keyword[return] identifier[info]
keyword[return] keyword[None] | def ftpparse(line):
"""Parse a FTP list line into a dictionary with attributes:
name - name of file (string)
trycwd - False if cwd is definitely pointless, True otherwise
tryretr - False if retr is definitely pointless, True otherwise
If the line has no file information, None is returned
"""
if len(line) < 2:
# an empty name in EPLF, with no info, could be 2 chars
return None # depends on [control=['if'], data=[]]
info = dict(name=None, trycwd=False, tryretr=False)
# EPLF format
# http://pobox.com/~djb/proto/eplf.html
# "+i8388621.29609,m824255902,/,\tdev"
# "+i8388621.44468,m839956783,r,s10376,\tRFCEPLF"
if line[0] == '+':
if '\t' in line:
(flags, name) = line.split('\t', 1)
info['name'] = name
flags = flags.split(',')
info['trycwd'] = '/' in flags
info['tryretr'] = 'r' in flags # depends on [control=['if'], data=['line']]
return info # depends on [control=['if'], data=[]]
# UNIX-style listing, without inum and without blocks
# "-rw-r--r-- 1 root other 531 Jan 29 03:26 README"
# "dr-xr-xr-x 2 root other 512 Apr 8 1994 etc"
# "dr-xr-xr-x 2 root 512 Apr 8 1994 etc"
# "lrwxrwxrwx 1 root other 7 Jan 25 00:17 bin -> usr/bin"
# Also produced by Microsoft's FTP servers for Windows:
# "---------- 1 owner group 1803128 Jul 10 10:18 ls-lR.Z"
# "d--------- 1 owner group 0 May 9 19:45 Softlib"
# Also WFTPD for MSDOS:
# "-rwxrwxrwx 1 noone nogroup 322 Aug 19 1996 message.ftp"
# Also NetWare:
# "d [R----F--] supervisor 512 Jan 16 18:53 login"
# "- [R----F--] rhesus 214059 Oct 20 15:27 cx.exe"
# Also NetPresenz for the Mac:
# "-------r-- 326 1391972 1392298 Nov 22 1995 MegaPhone.sit"
# "drwxrwxr-x folder 2 May 10 1996 network"
if line[0] in 'bcdlps-':
if line[0] == 'd':
info['trycwd'] = True # depends on [control=['if'], data=[]]
if line[0] == '-':
info['tryretr'] = True # depends on [control=['if'], data=[]]
if line[0] == 'l':
info['trycwd'] = info['tryretr'] = True # depends on [control=['if'], data=[]]
parts = line.split()
if len(parts) < 7:
return None # depends on [control=['if'], data=[]]
del parts[0] # skip permissions
if parts[0] != 'folder':
del parts[0] # skip nlink # depends on [control=['if'], data=[]]
del parts[0] # skip uid
del parts[0] # skip gid or size
if not ismonth(parts[0]):
del parts[0] # skip size # depends on [control=['if'], data=[]]
if not ismonth(parts[0]):
return None # depends on [control=['if'], data=[]]
del parts[0] # skip month
del parts[0] # skip day
if not parts:
return None # depends on [control=['if'], data=[]]
del parts[0] # skip year or time
name = ' '.join(parts)
# resolve links
if line[0] == 'l' and ' -> ' in name:
name = name.split(' -> ', 1)[1] # depends on [control=['if'], data=[]]
# eliminate extra NetWare spaces
if line[1] in ' [' and name.startswith(' '):
name = name[3:] # depends on [control=['if'], data=[]]
info['name'] = name
return info # depends on [control=['if'], data=[]]
# MultiNet (some spaces removed from examples)
# "00README.TXT;1 2 30-DEC-1996 17:44 [SYSTEM] (RWED,RWED,RE,RE)"
# "CORE.DIR;1 1 8-SEP-1996 16:09 [SYSTEM] (RWE,RWE,RE,RE)"
# and non-MutliNet VMS:
# "CII-MANUAL.TEX;1 213/216 29-JAN-1996 03:33:12 [ANONYMOU,ANONYMOUS] (RWED,RWED,,)"
i = line.find(';')
if i != -1:
name = line[:i]
if name.endswith('.DIR'):
name = name[:-4]
info['trycwd'] = True # depends on [control=['if'], data=[]]
else:
info['tryretr'] = True
info['name'] = name
return info # depends on [control=['if'], data=['i']]
# MSDOS format
# 04-27-00 09:09PM <DIR> licensed
# 07-18-00 10:16AM <DIR> pub
# 04-14-00 03:47PM 589 readme.htm
if line[0].isdigit():
parts = line.split()
if len(parts) != 4:
return None # depends on [control=['if'], data=[]]
info['name'] = parts[3]
if parts[2][0] == '<':
info['trycwd'] = True # depends on [control=['if'], data=[]]
else:
info['tryretr'] = True
return info # depends on [control=['if'], data=[]]
# Some useless lines, safely ignored:
# "Total of 11 Files, 10966 Blocks." (VMS)
# "total 14786" (UNIX)
# "DISK$ANONFTP:[ANONYMOUS]" (VMS)
# "Directory DISK$PCSA:[ANONYM]" (VMS)
return None |
def load_graphml(filename, folder=None, node_type=int):
"""
Load a GraphML file from disk and convert the node/edge attributes to
correct data types.
Parameters
----------
filename : string
the name of the graphml file (including file extension)
folder : string
the folder containing the file, if None, use default data folder
node_type : type
(Python type (default: int)) - Convert node ids to this type
Returns
-------
networkx multidigraph
"""
start_time = time.time()
# read the graph from disk
if folder is None:
folder = settings.data_folder
path = os.path.join(folder, filename)
G = nx.MultiDiGraph(nx.read_graphml(path, node_type=node_type))
# convert graph crs attribute from saved string to correct dict data type
G.graph['crs'] = ast.literal_eval(G.graph['crs'])
if 'streets_per_node' in G.graph:
G.graph['streets_per_node'] = ast.literal_eval(G.graph['streets_per_node'])
# convert numeric node tags from string to numeric data types
log('Converting node and edge attribute data types')
for _, data in G.nodes(data=True):
data['osmid'] = node_type(data['osmid'])
data['x'] = float(data['x'])
data['y'] = float(data['y'])
# convert numeric, bool, and list node tags from string to correct data types
for _, _, data in G.edges(data=True, keys=False):
# first parse oneway to bool and length to float - they should always
# have only 1 value each
data['oneway'] = ast.literal_eval(data['oneway'])
data['length'] = float(data['length'])
# these attributes might have a single value, or a list if edge's
# topology was simplified
for attr in ['highway', 'name', 'bridge', 'tunnel', 'lanes', 'ref', 'maxspeed', 'service', 'access', 'area', 'landuse', 'width', 'est_width']:
# if this edge has this attribute, and it starts with '[' and ends
# with ']', then it's a list to be parsed
if attr in data and data[attr][0] == '[' and data[attr][-1] == ']':
# try to convert the string list to a list type, else leave as
# single-value string (and leave as string if error)
try:
data[attr] = ast.literal_eval(data[attr])
except:
pass
# osmid might have a single value or a list
if 'osmid' in data:
if data['osmid'][0] == '[' and data['osmid'][-1] == ']':
# if it's a list, eval the list then convert each element to node_type
data['osmid'] = [node_type(i) for i in ast.literal_eval(data['osmid'])]
else:
# if it's not a list, convert it to the node_type
data['osmid'] = node_type(data['osmid'])
# if geometry attribute exists, load the string as well-known text to
# shapely LineString
if 'geometry' in data:
data['geometry'] = wkt.loads(data['geometry'])
# remove node_default and edge_default metadata keys if they exist
if 'node_default' in G.graph:
del G.graph['node_default']
if 'edge_default' in G.graph:
del G.graph['edge_default']
log('Loaded graph with {:,} nodes and {:,} edges in {:,.2f} seconds from "{}"'.format(len(list(G.nodes())),
len(list(G.edges())),
time.time()-start_time,
path))
return G | def function[load_graphml, parameter[filename, folder, node_type]]:
constant[
Load a GraphML file from disk and convert the node/edge attributes to
correct data types.
Parameters
----------
filename : string
the name of the graphml file (including file extension)
folder : string
the folder containing the file, if None, use default data folder
node_type : type
(Python type (default: int)) - Convert node ids to this type
Returns
-------
networkx multidigraph
]
variable[start_time] assign[=] call[name[time].time, parameter[]]
if compare[name[folder] is constant[None]] begin[:]
variable[folder] assign[=] name[settings].data_folder
variable[path] assign[=] call[name[os].path.join, parameter[name[folder], name[filename]]]
variable[G] assign[=] call[name[nx].MultiDiGraph, parameter[call[name[nx].read_graphml, parameter[name[path]]]]]
call[name[G].graph][constant[crs]] assign[=] call[name[ast].literal_eval, parameter[call[name[G].graph][constant[crs]]]]
if compare[constant[streets_per_node] in name[G].graph] begin[:]
call[name[G].graph][constant[streets_per_node]] assign[=] call[name[ast].literal_eval, parameter[call[name[G].graph][constant[streets_per_node]]]]
call[name[log], parameter[constant[Converting node and edge attribute data types]]]
for taget[tuple[[<ast.Name object at 0x7da1b1c58580>, <ast.Name object at 0x7da1b1c58820>]]] in starred[call[name[G].nodes, parameter[]]] begin[:]
call[name[data]][constant[osmid]] assign[=] call[name[node_type], parameter[call[name[data]][constant[osmid]]]]
call[name[data]][constant[x]] assign[=] call[name[float], parameter[call[name[data]][constant[x]]]]
call[name[data]][constant[y]] assign[=] call[name[float], parameter[call[name[data]][constant[y]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1b64c40>, <ast.Name object at 0x7da1b1b647f0>, <ast.Name object at 0x7da1b1b64670>]]] in starred[call[name[G].edges, parameter[]]] begin[:]
call[name[data]][constant[oneway]] assign[=] call[name[ast].literal_eval, parameter[call[name[data]][constant[oneway]]]]
call[name[data]][constant[length]] assign[=] call[name[float], parameter[call[name[data]][constant[length]]]]
for taget[name[attr]] in starred[list[[<ast.Constant object at 0x7da1b1c32b90>, <ast.Constant object at 0x7da1b1c329b0>, <ast.Constant object at 0x7da1b1c337c0>, <ast.Constant object at 0x7da1b1c305e0>, <ast.Constant object at 0x7da1b1c328f0>, <ast.Constant object at 0x7da1b1c32ad0>, <ast.Constant object at 0x7da1b1c31cc0>, <ast.Constant object at 0x7da1b1c30be0>, <ast.Constant object at 0x7da1b1c33eb0>, <ast.Constant object at 0x7da1b1c31e40>, <ast.Constant object at 0x7da1b1c32920>, <ast.Constant object at 0x7da1b1c31420>, <ast.Constant object at 0x7da1b1c33df0>]]] begin[:]
if <ast.BoolOp object at 0x7da1b1c32c50> begin[:]
<ast.Try object at 0x7da1b1c310c0>
if compare[constant[osmid] in name[data]] begin[:]
if <ast.BoolOp object at 0x7da1b1c324a0> begin[:]
call[name[data]][constant[osmid]] assign[=] <ast.ListComp object at 0x7da1b1c30610>
if compare[constant[geometry] in name[data]] begin[:]
call[name[data]][constant[geometry]] assign[=] call[name[wkt].loads, parameter[call[name[data]][constant[geometry]]]]
if compare[constant[node_default] in name[G].graph] begin[:]
<ast.Delete object at 0x7da1b1c33550>
if compare[constant[edge_default] in name[G].graph] begin[:]
<ast.Delete object at 0x7da1b1c31570>
call[name[log], parameter[call[constant[Loaded graph with {:,} nodes and {:,} edges in {:,.2f} seconds from "{}"].format, parameter[call[name[len], parameter[call[name[list], parameter[call[name[G].nodes, parameter[]]]]]], call[name[len], parameter[call[name[list], parameter[call[name[G].edges, parameter[]]]]]], binary_operation[call[name[time].time, parameter[]] - name[start_time]], name[path]]]]]
return[name[G]] | keyword[def] identifier[load_graphml] ( identifier[filename] , identifier[folder] = keyword[None] , identifier[node_type] = identifier[int] ):
literal[string]
identifier[start_time] = identifier[time] . identifier[time] ()
keyword[if] identifier[folder] keyword[is] keyword[None] :
identifier[folder] = identifier[settings] . identifier[data_folder]
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[folder] , identifier[filename] )
identifier[G] = identifier[nx] . identifier[MultiDiGraph] ( identifier[nx] . identifier[read_graphml] ( identifier[path] , identifier[node_type] = identifier[node_type] ))
identifier[G] . identifier[graph] [ literal[string] ]= identifier[ast] . identifier[literal_eval] ( identifier[G] . identifier[graph] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[G] . identifier[graph] :
identifier[G] . identifier[graph] [ literal[string] ]= identifier[ast] . identifier[literal_eval] ( identifier[G] . identifier[graph] [ literal[string] ])
identifier[log] ( literal[string] )
keyword[for] identifier[_] , identifier[data] keyword[in] identifier[G] . identifier[nodes] ( identifier[data] = keyword[True] ):
identifier[data] [ literal[string] ]= identifier[node_type] ( identifier[data] [ literal[string] ])
identifier[data] [ literal[string] ]= identifier[float] ( identifier[data] [ literal[string] ])
identifier[data] [ literal[string] ]= identifier[float] ( identifier[data] [ literal[string] ])
keyword[for] identifier[_] , identifier[_] , identifier[data] keyword[in] identifier[G] . identifier[edges] ( identifier[data] = keyword[True] , identifier[keys] = keyword[False] ):
identifier[data] [ literal[string] ]= identifier[ast] . identifier[literal_eval] ( identifier[data] [ literal[string] ])
identifier[data] [ literal[string] ]= identifier[float] ( identifier[data] [ literal[string] ])
keyword[for] identifier[attr] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]:
keyword[if] identifier[attr] keyword[in] identifier[data] keyword[and] identifier[data] [ identifier[attr] ][ literal[int] ]== literal[string] keyword[and] identifier[data] [ identifier[attr] ][- literal[int] ]== literal[string] :
keyword[try] :
identifier[data] [ identifier[attr] ]= identifier[ast] . identifier[literal_eval] ( identifier[data] [ identifier[attr] ])
keyword[except] :
keyword[pass]
keyword[if] literal[string] keyword[in] identifier[data] :
keyword[if] identifier[data] [ literal[string] ][ literal[int] ]== literal[string] keyword[and] identifier[data] [ literal[string] ][- literal[int] ]== literal[string] :
identifier[data] [ literal[string] ]=[ identifier[node_type] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[ast] . identifier[literal_eval] ( identifier[data] [ literal[string] ])]
keyword[else] :
identifier[data] [ literal[string] ]= identifier[node_type] ( identifier[data] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[data] :
identifier[data] [ literal[string] ]= identifier[wkt] . identifier[loads] ( identifier[data] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[G] . identifier[graph] :
keyword[del] identifier[G] . identifier[graph] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[G] . identifier[graph] :
keyword[del] identifier[G] . identifier[graph] [ literal[string] ]
identifier[log] ( literal[string] . identifier[format] ( identifier[len] ( identifier[list] ( identifier[G] . identifier[nodes] ())),
identifier[len] ( identifier[list] ( identifier[G] . identifier[edges] ())),
identifier[time] . identifier[time] ()- identifier[start_time] ,
identifier[path] ))
keyword[return] identifier[G] | def load_graphml(filename, folder=None, node_type=int):
"""
Load a GraphML file from disk and convert the node/edge attributes to
correct data types.
Parameters
----------
filename : string
the name of the graphml file (including file extension)
folder : string
the folder containing the file, if None, use default data folder
node_type : type
(Python type (default: int)) - Convert node ids to this type
Returns
-------
networkx multidigraph
"""
start_time = time.time()
# read the graph from disk
if folder is None:
folder = settings.data_folder # depends on [control=['if'], data=['folder']]
path = os.path.join(folder, filename)
G = nx.MultiDiGraph(nx.read_graphml(path, node_type=node_type))
# convert graph crs attribute from saved string to correct dict data type
G.graph['crs'] = ast.literal_eval(G.graph['crs'])
if 'streets_per_node' in G.graph:
G.graph['streets_per_node'] = ast.literal_eval(G.graph['streets_per_node']) # depends on [control=['if'], data=[]]
# convert numeric node tags from string to numeric data types
log('Converting node and edge attribute data types')
for (_, data) in G.nodes(data=True):
data['osmid'] = node_type(data['osmid'])
data['x'] = float(data['x'])
data['y'] = float(data['y']) # depends on [control=['for'], data=[]]
# convert numeric, bool, and list node tags from string to correct data types
for (_, _, data) in G.edges(data=True, keys=False):
# first parse oneway to bool and length to float - they should always
# have only 1 value each
data['oneway'] = ast.literal_eval(data['oneway'])
data['length'] = float(data['length'])
# these attributes might have a single value, or a list if edge's
# topology was simplified
for attr in ['highway', 'name', 'bridge', 'tunnel', 'lanes', 'ref', 'maxspeed', 'service', 'access', 'area', 'landuse', 'width', 'est_width']:
# if this edge has this attribute, and it starts with '[' and ends
# with ']', then it's a list to be parsed
if attr in data and data[attr][0] == '[' and (data[attr][-1] == ']'):
# try to convert the string list to a list type, else leave as
# single-value string (and leave as string if error)
try:
data[attr] = ast.literal_eval(data[attr]) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attr']]
# osmid might have a single value or a list
if 'osmid' in data:
if data['osmid'][0] == '[' and data['osmid'][-1] == ']':
# if it's a list, eval the list then convert each element to node_type
data['osmid'] = [node_type(i) for i in ast.literal_eval(data['osmid'])] # depends on [control=['if'], data=[]]
else:
# if it's not a list, convert it to the node_type
data['osmid'] = node_type(data['osmid']) # depends on [control=['if'], data=['data']]
# if geometry attribute exists, load the string as well-known text to
# shapely LineString
if 'geometry' in data:
data['geometry'] = wkt.loads(data['geometry']) # depends on [control=['if'], data=['data']] # depends on [control=['for'], data=[]]
# remove node_default and edge_default metadata keys if they exist
if 'node_default' in G.graph:
del G.graph['node_default'] # depends on [control=['if'], data=[]]
if 'edge_default' in G.graph:
del G.graph['edge_default'] # depends on [control=['if'], data=[]]
log('Loaded graph with {:,} nodes and {:,} edges in {:,.2f} seconds from "{}"'.format(len(list(G.nodes())), len(list(G.edges())), time.time() - start_time, path))
return G |
def run(self, cmd):
"""Similar to profile.Profile.run ."""
import __main__
dikt = __main__.__dict__
return self.runctx(cmd, dikt, dikt) | def function[run, parameter[self, cmd]]:
constant[Similar to profile.Profile.run .]
import module[__main__]
variable[dikt] assign[=] name[__main__].__dict__
return[call[name[self].runctx, parameter[name[cmd], name[dikt], name[dikt]]]] | keyword[def] identifier[run] ( identifier[self] , identifier[cmd] ):
literal[string]
keyword[import] identifier[__main__]
identifier[dikt] = identifier[__main__] . identifier[__dict__]
keyword[return] identifier[self] . identifier[runctx] ( identifier[cmd] , identifier[dikt] , identifier[dikt] ) | def run(self, cmd):
"""Similar to profile.Profile.run ."""
import __main__
dikt = __main__.__dict__
return self.runctx(cmd, dikt, dikt) |
def run_command(self,
args: List[str],
max_num_processes: int=None,
max_stack_size: int=None,
max_virtual_memory: int=None,
as_root: bool=False,
stdin: FileIO=None,
timeout: int=None,
check: bool=False,
truncate_stdout: int=None,
truncate_stderr: int=None) -> 'CompletedCommand':
"""
Runs a command inside the sandbox and returns the results.
:param args: A list of strings that specify which command should
be run inside the sandbox.
:param max_num_processes: The maximum number of processes the
command is allowed to spawn.
:param max_stack_size: The maximum stack size, in bytes, allowed
for the command.
:param max_virtual_memory: The maximum amount of memory, in
bytes, allowed for the command.
:param as_root: Whether to run the command as a root user.
:param stdin: A file object to be redirected as input to the
command's stdin. If this is None, /dev/null is sent to the
command's stdin.
:param timeout: The time limit for the command.
:param check: Causes CalledProcessError to be raised if the
command exits nonzero or times out.
:param truncate_stdout: When not None, stdout from the command
will be truncated after this many bytes.
:param truncate_stderr: When not None, stderr from the command
will be truncated after this many bytes.
"""
cmd = ['docker', 'exec', '-i', self.name, 'cmd_runner.py']
if stdin is None:
cmd.append('--stdin_devnull')
if max_num_processes is not None:
cmd += ['--max_num_processes', str(max_num_processes)]
if max_stack_size is not None:
cmd += ['--max_stack_size', str(max_stack_size)]
if max_virtual_memory is not None:
cmd += ['--max_virtual_memory', str(max_virtual_memory)]
if timeout is not None:
cmd += ['--timeout', str(timeout)]
if truncate_stdout is not None:
cmd += ['--truncate_stdout', str(truncate_stdout)]
if truncate_stderr is not None:
cmd += ['--truncate_stderr', str(truncate_stderr)]
if not as_root:
cmd += ['--linux_user_id', str(self._linux_uid)]
cmd += args
if self.debug:
print('running: {}'.format(cmd), flush=True)
with tempfile.TemporaryFile() as f:
try:
subprocess.run(cmd, stdin=stdin, stdout=f, stderr=subprocess.PIPE, check=True)
f.seek(0)
json_len = int(f.readline().decode().rstrip())
results_json = json.loads(f.read(json_len).decode())
stdout_len = int(f.readline().decode().rstrip())
stdout = tempfile.NamedTemporaryFile()
stdout.write(f.read(stdout_len))
stdout.seek(0)
stderr_len = int(f.readline().decode().rstrip())
stderr = tempfile.NamedTemporaryFile()
stderr.write(f.read(stderr_len))
stderr.seek(0)
result = CompletedCommand(return_code=results_json['return_code'],
timed_out=results_json['timed_out'],
stdout=stdout,
stderr=stderr,
stdout_truncated=results_json['stdout_truncated'],
stderr_truncated=results_json['stderr_truncated'])
if (result.return_code != 0 or results_json['timed_out']) and check:
raise subprocess.CalledProcessError(
result.return_code, cmd,
output=result.stdout, stderr=result.stderr)
return result
except subprocess.CalledProcessError as e:
f.seek(0)
print(f.read())
print(e.stderr)
raise | def function[run_command, parameter[self, args, max_num_processes, max_stack_size, max_virtual_memory, as_root, stdin, timeout, check, truncate_stdout, truncate_stderr]]:
constant[
Runs a command inside the sandbox and returns the results.
:param args: A list of strings that specify which command should
be run inside the sandbox.
:param max_num_processes: The maximum number of processes the
command is allowed to spawn.
:param max_stack_size: The maximum stack size, in bytes, allowed
for the command.
:param max_virtual_memory: The maximum amount of memory, in
bytes, allowed for the command.
:param as_root: Whether to run the command as a root user.
:param stdin: A file object to be redirected as input to the
command's stdin. If this is None, /dev/null is sent to the
command's stdin.
:param timeout: The time limit for the command.
:param check: Causes CalledProcessError to be raised if the
command exits nonzero or times out.
:param truncate_stdout: When not None, stdout from the command
will be truncated after this many bytes.
:param truncate_stderr: When not None, stderr from the command
will be truncated after this many bytes.
]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b032f760>, <ast.Constant object at 0x7da1b032ff70>, <ast.Constant object at 0x7da1b032d9c0>, <ast.Attribute object at 0x7da1b032d600>, <ast.Constant object at 0x7da1b032e8f0>]]
if compare[name[stdin] is constant[None]] begin[:]
call[name[cmd].append, parameter[constant[--stdin_devnull]]]
if compare[name[max_num_processes] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b032fc40>
if compare[name[max_stack_size] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b032c940>
if compare[name[max_virtual_memory] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b032c160>
if compare[name[timeout] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b032dc60>
if compare[name[truncate_stdout] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b032ea40>
if compare[name[truncate_stderr] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b032ff40>
if <ast.UnaryOp object at 0x7da1b032feb0> begin[:]
<ast.AugAssign object at 0x7da1b032d930>
<ast.AugAssign object at 0x7da1b032e050>
if name[self].debug begin[:]
call[name[print], parameter[call[constant[running: {}].format, parameter[name[cmd]]]]]
with call[name[tempfile].TemporaryFile, parameter[]] begin[:]
<ast.Try object at 0x7da1b03880a0> | keyword[def] identifier[run_command] ( identifier[self] ,
identifier[args] : identifier[List] [ identifier[str] ],
identifier[max_num_processes] : identifier[int] = keyword[None] ,
identifier[max_stack_size] : identifier[int] = keyword[None] ,
identifier[max_virtual_memory] : identifier[int] = keyword[None] ,
identifier[as_root] : identifier[bool] = keyword[False] ,
identifier[stdin] : identifier[FileIO] = keyword[None] ,
identifier[timeout] : identifier[int] = keyword[None] ,
identifier[check] : identifier[bool] = keyword[False] ,
identifier[truncate_stdout] : identifier[int] = keyword[None] ,
identifier[truncate_stderr] : identifier[int] = keyword[None] )-> literal[string] :
literal[string]
identifier[cmd] =[ literal[string] , literal[string] , literal[string] , identifier[self] . identifier[name] , literal[string] ]
keyword[if] identifier[stdin] keyword[is] keyword[None] :
identifier[cmd] . identifier[append] ( literal[string] )
keyword[if] identifier[max_num_processes] keyword[is] keyword[not] keyword[None] :
identifier[cmd] +=[ literal[string] , identifier[str] ( identifier[max_num_processes] )]
keyword[if] identifier[max_stack_size] keyword[is] keyword[not] keyword[None] :
identifier[cmd] +=[ literal[string] , identifier[str] ( identifier[max_stack_size] )]
keyword[if] identifier[max_virtual_memory] keyword[is] keyword[not] keyword[None] :
identifier[cmd] +=[ literal[string] , identifier[str] ( identifier[max_virtual_memory] )]
keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] :
identifier[cmd] +=[ literal[string] , identifier[str] ( identifier[timeout] )]
keyword[if] identifier[truncate_stdout] keyword[is] keyword[not] keyword[None] :
identifier[cmd] +=[ literal[string] , identifier[str] ( identifier[truncate_stdout] )]
keyword[if] identifier[truncate_stderr] keyword[is] keyword[not] keyword[None] :
identifier[cmd] +=[ literal[string] , identifier[str] ( identifier[truncate_stderr] )]
keyword[if] keyword[not] identifier[as_root] :
identifier[cmd] +=[ literal[string] , identifier[str] ( identifier[self] . identifier[_linux_uid] )]
identifier[cmd] += identifier[args]
keyword[if] identifier[self] . identifier[debug] :
identifier[print] ( literal[string] . identifier[format] ( identifier[cmd] ), identifier[flush] = keyword[True] )
keyword[with] identifier[tempfile] . identifier[TemporaryFile] () keyword[as] identifier[f] :
keyword[try] :
identifier[subprocess] . identifier[run] ( identifier[cmd] , identifier[stdin] = identifier[stdin] , identifier[stdout] = identifier[f] , identifier[stderr] = identifier[subprocess] . identifier[PIPE] , identifier[check] = keyword[True] )
identifier[f] . identifier[seek] ( literal[int] )
identifier[json_len] = identifier[int] ( identifier[f] . identifier[readline] (). identifier[decode] (). identifier[rstrip] ())
identifier[results_json] = identifier[json] . identifier[loads] ( identifier[f] . identifier[read] ( identifier[json_len] ). identifier[decode] ())
identifier[stdout_len] = identifier[int] ( identifier[f] . identifier[readline] (). identifier[decode] (). identifier[rstrip] ())
identifier[stdout] = identifier[tempfile] . identifier[NamedTemporaryFile] ()
identifier[stdout] . identifier[write] ( identifier[f] . identifier[read] ( identifier[stdout_len] ))
identifier[stdout] . identifier[seek] ( literal[int] )
identifier[stderr_len] = identifier[int] ( identifier[f] . identifier[readline] (). identifier[decode] (). identifier[rstrip] ())
identifier[stderr] = identifier[tempfile] . identifier[NamedTemporaryFile] ()
identifier[stderr] . identifier[write] ( identifier[f] . identifier[read] ( identifier[stderr_len] ))
identifier[stderr] . identifier[seek] ( literal[int] )
identifier[result] = identifier[CompletedCommand] ( identifier[return_code] = identifier[results_json] [ literal[string] ],
identifier[timed_out] = identifier[results_json] [ literal[string] ],
identifier[stdout] = identifier[stdout] ,
identifier[stderr] = identifier[stderr] ,
identifier[stdout_truncated] = identifier[results_json] [ literal[string] ],
identifier[stderr_truncated] = identifier[results_json] [ literal[string] ])
keyword[if] ( identifier[result] . identifier[return_code] != literal[int] keyword[or] identifier[results_json] [ literal[string] ]) keyword[and] identifier[check] :
keyword[raise] identifier[subprocess] . identifier[CalledProcessError] (
identifier[result] . identifier[return_code] , identifier[cmd] ,
identifier[output] = identifier[result] . identifier[stdout] , identifier[stderr] = identifier[result] . identifier[stderr] )
keyword[return] identifier[result]
keyword[except] identifier[subprocess] . identifier[CalledProcessError] keyword[as] identifier[e] :
identifier[f] . identifier[seek] ( literal[int] )
identifier[print] ( identifier[f] . identifier[read] ())
identifier[print] ( identifier[e] . identifier[stderr] )
keyword[raise] | def run_command(self, args: List[str], max_num_processes: int=None, max_stack_size: int=None, max_virtual_memory: int=None, as_root: bool=False, stdin: FileIO=None, timeout: int=None, check: bool=False, truncate_stdout: int=None, truncate_stderr: int=None) -> 'CompletedCommand':
"""
Runs a command inside the sandbox and returns the results.
:param args: A list of strings that specify which command should
be run inside the sandbox.
:param max_num_processes: The maximum number of processes the
command is allowed to spawn.
:param max_stack_size: The maximum stack size, in bytes, allowed
for the command.
:param max_virtual_memory: The maximum amount of memory, in
bytes, allowed for the command.
:param as_root: Whether to run the command as a root user.
:param stdin: A file object to be redirected as input to the
command's stdin. If this is None, /dev/null is sent to the
command's stdin.
:param timeout: The time limit for the command.
:param check: Causes CalledProcessError to be raised if the
command exits nonzero or times out.
:param truncate_stdout: When not None, stdout from the command
will be truncated after this many bytes.
:param truncate_stderr: When not None, stderr from the command
will be truncated after this many bytes.
"""
cmd = ['docker', 'exec', '-i', self.name, 'cmd_runner.py']
if stdin is None:
cmd.append('--stdin_devnull') # depends on [control=['if'], data=[]]
if max_num_processes is not None:
cmd += ['--max_num_processes', str(max_num_processes)] # depends on [control=['if'], data=['max_num_processes']]
if max_stack_size is not None:
cmd += ['--max_stack_size', str(max_stack_size)] # depends on [control=['if'], data=['max_stack_size']]
if max_virtual_memory is not None:
cmd += ['--max_virtual_memory', str(max_virtual_memory)] # depends on [control=['if'], data=['max_virtual_memory']]
if timeout is not None:
cmd += ['--timeout', str(timeout)] # depends on [control=['if'], data=['timeout']]
if truncate_stdout is not None:
cmd += ['--truncate_stdout', str(truncate_stdout)] # depends on [control=['if'], data=['truncate_stdout']]
if truncate_stderr is not None:
cmd += ['--truncate_stderr', str(truncate_stderr)] # depends on [control=['if'], data=['truncate_stderr']]
if not as_root:
cmd += ['--linux_user_id', str(self._linux_uid)] # depends on [control=['if'], data=[]]
cmd += args
if self.debug:
print('running: {}'.format(cmd), flush=True) # depends on [control=['if'], data=[]]
with tempfile.TemporaryFile() as f:
try:
subprocess.run(cmd, stdin=stdin, stdout=f, stderr=subprocess.PIPE, check=True)
f.seek(0)
json_len = int(f.readline().decode().rstrip())
results_json = json.loads(f.read(json_len).decode())
stdout_len = int(f.readline().decode().rstrip())
stdout = tempfile.NamedTemporaryFile()
stdout.write(f.read(stdout_len))
stdout.seek(0)
stderr_len = int(f.readline().decode().rstrip())
stderr = tempfile.NamedTemporaryFile()
stderr.write(f.read(stderr_len))
stderr.seek(0)
result = CompletedCommand(return_code=results_json['return_code'], timed_out=results_json['timed_out'], stdout=stdout, stderr=stderr, stdout_truncated=results_json['stdout_truncated'], stderr_truncated=results_json['stderr_truncated'])
if (result.return_code != 0 or results_json['timed_out']) and check:
raise subprocess.CalledProcessError(result.return_code, cmd, output=result.stdout, stderr=result.stderr) # depends on [control=['if'], data=[]]
return result # depends on [control=['try'], data=[]]
except subprocess.CalledProcessError as e:
f.seek(0)
print(f.read())
print(e.stderr)
raise # depends on [control=['except'], data=['e']] # depends on [control=['with'], data=['f']] |
def keys(self, desc = None):
'''numpy asarray does not copy data'''
res = asarray(self.rc('index'))
if desc == True:
return reversed(res)
else:
return res | def function[keys, parameter[self, desc]]:
constant[numpy asarray does not copy data]
variable[res] assign[=] call[name[asarray], parameter[call[name[self].rc, parameter[constant[index]]]]]
if compare[name[desc] equal[==] constant[True]] begin[:]
return[call[name[reversed], parameter[name[res]]]] | keyword[def] identifier[keys] ( identifier[self] , identifier[desc] = keyword[None] ):
literal[string]
identifier[res] = identifier[asarray] ( identifier[self] . identifier[rc] ( literal[string] ))
keyword[if] identifier[desc] == keyword[True] :
keyword[return] identifier[reversed] ( identifier[res] )
keyword[else] :
keyword[return] identifier[res] | def keys(self, desc=None):
"""numpy asarray does not copy data"""
res = asarray(self.rc('index'))
if desc == True:
return reversed(res) # depends on [control=['if'], data=[]]
else:
return res |
def generate_matrices(dim = 40):
"""
Generates the matrices that positive and negative samples are multiplied
with. The matrix for positive samples is randomly drawn from a uniform
distribution, with elements in [-1, 1]. The matrix for negative examples
is the sum of the positive matrix with a matrix drawn from a normal
distribution with mean 0 variance 1.
"""
positive = numpy.random.uniform(-1, 1, (dim, dim))
negative = positive + numpy.random.normal(0, 1, (dim, dim))
return positive, negative | def function[generate_matrices, parameter[dim]]:
constant[
Generates the matrices that positive and negative samples are multiplied
with. The matrix for positive samples is randomly drawn from a uniform
distribution, with elements in [-1, 1]. The matrix for negative examples
is the sum of the positive matrix with a matrix drawn from a normal
distribution with mean 0 variance 1.
]
variable[positive] assign[=] call[name[numpy].random.uniform, parameter[<ast.UnaryOp object at 0x7da1b083d930>, constant[1], tuple[[<ast.Name object at 0x7da1b083d1e0>, <ast.Name object at 0x7da1b083d210>]]]]
variable[negative] assign[=] binary_operation[name[positive] + call[name[numpy].random.normal, parameter[constant[0], constant[1], tuple[[<ast.Name object at 0x7da1b08bd8a0>, <ast.Name object at 0x7da1b08bd870>]]]]]
return[tuple[[<ast.Name object at 0x7da1b08bd7e0>, <ast.Name object at 0x7da1b08bf310>]]] | keyword[def] identifier[generate_matrices] ( identifier[dim] = literal[int] ):
literal[string]
identifier[positive] = identifier[numpy] . identifier[random] . identifier[uniform] (- literal[int] , literal[int] ,( identifier[dim] , identifier[dim] ))
identifier[negative] = identifier[positive] + identifier[numpy] . identifier[random] . identifier[normal] ( literal[int] , literal[int] ,( identifier[dim] , identifier[dim] ))
keyword[return] identifier[positive] , identifier[negative] | def generate_matrices(dim=40):
"""
Generates the matrices that positive and negative samples are multiplied
with. The matrix for positive samples is randomly drawn from a uniform
distribution, with elements in [-1, 1]. The matrix for negative examples
is the sum of the positive matrix with a matrix drawn from a normal
distribution with mean 0 variance 1.
"""
positive = numpy.random.uniform(-1, 1, (dim, dim))
negative = positive + numpy.random.normal(0, 1, (dim, dim))
return (positive, negative) |
def save_config(self, cmd="write memory", confirm=False, confirm_response=""):
"""Saves configuration."""
return super(UbiquitiEdgeSSH, self).save_config(
cmd=cmd, confirm=confirm, confirm_response=confirm_response
) | def function[save_config, parameter[self, cmd, confirm, confirm_response]]:
constant[Saves configuration.]
return[call[call[name[super], parameter[name[UbiquitiEdgeSSH], name[self]]].save_config, parameter[]]] | keyword[def] identifier[save_config] ( identifier[self] , identifier[cmd] = literal[string] , identifier[confirm] = keyword[False] , identifier[confirm_response] = literal[string] ):
literal[string]
keyword[return] identifier[super] ( identifier[UbiquitiEdgeSSH] , identifier[self] ). identifier[save_config] (
identifier[cmd] = identifier[cmd] , identifier[confirm] = identifier[confirm] , identifier[confirm_response] = identifier[confirm_response]
) | def save_config(self, cmd='write memory', confirm=False, confirm_response=''):
"""Saves configuration."""
return super(UbiquitiEdgeSSH, self).save_config(cmd=cmd, confirm=confirm, confirm_response=confirm_response) |
def Scripts(unicode_dir=_UNICODE_DIR):
"""Returns dict mapping script names to code lists.
Args:
unicode_dir: Unicode data directory
Returns:
dict mapping script names to code lists
"""
scripts = {}
def DoLine(codes, fields):
"""Process single Scripts.txt line, updating scripts."""
(_, name) = fields
scripts.setdefault(name, []).extend(codes)
ReadUnicodeTable(unicode_dir+"/Scripts.txt", 2, DoLine)
return scripts | def function[Scripts, parameter[unicode_dir]]:
constant[Returns dict mapping script names to code lists.
Args:
unicode_dir: Unicode data directory
Returns:
dict mapping script names to code lists
]
variable[scripts] assign[=] dictionary[[], []]
def function[DoLine, parameter[codes, fields]]:
constant[Process single Scripts.txt line, updating scripts.]
<ast.Tuple object at 0x7da1b26ac040> assign[=] name[fields]
call[call[name[scripts].setdefault, parameter[name[name], list[[]]]].extend, parameter[name[codes]]]
call[name[ReadUnicodeTable], parameter[binary_operation[name[unicode_dir] + constant[/Scripts.txt]], constant[2], name[DoLine]]]
return[name[scripts]] | keyword[def] identifier[Scripts] ( identifier[unicode_dir] = identifier[_UNICODE_DIR] ):
literal[string]
identifier[scripts] ={}
keyword[def] identifier[DoLine] ( identifier[codes] , identifier[fields] ):
literal[string]
( identifier[_] , identifier[name] )= identifier[fields]
identifier[scripts] . identifier[setdefault] ( identifier[name] ,[]). identifier[extend] ( identifier[codes] )
identifier[ReadUnicodeTable] ( identifier[unicode_dir] + literal[string] , literal[int] , identifier[DoLine] )
keyword[return] identifier[scripts] | def Scripts(unicode_dir=_UNICODE_DIR):
"""Returns dict mapping script names to code lists.
Args:
unicode_dir: Unicode data directory
Returns:
dict mapping script names to code lists
"""
scripts = {}
def DoLine(codes, fields):
"""Process single Scripts.txt line, updating scripts."""
(_, name) = fields
scripts.setdefault(name, []).extend(codes)
ReadUnicodeTable(unicode_dir + '/Scripts.txt', 2, DoLine)
return scripts |
def reset(c7n_async=None):
"""Delete all persistent cluster state.
"""
click.echo('Delete db? Are you Sure? [yn] ', nl=False)
c = click.getchar()
click.echo()
if c == 'y':
click.echo('Wiping database')
worker.connection.flushdb()
elif c == 'n':
click.echo('Abort!')
else:
click.echo('Invalid input :(') | def function[reset, parameter[c7n_async]]:
constant[Delete all persistent cluster state.
]
call[name[click].echo, parameter[constant[Delete db? Are you Sure? [yn] ]]]
variable[c] assign[=] call[name[click].getchar, parameter[]]
call[name[click].echo, parameter[]]
if compare[name[c] equal[==] constant[y]] begin[:]
call[name[click].echo, parameter[constant[Wiping database]]]
call[name[worker].connection.flushdb, parameter[]] | keyword[def] identifier[reset] ( identifier[c7n_async] = keyword[None] ):
literal[string]
identifier[click] . identifier[echo] ( literal[string] , identifier[nl] = keyword[False] )
identifier[c] = identifier[click] . identifier[getchar] ()
identifier[click] . identifier[echo] ()
keyword[if] identifier[c] == literal[string] :
identifier[click] . identifier[echo] ( literal[string] )
identifier[worker] . identifier[connection] . identifier[flushdb] ()
keyword[elif] identifier[c] == literal[string] :
identifier[click] . identifier[echo] ( literal[string] )
keyword[else] :
identifier[click] . identifier[echo] ( literal[string] ) | def reset(c7n_async=None):
"""Delete all persistent cluster state.
"""
click.echo('Delete db? Are you Sure? [yn] ', nl=False)
c = click.getchar()
click.echo()
if c == 'y':
click.echo('Wiping database')
worker.connection.flushdb() # depends on [control=['if'], data=[]]
elif c == 'n':
click.echo('Abort!') # depends on [control=['if'], data=[]]
else:
click.echo('Invalid input :(') |
def remove(self, key):
"""
Transactional implementation of :func:`Map.remove(key) <hazelcast.proxy.map.Map.remove>`
The object to be removed will be removed from only the current transaction context until the transaction is
committed.
:param key: (object), key of the mapping to be deleted.
:return: (object), the previous value associated with key, or ``None`` if there was no mapping for key.
"""
check_not_none(key, "key can't be none")
return self._encode_invoke(transactional_map_remove_codec, key=self._to_data(key)) | def function[remove, parameter[self, key]]:
constant[
Transactional implementation of :func:`Map.remove(key) <hazelcast.proxy.map.Map.remove>`
The object to be removed will be removed from only the current transaction context until the transaction is
committed.
:param key: (object), key of the mapping to be deleted.
:return: (object), the previous value associated with key, or ``None`` if there was no mapping for key.
]
call[name[check_not_none], parameter[name[key], constant[key can't be none]]]
return[call[name[self]._encode_invoke, parameter[name[transactional_map_remove_codec]]]] | keyword[def] identifier[remove] ( identifier[self] , identifier[key] ):
literal[string]
identifier[check_not_none] ( identifier[key] , literal[string] )
keyword[return] identifier[self] . identifier[_encode_invoke] ( identifier[transactional_map_remove_codec] , identifier[key] = identifier[self] . identifier[_to_data] ( identifier[key] )) | def remove(self, key):
"""
Transactional implementation of :func:`Map.remove(key) <hazelcast.proxy.map.Map.remove>`
The object to be removed will be removed from only the current transaction context until the transaction is
committed.
:param key: (object), key of the mapping to be deleted.
:return: (object), the previous value associated with key, or ``None`` if there was no mapping for key.
"""
check_not_none(key, "key can't be none")
return self._encode_invoke(transactional_map_remove_codec, key=self._to_data(key)) |
def sense(self):
"""
Launches a few "sensing" commands such as 'ls', or 'pwd'
and updates the current bait state.
"""
cmd_name = random.choice(self.senses)
command = getattr(self, cmd_name)
self.state['last_command'] = cmd_name
command() | def function[sense, parameter[self]]:
constant[
Launches a few "sensing" commands such as 'ls', or 'pwd'
and updates the current bait state.
]
variable[cmd_name] assign[=] call[name[random].choice, parameter[name[self].senses]]
variable[command] assign[=] call[name[getattr], parameter[name[self], name[cmd_name]]]
call[name[self].state][constant[last_command]] assign[=] name[cmd_name]
call[name[command], parameter[]] | keyword[def] identifier[sense] ( identifier[self] ):
literal[string]
identifier[cmd_name] = identifier[random] . identifier[choice] ( identifier[self] . identifier[senses] )
identifier[command] = identifier[getattr] ( identifier[self] , identifier[cmd_name] )
identifier[self] . identifier[state] [ literal[string] ]= identifier[cmd_name]
identifier[command] () | def sense(self):
"""
Launches a few "sensing" commands such as 'ls', or 'pwd'
and updates the current bait state.
"""
cmd_name = random.choice(self.senses)
command = getattr(self, cmd_name)
self.state['last_command'] = cmd_name
command() |
def show_vcs_output_vcs_nodes_vcs_node_info_node_switch_subtype(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_vcs = ET.Element("show_vcs")
config = show_vcs
output = ET.SubElement(show_vcs, "output")
vcs_nodes = ET.SubElement(output, "vcs-nodes")
vcs_node_info = ET.SubElement(vcs_nodes, "vcs-node-info")
node_switch_subtype = ET.SubElement(vcs_node_info, "node-switch-subtype")
node_switch_subtype.text = kwargs.pop('node_switch_subtype')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[show_vcs_output_vcs_nodes_vcs_node_info_node_switch_subtype, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[show_vcs] assign[=] call[name[ET].Element, parameter[constant[show_vcs]]]
variable[config] assign[=] name[show_vcs]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[show_vcs], constant[output]]]
variable[vcs_nodes] assign[=] call[name[ET].SubElement, parameter[name[output], constant[vcs-nodes]]]
variable[vcs_node_info] assign[=] call[name[ET].SubElement, parameter[name[vcs_nodes], constant[vcs-node-info]]]
variable[node_switch_subtype] assign[=] call[name[ET].SubElement, parameter[name[vcs_node_info], constant[node-switch-subtype]]]
name[node_switch_subtype].text assign[=] call[name[kwargs].pop, parameter[constant[node_switch_subtype]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[show_vcs_output_vcs_nodes_vcs_node_info_node_switch_subtype] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[show_vcs] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[show_vcs]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[show_vcs] , literal[string] )
identifier[vcs_nodes] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[vcs_node_info] = identifier[ET] . identifier[SubElement] ( identifier[vcs_nodes] , literal[string] )
identifier[node_switch_subtype] = identifier[ET] . identifier[SubElement] ( identifier[vcs_node_info] , literal[string] )
identifier[node_switch_subtype] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def show_vcs_output_vcs_nodes_vcs_node_info_node_switch_subtype(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
show_vcs = ET.Element('show_vcs')
config = show_vcs
output = ET.SubElement(show_vcs, 'output')
vcs_nodes = ET.SubElement(output, 'vcs-nodes')
vcs_node_info = ET.SubElement(vcs_nodes, 'vcs-node-info')
node_switch_subtype = ET.SubElement(vcs_node_info, 'node-switch-subtype')
node_switch_subtype.text = kwargs.pop('node_switch_subtype')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def generate_payload(self, config, context):
"""
Generate payload by checking Django request object.
:param context: current context.
:param config: honeybadger configuration.
:return: a dict with the generated payload.
"""
request = current_request()
payload = {
'url': request.build_absolute_uri(),
'component': request.resolver_match.app_name,
'action': request.resolver_match.func.__name__,
'params': {},
'session': {},
'cgi_data': dict(request.META),
'context': context
}
if hasattr(request, 'session'):
payload['session'] = filter_dict(dict(request.session), config.params_filters)
payload['params'] = filter_dict(dict(getattr(request, request.method)), config.params_filters)
return payload | def function[generate_payload, parameter[self, config, context]]:
constant[
Generate payload by checking Django request object.
:param context: current context.
:param config: honeybadger configuration.
:return: a dict with the generated payload.
]
variable[request] assign[=] call[name[current_request], parameter[]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b2345db0>, <ast.Constant object at 0x7da1b2347580>, <ast.Constant object at 0x7da1b23452d0>, <ast.Constant object at 0x7da1b2345090>, <ast.Constant object at 0x7da1b2345d20>, <ast.Constant object at 0x7da1b2347280>, <ast.Constant object at 0x7da1b2347430>], [<ast.Call object at 0x7da1b2344640>, <ast.Attribute object at 0x7da1b23470d0>, <ast.Attribute object at 0x7da1b23469b0>, <ast.Dict object at 0x7da1b2346470>, <ast.Dict object at 0x7da1b2345de0>, <ast.Call object at 0x7da1b2346dd0>, <ast.Name object at 0x7da1b2347910>]]
if call[name[hasattr], parameter[name[request], constant[session]]] begin[:]
call[name[payload]][constant[session]] assign[=] call[name[filter_dict], parameter[call[name[dict], parameter[name[request].session]], name[config].params_filters]]
call[name[payload]][constant[params]] assign[=] call[name[filter_dict], parameter[call[name[dict], parameter[call[name[getattr], parameter[name[request], name[request].method]]]], name[config].params_filters]]
return[name[payload]] | keyword[def] identifier[generate_payload] ( identifier[self] , identifier[config] , identifier[context] ):
literal[string]
identifier[request] = identifier[current_request] ()
identifier[payload] ={
literal[string] : identifier[request] . identifier[build_absolute_uri] (),
literal[string] : identifier[request] . identifier[resolver_match] . identifier[app_name] ,
literal[string] : identifier[request] . identifier[resolver_match] . identifier[func] . identifier[__name__] ,
literal[string] :{},
literal[string] :{},
literal[string] : identifier[dict] ( identifier[request] . identifier[META] ),
literal[string] : identifier[context]
}
keyword[if] identifier[hasattr] ( identifier[request] , literal[string] ):
identifier[payload] [ literal[string] ]= identifier[filter_dict] ( identifier[dict] ( identifier[request] . identifier[session] ), identifier[config] . identifier[params_filters] )
identifier[payload] [ literal[string] ]= identifier[filter_dict] ( identifier[dict] ( identifier[getattr] ( identifier[request] , identifier[request] . identifier[method] )), identifier[config] . identifier[params_filters] )
keyword[return] identifier[payload] | def generate_payload(self, config, context):
"""
Generate payload by checking Django request object.
:param context: current context.
:param config: honeybadger configuration.
:return: a dict with the generated payload.
"""
request = current_request()
payload = {'url': request.build_absolute_uri(), 'component': request.resolver_match.app_name, 'action': request.resolver_match.func.__name__, 'params': {}, 'session': {}, 'cgi_data': dict(request.META), 'context': context}
if hasattr(request, 'session'):
payload['session'] = filter_dict(dict(request.session), config.params_filters) # depends on [control=['if'], data=[]]
payload['params'] = filter_dict(dict(getattr(request, request.method)), config.params_filters)
return payload |
def sg_max(tensor, opt):
r"""Computes the maximum of elements across axis of a tensor.
See `tf.reduce_max()` in tensorflow.
Args:
tensor: A `Tensor` (automatically given by chain).
opt:
axis : A tuple/list of integers or an integer. The axis to reduce.
keep_dims: If true, retains reduced dimensions with length 1.
name: If provided, replace current tensor's name.
Returns:
A `Tensor`.
"""
return tf.reduce_max(tensor, axis=opt.axis, keep_dims=opt.keep_dims, name=opt.name) | def function[sg_max, parameter[tensor, opt]]:
constant[Computes the maximum of elements across axis of a tensor.
See `tf.reduce_max()` in tensorflow.
Args:
tensor: A `Tensor` (automatically given by chain).
opt:
axis : A tuple/list of integers or an integer. The axis to reduce.
keep_dims: If true, retains reduced dimensions with length 1.
name: If provided, replace current tensor's name.
Returns:
A `Tensor`.
]
return[call[name[tf].reduce_max, parameter[name[tensor]]]] | keyword[def] identifier[sg_max] ( identifier[tensor] , identifier[opt] ):
literal[string]
keyword[return] identifier[tf] . identifier[reduce_max] ( identifier[tensor] , identifier[axis] = identifier[opt] . identifier[axis] , identifier[keep_dims] = identifier[opt] . identifier[keep_dims] , identifier[name] = identifier[opt] . identifier[name] ) | def sg_max(tensor, opt):
"""Computes the maximum of elements across axis of a tensor.
See `tf.reduce_max()` in tensorflow.
Args:
tensor: A `Tensor` (automatically given by chain).
opt:
axis : A tuple/list of integers or an integer. The axis to reduce.
keep_dims: If true, retains reduced dimensions with length 1.
name: If provided, replace current tensor's name.
Returns:
A `Tensor`.
"""
return tf.reduce_max(tensor, axis=opt.axis, keep_dims=opt.keep_dims, name=opt.name) |
def clear(self):
"""Remove all nodes and edges from the graph.
Unlike the regular networkx implementation, this does *not*
remove the graph's name. But all the other graph, node, and
edge attributes go away.
"""
self.adj.clear()
self.node.clear()
self.graph.clear() | def function[clear, parameter[self]]:
constant[Remove all nodes and edges from the graph.
Unlike the regular networkx implementation, this does *not*
remove the graph's name. But all the other graph, node, and
edge attributes go away.
]
call[name[self].adj.clear, parameter[]]
call[name[self].node.clear, parameter[]]
call[name[self].graph.clear, parameter[]] | keyword[def] identifier[clear] ( identifier[self] ):
literal[string]
identifier[self] . identifier[adj] . identifier[clear] ()
identifier[self] . identifier[node] . identifier[clear] ()
identifier[self] . identifier[graph] . identifier[clear] () | def clear(self):
"""Remove all nodes and edges from the graph.
Unlike the regular networkx implementation, this does *not*
remove the graph's name. But all the other graph, node, and
edge attributes go away.
"""
self.adj.clear()
self.node.clear()
self.graph.clear() |
def post_public(self, path, data, is_json=True):
'''Make a post request requiring no auth.'''
return self._post(path, data, is_json) | def function[post_public, parameter[self, path, data, is_json]]:
constant[Make a post request requiring no auth.]
return[call[name[self]._post, parameter[name[path], name[data], name[is_json]]]] | keyword[def] identifier[post_public] ( identifier[self] , identifier[path] , identifier[data] , identifier[is_json] = keyword[True] ):
literal[string]
keyword[return] identifier[self] . identifier[_post] ( identifier[path] , identifier[data] , identifier[is_json] ) | def post_public(self, path, data, is_json=True):
"""Make a post request requiring no auth."""
return self._post(path, data, is_json) |
def _check_pool_attr(self, attr, req_attr=None):
""" Check pool attributes.
"""
if req_attr is None:
req_attr = []
# check attribute names
self._check_attr(attr, req_attr, _pool_attrs)
# validate IPv4 prefix length
if attr.get('ipv4_default_prefix_length') is not None:
try:
attr['ipv4_default_prefix_length'] = \
int(attr['ipv4_default_prefix_length'])
if (attr['ipv4_default_prefix_length'] > 32 or
attr['ipv4_default_prefix_length'] < 1):
raise ValueError()
except ValueError:
raise NipapValueError('Default IPv4 prefix length must be an integer between 1 and 32.')
# validate IPv6 prefix length
if attr.get('ipv6_default_prefix_length'):
try:
attr['ipv6_default_prefix_length'] = \
int(attr['ipv6_default_prefix_length'])
if (attr['ipv6_default_prefix_length'] > 128 or
attr['ipv6_default_prefix_length'] < 1):
raise ValueError()
except ValueError:
raise NipapValueError('Default IPv6 prefix length must be an integer between 1 and 128.') | def function[_check_pool_attr, parameter[self, attr, req_attr]]:
constant[ Check pool attributes.
]
if compare[name[req_attr] is constant[None]] begin[:]
variable[req_attr] assign[=] list[[]]
call[name[self]._check_attr, parameter[name[attr], name[req_attr], name[_pool_attrs]]]
if compare[call[name[attr].get, parameter[constant[ipv4_default_prefix_length]]] is_not constant[None]] begin[:]
<ast.Try object at 0x7da20e9b0c10>
if call[name[attr].get, parameter[constant[ipv6_default_prefix_length]]] begin[:]
<ast.Try object at 0x7da18dc06fb0> | keyword[def] identifier[_check_pool_attr] ( identifier[self] , identifier[attr] , identifier[req_attr] = keyword[None] ):
literal[string]
keyword[if] identifier[req_attr] keyword[is] keyword[None] :
identifier[req_attr] =[]
identifier[self] . identifier[_check_attr] ( identifier[attr] , identifier[req_attr] , identifier[_pool_attrs] )
keyword[if] identifier[attr] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[attr] [ literal[string] ]= identifier[int] ( identifier[attr] [ literal[string] ])
keyword[if] ( identifier[attr] [ literal[string] ]> literal[int] keyword[or]
identifier[attr] [ literal[string] ]< literal[int] ):
keyword[raise] identifier[ValueError] ()
keyword[except] identifier[ValueError] :
keyword[raise] identifier[NipapValueError] ( literal[string] )
keyword[if] identifier[attr] . identifier[get] ( literal[string] ):
keyword[try] :
identifier[attr] [ literal[string] ]= identifier[int] ( identifier[attr] [ literal[string] ])
keyword[if] ( identifier[attr] [ literal[string] ]> literal[int] keyword[or]
identifier[attr] [ literal[string] ]< literal[int] ):
keyword[raise] identifier[ValueError] ()
keyword[except] identifier[ValueError] :
keyword[raise] identifier[NipapValueError] ( literal[string] ) | def _check_pool_attr(self, attr, req_attr=None):
""" Check pool attributes.
"""
if req_attr is None:
req_attr = [] # depends on [control=['if'], data=['req_attr']]
# check attribute names
self._check_attr(attr, req_attr, _pool_attrs)
# validate IPv4 prefix length
if attr.get('ipv4_default_prefix_length') is not None:
try:
attr['ipv4_default_prefix_length'] = int(attr['ipv4_default_prefix_length'])
if attr['ipv4_default_prefix_length'] > 32 or attr['ipv4_default_prefix_length'] < 1:
raise ValueError() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ValueError:
raise NipapValueError('Default IPv4 prefix length must be an integer between 1 and 32.') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# validate IPv6 prefix length
if attr.get('ipv6_default_prefix_length'):
try:
attr['ipv6_default_prefix_length'] = int(attr['ipv6_default_prefix_length'])
if attr['ipv6_default_prefix_length'] > 128 or attr['ipv6_default_prefix_length'] < 1:
raise ValueError() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ValueError:
raise NipapValueError('Default IPv6 prefix length must be an integer between 1 and 128.') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def cache():
"""Returns a 304 if an If-Modified-Since header or If-None-Match is present. Returns the same as a GET otherwise.
---
tags:
- Response inspection
parameters:
- in: header
name: If-Modified-Since
- in: header
name: If-None-Match
produces:
- application/json
responses:
200:
description: Cached response
304:
description: Modified
"""
is_conditional = request.headers.get("If-Modified-Since") or request.headers.get(
"If-None-Match"
)
if is_conditional is None:
response = view_get()
response.headers["Last-Modified"] = http_date()
response.headers["ETag"] = uuid.uuid4().hex
return response
else:
return status_code(304) | def function[cache, parameter[]]:
constant[Returns a 304 if an If-Modified-Since header or If-None-Match is present. Returns the same as a GET otherwise.
---
tags:
- Response inspection
parameters:
- in: header
name: If-Modified-Since
- in: header
name: If-None-Match
produces:
- application/json
responses:
200:
description: Cached response
304:
description: Modified
]
variable[is_conditional] assign[=] <ast.BoolOp object at 0x7da1b2179390>
if compare[name[is_conditional] is constant[None]] begin[:]
variable[response] assign[=] call[name[view_get], parameter[]]
call[name[response].headers][constant[Last-Modified]] assign[=] call[name[http_date], parameter[]]
call[name[response].headers][constant[ETag]] assign[=] call[name[uuid].uuid4, parameter[]].hex
return[name[response]] | keyword[def] identifier[cache] ():
literal[string]
identifier[is_conditional] = identifier[request] . identifier[headers] . identifier[get] ( literal[string] ) keyword[or] identifier[request] . identifier[headers] . identifier[get] (
literal[string]
)
keyword[if] identifier[is_conditional] keyword[is] keyword[None] :
identifier[response] = identifier[view_get] ()
identifier[response] . identifier[headers] [ literal[string] ]= identifier[http_date] ()
identifier[response] . identifier[headers] [ literal[string] ]= identifier[uuid] . identifier[uuid4] (). identifier[hex]
keyword[return] identifier[response]
keyword[else] :
keyword[return] identifier[status_code] ( literal[int] ) | def cache():
"""Returns a 304 if an If-Modified-Since header or If-None-Match is present. Returns the same as a GET otherwise.
---
tags:
- Response inspection
parameters:
- in: header
name: If-Modified-Since
- in: header
name: If-None-Match
produces:
- application/json
responses:
200:
description: Cached response
304:
description: Modified
"""
is_conditional = request.headers.get('If-Modified-Since') or request.headers.get('If-None-Match')
if is_conditional is None:
response = view_get()
response.headers['Last-Modified'] = http_date()
response.headers['ETag'] = uuid.uuid4().hex
return response # depends on [control=['if'], data=[]]
else:
return status_code(304) |
def info(self, category_id, store_view=None, attributes=None):
"""
Retrieve Category details
:param category_id: ID of category to retrieve
:param store_view: Store view ID or code
:param attributes: Return the fields specified
:return: Dictionary of data
"""
return self.call(
'catalog_category.info', [category_id, store_view, attributes]
) | def function[info, parameter[self, category_id, store_view, attributes]]:
constant[
Retrieve Category details
:param category_id: ID of category to retrieve
:param store_view: Store view ID or code
:param attributes: Return the fields specified
:return: Dictionary of data
]
return[call[name[self].call, parameter[constant[catalog_category.info], list[[<ast.Name object at 0x7da1b04f61d0>, <ast.Name object at 0x7da1b04f4940>, <ast.Name object at 0x7da1b04f5c00>]]]]] | keyword[def] identifier[info] ( identifier[self] , identifier[category_id] , identifier[store_view] = keyword[None] , identifier[attributes] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[call] (
literal[string] ,[ identifier[category_id] , identifier[store_view] , identifier[attributes] ]
) | def info(self, category_id, store_view=None, attributes=None):
"""
Retrieve Category details
:param category_id: ID of category to retrieve
:param store_view: Store view ID or code
:param attributes: Return the fields specified
:return: Dictionary of data
"""
return self.call('catalog_category.info', [category_id, store_view, attributes]) |
def from_rectilinear(cls, x, y, z, formatter=numpy_formatter):
"""Construct a contour generator from a rectilinear grid.
Parameters
----------
x : array_like
x coordinates of each column of `z`. Must be the same length as
the number of columns in `z`. (len(x) == z.shape[1])
y : array_like
y coordinates of each row of `z`. Must be the same length as the
number of columns in `z`. (len(y) == z.shape[0])
z : array_like
The 2-dimensional rectilinear grid of data to compute contours for.
Masked arrays are supported.
formatter : callable
A conversion function to convert from the internal `Matplotlib`_
contour format to an external format. See :ref:`formatters` for
more information.
Returns
-------
: :class:`QuadContourGenerator`
Initialized contour generator.
"""
x = np.asarray(x, dtype=np.float64)
y = np.asarray(y, dtype=np.float64)
z = np.ma.asarray(z, dtype=np.float64)
# Check arguments.
if x.ndim != 1:
raise TypeError(
"'x' must be a 1D array but is a {:d}D array".format(x.ndim))
if y.ndim != 1:
raise TypeError(
"'y' must be a 1D array but is a {:d}D array".format(y.ndim))
if z.ndim != 2:
raise TypeError(
"'z' must be a 2D array but it a {:d}D array".format(z.ndim))
if x.size != z.shape[1]:
raise TypeError(
("the length of 'x' must be equal to the number of columns in "
"'z' but the length of 'x' is {:d} and 'z' has {:d} "
"columns").format(x.size, z.shape[1]))
if y.size != z.shape[0]:
raise TypeError(
("the length of 'y' must be equal to the number of rows in "
"'z' but the length of 'y' is {:d} and 'z' has {:d} "
"rows").format(y.size, z.shape[0]))
# Convert to curvilinear format and call constructor.
y, x = np.meshgrid(y, x, indexing='ij')
return cls(x, y, z, formatter) | def function[from_rectilinear, parameter[cls, x, y, z, formatter]]:
constant[Construct a contour generator from a rectilinear grid.
Parameters
----------
x : array_like
x coordinates of each column of `z`. Must be the same length as
the number of columns in `z`. (len(x) == z.shape[1])
y : array_like
y coordinates of each row of `z`. Must be the same length as the
number of columns in `z`. (len(y) == z.shape[0])
z : array_like
The 2-dimensional rectilinear grid of data to compute contours for.
Masked arrays are supported.
formatter : callable
A conversion function to convert from the internal `Matplotlib`_
contour format to an external format. See :ref:`formatters` for
more information.
Returns
-------
: :class:`QuadContourGenerator`
Initialized contour generator.
]
variable[x] assign[=] call[name[np].asarray, parameter[name[x]]]
variable[y] assign[=] call[name[np].asarray, parameter[name[y]]]
variable[z] assign[=] call[name[np].ma.asarray, parameter[name[z]]]
if compare[name[x].ndim not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da1b2347e20>
if compare[name[y].ndim not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da1b2347af0>
if compare[name[z].ndim not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da1b2346ec0>
if compare[name[x].size not_equal[!=] call[name[z].shape][constant[1]]] begin[:]
<ast.Raise object at 0x7da1b23453f0>
if compare[name[y].size not_equal[!=] call[name[z].shape][constant[0]]] begin[:]
<ast.Raise object at 0x7da1b2344f70>
<ast.Tuple object at 0x7da1b23454b0> assign[=] call[name[np].meshgrid, parameter[name[y], name[x]]]
return[call[name[cls], parameter[name[x], name[y], name[z], name[formatter]]]] | keyword[def] identifier[from_rectilinear] ( identifier[cls] , identifier[x] , identifier[y] , identifier[z] , identifier[formatter] = identifier[numpy_formatter] ):
literal[string]
identifier[x] = identifier[np] . identifier[asarray] ( identifier[x] , identifier[dtype] = identifier[np] . identifier[float64] )
identifier[y] = identifier[np] . identifier[asarray] ( identifier[y] , identifier[dtype] = identifier[np] . identifier[float64] )
identifier[z] = identifier[np] . identifier[ma] . identifier[asarray] ( identifier[z] , identifier[dtype] = identifier[np] . identifier[float64] )
keyword[if] identifier[x] . identifier[ndim] != literal[int] :
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] ( identifier[x] . identifier[ndim] ))
keyword[if] identifier[y] . identifier[ndim] != literal[int] :
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] ( identifier[y] . identifier[ndim] ))
keyword[if] identifier[z] . identifier[ndim] != literal[int] :
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] ( identifier[z] . identifier[ndim] ))
keyword[if] identifier[x] . identifier[size] != identifier[z] . identifier[shape] [ literal[int] ]:
keyword[raise] identifier[TypeError] (
( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[x] . identifier[size] , identifier[z] . identifier[shape] [ literal[int] ]))
keyword[if] identifier[y] . identifier[size] != identifier[z] . identifier[shape] [ literal[int] ]:
keyword[raise] identifier[TypeError] (
( literal[string]
literal[string]
literal[string] ). identifier[format] ( identifier[y] . identifier[size] , identifier[z] . identifier[shape] [ literal[int] ]))
identifier[y] , identifier[x] = identifier[np] . identifier[meshgrid] ( identifier[y] , identifier[x] , identifier[indexing] = literal[string] )
keyword[return] identifier[cls] ( identifier[x] , identifier[y] , identifier[z] , identifier[formatter] ) | def from_rectilinear(cls, x, y, z, formatter=numpy_formatter):
"""Construct a contour generator from a rectilinear grid.
Parameters
----------
x : array_like
x coordinates of each column of `z`. Must be the same length as
the number of columns in `z`. (len(x) == z.shape[1])
y : array_like
y coordinates of each row of `z`. Must be the same length as the
number of columns in `z`. (len(y) == z.shape[0])
z : array_like
The 2-dimensional rectilinear grid of data to compute contours for.
Masked arrays are supported.
formatter : callable
A conversion function to convert from the internal `Matplotlib`_
contour format to an external format. See :ref:`formatters` for
more information.
Returns
-------
: :class:`QuadContourGenerator`
Initialized contour generator.
"""
x = np.asarray(x, dtype=np.float64)
y = np.asarray(y, dtype=np.float64)
z = np.ma.asarray(z, dtype=np.float64)
# Check arguments.
if x.ndim != 1:
raise TypeError("'x' must be a 1D array but is a {:d}D array".format(x.ndim)) # depends on [control=['if'], data=[]]
if y.ndim != 1:
raise TypeError("'y' must be a 1D array but is a {:d}D array".format(y.ndim)) # depends on [control=['if'], data=[]]
if z.ndim != 2:
raise TypeError("'z' must be a 2D array but it a {:d}D array".format(z.ndim)) # depends on [control=['if'], data=[]]
if x.size != z.shape[1]:
raise TypeError("the length of 'x' must be equal to the number of columns in 'z' but the length of 'x' is {:d} and 'z' has {:d} columns".format(x.size, z.shape[1])) # depends on [control=['if'], data=[]]
if y.size != z.shape[0]:
raise TypeError("the length of 'y' must be equal to the number of rows in 'z' but the length of 'y' is {:d} and 'z' has {:d} rows".format(y.size, z.shape[0])) # depends on [control=['if'], data=[]]
# Convert to curvilinear format and call constructor.
(y, x) = np.meshgrid(y, x, indexing='ij')
return cls(x, y, z, formatter) |
def get_service_health(service_id: str) -> str:
"""Get the health of a service using service_id.
Args:
service_id
Returns:
str, health status
"""
# Check if the current and actual replica levels are the same
if DC.get_replicas(service_id) != DC.get_actual_replica(service_id):
health_status = "Unhealthy"
else:
health_status = "Healthy"
return health_status | def function[get_service_health, parameter[service_id]]:
constant[Get the health of a service using service_id.
Args:
service_id
Returns:
str, health status
]
if compare[call[name[DC].get_replicas, parameter[name[service_id]]] not_equal[!=] call[name[DC].get_actual_replica, parameter[name[service_id]]]] begin[:]
variable[health_status] assign[=] constant[Unhealthy]
return[name[health_status]] | keyword[def] identifier[get_service_health] ( identifier[service_id] : identifier[str] )-> identifier[str] :
literal[string]
keyword[if] identifier[DC] . identifier[get_replicas] ( identifier[service_id] )!= identifier[DC] . identifier[get_actual_replica] ( identifier[service_id] ):
identifier[health_status] = literal[string]
keyword[else] :
identifier[health_status] = literal[string]
keyword[return] identifier[health_status] | def get_service_health(service_id: str) -> str:
"""Get the health of a service using service_id.
Args:
service_id
Returns:
str, health status
"""
# Check if the current and actual replica levels are the same
if DC.get_replicas(service_id) != DC.get_actual_replica(service_id):
health_status = 'Unhealthy' # depends on [control=['if'], data=[]]
else:
health_status = 'Healthy'
return health_status |
def loginfo(logger, msg, *args, **kwargs):
'''
Logs messages as INFO,
unless esgfpid.defaults.LOG_INFO_TO_DEBUG,
(then it logs messages as DEBUG).
'''
if esgfpid.defaults.LOG_INFO_TO_DEBUG:
logger.debug(msg, *args, **kwargs)
else:
logger.info(msg, *args, **kwargs) | def function[loginfo, parameter[logger, msg]]:
constant[
Logs messages as INFO,
unless esgfpid.defaults.LOG_INFO_TO_DEBUG,
(then it logs messages as DEBUG).
]
if name[esgfpid].defaults.LOG_INFO_TO_DEBUG begin[:]
call[name[logger].debug, parameter[name[msg], <ast.Starred object at 0x7da1b1fa8280>]] | keyword[def] identifier[loginfo] ( identifier[logger] , identifier[msg] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[esgfpid] . identifier[defaults] . identifier[LOG_INFO_TO_DEBUG] :
identifier[logger] . identifier[debug] ( identifier[msg] ,* identifier[args] ,** identifier[kwargs] )
keyword[else] :
identifier[logger] . identifier[info] ( identifier[msg] ,* identifier[args] ,** identifier[kwargs] ) | def loginfo(logger, msg, *args, **kwargs):
"""
Logs messages as INFO,
unless esgfpid.defaults.LOG_INFO_TO_DEBUG,
(then it logs messages as DEBUG).
"""
if esgfpid.defaults.LOG_INFO_TO_DEBUG:
logger.debug(msg, *args, **kwargs) # depends on [control=['if'], data=[]]
else:
logger.info(msg, *args, **kwargs) |
def upload_model(self, ndex_cred=None, private=True, style='default'):
"""Creates a new NDEx network of the assembled CX model.
To upload the assembled CX model to NDEx, you need to have
a registered account on NDEx (http://ndexbio.org/) and have
the `ndex` python package installed. The uploaded network
is private by default.
Parameters
----------
ndex_cred : Optional[dict]
A dictionary with the following entries:
'user': NDEx user name
'password': NDEx password
private : Optional[bool]
Whether or not the created network will be private on NDEX.
style : Optional[str]
This optional parameter can either be (1)
The UUID of an existing NDEx network whose style should be applied
to the new network. (2) Unspecified or 'default' to use
the default INDRA-assembled network style. (3) None to
not set a network style.
Returns
-------
network_id : str
The UUID of the NDEx network that was created by uploading
the assembled CX model.
"""
cx_str = self.print_cx()
if not ndex_cred:
username, password = ndex_client.get_default_ndex_cred({})
ndex_cred = {'user': username,
'password': password}
network_id = ndex_client.create_network(cx_str, ndex_cred, private)
if network_id and style:
template_id = None if style == 'default' else style
ndex_client.set_style(network_id, ndex_cred, template_id)
return network_id | def function[upload_model, parameter[self, ndex_cred, private, style]]:
constant[Creates a new NDEx network of the assembled CX model.
To upload the assembled CX model to NDEx, you need to have
a registered account on NDEx (http://ndexbio.org/) and have
the `ndex` python package installed. The uploaded network
is private by default.
Parameters
----------
ndex_cred : Optional[dict]
A dictionary with the following entries:
'user': NDEx user name
'password': NDEx password
private : Optional[bool]
Whether or not the created network will be private on NDEX.
style : Optional[str]
This optional parameter can either be (1)
The UUID of an existing NDEx network whose style should be applied
to the new network. (2) Unspecified or 'default' to use
the default INDRA-assembled network style. (3) None to
not set a network style.
Returns
-------
network_id : str
The UUID of the NDEx network that was created by uploading
the assembled CX model.
]
variable[cx_str] assign[=] call[name[self].print_cx, parameter[]]
if <ast.UnaryOp object at 0x7da18bccaec0> begin[:]
<ast.Tuple object at 0x7da18bcca830> assign[=] call[name[ndex_client].get_default_ndex_cred, parameter[dictionary[[], []]]]
variable[ndex_cred] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc8e20>, <ast.Constant object at 0x7da18bccb670>], [<ast.Name object at 0x7da18bcca890>, <ast.Name object at 0x7da18bcca740>]]
variable[network_id] assign[=] call[name[ndex_client].create_network, parameter[name[cx_str], name[ndex_cred], name[private]]]
if <ast.BoolOp object at 0x7da18bccbc10> begin[:]
variable[template_id] assign[=] <ast.IfExp object at 0x7da18bccab90>
call[name[ndex_client].set_style, parameter[name[network_id], name[ndex_cred], name[template_id]]]
return[name[network_id]] | keyword[def] identifier[upload_model] ( identifier[self] , identifier[ndex_cred] = keyword[None] , identifier[private] = keyword[True] , identifier[style] = literal[string] ):
literal[string]
identifier[cx_str] = identifier[self] . identifier[print_cx] ()
keyword[if] keyword[not] identifier[ndex_cred] :
identifier[username] , identifier[password] = identifier[ndex_client] . identifier[get_default_ndex_cred] ({})
identifier[ndex_cred] ={ literal[string] : identifier[username] ,
literal[string] : identifier[password] }
identifier[network_id] = identifier[ndex_client] . identifier[create_network] ( identifier[cx_str] , identifier[ndex_cred] , identifier[private] )
keyword[if] identifier[network_id] keyword[and] identifier[style] :
identifier[template_id] = keyword[None] keyword[if] identifier[style] == literal[string] keyword[else] identifier[style]
identifier[ndex_client] . identifier[set_style] ( identifier[network_id] , identifier[ndex_cred] , identifier[template_id] )
keyword[return] identifier[network_id] | def upload_model(self, ndex_cred=None, private=True, style='default'):
"""Creates a new NDEx network of the assembled CX model.
To upload the assembled CX model to NDEx, you need to have
a registered account on NDEx (http://ndexbio.org/) and have
the `ndex` python package installed. The uploaded network
is private by default.
Parameters
----------
ndex_cred : Optional[dict]
A dictionary with the following entries:
'user': NDEx user name
'password': NDEx password
private : Optional[bool]
Whether or not the created network will be private on NDEX.
style : Optional[str]
This optional parameter can either be (1)
The UUID of an existing NDEx network whose style should be applied
to the new network. (2) Unspecified or 'default' to use
the default INDRA-assembled network style. (3) None to
not set a network style.
Returns
-------
network_id : str
The UUID of the NDEx network that was created by uploading
the assembled CX model.
"""
cx_str = self.print_cx()
if not ndex_cred:
(username, password) = ndex_client.get_default_ndex_cred({})
ndex_cred = {'user': username, 'password': password} # depends on [control=['if'], data=[]]
network_id = ndex_client.create_network(cx_str, ndex_cred, private)
if network_id and style:
template_id = None if style == 'default' else style
ndex_client.set_style(network_id, ndex_cred, template_id) # depends on [control=['if'], data=[]]
return network_id |
def _build_endpoint_url(self, url, name=None):
"""
Method that constructs a full url with the given url and the
snapshot name.
Example:
full_url = _build_endpoint_url('/users', '1')
full_url => 'http://firebase.localhost/users/1.json'
"""
if not url.endswith(self.URL_SEPERATOR):
url = url + self.URL_SEPERATOR
if name is None:
name = ''
return '%s%s%s' % (urlparse.urljoin(self.dsn, url), name,
self.NAME_EXTENSION) | def function[_build_endpoint_url, parameter[self, url, name]]:
constant[
Method that constructs a full url with the given url and the
snapshot name.
Example:
full_url = _build_endpoint_url('/users', '1')
full_url => 'http://firebase.localhost/users/1.json'
]
if <ast.UnaryOp object at 0x7da20c6e58d0> begin[:]
variable[url] assign[=] binary_operation[name[url] + name[self].URL_SEPERATOR]
if compare[name[name] is constant[None]] begin[:]
variable[name] assign[=] constant[]
return[binary_operation[constant[%s%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18fe906d0>, <ast.Name object at 0x7da18fe914e0>, <ast.Attribute object at 0x7da18fe90760>]]]] | keyword[def] identifier[_build_endpoint_url] ( identifier[self] , identifier[url] , identifier[name] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[url] . identifier[endswith] ( identifier[self] . identifier[URL_SEPERATOR] ):
identifier[url] = identifier[url] + identifier[self] . identifier[URL_SEPERATOR]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[name] = literal[string]
keyword[return] literal[string] %( identifier[urlparse] . identifier[urljoin] ( identifier[self] . identifier[dsn] , identifier[url] ), identifier[name] ,
identifier[self] . identifier[NAME_EXTENSION] ) | def _build_endpoint_url(self, url, name=None):
"""
Method that constructs a full url with the given url and the
snapshot name.
Example:
full_url = _build_endpoint_url('/users', '1')
full_url => 'http://firebase.localhost/users/1.json'
"""
if not url.endswith(self.URL_SEPERATOR):
url = url + self.URL_SEPERATOR # depends on [control=['if'], data=[]]
if name is None:
name = '' # depends on [control=['if'], data=['name']]
return '%s%s%s' % (urlparse.urljoin(self.dsn, url), name, self.NAME_EXTENSION) |
def set_permissions_in_context(self, context={}):
""" Provides permissions for mongoadmin for use in the context"""
context['has_view_permission'] = self.mongoadmin.has_view_permission(self.request)
context['has_edit_permission'] = self.mongoadmin.has_edit_permission(self.request)
context['has_add_permission'] = self.mongoadmin.has_add_permission(self.request)
context['has_delete_permission'] = self.mongoadmin.has_delete_permission(self.request)
return context | def function[set_permissions_in_context, parameter[self, context]]:
constant[ Provides permissions for mongoadmin for use in the context]
call[name[context]][constant[has_view_permission]] assign[=] call[name[self].mongoadmin.has_view_permission, parameter[name[self].request]]
call[name[context]][constant[has_edit_permission]] assign[=] call[name[self].mongoadmin.has_edit_permission, parameter[name[self].request]]
call[name[context]][constant[has_add_permission]] assign[=] call[name[self].mongoadmin.has_add_permission, parameter[name[self].request]]
call[name[context]][constant[has_delete_permission]] assign[=] call[name[self].mongoadmin.has_delete_permission, parameter[name[self].request]]
return[name[context]] | keyword[def] identifier[set_permissions_in_context] ( identifier[self] , identifier[context] ={}):
literal[string]
identifier[context] [ literal[string] ]= identifier[self] . identifier[mongoadmin] . identifier[has_view_permission] ( identifier[self] . identifier[request] )
identifier[context] [ literal[string] ]= identifier[self] . identifier[mongoadmin] . identifier[has_edit_permission] ( identifier[self] . identifier[request] )
identifier[context] [ literal[string] ]= identifier[self] . identifier[mongoadmin] . identifier[has_add_permission] ( identifier[self] . identifier[request] )
identifier[context] [ literal[string] ]= identifier[self] . identifier[mongoadmin] . identifier[has_delete_permission] ( identifier[self] . identifier[request] )
keyword[return] identifier[context] | def set_permissions_in_context(self, context={}):
""" Provides permissions for mongoadmin for use in the context"""
context['has_view_permission'] = self.mongoadmin.has_view_permission(self.request)
context['has_edit_permission'] = self.mongoadmin.has_edit_permission(self.request)
context['has_add_permission'] = self.mongoadmin.has_add_permission(self.request)
context['has_delete_permission'] = self.mongoadmin.has_delete_permission(self.request)
return context |
def remove_account_alias(self, account, alias):
"""
:param account: an account object to be used as a selector
:param alias: email alias address
:returns: None (the API itself returns nothing)
"""
self.request('RemoveAccountAlias', {
'id': self._get_or_fetch_id(account, self.get_account),
'alias': alias,
}) | def function[remove_account_alias, parameter[self, account, alias]]:
constant[
:param account: an account object to be used as a selector
:param alias: email alias address
:returns: None (the API itself returns nothing)
]
call[name[self].request, parameter[constant[RemoveAccountAlias], dictionary[[<ast.Constant object at 0x7da18dc04670>, <ast.Constant object at 0x7da18dc05180>], [<ast.Call object at 0x7da18dc05840>, <ast.Name object at 0x7da18dc052d0>]]]] | keyword[def] identifier[remove_account_alias] ( identifier[self] , identifier[account] , identifier[alias] ):
literal[string]
identifier[self] . identifier[request] ( literal[string] ,{
literal[string] : identifier[self] . identifier[_get_or_fetch_id] ( identifier[account] , identifier[self] . identifier[get_account] ),
literal[string] : identifier[alias] ,
}) | def remove_account_alias(self, account, alias):
"""
:param account: an account object to be used as a selector
:param alias: email alias address
:returns: None (the API itself returns nothing)
"""
self.request('RemoveAccountAlias', {'id': self._get_or_fetch_id(account, self.get_account), 'alias': alias}) |
def state():
'''Get The playback state: 'playing', 'paused', or 'stopped'.
If PLAYING or PAUSED, show information on current track.
Calls PlaybackController.get_state(), and if state is PLAYING or PAUSED, get
PlaybackController.get_current_track() and
PlaybackController.get_time_position()'''
server = getServer()
state = server.core.playback.get_state()
logging.debug('Got playback state: %r', state)
if state.upper() == 'STOPPED':
print('Playback is currently stopped')
else:
track = server.core.playback.get_current_track()
logging.debug('Track is %r', track)
logging.debug('Track loaded is %r', jsonrpclib.jsonclass.load(track))
pos = server.core.playback.get_time_position()
logging.debug('Pos is %r', pos)
print('{} track: "{}", by {} (at {})'.format(state.title(),
track['name'],
','.join([a['name'] for a in track['artists']]),
formatTimeposition(pos))
) | def function[state, parameter[]]:
constant[Get The playback state: 'playing', 'paused', or 'stopped'.
If PLAYING or PAUSED, show information on current track.
Calls PlaybackController.get_state(), and if state is PLAYING or PAUSED, get
PlaybackController.get_current_track() and
PlaybackController.get_time_position()]
variable[server] assign[=] call[name[getServer], parameter[]]
variable[state] assign[=] call[name[server].core.playback.get_state, parameter[]]
call[name[logging].debug, parameter[constant[Got playback state: %r], name[state]]]
if compare[call[name[state].upper, parameter[]] equal[==] constant[STOPPED]] begin[:]
call[name[print], parameter[constant[Playback is currently stopped]]] | keyword[def] identifier[state] ():
literal[string]
identifier[server] = identifier[getServer] ()
identifier[state] = identifier[server] . identifier[core] . identifier[playback] . identifier[get_state] ()
identifier[logging] . identifier[debug] ( literal[string] , identifier[state] )
keyword[if] identifier[state] . identifier[upper] ()== literal[string] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[track] = identifier[server] . identifier[core] . identifier[playback] . identifier[get_current_track] ()
identifier[logging] . identifier[debug] ( literal[string] , identifier[track] )
identifier[logging] . identifier[debug] ( literal[string] , identifier[jsonrpclib] . identifier[jsonclass] . identifier[load] ( identifier[track] ))
identifier[pos] = identifier[server] . identifier[core] . identifier[playback] . identifier[get_time_position] ()
identifier[logging] . identifier[debug] ( literal[string] , identifier[pos] )
identifier[print] ( literal[string] . identifier[format] ( identifier[state] . identifier[title] (),
identifier[track] [ literal[string] ],
literal[string] . identifier[join] ([ identifier[a] [ literal[string] ] keyword[for] identifier[a] keyword[in] identifier[track] [ literal[string] ]]),
identifier[formatTimeposition] ( identifier[pos] ))
) | def state():
"""Get The playback state: 'playing', 'paused', or 'stopped'.
If PLAYING or PAUSED, show information on current track.
Calls PlaybackController.get_state(), and if state is PLAYING or PAUSED, get
PlaybackController.get_current_track() and
PlaybackController.get_time_position()"""
server = getServer()
state = server.core.playback.get_state()
logging.debug('Got playback state: %r', state)
if state.upper() == 'STOPPED':
print('Playback is currently stopped') # depends on [control=['if'], data=[]]
else:
track = server.core.playback.get_current_track()
logging.debug('Track is %r', track)
logging.debug('Track loaded is %r', jsonrpclib.jsonclass.load(track))
pos = server.core.playback.get_time_position()
logging.debug('Pos is %r', pos)
print('{} track: "{}", by {} (at {})'.format(state.title(), track['name'], ','.join([a['name'] for a in track['artists']]), formatTimeposition(pos))) |
def _spill(self):
"""
dump already partitioned data into disks.
"""
global MemoryBytesSpilled, DiskBytesSpilled
path = self._get_spill_dir(self.spills)
if not os.path.exists(path):
os.makedirs(path)
used_memory = get_used_memory()
if not self.pdata:
# The data has not been partitioned, it will iterator the
# data once, write them into different files, has no
# additional memory. It only called when the memory goes
# above limit at the first time.
# open all the files for writing
streams = [open(os.path.join(path, str(i)), 'wb')
for i in range(self.partitions)]
# If the number of keys is small, then the overhead of sort is small
# sort them before dumping into disks
self._sorted = len(self.data) < self.SORT_KEY_LIMIT
if self._sorted:
self.serializer = self.flattened_serializer()
for k in sorted(self.data.keys()):
h = self._partition(k)
self.serializer.dump_stream([(k, self.data[k])], streams[h])
else:
for k, v in self.data.items():
h = self._partition(k)
self.serializer.dump_stream([(k, v)], streams[h])
for s in streams:
DiskBytesSpilled += s.tell()
s.close()
self.data.clear()
# self.pdata is cached in `mergeValues` and `mergeCombiners`
self.pdata.extend([{} for i in range(self.partitions)])
else:
for i in range(self.partitions):
p = os.path.join(path, str(i))
with open(p, "wb") as f:
# dump items in batch
if self._sorted:
# sort by key only (stable)
sorted_items = sorted(self.pdata[i].items(), key=operator.itemgetter(0))
self.serializer.dump_stream(sorted_items, f)
else:
self.serializer.dump_stream(self.pdata[i].items(), f)
self.pdata[i].clear()
DiskBytesSpilled += os.path.getsize(p)
self.spills += 1
gc.collect() # release the memory as much as possible
MemoryBytesSpilled += max(used_memory - get_used_memory(), 0) << 20 | def function[_spill, parameter[self]]:
constant[
dump already partitioned data into disks.
]
<ast.Global object at 0x7da20c795f00>
variable[path] assign[=] call[name[self]._get_spill_dir, parameter[name[self].spills]]
if <ast.UnaryOp object at 0x7da20c796a40> begin[:]
call[name[os].makedirs, parameter[name[path]]]
variable[used_memory] assign[=] call[name[get_used_memory], parameter[]]
if <ast.UnaryOp object at 0x7da20c7950f0> begin[:]
variable[streams] assign[=] <ast.ListComp object at 0x7da20c7961a0>
name[self]._sorted assign[=] compare[call[name[len], parameter[name[self].data]] less[<] name[self].SORT_KEY_LIMIT]
if name[self]._sorted begin[:]
name[self].serializer assign[=] call[name[self].flattened_serializer, parameter[]]
for taget[name[k]] in starred[call[name[sorted], parameter[call[name[self].data.keys, parameter[]]]]] begin[:]
variable[h] assign[=] call[name[self]._partition, parameter[name[k]]]
call[name[self].serializer.dump_stream, parameter[list[[<ast.Tuple object at 0x7da1b2068bb0>]], call[name[streams]][name[h]]]]
for taget[name[s]] in starred[name[streams]] begin[:]
<ast.AugAssign object at 0x7da1b206b100>
call[name[s].close, parameter[]]
call[name[self].data.clear, parameter[]]
call[name[self].pdata.extend, parameter[<ast.ListComp object at 0x7da1b206bfa0>]]
<ast.AugAssign object at 0x7da20c9933d0>
call[name[gc].collect, parameter[]]
<ast.AugAssign object at 0x7da20c991ae0> | keyword[def] identifier[_spill] ( identifier[self] ):
literal[string]
keyword[global] identifier[MemoryBytesSpilled] , identifier[DiskBytesSpilled]
identifier[path] = identifier[self] . identifier[_get_spill_dir] ( identifier[self] . identifier[spills] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
identifier[os] . identifier[makedirs] ( identifier[path] )
identifier[used_memory] = identifier[get_used_memory] ()
keyword[if] keyword[not] identifier[self] . identifier[pdata] :
identifier[streams] =[ identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[str] ( identifier[i] )), literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[partitions] )]
identifier[self] . identifier[_sorted] = identifier[len] ( identifier[self] . identifier[data] )< identifier[self] . identifier[SORT_KEY_LIMIT]
keyword[if] identifier[self] . identifier[_sorted] :
identifier[self] . identifier[serializer] = identifier[self] . identifier[flattened_serializer] ()
keyword[for] identifier[k] keyword[in] identifier[sorted] ( identifier[self] . identifier[data] . identifier[keys] ()):
identifier[h] = identifier[self] . identifier[_partition] ( identifier[k] )
identifier[self] . identifier[serializer] . identifier[dump_stream] ([( identifier[k] , identifier[self] . identifier[data] [ identifier[k] ])], identifier[streams] [ identifier[h] ])
keyword[else] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[data] . identifier[items] ():
identifier[h] = identifier[self] . identifier[_partition] ( identifier[k] )
identifier[self] . identifier[serializer] . identifier[dump_stream] ([( identifier[k] , identifier[v] )], identifier[streams] [ identifier[h] ])
keyword[for] identifier[s] keyword[in] identifier[streams] :
identifier[DiskBytesSpilled] += identifier[s] . identifier[tell] ()
identifier[s] . identifier[close] ()
identifier[self] . identifier[data] . identifier[clear] ()
identifier[self] . identifier[pdata] . identifier[extend] ([{} keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[partitions] )])
keyword[else] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[partitions] ):
identifier[p] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[str] ( identifier[i] ))
keyword[with] identifier[open] ( identifier[p] , literal[string] ) keyword[as] identifier[f] :
keyword[if] identifier[self] . identifier[_sorted] :
identifier[sorted_items] = identifier[sorted] ( identifier[self] . identifier[pdata] [ identifier[i] ]. identifier[items] (), identifier[key] = identifier[operator] . identifier[itemgetter] ( literal[int] ))
identifier[self] . identifier[serializer] . identifier[dump_stream] ( identifier[sorted_items] , identifier[f] )
keyword[else] :
identifier[self] . identifier[serializer] . identifier[dump_stream] ( identifier[self] . identifier[pdata] [ identifier[i] ]. identifier[items] (), identifier[f] )
identifier[self] . identifier[pdata] [ identifier[i] ]. identifier[clear] ()
identifier[DiskBytesSpilled] += identifier[os] . identifier[path] . identifier[getsize] ( identifier[p] )
identifier[self] . identifier[spills] += literal[int]
identifier[gc] . identifier[collect] ()
identifier[MemoryBytesSpilled] += identifier[max] ( identifier[used_memory] - identifier[get_used_memory] (), literal[int] )<< literal[int] | def _spill(self):
"""
dump already partitioned data into disks.
"""
global MemoryBytesSpilled, DiskBytesSpilled
path = self._get_spill_dir(self.spills)
if not os.path.exists(path):
os.makedirs(path) # depends on [control=['if'], data=[]]
used_memory = get_used_memory()
if not self.pdata:
# The data has not been partitioned, it will iterator the
# data once, write them into different files, has no
# additional memory. It only called when the memory goes
# above limit at the first time.
# open all the files for writing
streams = [open(os.path.join(path, str(i)), 'wb') for i in range(self.partitions)]
# If the number of keys is small, then the overhead of sort is small
# sort them before dumping into disks
self._sorted = len(self.data) < self.SORT_KEY_LIMIT
if self._sorted:
self.serializer = self.flattened_serializer()
for k in sorted(self.data.keys()):
h = self._partition(k)
self.serializer.dump_stream([(k, self.data[k])], streams[h]) # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]]
else:
for (k, v) in self.data.items():
h = self._partition(k)
self.serializer.dump_stream([(k, v)], streams[h]) # depends on [control=['for'], data=[]]
for s in streams:
DiskBytesSpilled += s.tell()
s.close() # depends on [control=['for'], data=['s']]
self.data.clear()
# self.pdata is cached in `mergeValues` and `mergeCombiners`
self.pdata.extend([{} for i in range(self.partitions)]) # depends on [control=['if'], data=[]]
else:
for i in range(self.partitions):
p = os.path.join(path, str(i))
with open(p, 'wb') as f:
# dump items in batch
if self._sorted:
# sort by key only (stable)
sorted_items = sorted(self.pdata[i].items(), key=operator.itemgetter(0))
self.serializer.dump_stream(sorted_items, f) # depends on [control=['if'], data=[]]
else:
self.serializer.dump_stream(self.pdata[i].items(), f) # depends on [control=['with'], data=['f']]
self.pdata[i].clear()
DiskBytesSpilled += os.path.getsize(p) # depends on [control=['for'], data=['i']]
self.spills += 1
gc.collect() # release the memory as much as possible
MemoryBytesSpilled += max(used_memory - get_used_memory(), 0) << 20 |
def _get_course_content(course_id, course_url, sailthru_client, site_code, config):
"""Get course information using the Sailthru content api or from cache.
If there is an error, just return with an empty response.
Arguments:
course_id (str): course key of the course
course_url (str): LMS url for course info page.
sailthru_client (object): SailthruClient
site_code (str): site code
config (dict): config options
Returns:
course information from Sailthru
"""
# check cache first
cache_key = "{}:{}".format(site_code, course_url)
response = cache.get(cache_key)
if not response:
try:
sailthru_response = sailthru_client.api_get("content", {"id": course_url})
if not sailthru_response.is_ok():
response = {}
else:
response = sailthru_response.json
cache.set(cache_key, response, config.get('SAILTHRU_CACHE_TTL_SECONDS'))
except SailthruClientError:
response = {}
if not response:
logger.error('Could not get course data from Sailthru on enroll/purchase event. '
'Calling Ecommerce Course API to get course info for enrollment confirmation email')
response = _get_course_content_from_ecommerce(course_id, site_code=site_code)
if response:
cache.set(cache_key, response, config.get('SAILTHRU_CACHE_TTL_SECONDS'))
return response | def function[_get_course_content, parameter[course_id, course_url, sailthru_client, site_code, config]]:
constant[Get course information using the Sailthru content api or from cache.
If there is an error, just return with an empty response.
Arguments:
course_id (str): course key of the course
course_url (str): LMS url for course info page.
sailthru_client (object): SailthruClient
site_code (str): site code
config (dict): config options
Returns:
course information from Sailthru
]
variable[cache_key] assign[=] call[constant[{}:{}].format, parameter[name[site_code], name[course_url]]]
variable[response] assign[=] call[name[cache].get, parameter[name[cache_key]]]
if <ast.UnaryOp object at 0x7da20c6c7cd0> begin[:]
<ast.Try object at 0x7da20c6c55a0>
if <ast.UnaryOp object at 0x7da20c6c6e60> begin[:]
call[name[logger].error, parameter[constant[Could not get course data from Sailthru on enroll/purchase event. Calling Ecommerce Course API to get course info for enrollment confirmation email]]]
variable[response] assign[=] call[name[_get_course_content_from_ecommerce], parameter[name[course_id]]]
if name[response] begin[:]
call[name[cache].set, parameter[name[cache_key], name[response], call[name[config].get, parameter[constant[SAILTHRU_CACHE_TTL_SECONDS]]]]]
return[name[response]] | keyword[def] identifier[_get_course_content] ( identifier[course_id] , identifier[course_url] , identifier[sailthru_client] , identifier[site_code] , identifier[config] ):
literal[string]
identifier[cache_key] = literal[string] . identifier[format] ( identifier[site_code] , identifier[course_url] )
identifier[response] = identifier[cache] . identifier[get] ( identifier[cache_key] )
keyword[if] keyword[not] identifier[response] :
keyword[try] :
identifier[sailthru_response] = identifier[sailthru_client] . identifier[api_get] ( literal[string] ,{ literal[string] : identifier[course_url] })
keyword[if] keyword[not] identifier[sailthru_response] . identifier[is_ok] ():
identifier[response] ={}
keyword[else] :
identifier[response] = identifier[sailthru_response] . identifier[json]
identifier[cache] . identifier[set] ( identifier[cache_key] , identifier[response] , identifier[config] . identifier[get] ( literal[string] ))
keyword[except] identifier[SailthruClientError] :
identifier[response] ={}
keyword[if] keyword[not] identifier[response] :
identifier[logger] . identifier[error] ( literal[string]
literal[string] )
identifier[response] = identifier[_get_course_content_from_ecommerce] ( identifier[course_id] , identifier[site_code] = identifier[site_code] )
keyword[if] identifier[response] :
identifier[cache] . identifier[set] ( identifier[cache_key] , identifier[response] , identifier[config] . identifier[get] ( literal[string] ))
keyword[return] identifier[response] | def _get_course_content(course_id, course_url, sailthru_client, site_code, config):
"""Get course information using the Sailthru content api or from cache.
If there is an error, just return with an empty response.
Arguments:
course_id (str): course key of the course
course_url (str): LMS url for course info page.
sailthru_client (object): SailthruClient
site_code (str): site code
config (dict): config options
Returns:
course information from Sailthru
"""
# check cache first
cache_key = '{}:{}'.format(site_code, course_url)
response = cache.get(cache_key)
if not response:
try:
sailthru_response = sailthru_client.api_get('content', {'id': course_url})
if not sailthru_response.is_ok():
response = {} # depends on [control=['if'], data=[]]
else:
response = sailthru_response.json
cache.set(cache_key, response, config.get('SAILTHRU_CACHE_TTL_SECONDS')) # depends on [control=['try'], data=[]]
except SailthruClientError:
response = {} # depends on [control=['except'], data=[]]
if not response:
logger.error('Could not get course data from Sailthru on enroll/purchase event. Calling Ecommerce Course API to get course info for enrollment confirmation email')
response = _get_course_content_from_ecommerce(course_id, site_code=site_code)
if response:
cache.set(cache_key, response, config.get('SAILTHRU_CACHE_TTL_SECONDS')) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return response |
def _inherit_from(context, uri, calling_uri):
"""called by the _inherit method in template modules to set
up the inheritance chain at the start of a template's
execution."""
if uri is None:
return None
template = _lookup_template(context, uri, calling_uri)
self_ns = context['self']
ih = self_ns
while ih.inherits is not None:
ih = ih.inherits
lclcontext = context._locals({'next': ih})
ih.inherits = TemplateNamespace("self:%s" % template.uri,
lclcontext,
template=template,
populate_self=False)
context._data['parent'] = lclcontext._data['local'] = ih.inherits
callable_ = getattr(template.module, '_mako_inherit', None)
if callable_ is not None:
ret = callable_(template, lclcontext)
if ret:
return ret
gen_ns = getattr(template.module, '_mako_generate_namespaces', None)
if gen_ns is not None:
gen_ns(context)
return (template.callable_, lclcontext) | def function[_inherit_from, parameter[context, uri, calling_uri]]:
constant[called by the _inherit method in template modules to set
up the inheritance chain at the start of a template's
execution.]
if compare[name[uri] is constant[None]] begin[:]
return[constant[None]]
variable[template] assign[=] call[name[_lookup_template], parameter[name[context], name[uri], name[calling_uri]]]
variable[self_ns] assign[=] call[name[context]][constant[self]]
variable[ih] assign[=] name[self_ns]
while compare[name[ih].inherits is_not constant[None]] begin[:]
variable[ih] assign[=] name[ih].inherits
variable[lclcontext] assign[=] call[name[context]._locals, parameter[dictionary[[<ast.Constant object at 0x7da1b1d22740>], [<ast.Name object at 0x7da1b1d20f40>]]]]
name[ih].inherits assign[=] call[name[TemplateNamespace], parameter[binary_operation[constant[self:%s] <ast.Mod object at 0x7da2590d6920> name[template].uri], name[lclcontext]]]
call[name[context]._data][constant[parent]] assign[=] name[ih].inherits
variable[callable_] assign[=] call[name[getattr], parameter[name[template].module, constant[_mako_inherit], constant[None]]]
if compare[name[callable_] is_not constant[None]] begin[:]
variable[ret] assign[=] call[name[callable_], parameter[name[template], name[lclcontext]]]
if name[ret] begin[:]
return[name[ret]]
variable[gen_ns] assign[=] call[name[getattr], parameter[name[template].module, constant[_mako_generate_namespaces], constant[None]]]
if compare[name[gen_ns] is_not constant[None]] begin[:]
call[name[gen_ns], parameter[name[context]]]
return[tuple[[<ast.Attribute object at 0x7da1b1d22a70>, <ast.Name object at 0x7da1b1d23730>]]] | keyword[def] identifier[_inherit_from] ( identifier[context] , identifier[uri] , identifier[calling_uri] ):
literal[string]
keyword[if] identifier[uri] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[template] = identifier[_lookup_template] ( identifier[context] , identifier[uri] , identifier[calling_uri] )
identifier[self_ns] = identifier[context] [ literal[string] ]
identifier[ih] = identifier[self_ns]
keyword[while] identifier[ih] . identifier[inherits] keyword[is] keyword[not] keyword[None] :
identifier[ih] = identifier[ih] . identifier[inherits]
identifier[lclcontext] = identifier[context] . identifier[_locals] ({ literal[string] : identifier[ih] })
identifier[ih] . identifier[inherits] = identifier[TemplateNamespace] ( literal[string] % identifier[template] . identifier[uri] ,
identifier[lclcontext] ,
identifier[template] = identifier[template] ,
identifier[populate_self] = keyword[False] )
identifier[context] . identifier[_data] [ literal[string] ]= identifier[lclcontext] . identifier[_data] [ literal[string] ]= identifier[ih] . identifier[inherits]
identifier[callable_] = identifier[getattr] ( identifier[template] . identifier[module] , literal[string] , keyword[None] )
keyword[if] identifier[callable_] keyword[is] keyword[not] keyword[None] :
identifier[ret] = identifier[callable_] ( identifier[template] , identifier[lclcontext] )
keyword[if] identifier[ret] :
keyword[return] identifier[ret]
identifier[gen_ns] = identifier[getattr] ( identifier[template] . identifier[module] , literal[string] , keyword[None] )
keyword[if] identifier[gen_ns] keyword[is] keyword[not] keyword[None] :
identifier[gen_ns] ( identifier[context] )
keyword[return] ( identifier[template] . identifier[callable_] , identifier[lclcontext] ) | def _inherit_from(context, uri, calling_uri):
"""called by the _inherit method in template modules to set
up the inheritance chain at the start of a template's
execution."""
if uri is None:
return None # depends on [control=['if'], data=[]]
template = _lookup_template(context, uri, calling_uri)
self_ns = context['self']
ih = self_ns
while ih.inherits is not None:
ih = ih.inherits # depends on [control=['while'], data=[]]
lclcontext = context._locals({'next': ih})
ih.inherits = TemplateNamespace('self:%s' % template.uri, lclcontext, template=template, populate_self=False)
context._data['parent'] = lclcontext._data['local'] = ih.inherits
callable_ = getattr(template.module, '_mako_inherit', None)
if callable_ is not None:
ret = callable_(template, lclcontext)
if ret:
return ret # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['callable_']]
gen_ns = getattr(template.module, '_mako_generate_namespaces', None)
if gen_ns is not None:
gen_ns(context) # depends on [control=['if'], data=['gen_ns']]
return (template.callable_, lclcontext) |
def EQ127(T, A, B, C, D, E, F, G, order=0):
r'''DIPPR Equation #127. Rarely used, and then only in calculating
ideal-gas heat capacity. All 7 parameters are required.
.. math::
Y = A+B\left[\frac{\left(\frac{C}{T}\right)^2\exp\left(\frac{C}{T}
\right)}{\left(\exp\frac{C}{T}-1 \right)^2}\right]
+D\left[\frac{\left(\frac{E}{T}\right)^2\exp\left(\frac{E}{T}\right)}
{\left(\exp\frac{E}{T}-1 \right)^2}\right]
+F\left[\frac{\left(\frac{G}{T}\right)^2\exp\left(\frac{G}{T}\right)}
{\left(\exp\frac{G}{T}-1 \right)^2}\right]
Parameters
----------
T : float
Temperature, [K]
A-G : float
Parameter for the equation; chemical and property specific [-]
order : int, optional
Order of the calculation. 0 for the calculation of the result itself;
for 1, the first derivative of the property is returned, for
-1, the indefinite integral of the property with respect to temperature
is returned; and for -1j, the indefinite integral of the property
divided by temperature with respect to temperature is returned. No
other integrals or derivatives are implemented, and an exception will
be raised if any other order is given.
Returns
-------
Y : float
Property [constant-specific; if order == 1, property/K; if order == -1,
property*K; if order == -1j, unchanged from default]
Notes
-----
The derivative with respect to T, integral with respect to T, and integral
over T with respect to T are computed as follows. All expressions can be
obtained with SymPy readily.
.. math::
\frac{d Y}{dT} = - \frac{B C^{3} e^{\frac{C}{T}}}{T^{4}
\left(e^{\frac{C}{T}} - 1\right)^{2}} + \frac{2 B C^{3}
e^{\frac{2 C}{T}}}{T^{4} \left(e^{\frac{C}{T}} - 1\right)^{3}}
- \frac{2 B C^{2} e^{\frac{C}{T}}}{T^{3} \left(e^{\frac{C}{T}}
- 1\right)^{2}} - \frac{D E^{3} e^{\frac{E}{T}}}{T^{4}
\left(e^{\frac{E}{T}} - 1\right)^{2}} + \frac{2 D E^{3}
e^{\frac{2 E}{T}}}{T^{4} \left(e^{\frac{E}{T}} - 1\right)^{3}}
- \frac{2 D E^{2} e^{\frac{E}{T}}}{T^{3} \left(e^{\frac{E}{T}}
- 1\right)^{2}} - \frac{F G^{3} e^{\frac{G}{T}}}{T^{4}
\left(e^{\frac{G}{T}} - 1\right)^{2}} + \frac{2 F G^{3}
e^{\frac{2 G}{T}}}{T^{4} \left(e^{\frac{G}{T}} - 1\right)^{3}}
- \frac{2 F G^{2} e^{\frac{G}{T}}}{T^{3} \left(e^{\frac{G}{T}}
- 1\right)^{2}}
.. math::
\int Y dT = A T + \frac{B C^{2}}{C e^{\frac{C}{T}} - C}
+ \frac{D E^{2}}{E e^{\frac{E}{T}} - E}
+ \frac{F G^{2}}{G e^{\frac{G}{T}} - G}
.. math::
\int \frac{Y}{T} dT = A \log{\left (T \right )} + B C^{2} \left(
\frac{1}{C T e^{\frac{C}{T}} - C T} + \frac{1}{C T} - \frac{1}{C^{2}}
\log{\left (e^{\frac{C}{T}} - 1 \right )}\right) + D E^{2} \left(
\frac{1}{E T e^{\frac{E}{T}} - E T} + \frac{1}{E T} - \frac{1}{E^{2}}
\log{\left (e^{\frac{E}{T}} - 1 \right )}\right) + F G^{2} \left(
\frac{1}{G T e^{\frac{G}{T}} - G T} + \frac{1}{G T} - \frac{1}{G^{2}}
\log{\left (e^{\frac{G}{T}} - 1 \right )}\right)
Examples
--------
Ideal gas heat capacity of methanol; DIPPR coefficients normally in
J/kmol/K
>>> EQ127(20., 3.3258E4, 3.6199E4, 1.2057E3, 1.5373E7, 3.2122E3, -1.5318E7, 3.2122E3)
33258.0
References
----------
.. [1] Design Institute for Physical Properties, 1996. DIPPR Project 801
DIPPR/AIChE
'''
if order == 0:
return (A+B*((C/T)**2*exp(C/T)/(exp(C/T) - 1)**2) +
D*((E/T)**2*exp(E/T)/(exp(E/T)-1)**2) +
F*((G/T)**2*exp(G/T)/(exp(G/T)-1)**2))
elif order == 1:
return (-B*C**3*exp(C/T)/(T**4*(exp(C/T) - 1)**2)
+ 2*B*C**3*exp(2*C/T)/(T**4*(exp(C/T) - 1)**3)
- 2*B*C**2*exp(C/T)/(T**3*(exp(C/T) - 1)**2)
- D*E**3*exp(E/T)/(T**4*(exp(E/T) - 1)**2)
+ 2*D*E**3*exp(2*E/T)/(T**4*(exp(E/T) - 1)**3)
- 2*D*E**2*exp(E/T)/(T**3*(exp(E/T) - 1)**2)
- F*G**3*exp(G/T)/(T**4*(exp(G/T) - 1)**2)
+ 2*F*G**3*exp(2*G/T)/(T**4*(exp(G/T) - 1)**3)
- 2*F*G**2*exp(G/T)/(T**3*(exp(G/T) - 1)**2))
elif order == -1:
return (A*T + B*C**2/(C*exp(C/T) - C) + D*E**2/(E*exp(E/T) - E)
+ F*G**2/(G*exp(G/T) - G))
elif order == -1j:
return (A*log(T) + B*C**2*(1/(C*T*exp(C/T) - C*T) + 1/(C*T)
- log(exp(C/T) - 1)/C**2) + D*E**2*(1/(E*T*exp(E/T) - E*T)
+ 1/(E*T) - log(exp(E/T) - 1)/E**2)
+ F*G**2*(1/(G*T*exp(G/T) - G*T) + 1/(G*T) - log(exp(G/T)
- 1)/G**2))
else:
raise Exception(order_not_found_msg) | def function[EQ127, parameter[T, A, B, C, D, E, F, G, order]]:
constant[DIPPR Equation #127. Rarely used, and then only in calculating
ideal-gas heat capacity. All 7 parameters are required.
.. math::
Y = A+B\left[\frac{\left(\frac{C}{T}\right)^2\exp\left(\frac{C}{T}
\right)}{\left(\exp\frac{C}{T}-1 \right)^2}\right]
+D\left[\frac{\left(\frac{E}{T}\right)^2\exp\left(\frac{E}{T}\right)}
{\left(\exp\frac{E}{T}-1 \right)^2}\right]
+F\left[\frac{\left(\frac{G}{T}\right)^2\exp\left(\frac{G}{T}\right)}
{\left(\exp\frac{G}{T}-1 \right)^2}\right]
Parameters
----------
T : float
Temperature, [K]
A-G : float
Parameter for the equation; chemical and property specific [-]
order : int, optional
Order of the calculation. 0 for the calculation of the result itself;
for 1, the first derivative of the property is returned, for
-1, the indefinite integral of the property with respect to temperature
is returned; and for -1j, the indefinite integral of the property
divided by temperature with respect to temperature is returned. No
other integrals or derivatives are implemented, and an exception will
be raised if any other order is given.
Returns
-------
Y : float
Property [constant-specific; if order == 1, property/K; if order == -1,
property*K; if order == -1j, unchanged from default]
Notes
-----
The derivative with respect to T, integral with respect to T, and integral
over T with respect to T are computed as follows. All expressions can be
obtained with SymPy readily.
.. math::
\frac{d Y}{dT} = - \frac{B C^{3} e^{\frac{C}{T}}}{T^{4}
\left(e^{\frac{C}{T}} - 1\right)^{2}} + \frac{2 B C^{3}
e^{\frac{2 C}{T}}}{T^{4} \left(e^{\frac{C}{T}} - 1\right)^{3}}
- \frac{2 B C^{2} e^{\frac{C}{T}}}{T^{3} \left(e^{\frac{C}{T}}
- 1\right)^{2}} - \frac{D E^{3} e^{\frac{E}{T}}}{T^{4}
\left(e^{\frac{E}{T}} - 1\right)^{2}} + \frac{2 D E^{3}
e^{\frac{2 E}{T}}}{T^{4} \left(e^{\frac{E}{T}} - 1\right)^{3}}
- \frac{2 D E^{2} e^{\frac{E}{T}}}{T^{3} \left(e^{\frac{E}{T}}
- 1\right)^{2}} - \frac{F G^{3} e^{\frac{G}{T}}}{T^{4}
\left(e^{\frac{G}{T}} - 1\right)^{2}} + \frac{2 F G^{3}
e^{\frac{2 G}{T}}}{T^{4} \left(e^{\frac{G}{T}} - 1\right)^{3}}
- \frac{2 F G^{2} e^{\frac{G}{T}}}{T^{3} \left(e^{\frac{G}{T}}
- 1\right)^{2}}
.. math::
\int Y dT = A T + \frac{B C^{2}}{C e^{\frac{C}{T}} - C}
+ \frac{D E^{2}}{E e^{\frac{E}{T}} - E}
+ \frac{F G^{2}}{G e^{\frac{G}{T}} - G}
.. math::
\int \frac{Y}{T} dT = A \log{\left (T \right )} + B C^{2} \left(
\frac{1}{C T e^{\frac{C}{T}} - C T} + \frac{1}{C T} - \frac{1}{C^{2}}
\log{\left (e^{\frac{C}{T}} - 1 \right )}\right) + D E^{2} \left(
\frac{1}{E T e^{\frac{E}{T}} - E T} + \frac{1}{E T} - \frac{1}{E^{2}}
\log{\left (e^{\frac{E}{T}} - 1 \right )}\right) + F G^{2} \left(
\frac{1}{G T e^{\frac{G}{T}} - G T} + \frac{1}{G T} - \frac{1}{G^{2}}
\log{\left (e^{\frac{G}{T}} - 1 \right )}\right)
Examples
--------
Ideal gas heat capacity of methanol; DIPPR coefficients normally in
J/kmol/K
>>> EQ127(20., 3.3258E4, 3.6199E4, 1.2057E3, 1.5373E7, 3.2122E3, -1.5318E7, 3.2122E3)
33258.0
References
----------
.. [1] Design Institute for Physical Properties, 1996. DIPPR Project 801
DIPPR/AIChE
]
if compare[name[order] equal[==] constant[0]] begin[:]
return[binary_operation[binary_operation[binary_operation[name[A] + binary_operation[name[B] * binary_operation[binary_operation[binary_operation[binary_operation[name[C] / name[T]] ** constant[2]] * call[name[exp], parameter[binary_operation[name[C] / name[T]]]]] / binary_operation[binary_operation[call[name[exp], parameter[binary_operation[name[C] / name[T]]]] - constant[1]] ** constant[2]]]]] + binary_operation[name[D] * binary_operation[binary_operation[binary_operation[binary_operation[name[E] / name[T]] ** constant[2]] * call[name[exp], parameter[binary_operation[name[E] / name[T]]]]] / binary_operation[binary_operation[call[name[exp], parameter[binary_operation[name[E] / name[T]]]] - constant[1]] ** constant[2]]]]] + binary_operation[name[F] * binary_operation[binary_operation[binary_operation[binary_operation[name[G] / name[T]] ** constant[2]] * call[name[exp], parameter[binary_operation[name[G] / name[T]]]]] / binary_operation[binary_operation[call[name[exp], parameter[binary_operation[name[G] / name[T]]]] - constant[1]] ** constant[2]]]]]] | keyword[def] identifier[EQ127] ( identifier[T] , identifier[A] , identifier[B] , identifier[C] , identifier[D] , identifier[E] , identifier[F] , identifier[G] , identifier[order] = literal[int] ):
literal[string]
keyword[if] identifier[order] == literal[int] :
keyword[return] ( identifier[A] + identifier[B] *(( identifier[C] / identifier[T] )** literal[int] * identifier[exp] ( identifier[C] / identifier[T] )/( identifier[exp] ( identifier[C] / identifier[T] )- literal[int] )** literal[int] )+
identifier[D] *(( identifier[E] / identifier[T] )** literal[int] * identifier[exp] ( identifier[E] / identifier[T] )/( identifier[exp] ( identifier[E] / identifier[T] )- literal[int] )** literal[int] )+
identifier[F] *(( identifier[G] / identifier[T] )** literal[int] * identifier[exp] ( identifier[G] / identifier[T] )/( identifier[exp] ( identifier[G] / identifier[T] )- literal[int] )** literal[int] ))
keyword[elif] identifier[order] == literal[int] :
keyword[return] (- identifier[B] * identifier[C] ** literal[int] * identifier[exp] ( identifier[C] / identifier[T] )/( identifier[T] ** literal[int] *( identifier[exp] ( identifier[C] / identifier[T] )- literal[int] )** literal[int] )
+ literal[int] * identifier[B] * identifier[C] ** literal[int] * identifier[exp] ( literal[int] * identifier[C] / identifier[T] )/( identifier[T] ** literal[int] *( identifier[exp] ( identifier[C] / identifier[T] )- literal[int] )** literal[int] )
- literal[int] * identifier[B] * identifier[C] ** literal[int] * identifier[exp] ( identifier[C] / identifier[T] )/( identifier[T] ** literal[int] *( identifier[exp] ( identifier[C] / identifier[T] )- literal[int] )** literal[int] )
- identifier[D] * identifier[E] ** literal[int] * identifier[exp] ( identifier[E] / identifier[T] )/( identifier[T] ** literal[int] *( identifier[exp] ( identifier[E] / identifier[T] )- literal[int] )** literal[int] )
+ literal[int] * identifier[D] * identifier[E] ** literal[int] * identifier[exp] ( literal[int] * identifier[E] / identifier[T] )/( identifier[T] ** literal[int] *( identifier[exp] ( identifier[E] / identifier[T] )- literal[int] )** literal[int] )
- literal[int] * identifier[D] * identifier[E] ** literal[int] * identifier[exp] ( identifier[E] / identifier[T] )/( identifier[T] ** literal[int] *( identifier[exp] ( identifier[E] / identifier[T] )- literal[int] )** literal[int] )
- identifier[F] * identifier[G] ** literal[int] * identifier[exp] ( identifier[G] / identifier[T] )/( identifier[T] ** literal[int] *( identifier[exp] ( identifier[G] / identifier[T] )- literal[int] )** literal[int] )
+ literal[int] * identifier[F] * identifier[G] ** literal[int] * identifier[exp] ( literal[int] * identifier[G] / identifier[T] )/( identifier[T] ** literal[int] *( identifier[exp] ( identifier[G] / identifier[T] )- literal[int] )** literal[int] )
- literal[int] * identifier[F] * identifier[G] ** literal[int] * identifier[exp] ( identifier[G] / identifier[T] )/( identifier[T] ** literal[int] *( identifier[exp] ( identifier[G] / identifier[T] )- literal[int] )** literal[int] ))
keyword[elif] identifier[order] ==- literal[int] :
keyword[return] ( identifier[A] * identifier[T] + identifier[B] * identifier[C] ** literal[int] /( identifier[C] * identifier[exp] ( identifier[C] / identifier[T] )- identifier[C] )+ identifier[D] * identifier[E] ** literal[int] /( identifier[E] * identifier[exp] ( identifier[E] / identifier[T] )- identifier[E] )
+ identifier[F] * identifier[G] ** literal[int] /( identifier[G] * identifier[exp] ( identifier[G] / identifier[T] )- identifier[G] ))
keyword[elif] identifier[order] ==- literal[int] :
keyword[return] ( identifier[A] * identifier[log] ( identifier[T] )+ identifier[B] * identifier[C] ** literal[int] *( literal[int] /( identifier[C] * identifier[T] * identifier[exp] ( identifier[C] / identifier[T] )- identifier[C] * identifier[T] )+ literal[int] /( identifier[C] * identifier[T] )
- identifier[log] ( identifier[exp] ( identifier[C] / identifier[T] )- literal[int] )/ identifier[C] ** literal[int] )+ identifier[D] * identifier[E] ** literal[int] *( literal[int] /( identifier[E] * identifier[T] * identifier[exp] ( identifier[E] / identifier[T] )- identifier[E] * identifier[T] )
+ literal[int] /( identifier[E] * identifier[T] )- identifier[log] ( identifier[exp] ( identifier[E] / identifier[T] )- literal[int] )/ identifier[E] ** literal[int] )
+ identifier[F] * identifier[G] ** literal[int] *( literal[int] /( identifier[G] * identifier[T] * identifier[exp] ( identifier[G] / identifier[T] )- identifier[G] * identifier[T] )+ literal[int] /( identifier[G] * identifier[T] )- identifier[log] ( identifier[exp] ( identifier[G] / identifier[T] )
- literal[int] )/ identifier[G] ** literal[int] ))
keyword[else] :
keyword[raise] identifier[Exception] ( identifier[order_not_found_msg] ) | def EQ127(T, A, B, C, D, E, F, G, order=0):
"""DIPPR Equation #127. Rarely used, and then only in calculating
ideal-gas heat capacity. All 7 parameters are required.
.. math::
Y = A+B\\left[\\frac{\\left(\\frac{C}{T}\\right)^2\\exp\\left(\\frac{C}{T}
\\right)}{\\left(\\exp\\frac{C}{T}-1 \\right)^2}\\right]
+D\\left[\\frac{\\left(\\frac{E}{T}\\right)^2\\exp\\left(\\frac{E}{T}\\right)}
{\\left(\\exp\\frac{E}{T}-1 \\right)^2}\\right]
+F\\left[\\frac{\\left(\\frac{G}{T}\\right)^2\\exp\\left(\\frac{G}{T}\\right)}
{\\left(\\exp\\frac{G}{T}-1 \\right)^2}\\right]
Parameters
----------
T : float
Temperature, [K]
A-G : float
Parameter for the equation; chemical and property specific [-]
order : int, optional
Order of the calculation. 0 for the calculation of the result itself;
for 1, the first derivative of the property is returned, for
-1, the indefinite integral of the property with respect to temperature
is returned; and for -1j, the indefinite integral of the property
divided by temperature with respect to temperature is returned. No
other integrals or derivatives are implemented, and an exception will
be raised if any other order is given.
Returns
-------
Y : float
Property [constant-specific; if order == 1, property/K; if order == -1,
property*K; if order == -1j, unchanged from default]
Notes
-----
The derivative with respect to T, integral with respect to T, and integral
over T with respect to T are computed as follows. All expressions can be
obtained with SymPy readily.
.. math::
\\frac{d Y}{dT} = - \\frac{B C^{3} e^{\\frac{C}{T}}}{T^{4}
\\left(e^{\\frac{C}{T}} - 1\\right)^{2}} + \\frac{2 B C^{3}
e^{\\frac{2 C}{T}}}{T^{4} \\left(e^{\\frac{C}{T}} - 1\\right)^{3}}
- \\frac{2 B C^{2} e^{\\frac{C}{T}}}{T^{3} \\left(e^{\\frac{C}{T}}
- 1\\right)^{2}} - \\frac{D E^{3} e^{\\frac{E}{T}}}{T^{4}
\\left(e^{\\frac{E}{T}} - 1\\right)^{2}} + \\frac{2 D E^{3}
e^{\\frac{2 E}{T}}}{T^{4} \\left(e^{\\frac{E}{T}} - 1\\right)^{3}}
- \\frac{2 D E^{2} e^{\\frac{E}{T}}}{T^{3} \\left(e^{\\frac{E}{T}}
- 1\\right)^{2}} - \\frac{F G^{3} e^{\\frac{G}{T}}}{T^{4}
\\left(e^{\\frac{G}{T}} - 1\\right)^{2}} + \\frac{2 F G^{3}
e^{\\frac{2 G}{T}}}{T^{4} \\left(e^{\\frac{G}{T}} - 1\\right)^{3}}
- \\frac{2 F G^{2} e^{\\frac{G}{T}}}{T^{3} \\left(e^{\\frac{G}{T}}
- 1\\right)^{2}}
.. math::
\\int Y dT = A T + \\frac{B C^{2}}{C e^{\\frac{C}{T}} - C}
+ \\frac{D E^{2}}{E e^{\\frac{E}{T}} - E}
+ \\frac{F G^{2}}{G e^{\\frac{G}{T}} - G}
.. math::
\\int \\frac{Y}{T} dT = A \\log{\\left (T \\right )} + B C^{2} \\left(
\\frac{1}{C T e^{\\frac{C}{T}} - C T} + \\frac{1}{C T} - \\frac{1}{C^{2}}
\\log{\\left (e^{\\frac{C}{T}} - 1 \\right )}\\right) + D E^{2} \\left(
\\frac{1}{E T e^{\\frac{E}{T}} - E T} + \\frac{1}{E T} - \\frac{1}{E^{2}}
\\log{\\left (e^{\\frac{E}{T}} - 1 \\right )}\\right) + F G^{2} \\left(
\\frac{1}{G T e^{\\frac{G}{T}} - G T} + \\frac{1}{G T} - \\frac{1}{G^{2}}
\\log{\\left (e^{\\frac{G}{T}} - 1 \\right )}\\right)
Examples
--------
Ideal gas heat capacity of methanol; DIPPR coefficients normally in
J/kmol/K
>>> EQ127(20., 3.3258E4, 3.6199E4, 1.2057E3, 1.5373E7, 3.2122E3, -1.5318E7, 3.2122E3)
33258.0
References
----------
.. [1] Design Institute for Physical Properties, 1996. DIPPR Project 801
DIPPR/AIChE
"""
if order == 0:
return A + B * ((C / T) ** 2 * exp(C / T) / (exp(C / T) - 1) ** 2) + D * ((E / T) ** 2 * exp(E / T) / (exp(E / T) - 1) ** 2) + F * ((G / T) ** 2 * exp(G / T) / (exp(G / T) - 1) ** 2) # depends on [control=['if'], data=[]]
elif order == 1:
return -B * C ** 3 * exp(C / T) / (T ** 4 * (exp(C / T) - 1) ** 2) + 2 * B * C ** 3 * exp(2 * C / T) / (T ** 4 * (exp(C / T) - 1) ** 3) - 2 * B * C ** 2 * exp(C / T) / (T ** 3 * (exp(C / T) - 1) ** 2) - D * E ** 3 * exp(E / T) / (T ** 4 * (exp(E / T) - 1) ** 2) + 2 * D * E ** 3 * exp(2 * E / T) / (T ** 4 * (exp(E / T) - 1) ** 3) - 2 * D * E ** 2 * exp(E / T) / (T ** 3 * (exp(E / T) - 1) ** 2) - F * G ** 3 * exp(G / T) / (T ** 4 * (exp(G / T) - 1) ** 2) + 2 * F * G ** 3 * exp(2 * G / T) / (T ** 4 * (exp(G / T) - 1) ** 3) - 2 * F * G ** 2 * exp(G / T) / (T ** 3 * (exp(G / T) - 1) ** 2) # depends on [control=['if'], data=[]]
elif order == -1:
return A * T + B * C ** 2 / (C * exp(C / T) - C) + D * E ** 2 / (E * exp(E / T) - E) + F * G ** 2 / (G * exp(G / T) - G) # depends on [control=['if'], data=[]]
elif order == -1j:
return A * log(T) + B * C ** 2 * (1 / (C * T * exp(C / T) - C * T) + 1 / (C * T) - log(exp(C / T) - 1) / C ** 2) + D * E ** 2 * (1 / (E * T * exp(E / T) - E * T) + 1 / (E * T) - log(exp(E / T) - 1) / E ** 2) + F * G ** 2 * (1 / (G * T * exp(G / T) - G * T) + 1 / (G * T) - log(exp(G / T) - 1) / G ** 2) # depends on [control=['if'], data=[]]
else:
raise Exception(order_not_found_msg) |
def segment(text: str) -> str:
"""
Enhanced Thai Character Cluster (ETCC)
:param string text: word input
:return: etcc
"""
if not text or not isinstance(text, str):
return ""
if re.search(r"[เแ]" + _C + r"[" + "".join(_UV) + r"]" + r"\w", text):
search = re.findall(r"[เแ]" + _C + r"[" + "".join(_UV) + r"]" + r"\w", text)
for i in search:
text = re.sub(i, "/" + i + "/", text)
if re.search(_C + r"[" + "".join(_UV1) + r"]" + _C + _C + r"ุ" + r"์", text):
search = re.findall(
_C + r"[" + "".join(_UV1) + r"]" + _C + _C + r"ุ" + r"์", text
)
for i in search:
text = re.sub(i, "//" + i + "/", text)
if re.search(_C + _UV2 + _C, text):
search = re.findall(_C + _UV2 + _C, text)
for i in search:
text = re.sub(i, "/" + i + "/", text)
re.sub("//", "/", text)
if re.search("เ" + _C + "า" + "ะ", text):
search = re.findall("เ" + _C + "า" + "ะ", text)
for i in search:
text = re.sub(i, "/" + i + "/", text)
if re.search("เ" + r"\w\w" + "า" + "ะ", text):
search = re.findall("เ" + r"\w\w" + "า" + "ะ", text)
for i in search:
text = re.sub(i, "/" + i + "/", text)
text = re.sub("//", "/", text)
if re.search(_C + "[" + "".join(_UV1) + "]" + _C + _C + "์", text):
search = re.findall(_C + "[" + "".join(_UV1) + "]" + _C + _C + "์", text)
for i in search:
text = re.sub(i, "/" + i + "/", text)
if re.search("/" + _C + "".join(["ุ", "์"]) + "/", text):
# แก้ไขในกรณี พัน/ธุ์
search = re.findall("/" + _C + "".join(["ุ", "์"]) + "/", text)
for i in search:
ii = re.sub("/", "", i)
text = re.sub(i, ii + "/", text)
text = re.sub("//", "/", text)
return text.split("/") | def function[segment, parameter[text]]:
constant[
Enhanced Thai Character Cluster (ETCC)
:param string text: word input
:return: etcc
]
if <ast.BoolOp object at 0x7da1b1797e20> begin[:]
return[constant[]]
if call[name[re].search, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[[เแ]] + name[_C]] + constant[[]] + call[constant[].join, parameter[name[_UV]]]] + constant[]]] + constant[\w]], name[text]]] begin[:]
variable[search] assign[=] call[name[re].findall, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[[เแ]] + name[_C]] + constant[[]] + call[constant[].join, parameter[name[_UV]]]] + constant[]]] + constant[\w]], name[text]]]
for taget[name[i]] in starred[name[search]] begin[:]
variable[text] assign[=] call[name[re].sub, parameter[name[i], binary_operation[binary_operation[constant[/] + name[i]] + constant[/]], name[text]]]
if call[name[re].search, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[_C] + constant[[]] + call[constant[].join, parameter[name[_UV1]]]] + constant[]]] + name[_C]] + name[_C]] + constant[ุ]] + constant[์]], name[text]]] begin[:]
variable[search] assign[=] call[name[re].findall, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[_C] + constant[[]] + call[constant[].join, parameter[name[_UV1]]]] + constant[]]] + name[_C]] + name[_C]] + constant[ุ]] + constant[์]], name[text]]]
for taget[name[i]] in starred[name[search]] begin[:]
variable[text] assign[=] call[name[re].sub, parameter[name[i], binary_operation[binary_operation[constant[//] + name[i]] + constant[/]], name[text]]]
if call[name[re].search, parameter[binary_operation[binary_operation[name[_C] + name[_UV2]] + name[_C]], name[text]]] begin[:]
variable[search] assign[=] call[name[re].findall, parameter[binary_operation[binary_operation[name[_C] + name[_UV2]] + name[_C]], name[text]]]
for taget[name[i]] in starred[name[search]] begin[:]
variable[text] assign[=] call[name[re].sub, parameter[name[i], binary_operation[binary_operation[constant[/] + name[i]] + constant[/]], name[text]]]
call[name[re].sub, parameter[constant[//], constant[/], name[text]]]
if call[name[re].search, parameter[binary_operation[binary_operation[binary_operation[constant[เ] + name[_C]] + constant[า]] + constant[ะ]], name[text]]] begin[:]
variable[search] assign[=] call[name[re].findall, parameter[binary_operation[binary_operation[binary_operation[constant[เ] + name[_C]] + constant[า]] + constant[ะ]], name[text]]]
for taget[name[i]] in starred[name[search]] begin[:]
variable[text] assign[=] call[name[re].sub, parameter[name[i], binary_operation[binary_operation[constant[/] + name[i]] + constant[/]], name[text]]]
if call[name[re].search, parameter[binary_operation[binary_operation[binary_operation[constant[เ] + constant[\w\w]] + constant[า]] + constant[ะ]], name[text]]] begin[:]
variable[search] assign[=] call[name[re].findall, parameter[binary_operation[binary_operation[binary_operation[constant[เ] + constant[\w\w]] + constant[า]] + constant[ะ]], name[text]]]
for taget[name[i]] in starred[name[search]] begin[:]
variable[text] assign[=] call[name[re].sub, parameter[name[i], binary_operation[binary_operation[constant[/] + name[i]] + constant[/]], name[text]]]
variable[text] assign[=] call[name[re].sub, parameter[constant[//], constant[/], name[text]]]
if call[name[re].search, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[_C] + constant[[]] + call[constant[].join, parameter[name[_UV1]]]] + constant[]]] + name[_C]] + name[_C]] + constant[์]], name[text]]] begin[:]
variable[search] assign[=] call[name[re].findall, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[_C] + constant[[]] + call[constant[].join, parameter[name[_UV1]]]] + constant[]]] + name[_C]] + name[_C]] + constant[์]], name[text]]]
for taget[name[i]] in starred[name[search]] begin[:]
variable[text] assign[=] call[name[re].sub, parameter[name[i], binary_operation[binary_operation[constant[/] + name[i]] + constant[/]], name[text]]]
if call[name[re].search, parameter[binary_operation[binary_operation[binary_operation[constant[/] + name[_C]] + call[constant[].join, parameter[list[[<ast.Constant object at 0x7da1b178ffd0>, <ast.Constant object at 0x7da1b178ce50>]]]]] + constant[/]], name[text]]] begin[:]
variable[search] assign[=] call[name[re].findall, parameter[binary_operation[binary_operation[binary_operation[constant[/] + name[_C]] + call[constant[].join, parameter[list[[<ast.Constant object at 0x7da1b178f280>, <ast.Constant object at 0x7da1b178f250>]]]]] + constant[/]], name[text]]]
for taget[name[i]] in starred[name[search]] begin[:]
variable[ii] assign[=] call[name[re].sub, parameter[constant[/], constant[], name[i]]]
variable[text] assign[=] call[name[re].sub, parameter[name[i], binary_operation[name[ii] + constant[/]], name[text]]]
variable[text] assign[=] call[name[re].sub, parameter[constant[//], constant[/], name[text]]]
return[call[name[text].split, parameter[constant[/]]]] | keyword[def] identifier[segment] ( identifier[text] : identifier[str] )-> identifier[str] :
literal[string]
keyword[if] keyword[not] identifier[text] keyword[or] keyword[not] identifier[isinstance] ( identifier[text] , identifier[str] ):
keyword[return] literal[string]
keyword[if] identifier[re] . identifier[search] ( literal[string] + identifier[_C] + literal[string] + literal[string] . identifier[join] ( identifier[_UV] )+ literal[string] + literal[string] , identifier[text] ):
identifier[search] = identifier[re] . identifier[findall] ( literal[string] + identifier[_C] + literal[string] + literal[string] . identifier[join] ( identifier[_UV] )+ literal[string] + literal[string] , identifier[text] )
keyword[for] identifier[i] keyword[in] identifier[search] :
identifier[text] = identifier[re] . identifier[sub] ( identifier[i] , literal[string] + identifier[i] + literal[string] , identifier[text] )
keyword[if] identifier[re] . identifier[search] ( identifier[_C] + literal[string] + literal[string] . identifier[join] ( identifier[_UV1] )+ literal[string] + identifier[_C] + identifier[_C] + literal[string] + literal[string] , identifier[text] ):
identifier[search] = identifier[re] . identifier[findall] (
identifier[_C] + literal[string] + literal[string] . identifier[join] ( identifier[_UV1] )+ literal[string] + identifier[_C] + identifier[_C] + literal[string] + literal[string] , identifier[text]
)
keyword[for] identifier[i] keyword[in] identifier[search] :
identifier[text] = identifier[re] . identifier[sub] ( identifier[i] , literal[string] + identifier[i] + literal[string] , identifier[text] )
keyword[if] identifier[re] . identifier[search] ( identifier[_C] + identifier[_UV2] + identifier[_C] , identifier[text] ):
identifier[search] = identifier[re] . identifier[findall] ( identifier[_C] + identifier[_UV2] + identifier[_C] , identifier[text] )
keyword[for] identifier[i] keyword[in] identifier[search] :
identifier[text] = identifier[re] . identifier[sub] ( identifier[i] , literal[string] + identifier[i] + literal[string] , identifier[text] )
identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[text] )
keyword[if] identifier[re] . identifier[search] ( literal[string] + identifier[_C] + literal[string] + literal[string] , identifier[text] ):
identifier[search] = identifier[re] . identifier[findall] ( literal[string] + identifier[_C] + literal[string] + literal[string] , identifier[text] )
keyword[for] identifier[i] keyword[in] identifier[search] :
identifier[text] = identifier[re] . identifier[sub] ( identifier[i] , literal[string] + identifier[i] + literal[string] , identifier[text] )
keyword[if] identifier[re] . identifier[search] ( literal[string] + literal[string] + literal[string] + literal[string] , identifier[text] ):
identifier[search] = identifier[re] . identifier[findall] ( literal[string] + literal[string] + literal[string] + literal[string] , identifier[text] )
keyword[for] identifier[i] keyword[in] identifier[search] :
identifier[text] = identifier[re] . identifier[sub] ( identifier[i] , literal[string] + identifier[i] + literal[string] , identifier[text] )
identifier[text] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[text] )
keyword[if] identifier[re] . identifier[search] ( identifier[_C] + literal[string] + literal[string] . identifier[join] ( identifier[_UV1] )+ literal[string] + identifier[_C] + identifier[_C] + literal[string] , identifier[text] ):
identifier[search] = identifier[re] . identifier[findall] ( identifier[_C] + literal[string] + literal[string] . identifier[join] ( identifier[_UV1] )+ literal[string] + identifier[_C] + identifier[_C] + literal[string] , identifier[text] )
keyword[for] identifier[i] keyword[in] identifier[search] :
identifier[text] = identifier[re] . identifier[sub] ( identifier[i] , literal[string] + identifier[i] + literal[string] , identifier[text] )
keyword[if] identifier[re] . identifier[search] ( literal[string] + identifier[_C] + literal[string] . identifier[join] ([ literal[string] , literal[string] ])+ literal[string] , identifier[text] ):
identifier[search] = identifier[re] . identifier[findall] ( literal[string] + identifier[_C] + literal[string] . identifier[join] ([ literal[string] , literal[string] ])+ literal[string] , identifier[text] )
keyword[for] identifier[i] keyword[in] identifier[search] :
identifier[ii] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[i] )
identifier[text] = identifier[re] . identifier[sub] ( identifier[i] , identifier[ii] + literal[string] , identifier[text] )
identifier[text] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[text] )
keyword[return] identifier[text] . identifier[split] ( literal[string] ) | def segment(text: str) -> str:
"""
Enhanced Thai Character Cluster (ETCC)
:param string text: word input
:return: etcc
"""
if not text or not isinstance(text, str):
return '' # depends on [control=['if'], data=[]]
if re.search('[เแ]' + _C + '[' + ''.join(_UV) + ']' + '\\w', text):
search = re.findall('[เแ]' + _C + '[' + ''.join(_UV) + ']' + '\\w', text)
for i in search:
text = re.sub(i, '/' + i + '/', text) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if re.search(_C + '[' + ''.join(_UV1) + ']' + _C + _C + 'ุ' + '์', text):
search = re.findall(_C + '[' + ''.join(_UV1) + ']' + _C + _C + 'ุ' + '์', text)
for i in search:
text = re.sub(i, '//' + i + '/', text) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if re.search(_C + _UV2 + _C, text):
search = re.findall(_C + _UV2 + _C, text)
for i in search:
text = re.sub(i, '/' + i + '/', text) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
re.sub('//', '/', text)
if re.search('เ' + _C + 'า' + 'ะ', text):
search = re.findall('เ' + _C + 'า' + 'ะ', text)
for i in search:
text = re.sub(i, '/' + i + '/', text) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if re.search('เ' + '\\w\\w' + 'า' + 'ะ', text):
search = re.findall('เ' + '\\w\\w' + 'า' + 'ะ', text)
for i in search:
text = re.sub(i, '/' + i + '/', text) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
text = re.sub('//', '/', text)
if re.search(_C + '[' + ''.join(_UV1) + ']' + _C + _C + '์', text):
search = re.findall(_C + '[' + ''.join(_UV1) + ']' + _C + _C + '์', text)
for i in search:
text = re.sub(i, '/' + i + '/', text) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if re.search('/' + _C + ''.join(['ุ', '์']) + '/', text):
# แก้ไขในกรณี พัน/ธุ์
search = re.findall('/' + _C + ''.join(['ุ', '์']) + '/', text)
for i in search:
ii = re.sub('/', '', i)
text = re.sub(i, ii + '/', text) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
text = re.sub('//', '/', text)
return text.split('/') |
def remove_nonancestors_of(self, node):
"""Remove all of the non-ancestors operation nodes of node."""
if isinstance(node, int):
warnings.warn('Calling remove_nonancestors_of() with a node id is deprecated,'
' use a DAGNode instead',
DeprecationWarning, 2)
node = self._id_to_node[node]
anc = nx.ancestors(self._multi_graph, node)
comp = list(set(self._multi_graph.nodes()) - set(anc))
for n in comp:
if n.type == "op":
self.remove_op_node(n) | def function[remove_nonancestors_of, parameter[self, node]]:
constant[Remove all of the non-ancestors operation nodes of node.]
if call[name[isinstance], parameter[name[node], name[int]]] begin[:]
call[name[warnings].warn, parameter[constant[Calling remove_nonancestors_of() with a node id is deprecated, use a DAGNode instead], name[DeprecationWarning], constant[2]]]
variable[node] assign[=] call[name[self]._id_to_node][name[node]]
variable[anc] assign[=] call[name[nx].ancestors, parameter[name[self]._multi_graph, name[node]]]
variable[comp] assign[=] call[name[list], parameter[binary_operation[call[name[set], parameter[call[name[self]._multi_graph.nodes, parameter[]]]] - call[name[set], parameter[name[anc]]]]]]
for taget[name[n]] in starred[name[comp]] begin[:]
if compare[name[n].type equal[==] constant[op]] begin[:]
call[name[self].remove_op_node, parameter[name[n]]] | keyword[def] identifier[remove_nonancestors_of] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[node] , identifier[int] ):
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] ,
identifier[DeprecationWarning] , literal[int] )
identifier[node] = identifier[self] . identifier[_id_to_node] [ identifier[node] ]
identifier[anc] = identifier[nx] . identifier[ancestors] ( identifier[self] . identifier[_multi_graph] , identifier[node] )
identifier[comp] = identifier[list] ( identifier[set] ( identifier[self] . identifier[_multi_graph] . identifier[nodes] ())- identifier[set] ( identifier[anc] ))
keyword[for] identifier[n] keyword[in] identifier[comp] :
keyword[if] identifier[n] . identifier[type] == literal[string] :
identifier[self] . identifier[remove_op_node] ( identifier[n] ) | def remove_nonancestors_of(self, node):
"""Remove all of the non-ancestors operation nodes of node."""
if isinstance(node, int):
warnings.warn('Calling remove_nonancestors_of() with a node id is deprecated, use a DAGNode instead', DeprecationWarning, 2)
node = self._id_to_node[node] # depends on [control=['if'], data=[]]
anc = nx.ancestors(self._multi_graph, node)
comp = list(set(self._multi_graph.nodes()) - set(anc))
for n in comp:
if n.type == 'op':
self.remove_op_node(n) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']] |
def get_child_objective_bank_ids(self, objective_bank_id):
"""Gets the child ``Ids`` of the given objective bank.
arg: objective_bank_id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the objective bank
raise: NotFound - ``objective_bank_id`` is not found
raise: NullArgument - ``objective_bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_child_bin_ids
if self._catalog_session is not None:
return self._catalog_session.get_child_catalog_ids(catalog_id=objective_bank_id)
return self._hierarchy_session.get_children(id_=objective_bank_id) | def function[get_child_objective_bank_ids, parameter[self, objective_bank_id]]:
constant[Gets the child ``Ids`` of the given objective bank.
arg: objective_bank_id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the objective bank
raise: NotFound - ``objective_bank_id`` is not found
raise: NullArgument - ``objective_bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.get_child_catalog_ids, parameter[]]]
return[call[name[self]._hierarchy_session.get_children, parameter[]]] | keyword[def] identifier[get_child_objective_bank_ids] ( identifier[self] , identifier[objective_bank_id] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[get_child_catalog_ids] ( identifier[catalog_id] = identifier[objective_bank_id] )
keyword[return] identifier[self] . identifier[_hierarchy_session] . identifier[get_children] ( identifier[id_] = identifier[objective_bank_id] ) | def get_child_objective_bank_ids(self, objective_bank_id):
"""Gets the child ``Ids`` of the given objective bank.
arg: objective_bank_id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the objective bank
raise: NotFound - ``objective_bank_id`` is not found
raise: NullArgument - ``objective_bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_child_bin_ids
if self._catalog_session is not None:
return self._catalog_session.get_child_catalog_ids(catalog_id=objective_bank_id) # depends on [control=['if'], data=[]]
return self._hierarchy_session.get_children(id_=objective_bank_id) |
def reboot(env, identifier, hard):
"""Reboot an active virtual server."""
virtual_guest = env.client['Virtual_Guest']
mgr = SoftLayer.HardwareManager(env.client)
vs_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'VS')
if not (env.skip_confirmations or
formatting.confirm('This will reboot the VS with id %s. '
'Continue?' % vs_id)):
raise exceptions.CLIAbort('Aborted.')
if hard is True:
virtual_guest.rebootHard(id=vs_id)
elif hard is False:
virtual_guest.rebootSoft(id=vs_id)
else:
virtual_guest.rebootDefault(id=vs_id) | def function[reboot, parameter[env, identifier, hard]]:
constant[Reboot an active virtual server.]
variable[virtual_guest] assign[=] call[name[env].client][constant[Virtual_Guest]]
variable[mgr] assign[=] call[name[SoftLayer].HardwareManager, parameter[name[env].client]]
variable[vs_id] assign[=] call[name[helpers].resolve_id, parameter[name[mgr].resolve_ids, name[identifier], constant[VS]]]
if <ast.UnaryOp object at 0x7da18dc06740> begin[:]
<ast.Raise object at 0x7da18dc075e0>
if compare[name[hard] is constant[True]] begin[:]
call[name[virtual_guest].rebootHard, parameter[]] | keyword[def] identifier[reboot] ( identifier[env] , identifier[identifier] , identifier[hard] ):
literal[string]
identifier[virtual_guest] = identifier[env] . identifier[client] [ literal[string] ]
identifier[mgr] = identifier[SoftLayer] . identifier[HardwareManager] ( identifier[env] . identifier[client] )
identifier[vs_id] = identifier[helpers] . identifier[resolve_id] ( identifier[mgr] . identifier[resolve_ids] , identifier[identifier] , literal[string] )
keyword[if] keyword[not] ( identifier[env] . identifier[skip_confirmations] keyword[or]
identifier[formatting] . identifier[confirm] ( literal[string]
literal[string] % identifier[vs_id] )):
keyword[raise] identifier[exceptions] . identifier[CLIAbort] ( literal[string] )
keyword[if] identifier[hard] keyword[is] keyword[True] :
identifier[virtual_guest] . identifier[rebootHard] ( identifier[id] = identifier[vs_id] )
keyword[elif] identifier[hard] keyword[is] keyword[False] :
identifier[virtual_guest] . identifier[rebootSoft] ( identifier[id] = identifier[vs_id] )
keyword[else] :
identifier[virtual_guest] . identifier[rebootDefault] ( identifier[id] = identifier[vs_id] ) | def reboot(env, identifier, hard):
"""Reboot an active virtual server."""
virtual_guest = env.client['Virtual_Guest']
mgr = SoftLayer.HardwareManager(env.client)
vs_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'VS')
if not (env.skip_confirmations or formatting.confirm('This will reboot the VS with id %s. Continue?' % vs_id)):
raise exceptions.CLIAbort('Aborted.') # depends on [control=['if'], data=[]]
if hard is True:
virtual_guest.rebootHard(id=vs_id) # depends on [control=['if'], data=[]]
elif hard is False:
virtual_guest.rebootSoft(id=vs_id) # depends on [control=['if'], data=[]]
else:
virtual_guest.rebootDefault(id=vs_id) |
def moving_patterson_f3(acc, aca, acb, size, start=0, stop=None, step=None,
normed=True):
"""Estimate F3(C; A, B) in moving windows.
Parameters
----------
acc : array_like, int, shape (n_variants, 2)
Allele counts for the test population (C).
aca : array_like, int, shape (n_variants, 2)
Allele counts for the first source population (A).
acb : array_like, int, shape (n_variants, 2)
Allele counts for the second source population (B).
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
normed : bool, optional
If False, use un-normalised f3 values.
Returns
-------
f3 : ndarray, float, shape (n_windows,)
Estimated value of the statistic in each window.
"""
# calculate per-variant values
T, B = patterson_f3(acc, aca, acb)
# calculate value of statistic within each block
if normed:
T_bsum = moving_statistic(T, statistic=np.nansum, size=size,
start=start, stop=stop, step=step)
B_bsum = moving_statistic(B, statistic=np.nansum, size=size,
start=start, stop=stop, step=step)
f3 = T_bsum / B_bsum
else:
f3 = moving_statistic(T, statistic=np.nanmean, size=size,
start=start, stop=stop, step=step)
return f3 | def function[moving_patterson_f3, parameter[acc, aca, acb, size, start, stop, step, normed]]:
constant[Estimate F3(C; A, B) in moving windows.
Parameters
----------
acc : array_like, int, shape (n_variants, 2)
Allele counts for the test population (C).
aca : array_like, int, shape (n_variants, 2)
Allele counts for the first source population (A).
acb : array_like, int, shape (n_variants, 2)
Allele counts for the second source population (B).
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
normed : bool, optional
If False, use un-normalised f3 values.
Returns
-------
f3 : ndarray, float, shape (n_windows,)
Estimated value of the statistic in each window.
]
<ast.Tuple object at 0x7da20e9b3e20> assign[=] call[name[patterson_f3], parameter[name[acc], name[aca], name[acb]]]
if name[normed] begin[:]
variable[T_bsum] assign[=] call[name[moving_statistic], parameter[name[T]]]
variable[B_bsum] assign[=] call[name[moving_statistic], parameter[name[B]]]
variable[f3] assign[=] binary_operation[name[T_bsum] / name[B_bsum]]
return[name[f3]] | keyword[def] identifier[moving_patterson_f3] ( identifier[acc] , identifier[aca] , identifier[acb] , identifier[size] , identifier[start] = literal[int] , identifier[stop] = keyword[None] , identifier[step] = keyword[None] ,
identifier[normed] = keyword[True] ):
literal[string]
identifier[T] , identifier[B] = identifier[patterson_f3] ( identifier[acc] , identifier[aca] , identifier[acb] )
keyword[if] identifier[normed] :
identifier[T_bsum] = identifier[moving_statistic] ( identifier[T] , identifier[statistic] = identifier[np] . identifier[nansum] , identifier[size] = identifier[size] ,
identifier[start] = identifier[start] , identifier[stop] = identifier[stop] , identifier[step] = identifier[step] )
identifier[B_bsum] = identifier[moving_statistic] ( identifier[B] , identifier[statistic] = identifier[np] . identifier[nansum] , identifier[size] = identifier[size] ,
identifier[start] = identifier[start] , identifier[stop] = identifier[stop] , identifier[step] = identifier[step] )
identifier[f3] = identifier[T_bsum] / identifier[B_bsum]
keyword[else] :
identifier[f3] = identifier[moving_statistic] ( identifier[T] , identifier[statistic] = identifier[np] . identifier[nanmean] , identifier[size] = identifier[size] ,
identifier[start] = identifier[start] , identifier[stop] = identifier[stop] , identifier[step] = identifier[step] )
keyword[return] identifier[f3] | def moving_patterson_f3(acc, aca, acb, size, start=0, stop=None, step=None, normed=True):
"""Estimate F3(C; A, B) in moving windows.
Parameters
----------
acc : array_like, int, shape (n_variants, 2)
Allele counts for the test population (C).
aca : array_like, int, shape (n_variants, 2)
Allele counts for the first source population (A).
acb : array_like, int, shape (n_variants, 2)
Allele counts for the second source population (B).
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
normed : bool, optional
If False, use un-normalised f3 values.
Returns
-------
f3 : ndarray, float, shape (n_windows,)
Estimated value of the statistic in each window.
"""
# calculate per-variant values
(T, B) = patterson_f3(acc, aca, acb)
# calculate value of statistic within each block
if normed:
T_bsum = moving_statistic(T, statistic=np.nansum, size=size, start=start, stop=stop, step=step)
B_bsum = moving_statistic(B, statistic=np.nansum, size=size, start=start, stop=stop, step=step)
f3 = T_bsum / B_bsum # depends on [control=['if'], data=[]]
else:
f3 = moving_statistic(T, statistic=np.nanmean, size=size, start=start, stop=stop, step=step)
return f3 |
def get_data_times_for_job_workflow(self, num_job):
""" Get the data that this job will need to read in. """
# small factor of 0.0001 to avoid float round offs causing us to
# miss a second at end of segments.
shift_dur = self.curr_seg[0] + int(self.job_time_shift * num_job\
+ 0.0001)
job_data_seg = self.data_chunk.shift(shift_dur)
return job_data_seg | def function[get_data_times_for_job_workflow, parameter[self, num_job]]:
constant[ Get the data that this job will need to read in. ]
variable[shift_dur] assign[=] binary_operation[call[name[self].curr_seg][constant[0]] + call[name[int], parameter[binary_operation[binary_operation[name[self].job_time_shift * name[num_job]] + constant[0.0001]]]]]
variable[job_data_seg] assign[=] call[name[self].data_chunk.shift, parameter[name[shift_dur]]]
return[name[job_data_seg]] | keyword[def] identifier[get_data_times_for_job_workflow] ( identifier[self] , identifier[num_job] ):
literal[string]
identifier[shift_dur] = identifier[self] . identifier[curr_seg] [ literal[int] ]+ identifier[int] ( identifier[self] . identifier[job_time_shift] * identifier[num_job] + literal[int] )
identifier[job_data_seg] = identifier[self] . identifier[data_chunk] . identifier[shift] ( identifier[shift_dur] )
keyword[return] identifier[job_data_seg] | def get_data_times_for_job_workflow(self, num_job):
""" Get the data that this job will need to read in. """
# small factor of 0.0001 to avoid float round offs causing us to
# miss a second at end of segments.
shift_dur = self.curr_seg[0] + int(self.job_time_shift * num_job + 0.0001)
job_data_seg = self.data_chunk.shift(shift_dur)
return job_data_seg |
def run(self, n_steps=None):
r"""
Perform the algorithm
Parameters
----------
n_steps : int
The number of throats to invaded during this step
"""
if n_steps is None:
n_steps = sp.inf
queue = self.queue
if len(queue) == 0:
logger.warn('queue is empty, this network is fully invaded')
return
t_sorted = self['throat.sorted']
t_order = self['throat.order']
t_inv = self['throat.invasion_sequence']
p_inv = self['pore.invasion_sequence']
count = 0
while (len(queue) > 0) and (count < n_steps):
# Find throat at the top of the queue
t = hq.heappop(queue)
# Extract actual throat number
t_next = t_sorted[t]
t_inv[t_next] = self._tcount
# If throat is duplicated
while len(queue) > 0 and queue[0] == t:
# Note: Preventing duplicate entries below might save some time
t = hq.heappop(queue)
# Find pores connected to newly invaded throat
Ps = self.project.network['throat.conns'][t_next]
# Remove already invaded pores from Ps
Ps = Ps[p_inv[Ps] < 0]
if len(Ps) > 0:
p_inv[Ps] = self._tcount
Ts = self.project.network.find_neighbor_throats(pores=Ps)
Ts = Ts[t_inv[Ts] < 0] # Remove invaded throats from Ts
[hq.heappush(queue, T) for T in t_order[Ts]]
count += 1
self._tcount += 1
self['throat.invasion_sequence'] = t_inv
self['pore.invasion_sequence'] = p_inv | def function[run, parameter[self, n_steps]]:
constant[
Perform the algorithm
Parameters
----------
n_steps : int
The number of throats to invaded during this step
]
if compare[name[n_steps] is constant[None]] begin[:]
variable[n_steps] assign[=] name[sp].inf
variable[queue] assign[=] name[self].queue
if compare[call[name[len], parameter[name[queue]]] equal[==] constant[0]] begin[:]
call[name[logger].warn, parameter[constant[queue is empty, this network is fully invaded]]]
return[None]
variable[t_sorted] assign[=] call[name[self]][constant[throat.sorted]]
variable[t_order] assign[=] call[name[self]][constant[throat.order]]
variable[t_inv] assign[=] call[name[self]][constant[throat.invasion_sequence]]
variable[p_inv] assign[=] call[name[self]][constant[pore.invasion_sequence]]
variable[count] assign[=] constant[0]
while <ast.BoolOp object at 0x7da20c795f90> begin[:]
variable[t] assign[=] call[name[hq].heappop, parameter[name[queue]]]
variable[t_next] assign[=] call[name[t_sorted]][name[t]]
call[name[t_inv]][name[t_next]] assign[=] name[self]._tcount
while <ast.BoolOp object at 0x7da20c7959f0> begin[:]
variable[t] assign[=] call[name[hq].heappop, parameter[name[queue]]]
variable[Ps] assign[=] call[call[name[self].project.network][constant[throat.conns]]][name[t_next]]
variable[Ps] assign[=] call[name[Ps]][compare[call[name[p_inv]][name[Ps]] less[<] constant[0]]]
if compare[call[name[len], parameter[name[Ps]]] greater[>] constant[0]] begin[:]
call[name[p_inv]][name[Ps]] assign[=] name[self]._tcount
variable[Ts] assign[=] call[name[self].project.network.find_neighbor_throats, parameter[]]
variable[Ts] assign[=] call[name[Ts]][compare[call[name[t_inv]][name[Ts]] less[<] constant[0]]]
<ast.ListComp object at 0x7da20c795fc0>
<ast.AugAssign object at 0x7da20c794b50>
<ast.AugAssign object at 0x7da20c795960>
call[name[self]][constant[throat.invasion_sequence]] assign[=] name[t_inv]
call[name[self]][constant[pore.invasion_sequence]] assign[=] name[p_inv] | keyword[def] identifier[run] ( identifier[self] , identifier[n_steps] = keyword[None] ):
literal[string]
keyword[if] identifier[n_steps] keyword[is] keyword[None] :
identifier[n_steps] = identifier[sp] . identifier[inf]
identifier[queue] = identifier[self] . identifier[queue]
keyword[if] identifier[len] ( identifier[queue] )== literal[int] :
identifier[logger] . identifier[warn] ( literal[string] )
keyword[return]
identifier[t_sorted] = identifier[self] [ literal[string] ]
identifier[t_order] = identifier[self] [ literal[string] ]
identifier[t_inv] = identifier[self] [ literal[string] ]
identifier[p_inv] = identifier[self] [ literal[string] ]
identifier[count] = literal[int]
keyword[while] ( identifier[len] ( identifier[queue] )> literal[int] ) keyword[and] ( identifier[count] < identifier[n_steps] ):
identifier[t] = identifier[hq] . identifier[heappop] ( identifier[queue] )
identifier[t_next] = identifier[t_sorted] [ identifier[t] ]
identifier[t_inv] [ identifier[t_next] ]= identifier[self] . identifier[_tcount]
keyword[while] identifier[len] ( identifier[queue] )> literal[int] keyword[and] identifier[queue] [ literal[int] ]== identifier[t] :
identifier[t] = identifier[hq] . identifier[heappop] ( identifier[queue] )
identifier[Ps] = identifier[self] . identifier[project] . identifier[network] [ literal[string] ][ identifier[t_next] ]
identifier[Ps] = identifier[Ps] [ identifier[p_inv] [ identifier[Ps] ]< literal[int] ]
keyword[if] identifier[len] ( identifier[Ps] )> literal[int] :
identifier[p_inv] [ identifier[Ps] ]= identifier[self] . identifier[_tcount]
identifier[Ts] = identifier[self] . identifier[project] . identifier[network] . identifier[find_neighbor_throats] ( identifier[pores] = identifier[Ps] )
identifier[Ts] = identifier[Ts] [ identifier[t_inv] [ identifier[Ts] ]< literal[int] ]
[ identifier[hq] . identifier[heappush] ( identifier[queue] , identifier[T] ) keyword[for] identifier[T] keyword[in] identifier[t_order] [ identifier[Ts] ]]
identifier[count] += literal[int]
identifier[self] . identifier[_tcount] += literal[int]
identifier[self] [ literal[string] ]= identifier[t_inv]
identifier[self] [ literal[string] ]= identifier[p_inv] | def run(self, n_steps=None):
"""
Perform the algorithm
Parameters
----------
n_steps : int
The number of throats to invaded during this step
"""
if n_steps is None:
n_steps = sp.inf # depends on [control=['if'], data=['n_steps']]
queue = self.queue
if len(queue) == 0:
logger.warn('queue is empty, this network is fully invaded')
return # depends on [control=['if'], data=[]]
t_sorted = self['throat.sorted']
t_order = self['throat.order']
t_inv = self['throat.invasion_sequence']
p_inv = self['pore.invasion_sequence']
count = 0
while len(queue) > 0 and count < n_steps:
# Find throat at the top of the queue
t = hq.heappop(queue)
# Extract actual throat number
t_next = t_sorted[t]
t_inv[t_next] = self._tcount
# If throat is duplicated
while len(queue) > 0 and queue[0] == t:
# Note: Preventing duplicate entries below might save some time
t = hq.heappop(queue) # depends on [control=['while'], data=[]]
# Find pores connected to newly invaded throat
Ps = self.project.network['throat.conns'][t_next]
# Remove already invaded pores from Ps
Ps = Ps[p_inv[Ps] < 0]
if len(Ps) > 0:
p_inv[Ps] = self._tcount
Ts = self.project.network.find_neighbor_throats(pores=Ps)
Ts = Ts[t_inv[Ts] < 0] # Remove invaded throats from Ts
[hq.heappush(queue, T) for T in t_order[Ts]] # depends on [control=['if'], data=[]]
count += 1
self._tcount += 1 # depends on [control=['while'], data=[]]
self['throat.invasion_sequence'] = t_inv
self['pore.invasion_sequence'] = p_inv |
def close(self):
"""Close the zip file.
Note underlying tempfile is removed when archive is garbage collected.
"""
self._closed = True
self._zip_file.close()
log.debug(
"Created custodian serverless archive size: %0.2fmb",
(os.path.getsize(self._temp_archive_file.name) / (
1024.0 * 1024.0)))
return self | def function[close, parameter[self]]:
constant[Close the zip file.
Note underlying tempfile is removed when archive is garbage collected.
]
name[self]._closed assign[=] constant[True]
call[name[self]._zip_file.close, parameter[]]
call[name[log].debug, parameter[constant[Created custodian serverless archive size: %0.2fmb], binary_operation[call[name[os].path.getsize, parameter[name[self]._temp_archive_file.name]] / binary_operation[constant[1024.0] * constant[1024.0]]]]]
return[name[self]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_closed] = keyword[True]
identifier[self] . identifier[_zip_file] . identifier[close] ()
identifier[log] . identifier[debug] (
literal[string] ,
( identifier[os] . identifier[path] . identifier[getsize] ( identifier[self] . identifier[_temp_archive_file] . identifier[name] )/(
literal[int] * literal[int] )))
keyword[return] identifier[self] | def close(self):
"""Close the zip file.
Note underlying tempfile is removed when archive is garbage collected.
"""
self._closed = True
self._zip_file.close()
log.debug('Created custodian serverless archive size: %0.2fmb', os.path.getsize(self._temp_archive_file.name) / (1024.0 * 1024.0))
return self |
def stripext (cmd, archive, verbosity, extension=""):
"""Print the name without suffix."""
if verbosity >= 0:
print(util.stripext(archive)+extension)
return None | def function[stripext, parameter[cmd, archive, verbosity, extension]]:
constant[Print the name without suffix.]
if compare[name[verbosity] greater_or_equal[>=] constant[0]] begin[:]
call[name[print], parameter[binary_operation[call[name[util].stripext, parameter[name[archive]]] + name[extension]]]]
return[constant[None]] | keyword[def] identifier[stripext] ( identifier[cmd] , identifier[archive] , identifier[verbosity] , identifier[extension] = literal[string] ):
literal[string]
keyword[if] identifier[verbosity] >= literal[int] :
identifier[print] ( identifier[util] . identifier[stripext] ( identifier[archive] )+ identifier[extension] )
keyword[return] keyword[None] | def stripext(cmd, archive, verbosity, extension=''):
"""Print the name without suffix."""
if verbosity >= 0:
print(util.stripext(archive) + extension) # depends on [control=['if'], data=[]]
return None |
def pop(self):
"""
Pops the latest additional context.
If the additional context was pushed by a different additional manager,
a ``RuntimeError`` is raised.
"""
rv = _additional_ctx_stack.pop()
if rv is None or rv[0] is not self:
raise RuntimeError(
"popped wrong additional context ({} instead of {})".format(rv, self)
) | def function[pop, parameter[self]]:
constant[
Pops the latest additional context.
If the additional context was pushed by a different additional manager,
a ``RuntimeError`` is raised.
]
variable[rv] assign[=] call[name[_additional_ctx_stack].pop, parameter[]]
if <ast.BoolOp object at 0x7da20c6a97e0> begin[:]
<ast.Raise object at 0x7da20c6a8d60> | keyword[def] identifier[pop] ( identifier[self] ):
literal[string]
identifier[rv] = identifier[_additional_ctx_stack] . identifier[pop] ()
keyword[if] identifier[rv] keyword[is] keyword[None] keyword[or] identifier[rv] [ literal[int] ] keyword[is] keyword[not] identifier[self] :
keyword[raise] identifier[RuntimeError] (
literal[string] . identifier[format] ( identifier[rv] , identifier[self] )
) | def pop(self):
"""
Pops the latest additional context.
If the additional context was pushed by a different additional manager,
a ``RuntimeError`` is raised.
"""
rv = _additional_ctx_stack.pop()
if rv is None or rv[0] is not self:
raise RuntimeError('popped wrong additional context ({} instead of {})'.format(rv, self)) # depends on [control=['if'], data=[]] |
def make_roi_plots(self, gta, mcube_tot, **kwargs):
"""Make various diagnostic plots for the 1D and 2D
counts/model distributions.
Parameters
----------
prefix : str
Prefix that will be appended to all filenames.
"""
fmt = kwargs.get('format', self.config['format'])
figsize = kwargs.get('figsize', self.config['figsize'])
prefix = kwargs.get('prefix', '')
loge_bounds = kwargs.get('loge_bounds', None)
weighted = kwargs.get('weighted', False)
roi_kwargs = {}
roi_kwargs.setdefault('loge_bounds', loge_bounds)
roi_kwargs.setdefault(
'graticule_radii', self.config['graticule_radii'])
roi_kwargs.setdefault('label_ts_threshold',
self.config['label_ts_threshold'])
roi_kwargs.setdefault('cmap', self.config['cmap'])
roi_kwargs.setdefault('catalogs', self._catalogs)
if loge_bounds is None:
loge_bounds = (gta.log_energies[0], gta.log_energies[-1])
esuffix = '_%.3f_%.3f' % (loge_bounds[0], loge_bounds[1])
mcube_diffuse = gta.model_counts_map('diffuse')
counts_map = gta.counts_map()
if weighted:
wmap = gta.weight_map()
counts_map = copy.deepcopy(counts_map)
mcube_tot = copy.deepcopy(mcube_tot)
counts_map.data *= wmap.data
mcube_tot.data *= wmap.data
mcube_diffuse.data *= wmap.data
# colors = ['k', 'b', 'g', 'r']
data_style = {'marker': 's', 'linestyle': 'None'}
fig = plt.figure(figsize=figsize)
if gta.projtype == "WCS":
xmin = -1
xmax = 1
elif gta.projtype == "HPX":
hpx2wcs = counts_map.make_wcs_mapping(proj='CAR', oversample=2)
counts_map = counts_map.to_wcs(hpx2wcs=hpx2wcs)
mcube_tot = mcube_tot.to_wcs(hpx2wcs=hpx2wcs)
mcube_diffuse = mcube_diffuse.to_wcs(hpx2wcs=hpx2wcs)
xmin = None
xmax = None
fig = plt.figure(figsize=figsize)
rp = ROIPlotter(mcube_tot, roi=gta.roi, **roi_kwargs)
rp.plot(cb_label='Counts', zscale='pow', gamma=1. / 3.)
plt.savefig(os.path.join(gta.config['fileio']['workdir'],
'%s_model_map%s.%s' % (
prefix, esuffix, fmt)))
plt.close(fig)
rp = ROIPlotter(counts_map, roi=gta.roi, **roi_kwargs)
rp.plot(cb_label='Counts', zscale='sqrt')
plt.savefig(os.path.join(gta.config['fileio']['workdir'],
'%s_counts_map%s.%s' % (
prefix, esuffix, fmt)))
plt.close(fig)
for iaxis, xlabel, psuffix in zip([0, 1],
['LON Offset [deg]', 'LAT Offset [deg]'],
['xproj', 'yproj']):
fig = plt.figure(figsize=figsize)
rp.plot_projection(iaxis, label='Data', color='k',
xmin=xmin, xmax=xmax, **data_style)
rp.plot_projection(iaxis, data=mcube_tot, label='Model', xmin=xmin, xmax=xmax,
noerror=True)
rp.plot_projection(iaxis, data=mcube_diffuse, label='Diffuse', xmin=xmin, xmax=xmax,
noerror=True)
plt.gca().set_ylabel('Counts')
plt.gca().set_xlabel(xlabel)
plt.gca().legend(frameon=False)
annotate(loge_bounds=loge_bounds)
plt.savefig(os.path.join(gta.config['fileio']['workdir'],
'%s_counts_map_%s%s.%s' % (prefix, psuffix,
esuffix, fmt)))
plt.close(fig) | def function[make_roi_plots, parameter[self, gta, mcube_tot]]:
constant[Make various diagnostic plots for the 1D and 2D
counts/model distributions.
Parameters
----------
prefix : str
Prefix that will be appended to all filenames.
]
variable[fmt] assign[=] call[name[kwargs].get, parameter[constant[format], call[name[self].config][constant[format]]]]
variable[figsize] assign[=] call[name[kwargs].get, parameter[constant[figsize], call[name[self].config][constant[figsize]]]]
variable[prefix] assign[=] call[name[kwargs].get, parameter[constant[prefix], constant[]]]
variable[loge_bounds] assign[=] call[name[kwargs].get, parameter[constant[loge_bounds], constant[None]]]
variable[weighted] assign[=] call[name[kwargs].get, parameter[constant[weighted], constant[False]]]
variable[roi_kwargs] assign[=] dictionary[[], []]
call[name[roi_kwargs].setdefault, parameter[constant[loge_bounds], name[loge_bounds]]]
call[name[roi_kwargs].setdefault, parameter[constant[graticule_radii], call[name[self].config][constant[graticule_radii]]]]
call[name[roi_kwargs].setdefault, parameter[constant[label_ts_threshold], call[name[self].config][constant[label_ts_threshold]]]]
call[name[roi_kwargs].setdefault, parameter[constant[cmap], call[name[self].config][constant[cmap]]]]
call[name[roi_kwargs].setdefault, parameter[constant[catalogs], name[self]._catalogs]]
if compare[name[loge_bounds] is constant[None]] begin[:]
variable[loge_bounds] assign[=] tuple[[<ast.Subscript object at 0x7da20c6c57e0>, <ast.Subscript object at 0x7da20c6c44c0>]]
variable[esuffix] assign[=] binary_operation[constant[_%.3f_%.3f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da20c6c6200>, <ast.Subscript object at 0x7da20c6c66e0>]]]
variable[mcube_diffuse] assign[=] call[name[gta].model_counts_map, parameter[constant[diffuse]]]
variable[counts_map] assign[=] call[name[gta].counts_map, parameter[]]
if name[weighted] begin[:]
variable[wmap] assign[=] call[name[gta].weight_map, parameter[]]
variable[counts_map] assign[=] call[name[copy].deepcopy, parameter[name[counts_map]]]
variable[mcube_tot] assign[=] call[name[copy].deepcopy, parameter[name[mcube_tot]]]
<ast.AugAssign object at 0x7da20c7c8430>
<ast.AugAssign object at 0x7da20c7cb1f0>
<ast.AugAssign object at 0x7da20c7c9840>
variable[data_style] assign[=] dictionary[[<ast.Constant object at 0x7da20c7cbcd0>, <ast.Constant object at 0x7da20c7cb2b0>], [<ast.Constant object at 0x7da20c7cab90>, <ast.Constant object at 0x7da20c7c8760>]]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
if compare[name[gta].projtype equal[==] constant[WCS]] begin[:]
variable[xmin] assign[=] <ast.UnaryOp object at 0x7da20c7cab30>
variable[xmax] assign[=] constant[1]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[rp] assign[=] call[name[ROIPlotter], parameter[name[mcube_tot]]]
call[name[rp].plot, parameter[]]
call[name[plt].savefig, parameter[call[name[os].path.join, parameter[call[call[name[gta].config][constant[fileio]]][constant[workdir]], binary_operation[constant[%s_model_map%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c7c9ab0>, <ast.Name object at 0x7da20c7c9f00>, <ast.Name object at 0x7da20c7c8070>]]]]]]]
call[name[plt].close, parameter[name[fig]]]
variable[rp] assign[=] call[name[ROIPlotter], parameter[name[counts_map]]]
call[name[rp].plot, parameter[]]
call[name[plt].savefig, parameter[call[name[os].path.join, parameter[call[call[name[gta].config][constant[fileio]]][constant[workdir]], binary_operation[constant[%s_counts_map%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f00ea10>, <ast.Name object at 0x7da18f00e710>, <ast.Name object at 0x7da18f00e740>]]]]]]]
call[name[plt].close, parameter[name[fig]]]
for taget[tuple[[<ast.Name object at 0x7da18f00fbb0>, <ast.Name object at 0x7da18f00c550>, <ast.Name object at 0x7da18f00cc10>]]] in starred[call[name[zip], parameter[list[[<ast.Constant object at 0x7da18f00dc00>, <ast.Constant object at 0x7da18f00c5e0>]], list[[<ast.Constant object at 0x7da18f00fdf0>, <ast.Constant object at 0x7da18f00f550>]], list[[<ast.Constant object at 0x7da18f00eec0>, <ast.Constant object at 0x7da18f00d750>]]]]] begin[:]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
call[name[rp].plot_projection, parameter[name[iaxis]]]
call[name[rp].plot_projection, parameter[name[iaxis]]]
call[name[rp].plot_projection, parameter[name[iaxis]]]
call[call[name[plt].gca, parameter[]].set_ylabel, parameter[constant[Counts]]]
call[call[name[plt].gca, parameter[]].set_xlabel, parameter[name[xlabel]]]
call[call[name[plt].gca, parameter[]].legend, parameter[]]
call[name[annotate], parameter[]]
call[name[plt].savefig, parameter[call[name[os].path.join, parameter[call[call[name[gta].config][constant[fileio]]][constant[workdir]], binary_operation[constant[%s_counts_map_%s%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6e4820>, <ast.Name object at 0x7da20c6e70a0>, <ast.Name object at 0x7da20c6e4dc0>, <ast.Name object at 0x7da20c6e79a0>]]]]]]]
call[name[plt].close, parameter[name[fig]]] | keyword[def] identifier[make_roi_plots] ( identifier[self] , identifier[gta] , identifier[mcube_tot] ,** identifier[kwargs] ):
literal[string]
identifier[fmt] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[config] [ literal[string] ])
identifier[figsize] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[config] [ literal[string] ])
identifier[prefix] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
identifier[loge_bounds] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[weighted] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
identifier[roi_kwargs] ={}
identifier[roi_kwargs] . identifier[setdefault] ( literal[string] , identifier[loge_bounds] )
identifier[roi_kwargs] . identifier[setdefault] (
literal[string] , identifier[self] . identifier[config] [ literal[string] ])
identifier[roi_kwargs] . identifier[setdefault] ( literal[string] ,
identifier[self] . identifier[config] [ literal[string] ])
identifier[roi_kwargs] . identifier[setdefault] ( literal[string] , identifier[self] . identifier[config] [ literal[string] ])
identifier[roi_kwargs] . identifier[setdefault] ( literal[string] , identifier[self] . identifier[_catalogs] )
keyword[if] identifier[loge_bounds] keyword[is] keyword[None] :
identifier[loge_bounds] =( identifier[gta] . identifier[log_energies] [ literal[int] ], identifier[gta] . identifier[log_energies] [- literal[int] ])
identifier[esuffix] = literal[string] %( identifier[loge_bounds] [ literal[int] ], identifier[loge_bounds] [ literal[int] ])
identifier[mcube_diffuse] = identifier[gta] . identifier[model_counts_map] ( literal[string] )
identifier[counts_map] = identifier[gta] . identifier[counts_map] ()
keyword[if] identifier[weighted] :
identifier[wmap] = identifier[gta] . identifier[weight_map] ()
identifier[counts_map] = identifier[copy] . identifier[deepcopy] ( identifier[counts_map] )
identifier[mcube_tot] = identifier[copy] . identifier[deepcopy] ( identifier[mcube_tot] )
identifier[counts_map] . identifier[data] *= identifier[wmap] . identifier[data]
identifier[mcube_tot] . identifier[data] *= identifier[wmap] . identifier[data]
identifier[mcube_diffuse] . identifier[data] *= identifier[wmap] . identifier[data]
identifier[data_style] ={ literal[string] : literal[string] , literal[string] : literal[string] }
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[figsize] )
keyword[if] identifier[gta] . identifier[projtype] == literal[string] :
identifier[xmin] =- literal[int]
identifier[xmax] = literal[int]
keyword[elif] identifier[gta] . identifier[projtype] == literal[string] :
identifier[hpx2wcs] = identifier[counts_map] . identifier[make_wcs_mapping] ( identifier[proj] = literal[string] , identifier[oversample] = literal[int] )
identifier[counts_map] = identifier[counts_map] . identifier[to_wcs] ( identifier[hpx2wcs] = identifier[hpx2wcs] )
identifier[mcube_tot] = identifier[mcube_tot] . identifier[to_wcs] ( identifier[hpx2wcs] = identifier[hpx2wcs] )
identifier[mcube_diffuse] = identifier[mcube_diffuse] . identifier[to_wcs] ( identifier[hpx2wcs] = identifier[hpx2wcs] )
identifier[xmin] = keyword[None]
identifier[xmax] = keyword[None]
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[figsize] )
identifier[rp] = identifier[ROIPlotter] ( identifier[mcube_tot] , identifier[roi] = identifier[gta] . identifier[roi] ,** identifier[roi_kwargs] )
identifier[rp] . identifier[plot] ( identifier[cb_label] = literal[string] , identifier[zscale] = literal[string] , identifier[gamma] = literal[int] / literal[int] )
identifier[plt] . identifier[savefig] ( identifier[os] . identifier[path] . identifier[join] ( identifier[gta] . identifier[config] [ literal[string] ][ literal[string] ],
literal[string] %(
identifier[prefix] , identifier[esuffix] , identifier[fmt] )))
identifier[plt] . identifier[close] ( identifier[fig] )
identifier[rp] = identifier[ROIPlotter] ( identifier[counts_map] , identifier[roi] = identifier[gta] . identifier[roi] ,** identifier[roi_kwargs] )
identifier[rp] . identifier[plot] ( identifier[cb_label] = literal[string] , identifier[zscale] = literal[string] )
identifier[plt] . identifier[savefig] ( identifier[os] . identifier[path] . identifier[join] ( identifier[gta] . identifier[config] [ literal[string] ][ literal[string] ],
literal[string] %(
identifier[prefix] , identifier[esuffix] , identifier[fmt] )))
identifier[plt] . identifier[close] ( identifier[fig] )
keyword[for] identifier[iaxis] , identifier[xlabel] , identifier[psuffix] keyword[in] identifier[zip] ([ literal[int] , literal[int] ],
[ literal[string] , literal[string] ],
[ literal[string] , literal[string] ]):
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[figsize] )
identifier[rp] . identifier[plot_projection] ( identifier[iaxis] , identifier[label] = literal[string] , identifier[color] = literal[string] ,
identifier[xmin] = identifier[xmin] , identifier[xmax] = identifier[xmax] ,** identifier[data_style] )
identifier[rp] . identifier[plot_projection] ( identifier[iaxis] , identifier[data] = identifier[mcube_tot] , identifier[label] = literal[string] , identifier[xmin] = identifier[xmin] , identifier[xmax] = identifier[xmax] ,
identifier[noerror] = keyword[True] )
identifier[rp] . identifier[plot_projection] ( identifier[iaxis] , identifier[data] = identifier[mcube_diffuse] , identifier[label] = literal[string] , identifier[xmin] = identifier[xmin] , identifier[xmax] = identifier[xmax] ,
identifier[noerror] = keyword[True] )
identifier[plt] . identifier[gca] (). identifier[set_ylabel] ( literal[string] )
identifier[plt] . identifier[gca] (). identifier[set_xlabel] ( identifier[xlabel] )
identifier[plt] . identifier[gca] (). identifier[legend] ( identifier[frameon] = keyword[False] )
identifier[annotate] ( identifier[loge_bounds] = identifier[loge_bounds] )
identifier[plt] . identifier[savefig] ( identifier[os] . identifier[path] . identifier[join] ( identifier[gta] . identifier[config] [ literal[string] ][ literal[string] ],
literal[string] %( identifier[prefix] , identifier[psuffix] ,
identifier[esuffix] , identifier[fmt] )))
identifier[plt] . identifier[close] ( identifier[fig] ) | def make_roi_plots(self, gta, mcube_tot, **kwargs):
"""Make various diagnostic plots for the 1D and 2D
counts/model distributions.
Parameters
----------
prefix : str
Prefix that will be appended to all filenames.
"""
fmt = kwargs.get('format', self.config['format'])
figsize = kwargs.get('figsize', self.config['figsize'])
prefix = kwargs.get('prefix', '')
loge_bounds = kwargs.get('loge_bounds', None)
weighted = kwargs.get('weighted', False)
roi_kwargs = {}
roi_kwargs.setdefault('loge_bounds', loge_bounds)
roi_kwargs.setdefault('graticule_radii', self.config['graticule_radii'])
roi_kwargs.setdefault('label_ts_threshold', self.config['label_ts_threshold'])
roi_kwargs.setdefault('cmap', self.config['cmap'])
roi_kwargs.setdefault('catalogs', self._catalogs)
if loge_bounds is None:
loge_bounds = (gta.log_energies[0], gta.log_energies[-1]) # depends on [control=['if'], data=['loge_bounds']]
esuffix = '_%.3f_%.3f' % (loge_bounds[0], loge_bounds[1])
mcube_diffuse = gta.model_counts_map('diffuse')
counts_map = gta.counts_map()
if weighted:
wmap = gta.weight_map()
counts_map = copy.deepcopy(counts_map)
mcube_tot = copy.deepcopy(mcube_tot)
counts_map.data *= wmap.data
mcube_tot.data *= wmap.data
mcube_diffuse.data *= wmap.data # depends on [control=['if'], data=[]]
# colors = ['k', 'b', 'g', 'r']
data_style = {'marker': 's', 'linestyle': 'None'}
fig = plt.figure(figsize=figsize)
if gta.projtype == 'WCS':
xmin = -1
xmax = 1 # depends on [control=['if'], data=[]]
elif gta.projtype == 'HPX':
hpx2wcs = counts_map.make_wcs_mapping(proj='CAR', oversample=2)
counts_map = counts_map.to_wcs(hpx2wcs=hpx2wcs)
mcube_tot = mcube_tot.to_wcs(hpx2wcs=hpx2wcs)
mcube_diffuse = mcube_diffuse.to_wcs(hpx2wcs=hpx2wcs)
xmin = None
xmax = None # depends on [control=['if'], data=[]]
fig = plt.figure(figsize=figsize)
rp = ROIPlotter(mcube_tot, roi=gta.roi, **roi_kwargs)
rp.plot(cb_label='Counts', zscale='pow', gamma=1.0 / 3.0)
plt.savefig(os.path.join(gta.config['fileio']['workdir'], '%s_model_map%s.%s' % (prefix, esuffix, fmt)))
plt.close(fig)
rp = ROIPlotter(counts_map, roi=gta.roi, **roi_kwargs)
rp.plot(cb_label='Counts', zscale='sqrt')
plt.savefig(os.path.join(gta.config['fileio']['workdir'], '%s_counts_map%s.%s' % (prefix, esuffix, fmt)))
plt.close(fig)
for (iaxis, xlabel, psuffix) in zip([0, 1], ['LON Offset [deg]', 'LAT Offset [deg]'], ['xproj', 'yproj']):
fig = plt.figure(figsize=figsize)
rp.plot_projection(iaxis, label='Data', color='k', xmin=xmin, xmax=xmax, **data_style)
rp.plot_projection(iaxis, data=mcube_tot, label='Model', xmin=xmin, xmax=xmax, noerror=True)
rp.plot_projection(iaxis, data=mcube_diffuse, label='Diffuse', xmin=xmin, xmax=xmax, noerror=True)
plt.gca().set_ylabel('Counts')
plt.gca().set_xlabel(xlabel)
plt.gca().legend(frameon=False)
annotate(loge_bounds=loge_bounds)
plt.savefig(os.path.join(gta.config['fileio']['workdir'], '%s_counts_map_%s%s.%s' % (prefix, psuffix, esuffix, fmt)))
plt.close(fig) # depends on [control=['for'], data=[]] |
def toProtocolElement(self):
"""
Returns the representation of this ContinuousSet as the corresponding
ProtocolElement.
"""
gaContinuousSet = protocol.ContinuousSet()
gaContinuousSet.id = self.getId()
gaContinuousSet.dataset_id = self.getParentContainer().getId()
gaContinuousSet.reference_set_id = pb.string(
self._referenceSet.getId())
gaContinuousSet.name = self._name
gaContinuousSet.source_uri = self._sourceUri
attributes = self.getAttributes()
for key in attributes:
gaContinuousSet.attributes.attr[key] \
.values.extend(protocol.encodeValue(attributes[key]))
return gaContinuousSet | def function[toProtocolElement, parameter[self]]:
constant[
Returns the representation of this ContinuousSet as the corresponding
ProtocolElement.
]
variable[gaContinuousSet] assign[=] call[name[protocol].ContinuousSet, parameter[]]
name[gaContinuousSet].id assign[=] call[name[self].getId, parameter[]]
name[gaContinuousSet].dataset_id assign[=] call[call[name[self].getParentContainer, parameter[]].getId, parameter[]]
name[gaContinuousSet].reference_set_id assign[=] call[name[pb].string, parameter[call[name[self]._referenceSet.getId, parameter[]]]]
name[gaContinuousSet].name assign[=] name[self]._name
name[gaContinuousSet].source_uri assign[=] name[self]._sourceUri
variable[attributes] assign[=] call[name[self].getAttributes, parameter[]]
for taget[name[key]] in starred[name[attributes]] begin[:]
call[call[name[gaContinuousSet].attributes.attr][name[key]].values.extend, parameter[call[name[protocol].encodeValue, parameter[call[name[attributes]][name[key]]]]]]
return[name[gaContinuousSet]] | keyword[def] identifier[toProtocolElement] ( identifier[self] ):
literal[string]
identifier[gaContinuousSet] = identifier[protocol] . identifier[ContinuousSet] ()
identifier[gaContinuousSet] . identifier[id] = identifier[self] . identifier[getId] ()
identifier[gaContinuousSet] . identifier[dataset_id] = identifier[self] . identifier[getParentContainer] (). identifier[getId] ()
identifier[gaContinuousSet] . identifier[reference_set_id] = identifier[pb] . identifier[string] (
identifier[self] . identifier[_referenceSet] . identifier[getId] ())
identifier[gaContinuousSet] . identifier[name] = identifier[self] . identifier[_name]
identifier[gaContinuousSet] . identifier[source_uri] = identifier[self] . identifier[_sourceUri]
identifier[attributes] = identifier[self] . identifier[getAttributes] ()
keyword[for] identifier[key] keyword[in] identifier[attributes] :
identifier[gaContinuousSet] . identifier[attributes] . identifier[attr] [ identifier[key] ]. identifier[values] . identifier[extend] ( identifier[protocol] . identifier[encodeValue] ( identifier[attributes] [ identifier[key] ]))
keyword[return] identifier[gaContinuousSet] | def toProtocolElement(self):
"""
Returns the representation of this ContinuousSet as the corresponding
ProtocolElement.
"""
gaContinuousSet = protocol.ContinuousSet()
gaContinuousSet.id = self.getId()
gaContinuousSet.dataset_id = self.getParentContainer().getId()
gaContinuousSet.reference_set_id = pb.string(self._referenceSet.getId())
gaContinuousSet.name = self._name
gaContinuousSet.source_uri = self._sourceUri
attributes = self.getAttributes()
for key in attributes:
gaContinuousSet.attributes.attr[key].values.extend(protocol.encodeValue(attributes[key])) # depends on [control=['for'], data=['key']]
return gaContinuousSet |
def Sample(self, task, status):
"""Takes a sample of the status of a task for profiling.
Args:
task (Task): a task.
status (str): status.
"""
sample_time = time.time()
sample = '{0:f}\t{1:s}\t{2:s}\n'.format(
sample_time, task.identifier, status)
self._WritesString(sample) | def function[Sample, parameter[self, task, status]]:
constant[Takes a sample of the status of a task for profiling.
Args:
task (Task): a task.
status (str): status.
]
variable[sample_time] assign[=] call[name[time].time, parameter[]]
variable[sample] assign[=] call[constant[{0:f} {1:s} {2:s}
].format, parameter[name[sample_time], name[task].identifier, name[status]]]
call[name[self]._WritesString, parameter[name[sample]]] | keyword[def] identifier[Sample] ( identifier[self] , identifier[task] , identifier[status] ):
literal[string]
identifier[sample_time] = identifier[time] . identifier[time] ()
identifier[sample] = literal[string] . identifier[format] (
identifier[sample_time] , identifier[task] . identifier[identifier] , identifier[status] )
identifier[self] . identifier[_WritesString] ( identifier[sample] ) | def Sample(self, task, status):
"""Takes a sample of the status of a task for profiling.
Args:
task (Task): a task.
status (str): status.
"""
sample_time = time.time()
sample = '{0:f}\t{1:s}\t{2:s}\n'.format(sample_time, task.identifier, status)
self._WritesString(sample) |
def generate_snapshot(self, prov_dep):
# type: (MutableMapping[Text, Any]) -> None
"""Copy all of the CWL files to the snapshot/ directory."""
self.self_check()
for key, value in prov_dep.items():
if key == "location" and value.split("/")[-1]:
filename = value.split("/")[-1]
path = os.path.join(self.folder, SNAPSHOT, filename)
filepath = ''
if "file://" in value:
filepath = value[7:]
else:
filepath = value
# FIXME: What if destination path already exists?
if os.path.exists(filepath):
try:
if os.path.isdir(filepath):
shutil.copytree(filepath, path)
else:
shutil.copy(filepath, path)
timestamp = datetime.datetime.fromtimestamp(os.path.getmtime(filepath))
self.add_tagfile(path, timestamp)
except PermissionError:
pass # FIXME: avoids duplicate snapshotting; need better solution
elif key in ("secondaryFiles", "listing"):
for files in value:
if isinstance(files, MutableMapping):
self.generate_snapshot(files)
else:
pass | def function[generate_snapshot, parameter[self, prov_dep]]:
constant[Copy all of the CWL files to the snapshot/ directory.]
call[name[self].self_check, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c6e79d0>, <ast.Name object at 0x7da20c6e6050>]]] in starred[call[name[prov_dep].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da20c6e58d0> begin[:]
variable[filename] assign[=] call[call[name[value].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da2054a53c0>]
variable[path] assign[=] call[name[os].path.join, parameter[name[self].folder, name[SNAPSHOT], name[filename]]]
variable[filepath] assign[=] constant[]
if compare[constant[file://] in name[value]] begin[:]
variable[filepath] assign[=] call[name[value]][<ast.Slice object at 0x7da2054a5f90>]
if call[name[os].path.exists, parameter[name[filepath]]] begin[:]
<ast.Try object at 0x7da2054a4e80> | keyword[def] identifier[generate_snapshot] ( identifier[self] , identifier[prov_dep] ):
literal[string]
identifier[self] . identifier[self_check] ()
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[prov_dep] . identifier[items] ():
keyword[if] identifier[key] == literal[string] keyword[and] identifier[value] . identifier[split] ( literal[string] )[- literal[int] ]:
identifier[filename] = identifier[value] . identifier[split] ( literal[string] )[- literal[int] ]
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[folder] , identifier[SNAPSHOT] , identifier[filename] )
identifier[filepath] = literal[string]
keyword[if] literal[string] keyword[in] identifier[value] :
identifier[filepath] = identifier[value] [ literal[int] :]
keyword[else] :
identifier[filepath] = identifier[value]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[filepath] ):
keyword[try] :
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[filepath] ):
identifier[shutil] . identifier[copytree] ( identifier[filepath] , identifier[path] )
keyword[else] :
identifier[shutil] . identifier[copy] ( identifier[filepath] , identifier[path] )
identifier[timestamp] = identifier[datetime] . identifier[datetime] . identifier[fromtimestamp] ( identifier[os] . identifier[path] . identifier[getmtime] ( identifier[filepath] ))
identifier[self] . identifier[add_tagfile] ( identifier[path] , identifier[timestamp] )
keyword[except] identifier[PermissionError] :
keyword[pass]
keyword[elif] identifier[key] keyword[in] ( literal[string] , literal[string] ):
keyword[for] identifier[files] keyword[in] identifier[value] :
keyword[if] identifier[isinstance] ( identifier[files] , identifier[MutableMapping] ):
identifier[self] . identifier[generate_snapshot] ( identifier[files] )
keyword[else] :
keyword[pass] | def generate_snapshot(self, prov_dep):
# type: (MutableMapping[Text, Any]) -> None
'Copy all of the CWL files to the snapshot/ directory.'
self.self_check()
for (key, value) in prov_dep.items():
if key == 'location' and value.split('/')[-1]:
filename = value.split('/')[-1]
path = os.path.join(self.folder, SNAPSHOT, filename)
filepath = ''
if 'file://' in value:
filepath = value[7:] # depends on [control=['if'], data=['value']]
else:
filepath = value
# FIXME: What if destination path already exists?
if os.path.exists(filepath):
try:
if os.path.isdir(filepath):
shutil.copytree(filepath, path) # depends on [control=['if'], data=[]]
else:
shutil.copy(filepath, path)
timestamp = datetime.datetime.fromtimestamp(os.path.getmtime(filepath))
self.add_tagfile(path, timestamp) # depends on [control=['try'], data=[]]
except PermissionError:
pass # FIXME: avoids duplicate snapshotting; need better solution # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif key in ('secondaryFiles', 'listing'):
for files in value:
if isinstance(files, MutableMapping):
self.generate_snapshot(files) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['files']] # depends on [control=['if'], data=[]]
else:
pass # depends on [control=['for'], data=[]] |
def a_thing(random=random, *args, **kwargs):
"""
Return a ... thing.
>>> mock_random.seed(0)
>>> a_thing(random=mock_random)
'two secrets'
>>> mock_random.seed(1)
>>> a_thing(random=mock_random, capitalize=True)
'A Mighty Poop'
>>> mock_random.seed(2)
>>> a_thing(random=mock_random, slugify=True)
'a-poop'
>>> mock_random.seed(4)
>>> a_thing(random=mock_random, slugify=True)
'two-chimps'
"""
return thing(random=random, an=True, *args, **kwargs) | def function[a_thing, parameter[random]]:
constant[
Return a ... thing.
>>> mock_random.seed(0)
>>> a_thing(random=mock_random)
'two secrets'
>>> mock_random.seed(1)
>>> a_thing(random=mock_random, capitalize=True)
'A Mighty Poop'
>>> mock_random.seed(2)
>>> a_thing(random=mock_random, slugify=True)
'a-poop'
>>> mock_random.seed(4)
>>> a_thing(random=mock_random, slugify=True)
'two-chimps'
]
return[call[name[thing], parameter[<ast.Starred object at 0x7da1b0ca5150>]]] | keyword[def] identifier[a_thing] ( identifier[random] = identifier[random] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[thing] ( identifier[random] = identifier[random] , identifier[an] = keyword[True] ,* identifier[args] ,** identifier[kwargs] ) | def a_thing(random=random, *args, **kwargs):
"""
Return a ... thing.
>>> mock_random.seed(0)
>>> a_thing(random=mock_random)
'two secrets'
>>> mock_random.seed(1)
>>> a_thing(random=mock_random, capitalize=True)
'A Mighty Poop'
>>> mock_random.seed(2)
>>> a_thing(random=mock_random, slugify=True)
'a-poop'
>>> mock_random.seed(4)
>>> a_thing(random=mock_random, slugify=True)
'two-chimps'
"""
return thing(*args, random=random, an=True, **kwargs) |
def data_to_binary(self):
"""
:return: bytes
"""
return bytes([
COMMAND_CODE,
self.channels_to_byte([self.channel]),
self.disable_inhibit_forced,
self.status,
self.led_status
]) + struct.pack('>L', self.delay_time)[-3:] | def function[data_to_binary, parameter[self]]:
constant[
:return: bytes
]
return[binary_operation[call[name[bytes], parameter[list[[<ast.Name object at 0x7da18f723130>, <ast.Call object at 0x7da18f7216f0>, <ast.Attribute object at 0x7da204960430>, <ast.Attribute object at 0x7da204961f00>, <ast.Attribute object at 0x7da2049607c0>]]]] + call[call[name[struct].pack, parameter[constant[>L], name[self].delay_time]]][<ast.Slice object at 0x7da204963520>]]] | keyword[def] identifier[data_to_binary] ( identifier[self] ):
literal[string]
keyword[return] identifier[bytes] ([
identifier[COMMAND_CODE] ,
identifier[self] . identifier[channels_to_byte] ([ identifier[self] . identifier[channel] ]),
identifier[self] . identifier[disable_inhibit_forced] ,
identifier[self] . identifier[status] ,
identifier[self] . identifier[led_status]
])+ identifier[struct] . identifier[pack] ( literal[string] , identifier[self] . identifier[delay_time] )[- literal[int] :] | def data_to_binary(self):
"""
:return: bytes
"""
return bytes([COMMAND_CODE, self.channels_to_byte([self.channel]), self.disable_inhibit_forced, self.status, self.led_status]) + struct.pack('>L', self.delay_time)[-3:] |
async def edit_settings(self, **kwargs):
"""|coro|
Edits the client user's settings.
.. note::
This only applies to non-bot accounts.
Parameters
-------
afk_timeout: :class:`int`
How long (in seconds) the user needs to be AFK until Discord
sends push notifications to your mobile device.
animate_emojis: :class:`bool`
Whether or not to animate emojis in the chat.
convert_emoticons: :class:`bool`
Whether or not to automatically convert emoticons into emojis.
e.g. :-) -> 😃
default_guilds_restricted: :class:`bool`
Whether or not to automatically disable DMs between you and
members of new guilds you join.
detect_platform_accounts: :class:`bool`
Whether or not to automatically detect accounts from services
like Steam and Blizzard when you open the Discord client.
developer_mode: :class:`bool`
Whether or not to enable developer mode.
disable_games_tab: :class:`bool`
Whether or not to disable the showing of the Games tab.
enable_tts_command: :class:`bool`
Whether or not to allow tts messages to be played/sent.
explicit_content_filter: :class:`UserContentFilter`
The filter for explicit content in all messages.
friend_source_flags: :class:`FriendFlags`
Who can add you as a friend.
gif_auto_play: :class:`bool`
Whether or not to automatically play gifs that are in the chat.
guild_positions: List[:class:`abc.Snowflake`]
A list of guilds in order of the guild/guild icons that are on
the left hand side of the UI.
inline_attachment_media: :class:`bool`
Whether or not to display attachments when they are uploaded in chat.
inline_embed_media: :class:`bool`
Whether or not to display videos and images from links posted in chat.
locale: :class:`str`
The RFC 3066 language identifier of the locale to use for the language
of the Discord client.
message_display_compact: :class:`bool`
Whether or not to use the compact Discord display mode.
render_embeds: :class:`bool`
Whether or not to render embeds that are sent in the chat.
render_reactions: :class:`bool`
Whether or not to render reactions that are added to messages.
restricted_guilds: List[:class:`abc.Snowflake`]
A list of guilds that you will not receive DMs from.
show_current_game: :class:`bool`
Whether or not to display the game that you are currently playing.
status: :class:`Status`
The clients status that is shown to others.
theme: :class:`Theme`
The theme of the Discord UI.
timezone_offset: :class:`int`
The timezone offset to use.
Raises
-------
HTTPException
Editing the settings failed.
Forbidden
The client is a bot user and not a user account.
Returns
-------
:class:`dict`
The client user's updated settings.
"""
payload = {}
content_filter = kwargs.pop('explicit_content_filter', None)
if content_filter:
payload.update({'explicit_content_filter': content_filter.value})
friend_flags = kwargs.pop('friend_source_flags', None)
if friend_flags:
dicts = [{}, {'mutual_guilds': True}, {'mutual_friends': True},
{'mutual_guilds': True, 'mutual_friends': True}, {'all': True}]
payload.update({'friend_source_flags': dicts[friend_flags.value]})
guild_positions = kwargs.pop('guild_positions', None)
if guild_positions:
guild_positions = [str(x.id) for x in guild_positions]
payload.update({'guild_positions': guild_positions})
restricted_guilds = kwargs.pop('restricted_guilds', None)
if restricted_guilds:
restricted_guilds = [str(x.id) for x in restricted_guilds]
payload.update({'restricted_guilds': restricted_guilds})
status = kwargs.pop('status', None)
if status:
payload.update({'status': status.value})
theme = kwargs.pop('theme', None)
if theme:
payload.update({'theme': theme.value})
payload.update(kwargs)
data = await self._state.http.edit_settings(**payload)
return data | <ast.AsyncFunctionDef object at 0x7da1b2062d10> | keyword[async] keyword[def] identifier[edit_settings] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[payload] ={}
identifier[content_filter] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[content_filter] :
identifier[payload] . identifier[update] ({ literal[string] : identifier[content_filter] . identifier[value] })
identifier[friend_flags] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[friend_flags] :
identifier[dicts] =[{},{ literal[string] : keyword[True] },{ literal[string] : keyword[True] },
{ literal[string] : keyword[True] , literal[string] : keyword[True] },{ literal[string] : keyword[True] }]
identifier[payload] . identifier[update] ({ literal[string] : identifier[dicts] [ identifier[friend_flags] . identifier[value] ]})
identifier[guild_positions] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[guild_positions] :
identifier[guild_positions] =[ identifier[str] ( identifier[x] . identifier[id] ) keyword[for] identifier[x] keyword[in] identifier[guild_positions] ]
identifier[payload] . identifier[update] ({ literal[string] : identifier[guild_positions] })
identifier[restricted_guilds] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[restricted_guilds] :
identifier[restricted_guilds] =[ identifier[str] ( identifier[x] . identifier[id] ) keyword[for] identifier[x] keyword[in] identifier[restricted_guilds] ]
identifier[payload] . identifier[update] ({ literal[string] : identifier[restricted_guilds] })
identifier[status] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[status] :
identifier[payload] . identifier[update] ({ literal[string] : identifier[status] . identifier[value] })
identifier[theme] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[theme] :
identifier[payload] . identifier[update] ({ literal[string] : identifier[theme] . identifier[value] })
identifier[payload] . identifier[update] ( identifier[kwargs] )
identifier[data] = keyword[await] identifier[self] . identifier[_state] . identifier[http] . identifier[edit_settings] (** identifier[payload] )
keyword[return] identifier[data] | async def edit_settings(self, **kwargs):
"""|coro|
Edits the client user's settings.
.. note::
This only applies to non-bot accounts.
Parameters
-------
afk_timeout: :class:`int`
How long (in seconds) the user needs to be AFK until Discord
sends push notifications to your mobile device.
animate_emojis: :class:`bool`
Whether or not to animate emojis in the chat.
convert_emoticons: :class:`bool`
Whether or not to automatically convert emoticons into emojis.
e.g. :-) -> 😃
default_guilds_restricted: :class:`bool`
Whether or not to automatically disable DMs between you and
members of new guilds you join.
detect_platform_accounts: :class:`bool`
Whether or not to automatically detect accounts from services
like Steam and Blizzard when you open the Discord client.
developer_mode: :class:`bool`
Whether or not to enable developer mode.
disable_games_tab: :class:`bool`
Whether or not to disable the showing of the Games tab.
enable_tts_command: :class:`bool`
Whether or not to allow tts messages to be played/sent.
explicit_content_filter: :class:`UserContentFilter`
The filter for explicit content in all messages.
friend_source_flags: :class:`FriendFlags`
Who can add you as a friend.
gif_auto_play: :class:`bool`
Whether or not to automatically play gifs that are in the chat.
guild_positions: List[:class:`abc.Snowflake`]
A list of guilds in order of the guild/guild icons that are on
the left hand side of the UI.
inline_attachment_media: :class:`bool`
Whether or not to display attachments when they are uploaded in chat.
inline_embed_media: :class:`bool`
Whether or not to display videos and images from links posted in chat.
locale: :class:`str`
The RFC 3066 language identifier of the locale to use for the language
of the Discord client.
message_display_compact: :class:`bool`
Whether or not to use the compact Discord display mode.
render_embeds: :class:`bool`
Whether or not to render embeds that are sent in the chat.
render_reactions: :class:`bool`
Whether or not to render reactions that are added to messages.
restricted_guilds: List[:class:`abc.Snowflake`]
A list of guilds that you will not receive DMs from.
show_current_game: :class:`bool`
Whether or not to display the game that you are currently playing.
status: :class:`Status`
The clients status that is shown to others.
theme: :class:`Theme`
The theme of the Discord UI.
timezone_offset: :class:`int`
The timezone offset to use.
Raises
-------
HTTPException
Editing the settings failed.
Forbidden
The client is a bot user and not a user account.
Returns
-------
:class:`dict`
The client user's updated settings.
"""
payload = {}
content_filter = kwargs.pop('explicit_content_filter', None)
if content_filter:
payload.update({'explicit_content_filter': content_filter.value}) # depends on [control=['if'], data=[]]
friend_flags = kwargs.pop('friend_source_flags', None)
if friend_flags:
dicts = [{}, {'mutual_guilds': True}, {'mutual_friends': True}, {'mutual_guilds': True, 'mutual_friends': True}, {'all': True}]
payload.update({'friend_source_flags': dicts[friend_flags.value]}) # depends on [control=['if'], data=[]]
guild_positions = kwargs.pop('guild_positions', None)
if guild_positions:
guild_positions = [str(x.id) for x in guild_positions]
payload.update({'guild_positions': guild_positions}) # depends on [control=['if'], data=[]]
restricted_guilds = kwargs.pop('restricted_guilds', None)
if restricted_guilds:
restricted_guilds = [str(x.id) for x in restricted_guilds]
payload.update({'restricted_guilds': restricted_guilds}) # depends on [control=['if'], data=[]]
status = kwargs.pop('status', None)
if status:
payload.update({'status': status.value}) # depends on [control=['if'], data=[]]
theme = kwargs.pop('theme', None)
if theme:
payload.update({'theme': theme.value}) # depends on [control=['if'], data=[]]
payload.update(kwargs)
data = await self._state.http.edit_settings(**payload)
return data |
def send_signals(self):
"""Shout for the world to hear whether a txn was successful."""
if self.flag:
invalid_ipn_received.send(sender=self)
return
else:
valid_ipn_received.send(sender=self) | def function[send_signals, parameter[self]]:
constant[Shout for the world to hear whether a txn was successful.]
if name[self].flag begin[:]
call[name[invalid_ipn_received].send, parameter[]]
return[None] | keyword[def] identifier[send_signals] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[flag] :
identifier[invalid_ipn_received] . identifier[send] ( identifier[sender] = identifier[self] )
keyword[return]
keyword[else] :
identifier[valid_ipn_received] . identifier[send] ( identifier[sender] = identifier[self] ) | def send_signals(self):
"""Shout for the world to hear whether a txn was successful."""
if self.flag:
invalid_ipn_received.send(sender=self)
return # depends on [control=['if'], data=[]]
else:
valid_ipn_received.send(sender=self) |
def _GetParserFilters(cls, parser_filter_expression):
"""Retrieves the parsers and plugins to include and exclude.
Takes a comma separated string and splits it up into two dictionaries,
of parsers and plugins to include and to exclude from selection. If a
particular filter is prepended with an exclamation point it will be
added to the exclude section, otherwise in the include.
Args:
parser_filter_expression (str): parser filter expression, where None
represents all parsers and plugins.
Returns:
tuple: containing:
* dict[str, BaseParser]: included parsers and plugins by name.
* dict[str, BaseParser]: excluded parsers and plugins by name.
"""
if not parser_filter_expression:
return {}, {}
includes = {}
excludes = {}
preset_names = cls._presets.GetNames()
for parser_filter in parser_filter_expression.split(','):
parser_filter = parser_filter.strip()
if not parser_filter:
continue
if parser_filter.startswith('!'):
parser_filter = parser_filter[1:]
active_dict = excludes
else:
active_dict = includes
parser_filter = parser_filter.lower()
if parser_filter in preset_names:
for parser_in_category in cls._GetParsersFromPresetCategory(
parser_filter):
parser, _, plugin = parser_in_category.partition('/')
active_dict.setdefault(parser, [])
if plugin:
active_dict[parser].append(plugin)
else:
parser, _, plugin = parser_filter.partition('/')
active_dict.setdefault(parser, [])
if plugin:
active_dict[parser].append(plugin)
cls._ReduceParserFilters(includes, excludes)
return includes, excludes | def function[_GetParserFilters, parameter[cls, parser_filter_expression]]:
constant[Retrieves the parsers and plugins to include and exclude.
Takes a comma separated string and splits it up into two dictionaries,
of parsers and plugins to include and to exclude from selection. If a
particular filter is prepended with an exclamation point it will be
added to the exclude section, otherwise in the include.
Args:
parser_filter_expression (str): parser filter expression, where None
represents all parsers and plugins.
Returns:
tuple: containing:
* dict[str, BaseParser]: included parsers and plugins by name.
* dict[str, BaseParser]: excluded parsers and plugins by name.
]
if <ast.UnaryOp object at 0x7da18bcc88e0> begin[:]
return[tuple[[<ast.Dict object at 0x7da18bcc8e50>, <ast.Dict object at 0x7da18bcc8430>]]]
variable[includes] assign[=] dictionary[[], []]
variable[excludes] assign[=] dictionary[[], []]
variable[preset_names] assign[=] call[name[cls]._presets.GetNames, parameter[]]
for taget[name[parser_filter]] in starred[call[name[parser_filter_expression].split, parameter[constant[,]]]] begin[:]
variable[parser_filter] assign[=] call[name[parser_filter].strip, parameter[]]
if <ast.UnaryOp object at 0x7da18bcc82b0> begin[:]
continue
if call[name[parser_filter].startswith, parameter[constant[!]]] begin[:]
variable[parser_filter] assign[=] call[name[parser_filter]][<ast.Slice object at 0x7da18bccb160>]
variable[active_dict] assign[=] name[excludes]
variable[parser_filter] assign[=] call[name[parser_filter].lower, parameter[]]
if compare[name[parser_filter] in name[preset_names]] begin[:]
for taget[name[parser_in_category]] in starred[call[name[cls]._GetParsersFromPresetCategory, parameter[name[parser_filter]]]] begin[:]
<ast.Tuple object at 0x7da18bcc9090> assign[=] call[name[parser_in_category].partition, parameter[constant[/]]]
call[name[active_dict].setdefault, parameter[name[parser], list[[]]]]
if name[plugin] begin[:]
call[call[name[active_dict]][name[parser]].append, parameter[name[plugin]]]
call[name[cls]._ReduceParserFilters, parameter[name[includes], name[excludes]]]
return[tuple[[<ast.Name object at 0x7da20cabfc70>, <ast.Name object at 0x7da20cabfe50>]]] | keyword[def] identifier[_GetParserFilters] ( identifier[cls] , identifier[parser_filter_expression] ):
literal[string]
keyword[if] keyword[not] identifier[parser_filter_expression] :
keyword[return] {},{}
identifier[includes] ={}
identifier[excludes] ={}
identifier[preset_names] = identifier[cls] . identifier[_presets] . identifier[GetNames] ()
keyword[for] identifier[parser_filter] keyword[in] identifier[parser_filter_expression] . identifier[split] ( literal[string] ):
identifier[parser_filter] = identifier[parser_filter] . identifier[strip] ()
keyword[if] keyword[not] identifier[parser_filter] :
keyword[continue]
keyword[if] identifier[parser_filter] . identifier[startswith] ( literal[string] ):
identifier[parser_filter] = identifier[parser_filter] [ literal[int] :]
identifier[active_dict] = identifier[excludes]
keyword[else] :
identifier[active_dict] = identifier[includes]
identifier[parser_filter] = identifier[parser_filter] . identifier[lower] ()
keyword[if] identifier[parser_filter] keyword[in] identifier[preset_names] :
keyword[for] identifier[parser_in_category] keyword[in] identifier[cls] . identifier[_GetParsersFromPresetCategory] (
identifier[parser_filter] ):
identifier[parser] , identifier[_] , identifier[plugin] = identifier[parser_in_category] . identifier[partition] ( literal[string] )
identifier[active_dict] . identifier[setdefault] ( identifier[parser] ,[])
keyword[if] identifier[plugin] :
identifier[active_dict] [ identifier[parser] ]. identifier[append] ( identifier[plugin] )
keyword[else] :
identifier[parser] , identifier[_] , identifier[plugin] = identifier[parser_filter] . identifier[partition] ( literal[string] )
identifier[active_dict] . identifier[setdefault] ( identifier[parser] ,[])
keyword[if] identifier[plugin] :
identifier[active_dict] [ identifier[parser] ]. identifier[append] ( identifier[plugin] )
identifier[cls] . identifier[_ReduceParserFilters] ( identifier[includes] , identifier[excludes] )
keyword[return] identifier[includes] , identifier[excludes] | def _GetParserFilters(cls, parser_filter_expression):
"""Retrieves the parsers and plugins to include and exclude.
Takes a comma separated string and splits it up into two dictionaries,
of parsers and plugins to include and to exclude from selection. If a
particular filter is prepended with an exclamation point it will be
added to the exclude section, otherwise in the include.
Args:
parser_filter_expression (str): parser filter expression, where None
represents all parsers and plugins.
Returns:
tuple: containing:
* dict[str, BaseParser]: included parsers and plugins by name.
* dict[str, BaseParser]: excluded parsers and plugins by name.
"""
if not parser_filter_expression:
return ({}, {}) # depends on [control=['if'], data=[]]
includes = {}
excludes = {}
preset_names = cls._presets.GetNames()
for parser_filter in parser_filter_expression.split(','):
parser_filter = parser_filter.strip()
if not parser_filter:
continue # depends on [control=['if'], data=[]]
if parser_filter.startswith('!'):
parser_filter = parser_filter[1:]
active_dict = excludes # depends on [control=['if'], data=[]]
else:
active_dict = includes
parser_filter = parser_filter.lower()
if parser_filter in preset_names:
for parser_in_category in cls._GetParsersFromPresetCategory(parser_filter):
(parser, _, plugin) = parser_in_category.partition('/')
active_dict.setdefault(parser, [])
if plugin:
active_dict[parser].append(plugin) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['parser_in_category']] # depends on [control=['if'], data=['parser_filter']]
else:
(parser, _, plugin) = parser_filter.partition('/')
active_dict.setdefault(parser, [])
if plugin:
active_dict[parser].append(plugin) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['parser_filter']]
cls._ReduceParserFilters(includes, excludes)
return (includes, excludes) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.