code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def registerIcon(self, fileName, glyph, isOpen=None): """ Register an icon SVG file given a glyph, and optionally the open/close state. :param fileName: filename to the SVG file. If the filename is a relative path, the ICONS_DIRECTORY will be prepended. :param glyph: a string describing the glyph (e.g. 'file', 'array') :param isOpen: boolean that indicates if the RTI is open or closed. If None, the icon will be registered for open is both True and False :return: QIcon """ check_class(isOpen, bool, allow_none=True) if fileName and not os.path.isabs(fileName): fileName = os.path.join(self.ICONS_DIRECTORY, fileName) if isOpen is None: # Register both opened and closed variants self._registry[(glyph, True)] = fileName self._registry[(glyph, False)] = fileName else: self._registry[(glyph, isOpen)] = fileName
def function[registerIcon, parameter[self, fileName, glyph, isOpen]]: constant[ Register an icon SVG file given a glyph, and optionally the open/close state. :param fileName: filename to the SVG file. If the filename is a relative path, the ICONS_DIRECTORY will be prepended. :param glyph: a string describing the glyph (e.g. 'file', 'array') :param isOpen: boolean that indicates if the RTI is open or closed. If None, the icon will be registered for open is both True and False :return: QIcon ] call[name[check_class], parameter[name[isOpen], name[bool]]] if <ast.BoolOp object at 0x7da1b0535cf0> begin[:] variable[fileName] assign[=] call[name[os].path.join, parameter[name[self].ICONS_DIRECTORY, name[fileName]]] if compare[name[isOpen] is constant[None]] begin[:] call[name[self]._registry][tuple[[<ast.Name object at 0x7da1b0534ee0>, <ast.Constant object at 0x7da1b0535fc0>]]] assign[=] name[fileName] call[name[self]._registry][tuple[[<ast.Name object at 0x7da1b05351e0>, <ast.Constant object at 0x7da1b0534d90>]]] assign[=] name[fileName]
keyword[def] identifier[registerIcon] ( identifier[self] , identifier[fileName] , identifier[glyph] , identifier[isOpen] = keyword[None] ): literal[string] identifier[check_class] ( identifier[isOpen] , identifier[bool] , identifier[allow_none] = keyword[True] ) keyword[if] identifier[fileName] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isabs] ( identifier[fileName] ): identifier[fileName] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[ICONS_DIRECTORY] , identifier[fileName] ) keyword[if] identifier[isOpen] keyword[is] keyword[None] : identifier[self] . identifier[_registry] [( identifier[glyph] , keyword[True] )]= identifier[fileName] identifier[self] . identifier[_registry] [( identifier[glyph] , keyword[False] )]= identifier[fileName] keyword[else] : identifier[self] . identifier[_registry] [( identifier[glyph] , identifier[isOpen] )]= identifier[fileName]
def registerIcon(self, fileName, glyph, isOpen=None): """ Register an icon SVG file given a glyph, and optionally the open/close state. :param fileName: filename to the SVG file. If the filename is a relative path, the ICONS_DIRECTORY will be prepended. :param glyph: a string describing the glyph (e.g. 'file', 'array') :param isOpen: boolean that indicates if the RTI is open or closed. If None, the icon will be registered for open is both True and False :return: QIcon """ check_class(isOpen, bool, allow_none=True) if fileName and (not os.path.isabs(fileName)): fileName = os.path.join(self.ICONS_DIRECTORY, fileName) # depends on [control=['if'], data=[]] if isOpen is None: # Register both opened and closed variants self._registry[glyph, True] = fileName self._registry[glyph, False] = fileName # depends on [control=['if'], data=[]] else: self._registry[glyph, isOpen] = fileName
def principal_direction_extent(points): '''Calculate the extent of a set of 3D points. The extent is defined as the maximum distance between the projections on the principal directions of the covariance matrix of the points. Parameter: points : a 2D numpy array of points Returns: extents : the extents for each of the eigenvectors of the cov matrix eigs : eigenvalues of the covariance matrix eigv : respective eigenvectors of the covariance matrix ''' # center the points around 0.0 points = np.copy(points) points -= np.mean(points, axis=0) # principal components _, eigv = pca(points) extent = np.zeros(3) for i in range(eigv.shape[1]): # orthogonal projection onto the direction of the v component scalar_projs = np.sort(np.array([np.dot(p, eigv[:, i]) for p in points])) extent[i] = scalar_projs[-1] if scalar_projs[0] < 0.: extent -= scalar_projs[0] return extent
def function[principal_direction_extent, parameter[points]]: constant[Calculate the extent of a set of 3D points. The extent is defined as the maximum distance between the projections on the principal directions of the covariance matrix of the points. Parameter: points : a 2D numpy array of points Returns: extents : the extents for each of the eigenvectors of the cov matrix eigs : eigenvalues of the covariance matrix eigv : respective eigenvectors of the covariance matrix ] variable[points] assign[=] call[name[np].copy, parameter[name[points]]] <ast.AugAssign object at 0x7da20c76f8e0> <ast.Tuple object at 0x7da20c76fbb0> assign[=] call[name[pca], parameter[name[points]]] variable[extent] assign[=] call[name[np].zeros, parameter[constant[3]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[eigv].shape][constant[1]]]]] begin[:] variable[scalar_projs] assign[=] call[name[np].sort, parameter[call[name[np].array, parameter[<ast.ListComp object at 0x7da20c76d5d0>]]]] call[name[extent]][name[i]] assign[=] call[name[scalar_projs]][<ast.UnaryOp object at 0x7da2047ea770>] if compare[call[name[scalar_projs]][constant[0]] less[<] constant[0.0]] begin[:] <ast.AugAssign object at 0x7da18fe92770> return[name[extent]]
keyword[def] identifier[principal_direction_extent] ( identifier[points] ): literal[string] identifier[points] = identifier[np] . identifier[copy] ( identifier[points] ) identifier[points] -= identifier[np] . identifier[mean] ( identifier[points] , identifier[axis] = literal[int] ) identifier[_] , identifier[eigv] = identifier[pca] ( identifier[points] ) identifier[extent] = identifier[np] . identifier[zeros] ( literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[eigv] . identifier[shape] [ literal[int] ]): identifier[scalar_projs] = identifier[np] . identifier[sort] ( identifier[np] . identifier[array] ([ identifier[np] . identifier[dot] ( identifier[p] , identifier[eigv] [:, identifier[i] ]) keyword[for] identifier[p] keyword[in] identifier[points] ])) identifier[extent] [ identifier[i] ]= identifier[scalar_projs] [- literal[int] ] keyword[if] identifier[scalar_projs] [ literal[int] ]< literal[int] : identifier[extent] -= identifier[scalar_projs] [ literal[int] ] keyword[return] identifier[extent]
def principal_direction_extent(points): """Calculate the extent of a set of 3D points. The extent is defined as the maximum distance between the projections on the principal directions of the covariance matrix of the points. Parameter: points : a 2D numpy array of points Returns: extents : the extents for each of the eigenvectors of the cov matrix eigs : eigenvalues of the covariance matrix eigv : respective eigenvectors of the covariance matrix """ # center the points around 0.0 points = np.copy(points) points -= np.mean(points, axis=0) # principal components (_, eigv) = pca(points) extent = np.zeros(3) for i in range(eigv.shape[1]): # orthogonal projection onto the direction of the v component scalar_projs = np.sort(np.array([np.dot(p, eigv[:, i]) for p in points])) extent[i] = scalar_projs[-1] if scalar_projs[0] < 0.0: extent -= scalar_projs[0] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] return extent
def search(d, recursive=True, store_meta=True): ''' Search for DICOM files within a given directory and receive back a dictionary of {StudyInstanceUID: {SeriesNumber: [files]}} Example usage:: >>> import yaxil.dicom >>> yaxil.dicom.search("~/dicoms").keys() ['1.2.340.500067.8.9.10.11012.13000001401516017181900000200'] :param d: Directory name :type d: str :param recursive: Search recursively :type recursive: bool :param store_meta: Read and store metadata for each file for fast lookups :type store_meta: bool :returns: Dictionary of {StudyInstanceUID: {SeriesNumber: [files]}} :rtype: dict ''' # say this fast three times scans = col.defaultdict(lambda: col.defaultdict(lambda: col.defaultdict(list))) for dirpath,dirnames,filenames in os.walk(os.path.expanduser(d)): for f in filenames: fullfile = os.path.join(dirpath, f) try: d = pydicom.read_file(fullfile, stop_before_pixels=True) except pydicom.filereader.InvalidDicomError: continue meta = {k: getattr(d, k, None) for k in d.dir()} if store_meta else None scans[d.StudyInstanceUID][d.SeriesNumber][d.InstanceNumber].append(DicomFile(meta=meta, file=fullfile)) if not recursive: del dirnames[:] return scans
def function[search, parameter[d, recursive, store_meta]]: constant[ Search for DICOM files within a given directory and receive back a dictionary of {StudyInstanceUID: {SeriesNumber: [files]}} Example usage:: >>> import yaxil.dicom >>> yaxil.dicom.search("~/dicoms").keys() ['1.2.340.500067.8.9.10.11012.13000001401516017181900000200'] :param d: Directory name :type d: str :param recursive: Search recursively :type recursive: bool :param store_meta: Read and store metadata for each file for fast lookups :type store_meta: bool :returns: Dictionary of {StudyInstanceUID: {SeriesNumber: [files]}} :rtype: dict ] variable[scans] assign[=] call[name[col].defaultdict, parameter[<ast.Lambda object at 0x7da18fe91c30>]] for taget[tuple[[<ast.Name object at 0x7da18fe93220>, <ast.Name object at 0x7da18fe92470>, <ast.Name object at 0x7da18fe93370>]]] in starred[call[name[os].walk, parameter[call[name[os].path.expanduser, parameter[name[d]]]]]] begin[:] for taget[name[f]] in starred[name[filenames]] begin[:] variable[fullfile] assign[=] call[name[os].path.join, parameter[name[dirpath], name[f]]] <ast.Try object at 0x7da1b26affa0> variable[meta] assign[=] <ast.IfExp object at 0x7da1b26ada50> call[call[call[call[name[scans]][name[d].StudyInstanceUID]][name[d].SeriesNumber]][name[d].InstanceNumber].append, parameter[call[name[DicomFile], parameter[]]]] if <ast.UnaryOp object at 0x7da1b26ad1e0> begin[:] <ast.Delete object at 0x7da1b26ae320> return[name[scans]]
keyword[def] identifier[search] ( identifier[d] , identifier[recursive] = keyword[True] , identifier[store_meta] = keyword[True] ): literal[string] identifier[scans] = identifier[col] . identifier[defaultdict] ( keyword[lambda] : identifier[col] . identifier[defaultdict] ( keyword[lambda] : identifier[col] . identifier[defaultdict] ( identifier[list] ))) keyword[for] identifier[dirpath] , identifier[dirnames] , identifier[filenames] keyword[in] identifier[os] . identifier[walk] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[d] )): keyword[for] identifier[f] keyword[in] identifier[filenames] : identifier[fullfile] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirpath] , identifier[f] ) keyword[try] : identifier[d] = identifier[pydicom] . identifier[read_file] ( identifier[fullfile] , identifier[stop_before_pixels] = keyword[True] ) keyword[except] identifier[pydicom] . identifier[filereader] . identifier[InvalidDicomError] : keyword[continue] identifier[meta] ={ identifier[k] : identifier[getattr] ( identifier[d] , identifier[k] , keyword[None] ) keyword[for] identifier[k] keyword[in] identifier[d] . identifier[dir] ()} keyword[if] identifier[store_meta] keyword[else] keyword[None] identifier[scans] [ identifier[d] . identifier[StudyInstanceUID] ][ identifier[d] . identifier[SeriesNumber] ][ identifier[d] . identifier[InstanceNumber] ]. identifier[append] ( identifier[DicomFile] ( identifier[meta] = identifier[meta] , identifier[file] = identifier[fullfile] )) keyword[if] keyword[not] identifier[recursive] : keyword[del] identifier[dirnames] [:] keyword[return] identifier[scans]
def search(d, recursive=True, store_meta=True): """ Search for DICOM files within a given directory and receive back a dictionary of {StudyInstanceUID: {SeriesNumber: [files]}} Example usage:: >>> import yaxil.dicom >>> yaxil.dicom.search("~/dicoms").keys() ['1.2.340.500067.8.9.10.11012.13000001401516017181900000200'] :param d: Directory name :type d: str :param recursive: Search recursively :type recursive: bool :param store_meta: Read and store metadata for each file for fast lookups :type store_meta: bool :returns: Dictionary of {StudyInstanceUID: {SeriesNumber: [files]}} :rtype: dict """ # say this fast three times scans = col.defaultdict(lambda : col.defaultdict(lambda : col.defaultdict(list))) for (dirpath, dirnames, filenames) in os.walk(os.path.expanduser(d)): for f in filenames: fullfile = os.path.join(dirpath, f) try: d = pydicom.read_file(fullfile, stop_before_pixels=True) # depends on [control=['try'], data=[]] except pydicom.filereader.InvalidDicomError: continue # depends on [control=['except'], data=[]] meta = {k: getattr(d, k, None) for k in d.dir()} if store_meta else None scans[d.StudyInstanceUID][d.SeriesNumber][d.InstanceNumber].append(DicomFile(meta=meta, file=fullfile)) # depends on [control=['for'], data=['f']] if not recursive: del dirnames[:] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return scans
def fixup_comments(self): """Remove any style bytes that are marked as commented but have no comment, and add any style bytes where there's a comment but it isn't marked in the style data. This happens on the base data, so only need to do this on one segment that uses this base data. """ style_base = self.rawdata.style_base comment_text_indexes = np.asarray(list(self.rawdata.extra.comments.keys()), dtype=np.uint32) comment_mask = self.get_style_mask(comment=True) has_comments = np.where(style_base & comment_bit_mask > 0)[0] both = np.intersect1d(comment_text_indexes, has_comments) log.info("fixup comments: %d correctly marked, %d without style, %d empty text" % (np.alen(both), np.alen(comment_text_indexes) - np.alen(both), np.alen(has_comments) - np.alen(both))) style_base &= comment_mask comment_style = self.get_style_bits(comment=True) style_base[comment_text_indexes] |= comment_style
def function[fixup_comments, parameter[self]]: constant[Remove any style bytes that are marked as commented but have no comment, and add any style bytes where there's a comment but it isn't marked in the style data. This happens on the base data, so only need to do this on one segment that uses this base data. ] variable[style_base] assign[=] name[self].rawdata.style_base variable[comment_text_indexes] assign[=] call[name[np].asarray, parameter[call[name[list], parameter[call[name[self].rawdata.extra.comments.keys, parameter[]]]]]] variable[comment_mask] assign[=] call[name[self].get_style_mask, parameter[]] variable[has_comments] assign[=] call[call[name[np].where, parameter[compare[binary_operation[name[style_base] <ast.BitAnd object at 0x7da2590d6b60> name[comment_bit_mask]] greater[>] constant[0]]]]][constant[0]] variable[both] assign[=] call[name[np].intersect1d, parameter[name[comment_text_indexes], name[has_comments]]] call[name[log].info, parameter[binary_operation[constant[fixup comments: %d correctly marked, %d without style, %d empty text] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b1833430>, <ast.BinOp object at 0x7da1b1833be0>, <ast.BinOp object at 0x7da1b1830f70>]]]]] <ast.AugAssign object at 0x7da1b1832e00> variable[comment_style] assign[=] call[name[self].get_style_bits, parameter[]] <ast.AugAssign object at 0x7da1b1832830>
keyword[def] identifier[fixup_comments] ( identifier[self] ): literal[string] identifier[style_base] = identifier[self] . identifier[rawdata] . identifier[style_base] identifier[comment_text_indexes] = identifier[np] . identifier[asarray] ( identifier[list] ( identifier[self] . identifier[rawdata] . identifier[extra] . identifier[comments] . identifier[keys] ()), identifier[dtype] = identifier[np] . identifier[uint32] ) identifier[comment_mask] = identifier[self] . identifier[get_style_mask] ( identifier[comment] = keyword[True] ) identifier[has_comments] = identifier[np] . identifier[where] ( identifier[style_base] & identifier[comment_bit_mask] > literal[int] )[ literal[int] ] identifier[both] = identifier[np] . identifier[intersect1d] ( identifier[comment_text_indexes] , identifier[has_comments] ) identifier[log] . identifier[info] ( literal[string] %( identifier[np] . identifier[alen] ( identifier[both] ), identifier[np] . identifier[alen] ( identifier[comment_text_indexes] )- identifier[np] . identifier[alen] ( identifier[both] ), identifier[np] . identifier[alen] ( identifier[has_comments] )- identifier[np] . identifier[alen] ( identifier[both] ))) identifier[style_base] &= identifier[comment_mask] identifier[comment_style] = identifier[self] . identifier[get_style_bits] ( identifier[comment] = keyword[True] ) identifier[style_base] [ identifier[comment_text_indexes] ]|= identifier[comment_style]
def fixup_comments(self): """Remove any style bytes that are marked as commented but have no comment, and add any style bytes where there's a comment but it isn't marked in the style data. This happens on the base data, so only need to do this on one segment that uses this base data. """ style_base = self.rawdata.style_base comment_text_indexes = np.asarray(list(self.rawdata.extra.comments.keys()), dtype=np.uint32) comment_mask = self.get_style_mask(comment=True) has_comments = np.where(style_base & comment_bit_mask > 0)[0] both = np.intersect1d(comment_text_indexes, has_comments) log.info('fixup comments: %d correctly marked, %d without style, %d empty text' % (np.alen(both), np.alen(comment_text_indexes) - np.alen(both), np.alen(has_comments) - np.alen(both))) style_base &= comment_mask comment_style = self.get_style_bits(comment=True) style_base[comment_text_indexes] |= comment_style
def _verify_include_files_used(self, file_uses, included_files): """Find all #include files that are unnecessary.""" for include_file, use in file_uses.items(): if not use & USES_DECLARATION: node, module = included_files[include_file] if module.ast_list is not None: msg = "'{}' does not need to be #included".format( node.filename) if use & USES_REFERENCE: msg += '; use a forward declaration instead' self._add_warning(msg, node)
def function[_verify_include_files_used, parameter[self, file_uses, included_files]]: constant[Find all #include files that are unnecessary.] for taget[tuple[[<ast.Name object at 0x7da1b0b9d360>, <ast.Name object at 0x7da1b0b9cf10>]]] in starred[call[name[file_uses].items, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da1b0b9dd50> begin[:] <ast.Tuple object at 0x7da1b0b9da80> assign[=] call[name[included_files]][name[include_file]] if compare[name[module].ast_list is_not constant[None]] begin[:] variable[msg] assign[=] call[constant['{}' does not need to be #included].format, parameter[name[node].filename]] if binary_operation[name[use] <ast.BitAnd object at 0x7da2590d6b60> name[USES_REFERENCE]] begin[:] <ast.AugAssign object at 0x7da1b0b9f4f0> call[name[self]._add_warning, parameter[name[msg], name[node]]]
keyword[def] identifier[_verify_include_files_used] ( identifier[self] , identifier[file_uses] , identifier[included_files] ): literal[string] keyword[for] identifier[include_file] , identifier[use] keyword[in] identifier[file_uses] . identifier[items] (): keyword[if] keyword[not] identifier[use] & identifier[USES_DECLARATION] : identifier[node] , identifier[module] = identifier[included_files] [ identifier[include_file] ] keyword[if] identifier[module] . identifier[ast_list] keyword[is] keyword[not] keyword[None] : identifier[msg] = literal[string] . identifier[format] ( identifier[node] . identifier[filename] ) keyword[if] identifier[use] & identifier[USES_REFERENCE] : identifier[msg] += literal[string] identifier[self] . identifier[_add_warning] ( identifier[msg] , identifier[node] )
def _verify_include_files_used(self, file_uses, included_files): """Find all #include files that are unnecessary.""" for (include_file, use) in file_uses.items(): if not use & USES_DECLARATION: (node, module) = included_files[include_file] if module.ast_list is not None: msg = "'{}' does not need to be #included".format(node.filename) if use & USES_REFERENCE: msg += '; use a forward declaration instead' # depends on [control=['if'], data=[]] self._add_warning(msg, node) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def index_scan_prefix_and_return_key(self, idx_name, val_prefix): '''Returns ids that match a prefix of an indexed value, and the specific key that matched the search prefix. Returns a generator of (index key, content identifier) that have an entry in the index ``idx_name`` with prefix ``val_prefix`` (after index transforms are applied). If the index named by ``idx_name`` is not registered, then a :exc:`~exceptions.KeyError` is raised. :param unicode idx_name: name of index :param val_prefix: the value to use to search the index :type val: unspecified (depends on the index, usually ``unicode``) :rtype: generator of (``index key``, ``content_id``) :raises: :exc:`~exceptions.KeyError` ''' return self._index_scan_prefix_impl( idx_name, val_prefix, lambda k: (k[0], k[2]))
def function[index_scan_prefix_and_return_key, parameter[self, idx_name, val_prefix]]: constant[Returns ids that match a prefix of an indexed value, and the specific key that matched the search prefix. Returns a generator of (index key, content identifier) that have an entry in the index ``idx_name`` with prefix ``val_prefix`` (after index transforms are applied). If the index named by ``idx_name`` is not registered, then a :exc:`~exceptions.KeyError` is raised. :param unicode idx_name: name of index :param val_prefix: the value to use to search the index :type val: unspecified (depends on the index, usually ``unicode``) :rtype: generator of (``index key``, ``content_id``) :raises: :exc:`~exceptions.KeyError` ] return[call[name[self]._index_scan_prefix_impl, parameter[name[idx_name], name[val_prefix], <ast.Lambda object at 0x7da20e955ff0>]]]
keyword[def] identifier[index_scan_prefix_and_return_key] ( identifier[self] , identifier[idx_name] , identifier[val_prefix] ): literal[string] keyword[return] identifier[self] . identifier[_index_scan_prefix_impl] ( identifier[idx_name] , identifier[val_prefix] , keyword[lambda] identifier[k] :( identifier[k] [ literal[int] ], identifier[k] [ literal[int] ]))
def index_scan_prefix_and_return_key(self, idx_name, val_prefix): """Returns ids that match a prefix of an indexed value, and the specific key that matched the search prefix. Returns a generator of (index key, content identifier) that have an entry in the index ``idx_name`` with prefix ``val_prefix`` (after index transforms are applied). If the index named by ``idx_name`` is not registered, then a :exc:`~exceptions.KeyError` is raised. :param unicode idx_name: name of index :param val_prefix: the value to use to search the index :type val: unspecified (depends on the index, usually ``unicode``) :rtype: generator of (``index key``, ``content_id``) :raises: :exc:`~exceptions.KeyError` """ return self._index_scan_prefix_impl(idx_name, val_prefix, lambda k: (k[0], k[2]))
def get_celery_app(name=None, **kwargs): # nocv # pylint: disable=import-error ''' Function to return celery-app. Works only if celery installed. :param name: Application name :param kwargs: overrided env-settings :return: Celery-app object ''' from celery import Celery prepare_environment(**kwargs) name = name or os.getenv("VST_PROJECT") celery_app = Celery(name) celery_app.config_from_object('django.conf:settings', namespace='CELERY') celery_app.autodiscover_tasks() return celery_app
def function[get_celery_app, parameter[name]]: constant[ Function to return celery-app. Works only if celery installed. :param name: Application name :param kwargs: overrided env-settings :return: Celery-app object ] from relative_module[celery] import module[Celery] call[name[prepare_environment], parameter[]] variable[name] assign[=] <ast.BoolOp object at 0x7da1b0538400> variable[celery_app] assign[=] call[name[Celery], parameter[name[name]]] call[name[celery_app].config_from_object, parameter[constant[django.conf:settings]]] call[name[celery_app].autodiscover_tasks, parameter[]] return[name[celery_app]]
keyword[def] identifier[get_celery_app] ( identifier[name] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[from] identifier[celery] keyword[import] identifier[Celery] identifier[prepare_environment] (** identifier[kwargs] ) identifier[name] = identifier[name] keyword[or] identifier[os] . identifier[getenv] ( literal[string] ) identifier[celery_app] = identifier[Celery] ( identifier[name] ) identifier[celery_app] . identifier[config_from_object] ( literal[string] , identifier[namespace] = literal[string] ) identifier[celery_app] . identifier[autodiscover_tasks] () keyword[return] identifier[celery_app]
def get_celery_app(name=None, **kwargs): # nocv # pylint: disable=import-error '\n Function to return celery-app. Works only if celery installed.\n :param name: Application name\n :param kwargs: overrided env-settings\n :return: Celery-app object\n ' from celery import Celery prepare_environment(**kwargs) name = name or os.getenv('VST_PROJECT') celery_app = Celery(name) celery_app.config_from_object('django.conf:settings', namespace='CELERY') celery_app.autodiscover_tasks() return celery_app
def _format_explain(self): """ Format the results of an EXPLAIN """ lines = [] for (command, kwargs) in self._call_list: lines.append(command + " " + pformat(kwargs)) return "\n".join(lines)
def function[_format_explain, parameter[self]]: constant[ Format the results of an EXPLAIN ] variable[lines] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b26af370>, <ast.Name object at 0x7da1b26ae4d0>]]] in starred[name[self]._call_list] begin[:] call[name[lines].append, parameter[binary_operation[binary_operation[name[command] + constant[ ]] + call[name[pformat], parameter[name[kwargs]]]]]] return[call[constant[ ].join, parameter[name[lines]]]]
keyword[def] identifier[_format_explain] ( identifier[self] ): literal[string] identifier[lines] =[] keyword[for] ( identifier[command] , identifier[kwargs] ) keyword[in] identifier[self] . identifier[_call_list] : identifier[lines] . identifier[append] ( identifier[command] + literal[string] + identifier[pformat] ( identifier[kwargs] )) keyword[return] literal[string] . identifier[join] ( identifier[lines] )
def _format_explain(self): """ Format the results of an EXPLAIN """ lines = [] for (command, kwargs) in self._call_list: lines.append(command + ' ' + pformat(kwargs)) # depends on [control=['for'], data=[]] return '\n'.join(lines)
def get_state_model_by_path(self, path): """Returns the `StateModel` for the given `path` Searches a `StateModel` in the state machine, who's path is given by `path`. :param str path: Path of the searched state :return: The state with that path :rtype: StateModel :raises: ValueError, if path is invalid/not existing with this state machine """ path_elements = path.split('/') path_elements.pop(0) current_state_model = self.root_state for state_id in path_elements: if isinstance(current_state_model, ContainerStateModel): if state_id in current_state_model.states: current_state_model = current_state_model.states[state_id] else: raise ValueError("Invalid path: State with id '{}' not found in state with id {}".format( state_id, current_state_model.state.state_id)) elif isinstance(current_state_model, LibraryStateModel): if state_id == current_state_model.state_copy.state.state_id: current_state_model = current_state_model.state_copy else: raise ValueError("Invalid path: state id '{}' does not coincide with state id '{}' of state_copy " "of library state with id '{}'".format( state_id, current_state_model.state_copy.state.state_id, current_state_model.state.state_id)) else: raise ValueError("Invalid path: State with id '{}' has no children".format( current_state_model.state.state_id)) return current_state_model
def function[get_state_model_by_path, parameter[self, path]]: constant[Returns the `StateModel` for the given `path` Searches a `StateModel` in the state machine, who's path is given by `path`. :param str path: Path of the searched state :return: The state with that path :rtype: StateModel :raises: ValueError, if path is invalid/not existing with this state machine ] variable[path_elements] assign[=] call[name[path].split, parameter[constant[/]]] call[name[path_elements].pop, parameter[constant[0]]] variable[current_state_model] assign[=] name[self].root_state for taget[name[state_id]] in starred[name[path_elements]] begin[:] if call[name[isinstance], parameter[name[current_state_model], name[ContainerStateModel]]] begin[:] if compare[name[state_id] in name[current_state_model].states] begin[:] variable[current_state_model] assign[=] call[name[current_state_model].states][name[state_id]] return[name[current_state_model]]
keyword[def] identifier[get_state_model_by_path] ( identifier[self] , identifier[path] ): literal[string] identifier[path_elements] = identifier[path] . identifier[split] ( literal[string] ) identifier[path_elements] . identifier[pop] ( literal[int] ) identifier[current_state_model] = identifier[self] . identifier[root_state] keyword[for] identifier[state_id] keyword[in] identifier[path_elements] : keyword[if] identifier[isinstance] ( identifier[current_state_model] , identifier[ContainerStateModel] ): keyword[if] identifier[state_id] keyword[in] identifier[current_state_model] . identifier[states] : identifier[current_state_model] = identifier[current_state_model] . identifier[states] [ identifier[state_id] ] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[state_id] , identifier[current_state_model] . identifier[state] . identifier[state_id] )) keyword[elif] identifier[isinstance] ( identifier[current_state_model] , identifier[LibraryStateModel] ): keyword[if] identifier[state_id] == identifier[current_state_model] . identifier[state_copy] . identifier[state] . identifier[state_id] : identifier[current_state_model] = identifier[current_state_model] . identifier[state_copy] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[state_id] , identifier[current_state_model] . identifier[state_copy] . identifier[state] . identifier[state_id] , identifier[current_state_model] . identifier[state] . identifier[state_id] )) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[current_state_model] . identifier[state] . identifier[state_id] )) keyword[return] identifier[current_state_model]
def get_state_model_by_path(self, path): """Returns the `StateModel` for the given `path` Searches a `StateModel` in the state machine, who's path is given by `path`. :param str path: Path of the searched state :return: The state with that path :rtype: StateModel :raises: ValueError, if path is invalid/not existing with this state machine """ path_elements = path.split('/') path_elements.pop(0) current_state_model = self.root_state for state_id in path_elements: if isinstance(current_state_model, ContainerStateModel): if state_id in current_state_model.states: current_state_model = current_state_model.states[state_id] # depends on [control=['if'], data=['state_id']] else: raise ValueError("Invalid path: State with id '{}' not found in state with id {}".format(state_id, current_state_model.state.state_id)) # depends on [control=['if'], data=[]] elif isinstance(current_state_model, LibraryStateModel): if state_id == current_state_model.state_copy.state.state_id: current_state_model = current_state_model.state_copy # depends on [control=['if'], data=[]] else: raise ValueError("Invalid path: state id '{}' does not coincide with state id '{}' of state_copy of library state with id '{}'".format(state_id, current_state_model.state_copy.state.state_id, current_state_model.state.state_id)) # depends on [control=['if'], data=[]] else: raise ValueError("Invalid path: State with id '{}' has no children".format(current_state_model.state.state_id)) # depends on [control=['for'], data=['state_id']] return current_state_model
def addOptionString(self, name, value, append=False): """ .. _addOptionString: Add a string option. :param name: The name of the option. Option names are case insensitive and must be unique. :type name: str :param value: The value of the option. :type value: str :param append: Setting append to true will cause values read from the command line or XML file to be concatenated into a comma delimited set. If _append is false, newer values will overwrite older ones. :type append: boolean :return: The result of the operation. :rtype: bool :see: addOption_, addOptionBool_, addOptionInt_ """ return self.options.AddOptionString( str_to_cppstr(name), str_to_cppstr(value), append)
def function[addOptionString, parameter[self, name, value, append]]: constant[ .. _addOptionString: Add a string option. :param name: The name of the option. Option names are case insensitive and must be unique. :type name: str :param value: The value of the option. :type value: str :param append: Setting append to true will cause values read from the command line or XML file to be concatenated into a comma delimited set. If _append is false, newer values will overwrite older ones. :type append: boolean :return: The result of the operation. :rtype: bool :see: addOption_, addOptionBool_, addOptionInt_ ] return[call[name[self].options.AddOptionString, parameter[call[name[str_to_cppstr], parameter[name[name]]], call[name[str_to_cppstr], parameter[name[value]]], name[append]]]]
keyword[def] identifier[addOptionString] ( identifier[self] , identifier[name] , identifier[value] , identifier[append] = keyword[False] ): literal[string] keyword[return] identifier[self] . identifier[options] . identifier[AddOptionString] ( identifier[str_to_cppstr] ( identifier[name] ), identifier[str_to_cppstr] ( identifier[value] ), identifier[append] )
def addOptionString(self, name, value, append=False): """ .. _addOptionString: Add a string option. :param name: The name of the option. Option names are case insensitive and must be unique. :type name: str :param value: The value of the option. :type value: str :param append: Setting append to true will cause values read from the command line or XML file to be concatenated into a comma delimited set. If _append is false, newer values will overwrite older ones. :type append: boolean :return: The result of the operation. :rtype: bool :see: addOption_, addOptionBool_, addOptionInt_ """ return self.options.AddOptionString(str_to_cppstr(name), str_to_cppstr(value), append)
def draw_tree(node, child_iter=lambda n: n.children, text_str=str): """Support asciitree 0.2 API. This function solely exist to not break old code (using asciitree 0.2). Its use is deprecated.""" return LeftAligned(traverse=Traversal(get_text=text_str, get_children=child_iter), draw=LegacyStyle())(node)
def function[draw_tree, parameter[node, child_iter, text_str]]: constant[Support asciitree 0.2 API. This function solely exist to not break old code (using asciitree 0.2). Its use is deprecated.] return[call[call[name[LeftAligned], parameter[]], parameter[name[node]]]]
keyword[def] identifier[draw_tree] ( identifier[node] , identifier[child_iter] = keyword[lambda] identifier[n] : identifier[n] . identifier[children] , identifier[text_str] = identifier[str] ): literal[string] keyword[return] identifier[LeftAligned] ( identifier[traverse] = identifier[Traversal] ( identifier[get_text] = identifier[text_str] , identifier[get_children] = identifier[child_iter] ), identifier[draw] = identifier[LegacyStyle] ())( identifier[node] )
def draw_tree(node, child_iter=lambda n: n.children, text_str=str): """Support asciitree 0.2 API. This function solely exist to not break old code (using asciitree 0.2). Its use is deprecated.""" return LeftAligned(traverse=Traversal(get_text=text_str, get_children=child_iter), draw=LegacyStyle())(node)
def initialize_communities_bucket(): """Initialize the communities file bucket. :raises: `invenio_files_rest.errors.FilesException` """ bucket_id = UUID(current_app.config['COMMUNITIES_BUCKET_UUID']) if Bucket.query.get(bucket_id): raise FilesException("Bucket with UUID {} already exists.".format( bucket_id)) else: storage_class = current_app.config['FILES_REST_DEFAULT_STORAGE_CLASS'] location = Location.get_default() bucket = Bucket(id=bucket_id, location=location, default_storage_class=storage_class) db.session.add(bucket) db.session.commit()
def function[initialize_communities_bucket, parameter[]]: constant[Initialize the communities file bucket. :raises: `invenio_files_rest.errors.FilesException` ] variable[bucket_id] assign[=] call[name[UUID], parameter[call[name[current_app].config][constant[COMMUNITIES_BUCKET_UUID]]]] if call[name[Bucket].query.get, parameter[name[bucket_id]]] begin[:] <ast.Raise object at 0x7da18f7228c0>
keyword[def] identifier[initialize_communities_bucket] (): literal[string] identifier[bucket_id] = identifier[UUID] ( identifier[current_app] . identifier[config] [ literal[string] ]) keyword[if] identifier[Bucket] . identifier[query] . identifier[get] ( identifier[bucket_id] ): keyword[raise] identifier[FilesException] ( literal[string] . identifier[format] ( identifier[bucket_id] )) keyword[else] : identifier[storage_class] = identifier[current_app] . identifier[config] [ literal[string] ] identifier[location] = identifier[Location] . identifier[get_default] () identifier[bucket] = identifier[Bucket] ( identifier[id] = identifier[bucket_id] , identifier[location] = identifier[location] , identifier[default_storage_class] = identifier[storage_class] ) identifier[db] . identifier[session] . identifier[add] ( identifier[bucket] ) identifier[db] . identifier[session] . identifier[commit] ()
def initialize_communities_bucket(): """Initialize the communities file bucket. :raises: `invenio_files_rest.errors.FilesException` """ bucket_id = UUID(current_app.config['COMMUNITIES_BUCKET_UUID']) if Bucket.query.get(bucket_id): raise FilesException('Bucket with UUID {} already exists.'.format(bucket_id)) # depends on [control=['if'], data=[]] else: storage_class = current_app.config['FILES_REST_DEFAULT_STORAGE_CLASS'] location = Location.get_default() bucket = Bucket(id=bucket_id, location=location, default_storage_class=storage_class) db.session.add(bucket) db.session.commit()
def gene_ids_of_gene_name(self, gene_name): """ What are the gene IDs associated with a given gene name? (due to copy events, there might be multiple genes per name) """ results = self._query_gene_ids("gene_name", gene_name) if len(results) == 0: raise ValueError("Gene name not found: %s" % gene_name) return results
def function[gene_ids_of_gene_name, parameter[self, gene_name]]: constant[ What are the gene IDs associated with a given gene name? (due to copy events, there might be multiple genes per name) ] variable[results] assign[=] call[name[self]._query_gene_ids, parameter[constant[gene_name], name[gene_name]]] if compare[call[name[len], parameter[name[results]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b08f8760> return[name[results]]
keyword[def] identifier[gene_ids_of_gene_name] ( identifier[self] , identifier[gene_name] ): literal[string] identifier[results] = identifier[self] . identifier[_query_gene_ids] ( literal[string] , identifier[gene_name] ) keyword[if] identifier[len] ( identifier[results] )== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[gene_name] ) keyword[return] identifier[results]
def gene_ids_of_gene_name(self, gene_name): """ What are the gene IDs associated with a given gene name? (due to copy events, there might be multiple genes per name) """ results = self._query_gene_ids('gene_name', gene_name) if len(results) == 0: raise ValueError('Gene name not found: %s' % gene_name) # depends on [control=['if'], data=[]] return results
def logical_or(lhs, rhs): """Returns the result of element-wise **logical or** comparison operation with broadcasting. For each element in input arrays, return 1(true) if lhs elements or rhs elements are true, otherwise return 0(false). Equivalent to ``lhs or rhs`` and ``mx.nd.broadcast_logical_or(lhs, rhs)``. .. note:: If the corresponding dimensions of two arrays have the same size or one of them has size 1, then the arrays are broadcastable to a common shape. Parameters ---------- lhs : scalar or mxnet.ndarray.array First input of the function. rhs : scalar or mxnet.ndarray.array Second input of the function. If ``lhs.shape != rhs.shape``, they must be broadcastable to a common shape. Returns ------- NDArray Output array of boolean values. Examples -------- >>> x = mx.nd.ones((2,3)) >>> y = mx.nd.arange(2).reshape((2,1)) >>> z = mx.nd.arange(2).reshape((1,2)) >>> x.asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> y.asnumpy() array([[ 0.], [ 1.]], dtype=float32) >>> z.asnumpy() array([[ 0., 1.]], dtype=float32) >>> mx.nd.logical_or(x, 1).asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> mx.nd.logical_or(x, y).asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> mx.nd.logical_or(z, y).asnumpy() array([[ 0., 1.], [ 1., 1.]], dtype=float32) """ # pylint: disable= no-member, protected-access return _ufunc_helper( lhs, rhs, op.broadcast_logical_or, lambda x, y: 1 if x or y else 0, _internal._logical_or_scalar, None)
def function[logical_or, parameter[lhs, rhs]]: constant[Returns the result of element-wise **logical or** comparison operation with broadcasting. For each element in input arrays, return 1(true) if lhs elements or rhs elements are true, otherwise return 0(false). Equivalent to ``lhs or rhs`` and ``mx.nd.broadcast_logical_or(lhs, rhs)``. .. note:: If the corresponding dimensions of two arrays have the same size or one of them has size 1, then the arrays are broadcastable to a common shape. Parameters ---------- lhs : scalar or mxnet.ndarray.array First input of the function. rhs : scalar or mxnet.ndarray.array Second input of the function. If ``lhs.shape != rhs.shape``, they must be broadcastable to a common shape. Returns ------- NDArray Output array of boolean values. Examples -------- >>> x = mx.nd.ones((2,3)) >>> y = mx.nd.arange(2).reshape((2,1)) >>> z = mx.nd.arange(2).reshape((1,2)) >>> x.asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> y.asnumpy() array([[ 0.], [ 1.]], dtype=float32) >>> z.asnumpy() array([[ 0., 1.]], dtype=float32) >>> mx.nd.logical_or(x, 1).asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> mx.nd.logical_or(x, y).asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> mx.nd.logical_or(z, y).asnumpy() array([[ 0., 1.], [ 1., 1.]], dtype=float32) ] return[call[name[_ufunc_helper], parameter[name[lhs], name[rhs], name[op].broadcast_logical_or, <ast.Lambda object at 0x7da1b2009ba0>, name[_internal]._logical_or_scalar, constant[None]]]]
keyword[def] identifier[logical_or] ( identifier[lhs] , identifier[rhs] ): literal[string] keyword[return] identifier[_ufunc_helper] ( identifier[lhs] , identifier[rhs] , identifier[op] . identifier[broadcast_logical_or] , keyword[lambda] identifier[x] , identifier[y] : literal[int] keyword[if] identifier[x] keyword[or] identifier[y] keyword[else] literal[int] , identifier[_internal] . identifier[_logical_or_scalar] , keyword[None] )
def logical_or(lhs, rhs): """Returns the result of element-wise **logical or** comparison operation with broadcasting. For each element in input arrays, return 1(true) if lhs elements or rhs elements are true, otherwise return 0(false). Equivalent to ``lhs or rhs`` and ``mx.nd.broadcast_logical_or(lhs, rhs)``. .. note:: If the corresponding dimensions of two arrays have the same size or one of them has size 1, then the arrays are broadcastable to a common shape. Parameters ---------- lhs : scalar or mxnet.ndarray.array First input of the function. rhs : scalar or mxnet.ndarray.array Second input of the function. If ``lhs.shape != rhs.shape``, they must be broadcastable to a common shape. Returns ------- NDArray Output array of boolean values. Examples -------- >>> x = mx.nd.ones((2,3)) >>> y = mx.nd.arange(2).reshape((2,1)) >>> z = mx.nd.arange(2).reshape((1,2)) >>> x.asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> y.asnumpy() array([[ 0.], [ 1.]], dtype=float32) >>> z.asnumpy() array([[ 0., 1.]], dtype=float32) >>> mx.nd.logical_or(x, 1).asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> mx.nd.logical_or(x, y).asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> mx.nd.logical_or(z, y).asnumpy() array([[ 0., 1.], [ 1., 1.]], dtype=float32) """ # pylint: disable= no-member, protected-access return _ufunc_helper(lhs, rhs, op.broadcast_logical_or, lambda x, y: 1 if x or y else 0, _internal._logical_or_scalar, None)
def spectral_entropy(X, Band, Fs, Power_Ratio=None): """Compute spectral entropy of a time series from either two cases below: 1. X, the time series (default) 2. Power_Ratio, a list of normalized signal power in a set of frequency bins defined in Band (if Power_Ratio is provided, recommended to speed up) In case 1, Power_Ratio is computed by bin_power() function. Notes ----- To speed up, it is recommended to compute Power_Ratio before calling this function because it may also be used by other functions whereas computing it here again will slow down. Parameters ---------- Band list boundary frequencies (in Hz) of bins. They can be unequal bins, e.g. [0.5,4,7,12,30] which are delta, theta, alpha and beta respectively. You can also use range() function of Python to generate equal bins and pass the generated list to this function. Each element of Band is a physical frequency and shall not exceed the Nyquist frequency, i.e., half of sampling frequency. X list a 1-D real time series. Fs integer the sampling rate in physical frequency Returns ------- As indicated in return line See Also -------- bin_power: pyeeg function that computes spectral power in frequency bins """ if Power_Ratio is None: Power, Power_Ratio = bin_power(X, Band, Fs) Spectral_Entropy = 0 for i in range(0, len(Power_Ratio) - 1): Spectral_Entropy += Power_Ratio[i] * numpy.log(Power_Ratio[i]) Spectral_Entropy /= numpy.log( len(Power_Ratio) ) # to save time, minus one is omitted return -1 * Spectral_Entropy
def function[spectral_entropy, parameter[X, Band, Fs, Power_Ratio]]: constant[Compute spectral entropy of a time series from either two cases below: 1. X, the time series (default) 2. Power_Ratio, a list of normalized signal power in a set of frequency bins defined in Band (if Power_Ratio is provided, recommended to speed up) In case 1, Power_Ratio is computed by bin_power() function. Notes ----- To speed up, it is recommended to compute Power_Ratio before calling this function because it may also be used by other functions whereas computing it here again will slow down. Parameters ---------- Band list boundary frequencies (in Hz) of bins. They can be unequal bins, e.g. [0.5,4,7,12,30] which are delta, theta, alpha and beta respectively. You can also use range() function of Python to generate equal bins and pass the generated list to this function. Each element of Band is a physical frequency and shall not exceed the Nyquist frequency, i.e., half of sampling frequency. X list a 1-D real time series. Fs integer the sampling rate in physical frequency Returns ------- As indicated in return line See Also -------- bin_power: pyeeg function that computes spectral power in frequency bins ] if compare[name[Power_Ratio] is constant[None]] begin[:] <ast.Tuple object at 0x7da2046208b0> assign[=] call[name[bin_power], parameter[name[X], name[Band], name[Fs]]] variable[Spectral_Entropy] assign[=] constant[0] for taget[name[i]] in starred[call[name[range], parameter[constant[0], binary_operation[call[name[len], parameter[name[Power_Ratio]]] - constant[1]]]]] begin[:] <ast.AugAssign object at 0x7da204620a00> <ast.AugAssign object at 0x7da2046234c0> return[binary_operation[<ast.UnaryOp object at 0x7da204620f70> * name[Spectral_Entropy]]]
keyword[def] identifier[spectral_entropy] ( identifier[X] , identifier[Band] , identifier[Fs] , identifier[Power_Ratio] = keyword[None] ): literal[string] keyword[if] identifier[Power_Ratio] keyword[is] keyword[None] : identifier[Power] , identifier[Power_Ratio] = identifier[bin_power] ( identifier[X] , identifier[Band] , identifier[Fs] ) identifier[Spectral_Entropy] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[Power_Ratio] )- literal[int] ): identifier[Spectral_Entropy] += identifier[Power_Ratio] [ identifier[i] ]* identifier[numpy] . identifier[log] ( identifier[Power_Ratio] [ identifier[i] ]) identifier[Spectral_Entropy] /= identifier[numpy] . identifier[log] ( identifier[len] ( identifier[Power_Ratio] ) ) keyword[return] - literal[int] * identifier[Spectral_Entropy]
def spectral_entropy(X, Band, Fs, Power_Ratio=None): """Compute spectral entropy of a time series from either two cases below: 1. X, the time series (default) 2. Power_Ratio, a list of normalized signal power in a set of frequency bins defined in Band (if Power_Ratio is provided, recommended to speed up) In case 1, Power_Ratio is computed by bin_power() function. Notes ----- To speed up, it is recommended to compute Power_Ratio before calling this function because it may also be used by other functions whereas computing it here again will slow down. Parameters ---------- Band list boundary frequencies (in Hz) of bins. They can be unequal bins, e.g. [0.5,4,7,12,30] which are delta, theta, alpha and beta respectively. You can also use range() function of Python to generate equal bins and pass the generated list to this function. Each element of Band is a physical frequency and shall not exceed the Nyquist frequency, i.e., half of sampling frequency. X list a 1-D real time series. Fs integer the sampling rate in physical frequency Returns ------- As indicated in return line See Also -------- bin_power: pyeeg function that computes spectral power in frequency bins """ if Power_Ratio is None: (Power, Power_Ratio) = bin_power(X, Band, Fs) # depends on [control=['if'], data=['Power_Ratio']] Spectral_Entropy = 0 for i in range(0, len(Power_Ratio) - 1): Spectral_Entropy += Power_Ratio[i] * numpy.log(Power_Ratio[i]) # depends on [control=['for'], data=['i']] Spectral_Entropy /= numpy.log(len(Power_Ratio)) # to save time, minus one is omitted return -1 * Spectral_Entropy
def get_child_bin_ids(self, bin_id): """Gets the child ``Ids`` of the given bin. arg: bin_id (osid.id.Id): the ``Id`` to query return: (osid.id.IdList) - the children of the bin raise: NotFound - ``bin_id`` not found raise: NullArgument - ``bin_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.BinHierarchySession.get_child_bin_ids if self._catalog_session is not None: return self._catalog_session.get_child_catalog_ids(catalog_id=bin_id) return self._hierarchy_session.get_children(id_=bin_id)
def function[get_child_bin_ids, parameter[self, bin_id]]: constant[Gets the child ``Ids`` of the given bin. arg: bin_id (osid.id.Id): the ``Id`` to query return: (osid.id.IdList) - the children of the bin raise: NotFound - ``bin_id`` not found raise: NullArgument - ``bin_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* ] if compare[name[self]._catalog_session is_not constant[None]] begin[:] return[call[name[self]._catalog_session.get_child_catalog_ids, parameter[]]] return[call[name[self]._hierarchy_session.get_children, parameter[]]]
keyword[def] identifier[get_child_bin_ids] ( identifier[self] , identifier[bin_id] ): literal[string] keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[_catalog_session] . identifier[get_child_catalog_ids] ( identifier[catalog_id] = identifier[bin_id] ) keyword[return] identifier[self] . identifier[_hierarchy_session] . identifier[get_children] ( identifier[id_] = identifier[bin_id] )
def get_child_bin_ids(self, bin_id): """Gets the child ``Ids`` of the given bin. arg: bin_id (osid.id.Id): the ``Id`` to query return: (osid.id.IdList) - the children of the bin raise: NotFound - ``bin_id`` not found raise: NullArgument - ``bin_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.BinHierarchySession.get_child_bin_ids if self._catalog_session is not None: return self._catalog_session.get_child_catalog_ids(catalog_id=bin_id) # depends on [control=['if'], data=[]] return self._hierarchy_session.get_children(id_=bin_id)
def discover(uri): """Discover services for a given URI. @param uri: The identity URI as a well-formed http or https URI. The well-formedness and the protocol are not checked, but the results of this function are undefined if those properties do not hold. @return: DiscoveryResult object @raises Exception: Any exception that can be raised by fetching a URL with the given fetcher. @raises DiscoveryFailure: When the HTTP response does not have a 200 code. """ result = DiscoveryResult(uri) resp = fetchers.fetch(uri, headers={'Accept': YADIS_ACCEPT_HEADER}) if resp.status not in (200, 206): raise DiscoveryFailure( 'HTTP Response status from identity URL host is not 200. ' 'Got status %r' % (resp.status,), resp) # Note the URL after following redirects result.normalized_uri = resp.final_url # Attempt to find out where to go to discover the document # or if we already have it result.content_type = resp.headers.get('content-type') result.xrds_uri = whereIsYadis(resp) if result.xrds_uri and result.usedYadisLocation(): resp = fetchers.fetch(result.xrds_uri) if resp.status not in (200, 206): exc = DiscoveryFailure( 'HTTP Response status from Yadis host is not 200. ' 'Got status %r' % (resp.status,), resp) exc.identity_url = result.normalized_uri raise exc result.content_type = resp.headers.get('content-type') result.response_text = resp.body return result
def function[discover, parameter[uri]]: constant[Discover services for a given URI. @param uri: The identity URI as a well-formed http or https URI. The well-formedness and the protocol are not checked, but the results of this function are undefined if those properties do not hold. @return: DiscoveryResult object @raises Exception: Any exception that can be raised by fetching a URL with the given fetcher. @raises DiscoveryFailure: When the HTTP response does not have a 200 code. ] variable[result] assign[=] call[name[DiscoveryResult], parameter[name[uri]]] variable[resp] assign[=] call[name[fetchers].fetch, parameter[name[uri]]] if compare[name[resp].status <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da18f723eb0>, <ast.Constant object at 0x7da18f723400>]]] begin[:] <ast.Raise object at 0x7da18f722350> name[result].normalized_uri assign[=] name[resp].final_url name[result].content_type assign[=] call[name[resp].headers.get, parameter[constant[content-type]]] name[result].xrds_uri assign[=] call[name[whereIsYadis], parameter[name[resp]]] if <ast.BoolOp object at 0x7da18f720280> begin[:] variable[resp] assign[=] call[name[fetchers].fetch, parameter[name[result].xrds_uri]] if compare[name[resp].status <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da18f721e70>, <ast.Constant object at 0x7da18f723e50>]]] begin[:] variable[exc] assign[=] call[name[DiscoveryFailure], parameter[binary_operation[constant[HTTP Response status from Yadis host is not 200. Got status %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18f723460>]]], name[resp]]] name[exc].identity_url assign[=] name[result].normalized_uri <ast.Raise object at 0x7da18f723a60> name[result].content_type assign[=] call[name[resp].headers.get, parameter[constant[content-type]]] name[result].response_text assign[=] name[resp].body return[name[result]]
keyword[def] identifier[discover] ( identifier[uri] ): literal[string] identifier[result] = identifier[DiscoveryResult] ( identifier[uri] ) identifier[resp] = identifier[fetchers] . identifier[fetch] ( identifier[uri] , identifier[headers] ={ literal[string] : identifier[YADIS_ACCEPT_HEADER] }) keyword[if] identifier[resp] . identifier[status] keyword[not] keyword[in] ( literal[int] , literal[int] ): keyword[raise] identifier[DiscoveryFailure] ( literal[string] literal[string] %( identifier[resp] . identifier[status] ,), identifier[resp] ) identifier[result] . identifier[normalized_uri] = identifier[resp] . identifier[final_url] identifier[result] . identifier[content_type] = identifier[resp] . identifier[headers] . identifier[get] ( literal[string] ) identifier[result] . identifier[xrds_uri] = identifier[whereIsYadis] ( identifier[resp] ) keyword[if] identifier[result] . identifier[xrds_uri] keyword[and] identifier[result] . identifier[usedYadisLocation] (): identifier[resp] = identifier[fetchers] . identifier[fetch] ( identifier[result] . identifier[xrds_uri] ) keyword[if] identifier[resp] . identifier[status] keyword[not] keyword[in] ( literal[int] , literal[int] ): identifier[exc] = identifier[DiscoveryFailure] ( literal[string] literal[string] %( identifier[resp] . identifier[status] ,), identifier[resp] ) identifier[exc] . identifier[identity_url] = identifier[result] . identifier[normalized_uri] keyword[raise] identifier[exc] identifier[result] . identifier[content_type] = identifier[resp] . identifier[headers] . identifier[get] ( literal[string] ) identifier[result] . identifier[response_text] = identifier[resp] . identifier[body] keyword[return] identifier[result]
def discover(uri): """Discover services for a given URI. @param uri: The identity URI as a well-formed http or https URI. The well-formedness and the protocol are not checked, but the results of this function are undefined if those properties do not hold. @return: DiscoveryResult object @raises Exception: Any exception that can be raised by fetching a URL with the given fetcher. @raises DiscoveryFailure: When the HTTP response does not have a 200 code. """ result = DiscoveryResult(uri) resp = fetchers.fetch(uri, headers={'Accept': YADIS_ACCEPT_HEADER}) if resp.status not in (200, 206): raise DiscoveryFailure('HTTP Response status from identity URL host is not 200. Got status %r' % (resp.status,), resp) # depends on [control=['if'], data=[]] # Note the URL after following redirects result.normalized_uri = resp.final_url # Attempt to find out where to go to discover the document # or if we already have it result.content_type = resp.headers.get('content-type') result.xrds_uri = whereIsYadis(resp) if result.xrds_uri and result.usedYadisLocation(): resp = fetchers.fetch(result.xrds_uri) if resp.status not in (200, 206): exc = DiscoveryFailure('HTTP Response status from Yadis host is not 200. Got status %r' % (resp.status,), resp) exc.identity_url = result.normalized_uri raise exc # depends on [control=['if'], data=[]] result.content_type = resp.headers.get('content-type') # depends on [control=['if'], data=[]] result.response_text = resp.body return result
def time_limited(limit_seconds, iterable): """ Yield items from *iterable* until *limit_seconds* have passed. >>> from time import sleep >>> def generator(): ... yield 1 ... yield 2 ... sleep(0.2) ... yield 3 >>> iterable = generator() >>> list(time_limited(0.1, iterable)) [1, 2] Note that the time is checked before each item is yielded, and iteration stops if the time elapsed is greater than *limit_seconds*. If your time limit is 1 second, but it takes 2 seconds to generate the first item from the iterable, the function will run for 2 seconds and not yield anything. """ if limit_seconds < 0: raise ValueError('limit_seconds must be positive') start_time = monotonic() for item in iterable: if monotonic() - start_time > limit_seconds: break yield item
def function[time_limited, parameter[limit_seconds, iterable]]: constant[ Yield items from *iterable* until *limit_seconds* have passed. >>> from time import sleep >>> def generator(): ... yield 1 ... yield 2 ... sleep(0.2) ... yield 3 >>> iterable = generator() >>> list(time_limited(0.1, iterable)) [1, 2] Note that the time is checked before each item is yielded, and iteration stops if the time elapsed is greater than *limit_seconds*. If your time limit is 1 second, but it takes 2 seconds to generate the first item from the iterable, the function will run for 2 seconds and not yield anything. ] if compare[name[limit_seconds] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da1b1ddb220> variable[start_time] assign[=] call[name[monotonic], parameter[]] for taget[name[item]] in starred[name[iterable]] begin[:] if compare[binary_operation[call[name[monotonic], parameter[]] - name[start_time]] greater[>] name[limit_seconds]] begin[:] break <ast.Yield object at 0x7da1b1dda980>
keyword[def] identifier[time_limited] ( identifier[limit_seconds] , identifier[iterable] ): literal[string] keyword[if] identifier[limit_seconds] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[start_time] = identifier[monotonic] () keyword[for] identifier[item] keyword[in] identifier[iterable] : keyword[if] identifier[monotonic] ()- identifier[start_time] > identifier[limit_seconds] : keyword[break] keyword[yield] identifier[item]
def time_limited(limit_seconds, iterable): """ Yield items from *iterable* until *limit_seconds* have passed. >>> from time import sleep >>> def generator(): ... yield 1 ... yield 2 ... sleep(0.2) ... yield 3 >>> iterable = generator() >>> list(time_limited(0.1, iterable)) [1, 2] Note that the time is checked before each item is yielded, and iteration stops if the time elapsed is greater than *limit_seconds*. If your time limit is 1 second, but it takes 2 seconds to generate the first item from the iterable, the function will run for 2 seconds and not yield anything. """ if limit_seconds < 0: raise ValueError('limit_seconds must be positive') # depends on [control=['if'], data=[]] start_time = monotonic() for item in iterable: if monotonic() - start_time > limit_seconds: break # depends on [control=['if'], data=[]] yield item # depends on [control=['for'], data=['item']]
def write_to_conll_eval_file(prediction_file: TextIO, gold_file: TextIO, verb_index: Optional[int], sentence: List[str], prediction: List[str], gold_labels: List[str]): """ Prints predicate argument predictions and gold labels for a single verbal predicate in a sentence to two provided file references. Parameters ---------- prediction_file : TextIO, required. A file reference to print predictions to. gold_file : TextIO, required. A file reference to print gold labels to. verb_index : Optional[int], required. The index of the verbal predicate in the sentence which the gold labels are the arguments for, or None if the sentence contains no verbal predicate. sentence : List[str], required. The word tokens. prediction : List[str], required. The predicted BIO labels. gold_labels : List[str], required. The gold BIO labels. """ verb_only_sentence = ["-"] * len(sentence) if verb_index: verb_only_sentence[verb_index] = sentence[verb_index] conll_format_predictions = convert_bio_tags_to_conll_format(prediction) conll_format_gold_labels = convert_bio_tags_to_conll_format(gold_labels) for word, predicted, gold in zip(verb_only_sentence, conll_format_predictions, conll_format_gold_labels): prediction_file.write(word.ljust(15)) prediction_file.write(predicted.rjust(15) + "\n") gold_file.write(word.ljust(15)) gold_file.write(gold.rjust(15) + "\n") prediction_file.write("\n") gold_file.write("\n")
def function[write_to_conll_eval_file, parameter[prediction_file, gold_file, verb_index, sentence, prediction, gold_labels]]: constant[ Prints predicate argument predictions and gold labels for a single verbal predicate in a sentence to two provided file references. Parameters ---------- prediction_file : TextIO, required. A file reference to print predictions to. gold_file : TextIO, required. A file reference to print gold labels to. verb_index : Optional[int], required. The index of the verbal predicate in the sentence which the gold labels are the arguments for, or None if the sentence contains no verbal predicate. sentence : List[str], required. The word tokens. prediction : List[str], required. The predicted BIO labels. gold_labels : List[str], required. The gold BIO labels. ] variable[verb_only_sentence] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b1ffd330>]] * call[name[len], parameter[name[sentence]]]] if name[verb_index] begin[:] call[name[verb_only_sentence]][name[verb_index]] assign[=] call[name[sentence]][name[verb_index]] variable[conll_format_predictions] assign[=] call[name[convert_bio_tags_to_conll_format], parameter[name[prediction]]] variable[conll_format_gold_labels] assign[=] call[name[convert_bio_tags_to_conll_format], parameter[name[gold_labels]]] for taget[tuple[[<ast.Name object at 0x7da18f8127d0>, <ast.Name object at 0x7da18f8131c0>, <ast.Name object at 0x7da18f810820>]]] in starred[call[name[zip], parameter[name[verb_only_sentence], name[conll_format_predictions], name[conll_format_gold_labels]]]] begin[:] call[name[prediction_file].write, parameter[call[name[word].ljust, parameter[constant[15]]]]] call[name[prediction_file].write, parameter[binary_operation[call[name[predicted].rjust, parameter[constant[15]]] + constant[ ]]]] call[name[gold_file].write, parameter[call[name[word].ljust, parameter[constant[15]]]]] call[name[gold_file].write, parameter[binary_operation[call[name[gold].rjust, parameter[constant[15]]] + constant[ ]]]] call[name[prediction_file].write, parameter[constant[ ]]] call[name[gold_file].write, parameter[constant[ ]]]
keyword[def] identifier[write_to_conll_eval_file] ( identifier[prediction_file] : identifier[TextIO] , identifier[gold_file] : identifier[TextIO] , identifier[verb_index] : identifier[Optional] [ identifier[int] ], identifier[sentence] : identifier[List] [ identifier[str] ], identifier[prediction] : identifier[List] [ identifier[str] ], identifier[gold_labels] : identifier[List] [ identifier[str] ]): literal[string] identifier[verb_only_sentence] =[ literal[string] ]* identifier[len] ( identifier[sentence] ) keyword[if] identifier[verb_index] : identifier[verb_only_sentence] [ identifier[verb_index] ]= identifier[sentence] [ identifier[verb_index] ] identifier[conll_format_predictions] = identifier[convert_bio_tags_to_conll_format] ( identifier[prediction] ) identifier[conll_format_gold_labels] = identifier[convert_bio_tags_to_conll_format] ( identifier[gold_labels] ) keyword[for] identifier[word] , identifier[predicted] , identifier[gold] keyword[in] identifier[zip] ( identifier[verb_only_sentence] , identifier[conll_format_predictions] , identifier[conll_format_gold_labels] ): identifier[prediction_file] . identifier[write] ( identifier[word] . identifier[ljust] ( literal[int] )) identifier[prediction_file] . identifier[write] ( identifier[predicted] . identifier[rjust] ( literal[int] )+ literal[string] ) identifier[gold_file] . identifier[write] ( identifier[word] . identifier[ljust] ( literal[int] )) identifier[gold_file] . identifier[write] ( identifier[gold] . identifier[rjust] ( literal[int] )+ literal[string] ) identifier[prediction_file] . identifier[write] ( literal[string] ) identifier[gold_file] . identifier[write] ( literal[string] )
def write_to_conll_eval_file(prediction_file: TextIO, gold_file: TextIO, verb_index: Optional[int], sentence: List[str], prediction: List[str], gold_labels: List[str]): """ Prints predicate argument predictions and gold labels for a single verbal predicate in a sentence to two provided file references. Parameters ---------- prediction_file : TextIO, required. A file reference to print predictions to. gold_file : TextIO, required. A file reference to print gold labels to. verb_index : Optional[int], required. The index of the verbal predicate in the sentence which the gold labels are the arguments for, or None if the sentence contains no verbal predicate. sentence : List[str], required. The word tokens. prediction : List[str], required. The predicted BIO labels. gold_labels : List[str], required. The gold BIO labels. """ verb_only_sentence = ['-'] * len(sentence) if verb_index: verb_only_sentence[verb_index] = sentence[verb_index] # depends on [control=['if'], data=[]] conll_format_predictions = convert_bio_tags_to_conll_format(prediction) conll_format_gold_labels = convert_bio_tags_to_conll_format(gold_labels) for (word, predicted, gold) in zip(verb_only_sentence, conll_format_predictions, conll_format_gold_labels): prediction_file.write(word.ljust(15)) prediction_file.write(predicted.rjust(15) + '\n') gold_file.write(word.ljust(15)) gold_file.write(gold.rjust(15) + '\n') # depends on [control=['for'], data=[]] prediction_file.write('\n') gold_file.write('\n')
async def increment(self, key, delta=1, namespace=None, _conn=None): """ Increments value stored in key by delta (can be negative). If key doesn't exist, it creates the key with delta as value. :param key: str key to check :param delta: int amount to increment/decrement :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: Value of the key once incremented. -1 if key is not found. :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout :raises: :class:`TypeError` if value is not incrementable """ start = time.monotonic() ns_key = self.build_key(key, namespace=namespace) ret = await self._increment(ns_key, delta, _conn=_conn) logger.debug("INCREMENT %s %d (%.4f)s", ns_key, ret, time.monotonic() - start) return ret
<ast.AsyncFunctionDef object at 0x7da18f58c6a0>
keyword[async] keyword[def] identifier[increment] ( identifier[self] , identifier[key] , identifier[delta] = literal[int] , identifier[namespace] = keyword[None] , identifier[_conn] = keyword[None] ): literal[string] identifier[start] = identifier[time] . identifier[monotonic] () identifier[ns_key] = identifier[self] . identifier[build_key] ( identifier[key] , identifier[namespace] = identifier[namespace] ) identifier[ret] = keyword[await] identifier[self] . identifier[_increment] ( identifier[ns_key] , identifier[delta] , identifier[_conn] = identifier[_conn] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[ns_key] , identifier[ret] , identifier[time] . identifier[monotonic] ()- identifier[start] ) keyword[return] identifier[ret]
async def increment(self, key, delta=1, namespace=None, _conn=None): """ Increments value stored in key by delta (can be negative). If key doesn't exist, it creates the key with delta as value. :param key: str key to check :param delta: int amount to increment/decrement :param namespace: str alternative namespace to use :param timeout: int or float in seconds specifying maximum timeout for the operations to last :returns: Value of the key once incremented. -1 if key is not found. :raises: :class:`asyncio.TimeoutError` if it lasts more than self.timeout :raises: :class:`TypeError` if value is not incrementable """ start = time.monotonic() ns_key = self.build_key(key, namespace=namespace) ret = await self._increment(ns_key, delta, _conn=_conn) logger.debug('INCREMENT %s %d (%.4f)s', ns_key, ret, time.monotonic() - start) return ret
def save_estimations(file_struct, times, labels, boundaries_id, labels_id, **params): """Saves the segment estimations in a JAMS file. Parameters ---------- file_struct : FileStruct Object with the different file paths of the current file. times : np.array or list Estimated boundary times. If `list`, estimated hierarchical boundaries. labels : np.array(N, 2) Estimated labels (None in case we are only storing boundary evaluations). boundaries_id : str Boundary algorithm identifier. labels_id : str Labels algorithm identifier. params : dict Dictionary with additional parameters for both algorithms. """ # Remove features if they exist params.pop("features", None) # Get duration dur = get_duration(file_struct.features_file) # Convert to intervals and sanity check if 'numpy' in str(type(times)): # Flat check inters = utils.times_to_intervals(times) assert len(inters) == len(labels), "Number of boundary intervals " \ "(%d) and labels (%d) do not match" % (len(inters), len(labels)) # Put into lists to simplify the writing process later inters = [inters] labels = [labels] else: # Hierarchical check inters = [] for level in range(len(times)): est_inters = utils.times_to_intervals(times[level]) inters.append(est_inters) assert len(inters[level]) == len(labels[level]), \ "Number of boundary intervals (%d) and labels (%d) do not " \ "match in level %d" % (len(inters[level]), len(labels[level]), level) # Create new estimation namespace = "multi_segment" if params["hier"] else "segment_open" ann = jams.Annotation(namespace=namespace) # Find estimation in file if os.path.isfile(file_struct.est_file): jam = jams.load(file_struct.est_file, validate=False) curr_ann = find_estimation(jam, boundaries_id, labels_id, params) if curr_ann is not None: curr_ann.data = ann.data # cleanup all data ann = curr_ann # This will overwrite the existing estimation else: jam.annotations.append(ann) else: # Create new JAMS if it doesn't exist jam = jams.JAMS() jam.file_metadata.duration = dur jam.annotations.append(ann) # Save metadata and parameters ann.annotation_metadata.version = msaf.__version__ ann.annotation_metadata.data_source = "MSAF" sandbox = {} sandbox["boundaries_id"] = boundaries_id sandbox["labels_id"] = labels_id sandbox["timestamp"] = \ datetime.datetime.today().strftime("%Y/%m/%d %H:%M:%S") for key in params: sandbox[key] = params[key] ann.sandbox = sandbox # Save actual data for i, (level_inters, level_labels) in enumerate(zip(inters, labels)): for bound_inter, label in zip(level_inters, level_labels): dur = float(bound_inter[1]) - float(bound_inter[0]) label = chr(int(label) + 65) if params["hier"]: value = {"label": label, "level": i} else: value = label ann.append(time=bound_inter[0], duration=dur, value=value) # Write results jam.save(file_struct.est_file)
def function[save_estimations, parameter[file_struct, times, labels, boundaries_id, labels_id]]: constant[Saves the segment estimations in a JAMS file. Parameters ---------- file_struct : FileStruct Object with the different file paths of the current file. times : np.array or list Estimated boundary times. If `list`, estimated hierarchical boundaries. labels : np.array(N, 2) Estimated labels (None in case we are only storing boundary evaluations). boundaries_id : str Boundary algorithm identifier. labels_id : str Labels algorithm identifier. params : dict Dictionary with additional parameters for both algorithms. ] call[name[params].pop, parameter[constant[features], constant[None]]] variable[dur] assign[=] call[name[get_duration], parameter[name[file_struct].features_file]] if compare[constant[numpy] in call[name[str], parameter[call[name[type], parameter[name[times]]]]]] begin[:] variable[inters] assign[=] call[name[utils].times_to_intervals, parameter[name[times]]] assert[compare[call[name[len], parameter[name[inters]]] equal[==] call[name[len], parameter[name[labels]]]]] variable[inters] assign[=] list[[<ast.Name object at 0x7da1b02dd900>]] variable[labels] assign[=] list[[<ast.Name object at 0x7da1b02dd990>]] variable[namespace] assign[=] <ast.IfExp object at 0x7da1b02dc460> variable[ann] assign[=] call[name[jams].Annotation, parameter[]] if call[name[os].path.isfile, parameter[name[file_struct].est_file]] begin[:] variable[jam] assign[=] call[name[jams].load, parameter[name[file_struct].est_file]] variable[curr_ann] assign[=] call[name[find_estimation], parameter[name[jam], name[boundaries_id], name[labels_id], name[params]]] if compare[name[curr_ann] is_not constant[None]] begin[:] name[curr_ann].data assign[=] name[ann].data variable[ann] assign[=] name[curr_ann] name[ann].annotation_metadata.version assign[=] name[msaf].__version__ name[ann].annotation_metadata.data_source assign[=] constant[MSAF] variable[sandbox] assign[=] dictionary[[], []] call[name[sandbox]][constant[boundaries_id]] assign[=] name[boundaries_id] call[name[sandbox]][constant[labels_id]] assign[=] name[labels_id] call[name[sandbox]][constant[timestamp]] assign[=] call[call[name[datetime].datetime.today, parameter[]].strftime, parameter[constant[%Y/%m/%d %H:%M:%S]]] for taget[name[key]] in starred[name[params]] begin[:] call[name[sandbox]][name[key]] assign[=] call[name[params]][name[key]] name[ann].sandbox assign[=] name[sandbox] for taget[tuple[[<ast.Name object at 0x7da1b02df0d0>, <ast.Tuple object at 0x7da1b02dca60>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[inters], name[labels]]]]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b02de620>, <ast.Name object at 0x7da1b02dd6c0>]]] in starred[call[name[zip], parameter[name[level_inters], name[level_labels]]]] begin[:] variable[dur] assign[=] binary_operation[call[name[float], parameter[call[name[bound_inter]][constant[1]]]] - call[name[float], parameter[call[name[bound_inter]][constant[0]]]]] variable[label] assign[=] call[name[chr], parameter[binary_operation[call[name[int], parameter[name[label]]] + constant[65]]]] if call[name[params]][constant[hier]] begin[:] variable[value] assign[=] dictionary[[<ast.Constant object at 0x7da1b02dfa30>, <ast.Constant object at 0x7da1b02dfeb0>], [<ast.Name object at 0x7da1b02de5c0>, <ast.Name object at 0x7da1b02dfd00>]] call[name[ann].append, parameter[]] call[name[jam].save, parameter[name[file_struct].est_file]]
keyword[def] identifier[save_estimations] ( identifier[file_struct] , identifier[times] , identifier[labels] , identifier[boundaries_id] , identifier[labels_id] , ** identifier[params] ): literal[string] identifier[params] . identifier[pop] ( literal[string] , keyword[None] ) identifier[dur] = identifier[get_duration] ( identifier[file_struct] . identifier[features_file] ) keyword[if] literal[string] keyword[in] identifier[str] ( identifier[type] ( identifier[times] )): identifier[inters] = identifier[utils] . identifier[times_to_intervals] ( identifier[times] ) keyword[assert] identifier[len] ( identifier[inters] )== identifier[len] ( identifier[labels] ), literal[string] literal[string] %( identifier[len] ( identifier[inters] ), identifier[len] ( identifier[labels] )) identifier[inters] =[ identifier[inters] ] identifier[labels] =[ identifier[labels] ] keyword[else] : identifier[inters] =[] keyword[for] identifier[level] keyword[in] identifier[range] ( identifier[len] ( identifier[times] )): identifier[est_inters] = identifier[utils] . identifier[times_to_intervals] ( identifier[times] [ identifier[level] ]) identifier[inters] . identifier[append] ( identifier[est_inters] ) keyword[assert] identifier[len] ( identifier[inters] [ identifier[level] ])== identifier[len] ( identifier[labels] [ identifier[level] ]), literal[string] literal[string] %( identifier[len] ( identifier[inters] [ identifier[level] ]), identifier[len] ( identifier[labels] [ identifier[level] ]), identifier[level] ) identifier[namespace] = literal[string] keyword[if] identifier[params] [ literal[string] ] keyword[else] literal[string] identifier[ann] = identifier[jams] . identifier[Annotation] ( identifier[namespace] = identifier[namespace] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[file_struct] . identifier[est_file] ): identifier[jam] = identifier[jams] . identifier[load] ( identifier[file_struct] . identifier[est_file] , identifier[validate] = keyword[False] ) identifier[curr_ann] = identifier[find_estimation] ( identifier[jam] , identifier[boundaries_id] , identifier[labels_id] , identifier[params] ) keyword[if] identifier[curr_ann] keyword[is] keyword[not] keyword[None] : identifier[curr_ann] . identifier[data] = identifier[ann] . identifier[data] identifier[ann] = identifier[curr_ann] keyword[else] : identifier[jam] . identifier[annotations] . identifier[append] ( identifier[ann] ) keyword[else] : identifier[jam] = identifier[jams] . identifier[JAMS] () identifier[jam] . identifier[file_metadata] . identifier[duration] = identifier[dur] identifier[jam] . identifier[annotations] . identifier[append] ( identifier[ann] ) identifier[ann] . identifier[annotation_metadata] . identifier[version] = identifier[msaf] . identifier[__version__] identifier[ann] . identifier[annotation_metadata] . identifier[data_source] = literal[string] identifier[sandbox] ={} identifier[sandbox] [ literal[string] ]= identifier[boundaries_id] identifier[sandbox] [ literal[string] ]= identifier[labels_id] identifier[sandbox] [ literal[string] ]= identifier[datetime] . identifier[datetime] . identifier[today] (). identifier[strftime] ( literal[string] ) keyword[for] identifier[key] keyword[in] identifier[params] : identifier[sandbox] [ identifier[key] ]= identifier[params] [ identifier[key] ] identifier[ann] . identifier[sandbox] = identifier[sandbox] keyword[for] identifier[i] ,( identifier[level_inters] , identifier[level_labels] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[inters] , identifier[labels] )): keyword[for] identifier[bound_inter] , identifier[label] keyword[in] identifier[zip] ( identifier[level_inters] , identifier[level_labels] ): identifier[dur] = identifier[float] ( identifier[bound_inter] [ literal[int] ])- identifier[float] ( identifier[bound_inter] [ literal[int] ]) identifier[label] = identifier[chr] ( identifier[int] ( identifier[label] )+ literal[int] ) keyword[if] identifier[params] [ literal[string] ]: identifier[value] ={ literal[string] : identifier[label] , literal[string] : identifier[i] } keyword[else] : identifier[value] = identifier[label] identifier[ann] . identifier[append] ( identifier[time] = identifier[bound_inter] [ literal[int] ], identifier[duration] = identifier[dur] , identifier[value] = identifier[value] ) identifier[jam] . identifier[save] ( identifier[file_struct] . identifier[est_file] )
def save_estimations(file_struct, times, labels, boundaries_id, labels_id, **params): """Saves the segment estimations in a JAMS file. Parameters ---------- file_struct : FileStruct Object with the different file paths of the current file. times : np.array or list Estimated boundary times. If `list`, estimated hierarchical boundaries. labels : np.array(N, 2) Estimated labels (None in case we are only storing boundary evaluations). boundaries_id : str Boundary algorithm identifier. labels_id : str Labels algorithm identifier. params : dict Dictionary with additional parameters for both algorithms. """ # Remove features if they exist params.pop('features', None) # Get duration dur = get_duration(file_struct.features_file) # Convert to intervals and sanity check if 'numpy' in str(type(times)): # Flat check inters = utils.times_to_intervals(times) assert len(inters) == len(labels), 'Number of boundary intervals (%d) and labels (%d) do not match' % (len(inters), len(labels)) # Put into lists to simplify the writing process later inters = [inters] labels = [labels] # depends on [control=['if'], data=[]] else: # Hierarchical check inters = [] for level in range(len(times)): est_inters = utils.times_to_intervals(times[level]) inters.append(est_inters) assert len(inters[level]) == len(labels[level]), 'Number of boundary intervals (%d) and labels (%d) do not match in level %d' % (len(inters[level]), len(labels[level]), level) # depends on [control=['for'], data=['level']] # Create new estimation namespace = 'multi_segment' if params['hier'] else 'segment_open' ann = jams.Annotation(namespace=namespace) # Find estimation in file if os.path.isfile(file_struct.est_file): jam = jams.load(file_struct.est_file, validate=False) curr_ann = find_estimation(jam, boundaries_id, labels_id, params) if curr_ann is not None: curr_ann.data = ann.data # cleanup all data ann = curr_ann # This will overwrite the existing estimation # depends on [control=['if'], data=['curr_ann']] else: jam.annotations.append(ann) # depends on [control=['if'], data=[]] else: # Create new JAMS if it doesn't exist jam = jams.JAMS() jam.file_metadata.duration = dur jam.annotations.append(ann) # Save metadata and parameters ann.annotation_metadata.version = msaf.__version__ ann.annotation_metadata.data_source = 'MSAF' sandbox = {} sandbox['boundaries_id'] = boundaries_id sandbox['labels_id'] = labels_id sandbox['timestamp'] = datetime.datetime.today().strftime('%Y/%m/%d %H:%M:%S') for key in params: sandbox[key] = params[key] # depends on [control=['for'], data=['key']] ann.sandbox = sandbox # Save actual data for (i, (level_inters, level_labels)) in enumerate(zip(inters, labels)): for (bound_inter, label) in zip(level_inters, level_labels): dur = float(bound_inter[1]) - float(bound_inter[0]) label = chr(int(label) + 65) if params['hier']: value = {'label': label, 'level': i} # depends on [control=['if'], data=[]] else: value = label ann.append(time=bound_inter[0], duration=dur, value=value) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # Write results jam.save(file_struct.est_file)
def set_global(self, name, value): """Set a global variable. Equivalent to ``! global`` in RiveScript code. :param str name: The name of the variable to set. :param str value: The value of the variable. Set this to ``None`` to delete the variable. """ if value is None: # Unset the variable. if name in self._global: del self._global[name] self._global[name] = value
def function[set_global, parameter[self, name, value]]: constant[Set a global variable. Equivalent to ``! global`` in RiveScript code. :param str name: The name of the variable to set. :param str value: The value of the variable. Set this to ``None`` to delete the variable. ] if compare[name[value] is constant[None]] begin[:] if compare[name[name] in name[self]._global] begin[:] <ast.Delete object at 0x7da20c6c5030> call[name[self]._global][name[name]] assign[=] name[value]
keyword[def] identifier[set_global] ( identifier[self] , identifier[name] , identifier[value] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[None] : keyword[if] identifier[name] keyword[in] identifier[self] . identifier[_global] : keyword[del] identifier[self] . identifier[_global] [ identifier[name] ] identifier[self] . identifier[_global] [ identifier[name] ]= identifier[value]
def set_global(self, name, value): """Set a global variable. Equivalent to ``! global`` in RiveScript code. :param str name: The name of the variable to set. :param str value: The value of the variable. Set this to ``None`` to delete the variable. """ if value is None: # Unset the variable. if name in self._global: del self._global[name] # depends on [control=['if'], data=['name']] # depends on [control=['if'], data=[]] self._global[name] = value
def toYearFraction(date): """Converts :class:`datetime.date` or :class:`datetime.datetime` to decimal year. Parameters ========== date : :class:`datetime.date` or :class:`datetime.datetime` Returns ======= year : float Decimal year Notes ===== The algorithm is taken from http://stackoverflow.com/a/6451892/2978652 """ def sinceEpoch(date): """returns seconds since epoch""" return time.mktime(date.timetuple()) year = date.year startOfThisYear = dt.datetime(year=year, month=1, day=1) startOfNextYear = dt.datetime(year=year+1, month=1, day=1) yearElapsed = sinceEpoch(date) - sinceEpoch(startOfThisYear) yearDuration = sinceEpoch(startOfNextYear) - sinceEpoch(startOfThisYear) fraction = yearElapsed/yearDuration return date.year + fraction
def function[toYearFraction, parameter[date]]: constant[Converts :class:`datetime.date` or :class:`datetime.datetime` to decimal year. Parameters ========== date : :class:`datetime.date` or :class:`datetime.datetime` Returns ======= year : float Decimal year Notes ===== The algorithm is taken from http://stackoverflow.com/a/6451892/2978652 ] def function[sinceEpoch, parameter[date]]: constant[returns seconds since epoch] return[call[name[time].mktime, parameter[call[name[date].timetuple, parameter[]]]]] variable[year] assign[=] name[date].year variable[startOfThisYear] assign[=] call[name[dt].datetime, parameter[]] variable[startOfNextYear] assign[=] call[name[dt].datetime, parameter[]] variable[yearElapsed] assign[=] binary_operation[call[name[sinceEpoch], parameter[name[date]]] - call[name[sinceEpoch], parameter[name[startOfThisYear]]]] variable[yearDuration] assign[=] binary_operation[call[name[sinceEpoch], parameter[name[startOfNextYear]]] - call[name[sinceEpoch], parameter[name[startOfThisYear]]]] variable[fraction] assign[=] binary_operation[name[yearElapsed] / name[yearDuration]] return[binary_operation[name[date].year + name[fraction]]]
keyword[def] identifier[toYearFraction] ( identifier[date] ): literal[string] keyword[def] identifier[sinceEpoch] ( identifier[date] ): literal[string] keyword[return] identifier[time] . identifier[mktime] ( identifier[date] . identifier[timetuple] ()) identifier[year] = identifier[date] . identifier[year] identifier[startOfThisYear] = identifier[dt] . identifier[datetime] ( identifier[year] = identifier[year] , identifier[month] = literal[int] , identifier[day] = literal[int] ) identifier[startOfNextYear] = identifier[dt] . identifier[datetime] ( identifier[year] = identifier[year] + literal[int] , identifier[month] = literal[int] , identifier[day] = literal[int] ) identifier[yearElapsed] = identifier[sinceEpoch] ( identifier[date] )- identifier[sinceEpoch] ( identifier[startOfThisYear] ) identifier[yearDuration] = identifier[sinceEpoch] ( identifier[startOfNextYear] )- identifier[sinceEpoch] ( identifier[startOfThisYear] ) identifier[fraction] = identifier[yearElapsed] / identifier[yearDuration] keyword[return] identifier[date] . identifier[year] + identifier[fraction]
def toYearFraction(date): """Converts :class:`datetime.date` or :class:`datetime.datetime` to decimal year. Parameters ========== date : :class:`datetime.date` or :class:`datetime.datetime` Returns ======= year : float Decimal year Notes ===== The algorithm is taken from http://stackoverflow.com/a/6451892/2978652 """ def sinceEpoch(date): """returns seconds since epoch""" return time.mktime(date.timetuple()) year = date.year startOfThisYear = dt.datetime(year=year, month=1, day=1) startOfNextYear = dt.datetime(year=year + 1, month=1, day=1) yearElapsed = sinceEpoch(date) - sinceEpoch(startOfThisYear) yearDuration = sinceEpoch(startOfNextYear) - sinceEpoch(startOfThisYear) fraction = yearElapsed / yearDuration return date.year + fraction
def get_last_month_range(): """ Gets the date for the first and the last day of the previous complete month. :returns: A tuple containing two date objects, for the first and the last day of the month respectively. """ today = date.today() # Get the last day for the previous month. end_of_last_month = snap_to_beginning_of_month(today) - timedelta(days=1) start_of_last_month = snap_to_beginning_of_month(end_of_last_month) return (start_of_last_month, end_of_last_month)
def function[get_last_month_range, parameter[]]: constant[ Gets the date for the first and the last day of the previous complete month. :returns: A tuple containing two date objects, for the first and the last day of the month respectively. ] variable[today] assign[=] call[name[date].today, parameter[]] variable[end_of_last_month] assign[=] binary_operation[call[name[snap_to_beginning_of_month], parameter[name[today]]] - call[name[timedelta], parameter[]]] variable[start_of_last_month] assign[=] call[name[snap_to_beginning_of_month], parameter[name[end_of_last_month]]] return[tuple[[<ast.Name object at 0x7da1b0492620>, <ast.Name object at 0x7da1b0490fa0>]]]
keyword[def] identifier[get_last_month_range] (): literal[string] identifier[today] = identifier[date] . identifier[today] () identifier[end_of_last_month] = identifier[snap_to_beginning_of_month] ( identifier[today] )- identifier[timedelta] ( identifier[days] = literal[int] ) identifier[start_of_last_month] = identifier[snap_to_beginning_of_month] ( identifier[end_of_last_month] ) keyword[return] ( identifier[start_of_last_month] , identifier[end_of_last_month] )
def get_last_month_range(): """ Gets the date for the first and the last day of the previous complete month. :returns: A tuple containing two date objects, for the first and the last day of the month respectively. """ today = date.today() # Get the last day for the previous month. end_of_last_month = snap_to_beginning_of_month(today) - timedelta(days=1) start_of_last_month = snap_to_beginning_of_month(end_of_last_month) return (start_of_last_month, end_of_last_month)
def has_aligned_reads(align_bam, region=None): """Check if the aligned BAM file has any reads in the region. region can be a chromosome string ("chr22"), a tuple region (("chr22", 1, 100)) or a file of regions. """ import pybedtools if region is not None: if isinstance(region, six.string_types) and os.path.isfile(region): regions = [tuple(r) for r in pybedtools.BedTool(region)] else: regions = [region] with pysam.Samfile(align_bam, "rb") as cur_bam: if region is not None: for region in regions: if isinstance(region, six.string_types): for item in cur_bam.fetch(str(region)): return True else: for item in cur_bam.fetch(str(region[0]), int(region[1]), int(region[2])): return True else: for item in cur_bam: if not item.is_unmapped: return True return False
def function[has_aligned_reads, parameter[align_bam, region]]: constant[Check if the aligned BAM file has any reads in the region. region can be a chromosome string ("chr22"), a tuple region (("chr22", 1, 100)) or a file of regions. ] import module[pybedtools] if compare[name[region] is_not constant[None]] begin[:] if <ast.BoolOp object at 0x7da1b1845210> begin[:] variable[regions] assign[=] <ast.ListComp object at 0x7da1b1845570> with call[name[pysam].Samfile, parameter[name[align_bam], constant[rb]]] begin[:] if compare[name[region] is_not constant[None]] begin[:] for taget[name[region]] in starred[name[regions]] begin[:] if call[name[isinstance], parameter[name[region], name[six].string_types]] begin[:] for taget[name[item]] in starred[call[name[cur_bam].fetch, parameter[call[name[str], parameter[name[region]]]]]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[has_aligned_reads] ( identifier[align_bam] , identifier[region] = keyword[None] ): literal[string] keyword[import] identifier[pybedtools] keyword[if] identifier[region] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[isinstance] ( identifier[region] , identifier[six] . identifier[string_types] ) keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[region] ): identifier[regions] =[ identifier[tuple] ( identifier[r] ) keyword[for] identifier[r] keyword[in] identifier[pybedtools] . identifier[BedTool] ( identifier[region] )] keyword[else] : identifier[regions] =[ identifier[region] ] keyword[with] identifier[pysam] . identifier[Samfile] ( identifier[align_bam] , literal[string] ) keyword[as] identifier[cur_bam] : keyword[if] identifier[region] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[region] keyword[in] identifier[regions] : keyword[if] identifier[isinstance] ( identifier[region] , identifier[six] . identifier[string_types] ): keyword[for] identifier[item] keyword[in] identifier[cur_bam] . identifier[fetch] ( identifier[str] ( identifier[region] )): keyword[return] keyword[True] keyword[else] : keyword[for] identifier[item] keyword[in] identifier[cur_bam] . identifier[fetch] ( identifier[str] ( identifier[region] [ literal[int] ]), identifier[int] ( identifier[region] [ literal[int] ]), identifier[int] ( identifier[region] [ literal[int] ])): keyword[return] keyword[True] keyword[else] : keyword[for] identifier[item] keyword[in] identifier[cur_bam] : keyword[if] keyword[not] identifier[item] . identifier[is_unmapped] : keyword[return] keyword[True] keyword[return] keyword[False]
def has_aligned_reads(align_bam, region=None): """Check if the aligned BAM file has any reads in the region. region can be a chromosome string ("chr22"), a tuple region (("chr22", 1, 100)) or a file of regions. """ import pybedtools if region is not None: if isinstance(region, six.string_types) and os.path.isfile(region): regions = [tuple(r) for r in pybedtools.BedTool(region)] # depends on [control=['if'], data=[]] else: regions = [region] # depends on [control=['if'], data=['region']] with pysam.Samfile(align_bam, 'rb') as cur_bam: if region is not None: for region in regions: if isinstance(region, six.string_types): for item in cur_bam.fetch(str(region)): return True # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: for item in cur_bam.fetch(str(region[0]), int(region[1]), int(region[2])): return True # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['region']] # depends on [control=['if'], data=['region']] else: for item in cur_bam: if not item.is_unmapped: return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] # depends on [control=['with'], data=['cur_bam']] return False
def validate_body(schema): """Validate the body of incoming requests for a flask view. An example usage might look like this:: from snapstore_schemas import validate_body @validate_body({ 'type': 'array', 'items': { 'type': 'object', 'properties': { 'snap_id': {'type': 'string'}, 'series': {'type': 'string'}, 'name': {'type': 'string'}, 'title': {'type': 'string'}, 'keywords': { 'type': 'array', 'items': {'type': 'string'} }, 'summary': {'type': 'string'}, 'description': {'type': 'string'}, 'created_at': {'type': 'string'}, }, 'required': ['snap_id', 'series'], 'additionalProperties': False } }) def my_flask_view(): # view code here return "Hello World", 200 All incoming request that have been routed to this view will be matched against the specified schema. If the request body does not match the schema an instance of `DataValidationError` will be raised. By default this will cause the flask application to return a 500 response, but this can be customised by telling flask how to handle these exceptions. The exception instance has an 'error_list' attribute that contains a list of all the errors encountered while processing the request body. """ location = get_callsite_location() def decorator(fn): validate_schema(schema) wrapper = wrap_request(fn, schema) record_schemas( fn, wrapper, location, request_schema=sort_schema(schema)) return wrapper return decorator
def function[validate_body, parameter[schema]]: constant[Validate the body of incoming requests for a flask view. An example usage might look like this:: from snapstore_schemas import validate_body @validate_body({ 'type': 'array', 'items': { 'type': 'object', 'properties': { 'snap_id': {'type': 'string'}, 'series': {'type': 'string'}, 'name': {'type': 'string'}, 'title': {'type': 'string'}, 'keywords': { 'type': 'array', 'items': {'type': 'string'} }, 'summary': {'type': 'string'}, 'description': {'type': 'string'}, 'created_at': {'type': 'string'}, }, 'required': ['snap_id', 'series'], 'additionalProperties': False } }) def my_flask_view(): # view code here return "Hello World", 200 All incoming request that have been routed to this view will be matched against the specified schema. If the request body does not match the schema an instance of `DataValidationError` will be raised. By default this will cause the flask application to return a 500 response, but this can be customised by telling flask how to handle these exceptions. The exception instance has an 'error_list' attribute that contains a list of all the errors encountered while processing the request body. ] variable[location] assign[=] call[name[get_callsite_location], parameter[]] def function[decorator, parameter[fn]]: call[name[validate_schema], parameter[name[schema]]] variable[wrapper] assign[=] call[name[wrap_request], parameter[name[fn], name[schema]]] call[name[record_schemas], parameter[name[fn], name[wrapper], name[location]]] return[name[wrapper]] return[name[decorator]]
keyword[def] identifier[validate_body] ( identifier[schema] ): literal[string] identifier[location] = identifier[get_callsite_location] () keyword[def] identifier[decorator] ( identifier[fn] ): identifier[validate_schema] ( identifier[schema] ) identifier[wrapper] = identifier[wrap_request] ( identifier[fn] , identifier[schema] ) identifier[record_schemas] ( identifier[fn] , identifier[wrapper] , identifier[location] , identifier[request_schema] = identifier[sort_schema] ( identifier[schema] )) keyword[return] identifier[wrapper] keyword[return] identifier[decorator]
def validate_body(schema): """Validate the body of incoming requests for a flask view. An example usage might look like this:: from snapstore_schemas import validate_body @validate_body({ 'type': 'array', 'items': { 'type': 'object', 'properties': { 'snap_id': {'type': 'string'}, 'series': {'type': 'string'}, 'name': {'type': 'string'}, 'title': {'type': 'string'}, 'keywords': { 'type': 'array', 'items': {'type': 'string'} }, 'summary': {'type': 'string'}, 'description': {'type': 'string'}, 'created_at': {'type': 'string'}, }, 'required': ['snap_id', 'series'], 'additionalProperties': False } }) def my_flask_view(): # view code here return "Hello World", 200 All incoming request that have been routed to this view will be matched against the specified schema. If the request body does not match the schema an instance of `DataValidationError` will be raised. By default this will cause the flask application to return a 500 response, but this can be customised by telling flask how to handle these exceptions. The exception instance has an 'error_list' attribute that contains a list of all the errors encountered while processing the request body. """ location = get_callsite_location() def decorator(fn): validate_schema(schema) wrapper = wrap_request(fn, schema) record_schemas(fn, wrapper, location, request_schema=sort_schema(schema)) return wrapper return decorator
def user_upsert(self, domain, userid, password=None, roles=None, name=None): """ Upsert a user in the cluster :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID :param password: The user password :param roles: A list of roles. A role can either be a simple string, or a list of `(role, bucket)` pairs. :param name: Human-readable name :raise: :exc:`couchbase.exceptions.HTTPError` if the request fails. :return: :class:`~.HttpResult` Creating a new read-only admin user :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', ['ro_admin']) An example of using more complex roles :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', [('data_reader', '*'), ('data_writer', 'inbox')]) .. warning:: Due to the asynchronous nature of Couchbase management APIs, it may take a few moments for the new user settings to take effect. """ if not roles or not isinstance(roles, list): raise E.ArgumentError("Roles must be a non-empty list") if password and domain == AuthDomain.External: raise E.ArgumentError("External domains must not have passwords") tmplist = [] for role in roles: if isinstance(role, basestring): tmplist.append(role) else: tmplist.append('{0}[{1}]'.format(*role)) role_string = ','.join(tmplist) params = { 'roles': role_string, } if password: params['password'] = password if name: params['name'] = name form = self._mk_formstr(params) path = self._get_management_path(domain, userid) return self.http_request(path=path, method='PUT', content_type='application/x-www-form-urlencoded', content=form)
def function[user_upsert, parameter[self, domain, userid, password, roles, name]]: constant[ Upsert a user in the cluster :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID :param password: The user password :param roles: A list of roles. A role can either be a simple string, or a list of `(role, bucket)` pairs. :param name: Human-readable name :raise: :exc:`couchbase.exceptions.HTTPError` if the request fails. :return: :class:`~.HttpResult` Creating a new read-only admin user :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', ['ro_admin']) An example of using more complex roles :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', [('data_reader', '*'), ('data_writer', 'inbox')]) .. warning:: Due to the asynchronous nature of Couchbase management APIs, it may take a few moments for the new user settings to take effect. ] if <ast.BoolOp object at 0x7da20c6e6860> begin[:] <ast.Raise object at 0x7da20c6e7910> if <ast.BoolOp object at 0x7da20c6e5d80> begin[:] <ast.Raise object at 0x7da20c6e4ca0> variable[tmplist] assign[=] list[[]] for taget[name[role]] in starred[name[roles]] begin[:] if call[name[isinstance], parameter[name[role], name[basestring]]] begin[:] call[name[tmplist].append, parameter[name[role]]] variable[role_string] assign[=] call[constant[,].join, parameter[name[tmplist]]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e5810>], [<ast.Name object at 0x7da20c6e7790>]] if name[password] begin[:] call[name[params]][constant[password]] assign[=] name[password] if name[name] begin[:] call[name[params]][constant[name]] assign[=] name[name] variable[form] assign[=] call[name[self]._mk_formstr, parameter[name[params]]] variable[path] assign[=] call[name[self]._get_management_path, parameter[name[domain], name[userid]]] return[call[name[self].http_request, parameter[]]]
keyword[def] identifier[user_upsert] ( identifier[self] , identifier[domain] , identifier[userid] , identifier[password] = keyword[None] , identifier[roles] = keyword[None] , identifier[name] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[roles] keyword[or] keyword[not] identifier[isinstance] ( identifier[roles] , identifier[list] ): keyword[raise] identifier[E] . identifier[ArgumentError] ( literal[string] ) keyword[if] identifier[password] keyword[and] identifier[domain] == identifier[AuthDomain] . identifier[External] : keyword[raise] identifier[E] . identifier[ArgumentError] ( literal[string] ) identifier[tmplist] =[] keyword[for] identifier[role] keyword[in] identifier[roles] : keyword[if] identifier[isinstance] ( identifier[role] , identifier[basestring] ): identifier[tmplist] . identifier[append] ( identifier[role] ) keyword[else] : identifier[tmplist] . identifier[append] ( literal[string] . identifier[format] (* identifier[role] )) identifier[role_string] = literal[string] . identifier[join] ( identifier[tmplist] ) identifier[params] ={ literal[string] : identifier[role_string] , } keyword[if] identifier[password] : identifier[params] [ literal[string] ]= identifier[password] keyword[if] identifier[name] : identifier[params] [ literal[string] ]= identifier[name] identifier[form] = identifier[self] . identifier[_mk_formstr] ( identifier[params] ) identifier[path] = identifier[self] . identifier[_get_management_path] ( identifier[domain] , identifier[userid] ) keyword[return] identifier[self] . identifier[http_request] ( identifier[path] = identifier[path] , identifier[method] = literal[string] , identifier[content_type] = literal[string] , identifier[content] = identifier[form] )
def user_upsert(self, domain, userid, password=None, roles=None, name=None): """ Upsert a user in the cluster :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID :param password: The user password :param roles: A list of roles. A role can either be a simple string, or a list of `(role, bucket)` pairs. :param name: Human-readable name :raise: :exc:`couchbase.exceptions.HTTPError` if the request fails. :return: :class:`~.HttpResult` Creating a new read-only admin user :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', ['ro_admin']) An example of using more complex roles :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', [('data_reader', '*'), ('data_writer', 'inbox')]) .. warning:: Due to the asynchronous nature of Couchbase management APIs, it may take a few moments for the new user settings to take effect. """ if not roles or not isinstance(roles, list): raise E.ArgumentError('Roles must be a non-empty list') # depends on [control=['if'], data=[]] if password and domain == AuthDomain.External: raise E.ArgumentError('External domains must not have passwords') # depends on [control=['if'], data=[]] tmplist = [] for role in roles: if isinstance(role, basestring): tmplist.append(role) # depends on [control=['if'], data=[]] else: tmplist.append('{0}[{1}]'.format(*role)) # depends on [control=['for'], data=['role']] role_string = ','.join(tmplist) params = {'roles': role_string} if password: params['password'] = password # depends on [control=['if'], data=[]] if name: params['name'] = name # depends on [control=['if'], data=[]] form = self._mk_formstr(params) path = self._get_management_path(domain, userid) return self.http_request(path=path, method='PUT', content_type='application/x-www-form-urlencoded', content=form)
def to_block(self, env, parent=None): """Convert the transient block to a :class:`ethereum.blocks.Block`""" return Block(self.header, self.transaction_list, self.uncles, env=env, parent=parent)
def function[to_block, parameter[self, env, parent]]: constant[Convert the transient block to a :class:`ethereum.blocks.Block`] return[call[name[Block], parameter[name[self].header, name[self].transaction_list, name[self].uncles]]]
keyword[def] identifier[to_block] ( identifier[self] , identifier[env] , identifier[parent] = keyword[None] ): literal[string] keyword[return] identifier[Block] ( identifier[self] . identifier[header] , identifier[self] . identifier[transaction_list] , identifier[self] . identifier[uncles] , identifier[env] = identifier[env] , identifier[parent] = identifier[parent] )
def to_block(self, env, parent=None): """Convert the transient block to a :class:`ethereum.blocks.Block`""" return Block(self.header, self.transaction_list, self.uncles, env=env, parent=parent)
def pull_status(self, param=None, must=[APIKEY]): '''获取状态报告 参数名 是否必须 描述 示例 apikey 是 用户唯一标识 9b11127a9701975c734b8aee81ee3526 page_size 否 每页个数,最大100个,默认20个 20 Args: param: Results: Result ''' param = {} if param is None else param r = self.verify_param(param, must) if not r.is_succ(): return r h = CommonResultHandler(lambda rsp: {VERSION_V1:rsp[FLOW_STATUS] if FLOW_STATUS in rsp else None, VERSION_V2:rsp}[self.version()]) return self.path('pull_status.json').post(param, h, r)
def function[pull_status, parameter[self, param, must]]: constant[获取状态报告 参数名 是否必须 描述 示例 apikey 是 用户唯一标识 9b11127a9701975c734b8aee81ee3526 page_size 否 每页个数,最大100个,默认20个 20 Args: param: Results: Result ] variable[param] assign[=] <ast.IfExp object at 0x7da20c7ca500> variable[r] assign[=] call[name[self].verify_param, parameter[name[param], name[must]]] if <ast.UnaryOp object at 0x7da20c7cb460> begin[:] return[name[r]] variable[h] assign[=] call[name[CommonResultHandler], parameter[<ast.Lambda object at 0x7da20c7cb6d0>]] return[call[call[name[self].path, parameter[constant[pull_status.json]]].post, parameter[name[param], name[h], name[r]]]]
keyword[def] identifier[pull_status] ( identifier[self] , identifier[param] = keyword[None] , identifier[must] =[ identifier[APIKEY] ]): literal[string] identifier[param] ={} keyword[if] identifier[param] keyword[is] keyword[None] keyword[else] identifier[param] identifier[r] = identifier[self] . identifier[verify_param] ( identifier[param] , identifier[must] ) keyword[if] keyword[not] identifier[r] . identifier[is_succ] (): keyword[return] identifier[r] identifier[h] = identifier[CommonResultHandler] ( keyword[lambda] identifier[rsp] :{ identifier[VERSION_V1] : identifier[rsp] [ identifier[FLOW_STATUS] ] keyword[if] identifier[FLOW_STATUS] keyword[in] identifier[rsp] keyword[else] keyword[None] , identifier[VERSION_V2] : identifier[rsp] }[ identifier[self] . identifier[version] ()]) keyword[return] identifier[self] . identifier[path] ( literal[string] ). identifier[post] ( identifier[param] , identifier[h] , identifier[r] )
def pull_status(self, param=None, must=[APIKEY]): """获取状态报告 参数名 是否必须 描述 示例 apikey 是 用户唯一标识 9b11127a9701975c734b8aee81ee3526 page_size 否 每页个数,最大100个,默认20个 20 Args: param: Results: Result """ param = {} if param is None else param r = self.verify_param(param, must) if not r.is_succ(): return r # depends on [control=['if'], data=[]] h = CommonResultHandler(lambda rsp: {VERSION_V1: rsp[FLOW_STATUS] if FLOW_STATUS in rsp else None, VERSION_V2: rsp}[self.version()]) return self.path('pull_status.json').post(param, h, r)
def extend(self, records): """ records - MUST HAVE FORM OF [{"value":value}, ... {"value":value}] OR [{"json":json}, ... {"json":json}] OPTIONAL "id" PROPERTY IS ALSO ACCEPTED """ if self.settings.read_only: Log.error("Index opened in read only mode, no changes allowed") lines = [] try: for r in records: if '_id' in r or 'value' not in r: # I MAKE THIS MISTAKE SO OFTEN, I NEED A CHECK Log.error('Expecting {"id":id, "value":document} form. Not expecting _id') id, version, json_bytes = self.encode(r) if '"_id":' in json_bytes: id, version, json_bytes = self.encode(r) if version: lines.append(value2json({"index": {"_id": id, "version": int(version), "version_type": "external_gte"}})) else: lines.append('{"index":{"_id": ' + value2json(id) + '}}') lines.append(json_bytes) del records if not lines: return with Timer("Add {{num}} documents to {{index}}", {"num": int(len(lines) / 2), "index": self.settings.index}, silent=not self.debug): try: data_string = "\n".join(l for l in lines) + "\n" except Exception as e: raise Log.error("can not make request body from\n{{lines|indent}}", lines=lines, cause=e) wait_for_active_shards = coalesce( self.settings.wait_for_active_shards, {"one": 1, None: None}[self.settings.consistency] ) response = self.cluster.post( self.path + "/_bulk", data=data_string, headers={"Content-Type": "application/x-ndjson"}, timeout=self.settings.timeout, retry=self.settings.retry, params={"wait_for_active_shards": wait_for_active_shards} ) items = response["items"] fails = [] if self.cluster.version.startswith("0.90."): for i, item in enumerate(items): if not item.index.ok: fails.append(i) elif self.cluster.version.startswith(("1.4.", "1.5.", "1.6.", "1.7.", "5.", "6.")): for i, item in enumerate(items): if item.index.status == 409: # 409 ARE VERSION CONFLICTS if "version conflict" not in item.index.error.reason: fails.append(i) # IF NOT A VERSION CONFLICT, REPORT AS FAILURE elif item.index.status not in [200, 201]: fails.append(i) else: Log.error("version not supported {{version}}", version=self.cluster.version) if fails: if len(fails) <= 3: cause = [ Except( template="{{status}} {{error}} (and {{some}} others) while loading line id={{id}} into index {{index|quote}} (typed={{typed}}):\n{{line}}", params={ "status":items[i].index.status, "error":items[i].index.error, "some":len(fails) - 1, "line":strings.limit(lines[i * 2 + 1], 500 if not self.debug else 100000), "index":self.settings.index, "typed":self.settings.typed, "id":items[i].index._id } ) for i in fails ] else: i=fails[0] cause = Except( template="{{status}} {{error}} (and {{some}} others) while loading line id={{id}} into index {{index|quote}} (typed={{typed}}):\n{{line}}", params={ "status":items[i].index.status, "error":items[i].index.error, "some":len(fails) - 1, "line":strings.limit(lines[i * 2 + 1], 500 if not self.debug else 100000), "index":self.settings.index, "typed":self.settings.typed, "id":items[i].index._id } ) Log.error("Problems with insert", cause=cause) pass except Exception as e: e = Except.wrap(e) if e.message.startswith("sequence item "): Log.error("problem with {{data}}", data=text_type(repr(lines[int(e.message[14:16].strip())])), cause=e) Log.error("problem sending to ES", cause=e)
def function[extend, parameter[self, records]]: constant[ records - MUST HAVE FORM OF [{"value":value}, ... {"value":value}] OR [{"json":json}, ... {"json":json}] OPTIONAL "id" PROPERTY IS ALSO ACCEPTED ] if name[self].settings.read_only begin[:] call[name[Log].error, parameter[constant[Index opened in read only mode, no changes allowed]]] variable[lines] assign[=] list[[]] <ast.Try object at 0x7da1b0b6d4b0>
keyword[def] identifier[extend] ( identifier[self] , identifier[records] ): literal[string] keyword[if] identifier[self] . identifier[settings] . identifier[read_only] : identifier[Log] . identifier[error] ( literal[string] ) identifier[lines] =[] keyword[try] : keyword[for] identifier[r] keyword[in] identifier[records] : keyword[if] literal[string] keyword[in] identifier[r] keyword[or] literal[string] keyword[not] keyword[in] identifier[r] : identifier[Log] . identifier[error] ( literal[string] ) identifier[id] , identifier[version] , identifier[json_bytes] = identifier[self] . identifier[encode] ( identifier[r] ) keyword[if] literal[string] keyword[in] identifier[json_bytes] : identifier[id] , identifier[version] , identifier[json_bytes] = identifier[self] . identifier[encode] ( identifier[r] ) keyword[if] identifier[version] : identifier[lines] . identifier[append] ( identifier[value2json] ({ literal[string] :{ literal[string] : identifier[id] , literal[string] : identifier[int] ( identifier[version] ), literal[string] : literal[string] }})) keyword[else] : identifier[lines] . identifier[append] ( literal[string] + identifier[value2json] ( identifier[id] )+ literal[string] ) identifier[lines] . identifier[append] ( identifier[json_bytes] ) keyword[del] identifier[records] keyword[if] keyword[not] identifier[lines] : keyword[return] keyword[with] identifier[Timer] ( literal[string] ,{ literal[string] : identifier[int] ( identifier[len] ( identifier[lines] )/ literal[int] ), literal[string] : identifier[self] . identifier[settings] . identifier[index] }, identifier[silent] = keyword[not] identifier[self] . identifier[debug] ): keyword[try] : identifier[data_string] = literal[string] . identifier[join] ( identifier[l] keyword[for] identifier[l] keyword[in] identifier[lines] )+ literal[string] keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[Log] . identifier[error] ( literal[string] , identifier[lines] = identifier[lines] , identifier[cause] = identifier[e] ) identifier[wait_for_active_shards] = identifier[coalesce] ( identifier[self] . identifier[settings] . identifier[wait_for_active_shards] , { literal[string] : literal[int] , keyword[None] : keyword[None] }[ identifier[self] . identifier[settings] . identifier[consistency] ] ) identifier[response] = identifier[self] . identifier[cluster] . identifier[post] ( identifier[self] . identifier[path] + literal[string] , identifier[data] = identifier[data_string] , identifier[headers] ={ literal[string] : literal[string] }, identifier[timeout] = identifier[self] . identifier[settings] . identifier[timeout] , identifier[retry] = identifier[self] . identifier[settings] . identifier[retry] , identifier[params] ={ literal[string] : identifier[wait_for_active_shards] } ) identifier[items] = identifier[response] [ literal[string] ] identifier[fails] =[] keyword[if] identifier[self] . identifier[cluster] . identifier[version] . identifier[startswith] ( literal[string] ): keyword[for] identifier[i] , identifier[item] keyword[in] identifier[enumerate] ( identifier[items] ): keyword[if] keyword[not] identifier[item] . identifier[index] . identifier[ok] : identifier[fails] . identifier[append] ( identifier[i] ) keyword[elif] identifier[self] . identifier[cluster] . identifier[version] . identifier[startswith] (( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )): keyword[for] identifier[i] , identifier[item] keyword[in] identifier[enumerate] ( identifier[items] ): keyword[if] identifier[item] . identifier[index] . identifier[status] == literal[int] : keyword[if] literal[string] keyword[not] keyword[in] identifier[item] . identifier[index] . identifier[error] . identifier[reason] : identifier[fails] . identifier[append] ( identifier[i] ) keyword[elif] identifier[item] . identifier[index] . identifier[status] keyword[not] keyword[in] [ literal[int] , literal[int] ]: identifier[fails] . identifier[append] ( identifier[i] ) keyword[else] : identifier[Log] . identifier[error] ( literal[string] , identifier[version] = identifier[self] . identifier[cluster] . identifier[version] ) keyword[if] identifier[fails] : keyword[if] identifier[len] ( identifier[fails] )<= literal[int] : identifier[cause] =[ identifier[Except] ( identifier[template] = literal[string] , identifier[params] ={ literal[string] : identifier[items] [ identifier[i] ]. identifier[index] . identifier[status] , literal[string] : identifier[items] [ identifier[i] ]. identifier[index] . identifier[error] , literal[string] : identifier[len] ( identifier[fails] )- literal[int] , literal[string] : identifier[strings] . identifier[limit] ( identifier[lines] [ identifier[i] * literal[int] + literal[int] ], literal[int] keyword[if] keyword[not] identifier[self] . identifier[debug] keyword[else] literal[int] ), literal[string] : identifier[self] . identifier[settings] . identifier[index] , literal[string] : identifier[self] . identifier[settings] . identifier[typed] , literal[string] : identifier[items] [ identifier[i] ]. identifier[index] . identifier[_id] } ) keyword[for] identifier[i] keyword[in] identifier[fails] ] keyword[else] : identifier[i] = identifier[fails] [ literal[int] ] identifier[cause] = identifier[Except] ( identifier[template] = literal[string] , identifier[params] ={ literal[string] : identifier[items] [ identifier[i] ]. identifier[index] . identifier[status] , literal[string] : identifier[items] [ identifier[i] ]. identifier[index] . identifier[error] , literal[string] : identifier[len] ( identifier[fails] )- literal[int] , literal[string] : identifier[strings] . identifier[limit] ( identifier[lines] [ identifier[i] * literal[int] + literal[int] ], literal[int] keyword[if] keyword[not] identifier[self] . identifier[debug] keyword[else] literal[int] ), literal[string] : identifier[self] . identifier[settings] . identifier[index] , literal[string] : identifier[self] . identifier[settings] . identifier[typed] , literal[string] : identifier[items] [ identifier[i] ]. identifier[index] . identifier[_id] } ) identifier[Log] . identifier[error] ( literal[string] , identifier[cause] = identifier[cause] ) keyword[pass] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[e] = identifier[Except] . identifier[wrap] ( identifier[e] ) keyword[if] identifier[e] . identifier[message] . identifier[startswith] ( literal[string] ): identifier[Log] . identifier[error] ( literal[string] , identifier[data] = identifier[text_type] ( identifier[repr] ( identifier[lines] [ identifier[int] ( identifier[e] . identifier[message] [ literal[int] : literal[int] ]. identifier[strip] ())])), identifier[cause] = identifier[e] ) identifier[Log] . identifier[error] ( literal[string] , identifier[cause] = identifier[e] )
def extend(self, records): """ records - MUST HAVE FORM OF [{"value":value}, ... {"value":value}] OR [{"json":json}, ... {"json":json}] OPTIONAL "id" PROPERTY IS ALSO ACCEPTED """ if self.settings.read_only: Log.error('Index opened in read only mode, no changes allowed') # depends on [control=['if'], data=[]] lines = [] try: for r in records: if '_id' in r or 'value' not in r: # I MAKE THIS MISTAKE SO OFTEN, I NEED A CHECK Log.error('Expecting {"id":id, "value":document} form. Not expecting _id') # depends on [control=['if'], data=[]] (id, version, json_bytes) = self.encode(r) if '"_id":' in json_bytes: (id, version, json_bytes) = self.encode(r) # depends on [control=['if'], data=['json_bytes']] if version: lines.append(value2json({'index': {'_id': id, 'version': int(version), 'version_type': 'external_gte'}})) # depends on [control=['if'], data=[]] else: lines.append('{"index":{"_id": ' + value2json(id) + '}}') lines.append(json_bytes) # depends on [control=['for'], data=['r']] del records if not lines: return # depends on [control=['if'], data=[]] with Timer('Add {{num}} documents to {{index}}', {'num': int(len(lines) / 2), 'index': self.settings.index}, silent=not self.debug): try: data_string = '\n'.join((l for l in lines)) + '\n' # depends on [control=['try'], data=[]] except Exception as e: raise Log.error('can not make request body from\n{{lines|indent}}', lines=lines, cause=e) # depends on [control=['except'], data=['e']] wait_for_active_shards = coalesce(self.settings.wait_for_active_shards, {'one': 1, None: None}[self.settings.consistency]) response = self.cluster.post(self.path + '/_bulk', data=data_string, headers={'Content-Type': 'application/x-ndjson'}, timeout=self.settings.timeout, retry=self.settings.retry, params={'wait_for_active_shards': wait_for_active_shards}) items = response['items'] fails = [] if self.cluster.version.startswith('0.90.'): for (i, item) in enumerate(items): if not item.index.ok: fails.append(i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] elif self.cluster.version.startswith(('1.4.', '1.5.', '1.6.', '1.7.', '5.', '6.')): for (i, item) in enumerate(items): if item.index.status == 409: # 409 ARE VERSION CONFLICTS if 'version conflict' not in item.index.error.reason: fails.append(i) # IF NOT A VERSION CONFLICT, REPORT AS FAILURE # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif item.index.status not in [200, 201]: fails.append(i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: Log.error('version not supported {{version}}', version=self.cluster.version) if fails: if len(fails) <= 3: cause = [Except(template='{{status}} {{error}} (and {{some}} others) while loading line id={{id}} into index {{index|quote}} (typed={{typed}}):\n{{line}}', params={'status': items[i].index.status, 'error': items[i].index.error, 'some': len(fails) - 1, 'line': strings.limit(lines[i * 2 + 1], 500 if not self.debug else 100000), 'index': self.settings.index, 'typed': self.settings.typed, 'id': items[i].index._id}) for i in fails] # depends on [control=['if'], data=[]] else: i = fails[0] cause = Except(template='{{status}} {{error}} (and {{some}} others) while loading line id={{id}} into index {{index|quote}} (typed={{typed}}):\n{{line}}', params={'status': items[i].index.status, 'error': items[i].index.error, 'some': len(fails) - 1, 'line': strings.limit(lines[i * 2 + 1], 500 if not self.debug else 100000), 'index': self.settings.index, 'typed': self.settings.typed, 'id': items[i].index._id}) Log.error('Problems with insert', cause=cause) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] pass # depends on [control=['try'], data=[]] except Exception as e: e = Except.wrap(e) if e.message.startswith('sequence item '): Log.error('problem with {{data}}', data=text_type(repr(lines[int(e.message[14:16].strip())])), cause=e) # depends on [control=['if'], data=[]] Log.error('problem sending to ES', cause=e) # depends on [control=['except'], data=['e']]
def _parse_process_name(name_str): """Parses the process string and returns the process name and its directives Process strings my contain directive information with the following syntax:: proc_name={'directive':'val'} This method parses this string and returns the process name as a string and the directives information as a dictionary. Parameters ---------- name_str : str Raw string with process name and, potentially, directive information Returns ------- str Process name dict or None Process directives """ directives = None fields = name_str.split("=") process_name = fields[0] if len(fields) == 2: _directives = fields[1].replace("'", '"') try: directives = json.loads(_directives) except json.decoder.JSONDecodeError: raise eh.ProcessError( "Could not parse directives for process '{}'. The raw" " string is: {}\n" "Possible causes include:\n" "\t1. Spaces inside directives\n" "\t2. Missing '=' symbol before directives\n" "\t3. Missing quotes (' or \") around directives\n" "A valid example: process_name={{'cpus':'2'}}".format( process_name, name_str)) return process_name, directives
def function[_parse_process_name, parameter[name_str]]: constant[Parses the process string and returns the process name and its directives Process strings my contain directive information with the following syntax:: proc_name={'directive':'val'} This method parses this string and returns the process name as a string and the directives information as a dictionary. Parameters ---------- name_str : str Raw string with process name and, potentially, directive information Returns ------- str Process name dict or None Process directives ] variable[directives] assign[=] constant[None] variable[fields] assign[=] call[name[name_str].split, parameter[constant[=]]] variable[process_name] assign[=] call[name[fields]][constant[0]] if compare[call[name[len], parameter[name[fields]]] equal[==] constant[2]] begin[:] variable[_directives] assign[=] call[call[name[fields]][constant[1]].replace, parameter[constant['], constant["]]] <ast.Try object at 0x7da1b021bf40> return[tuple[[<ast.Name object at 0x7da1b02167d0>, <ast.Name object at 0x7da1b0215ab0>]]]
keyword[def] identifier[_parse_process_name] ( identifier[name_str] ): literal[string] identifier[directives] = keyword[None] identifier[fields] = identifier[name_str] . identifier[split] ( literal[string] ) identifier[process_name] = identifier[fields] [ literal[int] ] keyword[if] identifier[len] ( identifier[fields] )== literal[int] : identifier[_directives] = identifier[fields] [ literal[int] ]. identifier[replace] ( literal[string] , literal[string] ) keyword[try] : identifier[directives] = identifier[json] . identifier[loads] ( identifier[_directives] ) keyword[except] identifier[json] . identifier[decoder] . identifier[JSONDecodeError] : keyword[raise] identifier[eh] . identifier[ProcessError] ( literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] . identifier[format] ( identifier[process_name] , identifier[name_str] )) keyword[return] identifier[process_name] , identifier[directives]
def _parse_process_name(name_str): """Parses the process string and returns the process name and its directives Process strings my contain directive information with the following syntax:: proc_name={'directive':'val'} This method parses this string and returns the process name as a string and the directives information as a dictionary. Parameters ---------- name_str : str Raw string with process name and, potentially, directive information Returns ------- str Process name dict or None Process directives """ directives = None fields = name_str.split('=') process_name = fields[0] if len(fields) == 2: _directives = fields[1].replace("'", '"') try: directives = json.loads(_directives) # depends on [control=['try'], data=[]] except json.decoder.JSONDecodeError: raise eh.ProcessError('Could not parse directives for process \'{}\'. The raw string is: {}\nPossible causes include:\n\t1. Spaces inside directives\n\t2. Missing \'=\' symbol before directives\n\t3. Missing quotes (\' or ") around directives\nA valid example: process_name={{\'cpus\':\'2\'}}'.format(process_name, name_str)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] return (process_name, directives)
def function_trace(function_name): """ Wraps a chunk of code that we want to appear as a separate, explicit, segment in our monitoring tools. """ if newrelic: nr_transaction = newrelic.agent.current_transaction() with newrelic.agent.FunctionTrace(nr_transaction, function_name): yield else: yield
def function[function_trace, parameter[function_name]]: constant[ Wraps a chunk of code that we want to appear as a separate, explicit, segment in our monitoring tools. ] if name[newrelic] begin[:] variable[nr_transaction] assign[=] call[name[newrelic].agent.current_transaction, parameter[]] with call[name[newrelic].agent.FunctionTrace, parameter[name[nr_transaction], name[function_name]]] begin[:] <ast.Yield object at 0x7da1b1b14a00>
keyword[def] identifier[function_trace] ( identifier[function_name] ): literal[string] keyword[if] identifier[newrelic] : identifier[nr_transaction] = identifier[newrelic] . identifier[agent] . identifier[current_transaction] () keyword[with] identifier[newrelic] . identifier[agent] . identifier[FunctionTrace] ( identifier[nr_transaction] , identifier[function_name] ): keyword[yield] keyword[else] : keyword[yield]
def function_trace(function_name): """ Wraps a chunk of code that we want to appear as a separate, explicit, segment in our monitoring tools. """ if newrelic: nr_transaction = newrelic.agent.current_transaction() with newrelic.agent.FunctionTrace(nr_transaction, function_name): yield # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] else: yield
def finish_displayhook(self): """Finish up all displayhook activities.""" sys.stdout.flush() sys.stderr.flush() self.session.send(self.pub_socket, self.msg, ident=self.topic) self.msg = None
def function[finish_displayhook, parameter[self]]: constant[Finish up all displayhook activities.] call[name[sys].stdout.flush, parameter[]] call[name[sys].stderr.flush, parameter[]] call[name[self].session.send, parameter[name[self].pub_socket, name[self].msg]] name[self].msg assign[=] constant[None]
keyword[def] identifier[finish_displayhook] ( identifier[self] ): literal[string] identifier[sys] . identifier[stdout] . identifier[flush] () identifier[sys] . identifier[stderr] . identifier[flush] () identifier[self] . identifier[session] . identifier[send] ( identifier[self] . identifier[pub_socket] , identifier[self] . identifier[msg] , identifier[ident] = identifier[self] . identifier[topic] ) identifier[self] . identifier[msg] = keyword[None]
def finish_displayhook(self): """Finish up all displayhook activities.""" sys.stdout.flush() sys.stderr.flush() self.session.send(self.pub_socket, self.msg, ident=self.topic) self.msg = None
def expect_column_values_to_be_decreasing(self, column, strictly=None, parse_strings_as_datetimes=None, mostly=None, result_format=None, include_config=False, catch_exceptions=None, meta=None ): """Expect column values to be decreasing. By default, this expectation only works for numeric or datetime data. When `parse_strings_as_datetimes=True`, it can also parse strings to datetimes. If `strictly=True`, then this expectation is only satisfied if each consecutive value is strictly decreasing--equal values are treated as failures. expect_column_values_to_be_decreasing is a :func:`column_map_expectation <great_expectations.data_asset.dataset.Dataset.column_map_expectation>`. Args: column (str): \ The column name. Keyword Args: strictly (Boolean or None): \ If True, values must be strictly greater than previous values parse_strings_as_datetimes (boolean or None) : \ If True, all non-null column values to datetimes before making comparisons mostly (None or a float between 0 and 1): \ Return `"success": True` if at least mostly percent of values match the expectation. \ For more detail, see :ref:`mostly`. Other Parameters: result_format (str or None): \ Which output mode to use: `BOOLEAN_ONLY`, `BASIC`, `COMPLETE`, or `SUMMARY`. For more detail, see :ref:`result_format <result_format>`. include_config (boolean): \ If True, then include the expectation config as part of the result object. \ For more detail, see :ref:`include_config`. catch_exceptions (boolean or None): \ If True, then catch exceptions and include them as part of the result object. \ For more detail, see :ref:`catch_exceptions`. meta (dict or None): \ A JSON-serializable dictionary (nesting allowed) that will be included in the output without modification. \ For more detail, see :ref:`meta`. Returns: A JSON-serializable expectation result object. Exact fields vary depending on the values passed to :ref:`result_format <result_format>` and :ref:`include_config`, :ref:`catch_exceptions`, and :ref:`meta`. See Also: expect_column_values_to_be_increasing """ raise NotImplementedError
def function[expect_column_values_to_be_decreasing, parameter[self, column, strictly, parse_strings_as_datetimes, mostly, result_format, include_config, catch_exceptions, meta]]: constant[Expect column values to be decreasing. By default, this expectation only works for numeric or datetime data. When `parse_strings_as_datetimes=True`, it can also parse strings to datetimes. If `strictly=True`, then this expectation is only satisfied if each consecutive value is strictly decreasing--equal values are treated as failures. expect_column_values_to_be_decreasing is a :func:`column_map_expectation <great_expectations.data_asset.dataset.Dataset.column_map_expectation>`. Args: column (str): The column name. Keyword Args: strictly (Boolean or None): If True, values must be strictly greater than previous values parse_strings_as_datetimes (boolean or None) : If True, all non-null column values to datetimes before making comparisons mostly (None or a float between 0 and 1): Return `"success": True` if at least mostly percent of values match the expectation. For more detail, see :ref:`mostly`. Other Parameters: result_format (str or None): Which output mode to use: `BOOLEAN_ONLY`, `BASIC`, `COMPLETE`, or `SUMMARY`. For more detail, see :ref:`result_format <result_format>`. include_config (boolean): If True, then include the expectation config as part of the result object. For more detail, see :ref:`include_config`. catch_exceptions (boolean or None): If True, then catch exceptions and include them as part of the result object. For more detail, see :ref:`catch_exceptions`. meta (dict or None): A JSON-serializable dictionary (nesting allowed) that will be included in the output without modification. For more detail, see :ref:`meta`. Returns: A JSON-serializable expectation result object. Exact fields vary depending on the values passed to :ref:`result_format <result_format>` and :ref:`include_config`, :ref:`catch_exceptions`, and :ref:`meta`. See Also: expect_column_values_to_be_increasing ] <ast.Raise object at 0x7da1b17bd3f0>
keyword[def] identifier[expect_column_values_to_be_decreasing] ( identifier[self] , identifier[column] , identifier[strictly] = keyword[None] , identifier[parse_strings_as_datetimes] = keyword[None] , identifier[mostly] = keyword[None] , identifier[result_format] = keyword[None] , identifier[include_config] = keyword[False] , identifier[catch_exceptions] = keyword[None] , identifier[meta] = keyword[None] ): literal[string] keyword[raise] identifier[NotImplementedError]
def expect_column_values_to_be_decreasing(self, column, strictly=None, parse_strings_as_datetimes=None, mostly=None, result_format=None, include_config=False, catch_exceptions=None, meta=None): """Expect column values to be decreasing. By default, this expectation only works for numeric or datetime data. When `parse_strings_as_datetimes=True`, it can also parse strings to datetimes. If `strictly=True`, then this expectation is only satisfied if each consecutive value is strictly decreasing--equal values are treated as failures. expect_column_values_to_be_decreasing is a :func:`column_map_expectation <great_expectations.data_asset.dataset.Dataset.column_map_expectation>`. Args: column (str): The column name. Keyword Args: strictly (Boolean or None): If True, values must be strictly greater than previous values parse_strings_as_datetimes (boolean or None) : If True, all non-null column values to datetimes before making comparisons mostly (None or a float between 0 and 1): Return `"success": True` if at least mostly percent of values match the expectation. For more detail, see :ref:`mostly`. Other Parameters: result_format (str or None): Which output mode to use: `BOOLEAN_ONLY`, `BASIC`, `COMPLETE`, or `SUMMARY`. For more detail, see :ref:`result_format <result_format>`. include_config (boolean): If True, then include the expectation config as part of the result object. For more detail, see :ref:`include_config`. catch_exceptions (boolean or None): If True, then catch exceptions and include them as part of the result object. For more detail, see :ref:`catch_exceptions`. meta (dict or None): A JSON-serializable dictionary (nesting allowed) that will be included in the output without modification. For more detail, see :ref:`meta`. Returns: A JSON-serializable expectation result object. Exact fields vary depending on the values passed to :ref:`result_format <result_format>` and :ref:`include_config`, :ref:`catch_exceptions`, and :ref:`meta`. See Also: expect_column_values_to_be_increasing """ raise NotImplementedError
def cyclic_time_shift(self, dt): """Shift the data and timestamps by a given number of seconds Shift the data and timestamps in the time domain a given number of seconds. To just change the time stamps, do ts.start_time += dt. The time shift may be smaller than the intrinsic sample rate of the data. Note that data will be cycliclly rotated, so if you shift by 2 seconds, the final 2 seconds of your data will now be at the beginning of the data set. Parameters ---------- dt : float Amount of time to shift the vector. Returns ------- data : pycbc.types.FrequencySeries The time shifted frequency series. """ from pycbc.waveform import apply_fseries_time_shift data = apply_fseries_time_shift(self, dt) data.start_time = self.start_time - dt return data
def function[cyclic_time_shift, parameter[self, dt]]: constant[Shift the data and timestamps by a given number of seconds Shift the data and timestamps in the time domain a given number of seconds. To just change the time stamps, do ts.start_time += dt. The time shift may be smaller than the intrinsic sample rate of the data. Note that data will be cycliclly rotated, so if you shift by 2 seconds, the final 2 seconds of your data will now be at the beginning of the data set. Parameters ---------- dt : float Amount of time to shift the vector. Returns ------- data : pycbc.types.FrequencySeries The time shifted frequency series. ] from relative_module[pycbc.waveform] import module[apply_fseries_time_shift] variable[data] assign[=] call[name[apply_fseries_time_shift], parameter[name[self], name[dt]]] name[data].start_time assign[=] binary_operation[name[self].start_time - name[dt]] return[name[data]]
keyword[def] identifier[cyclic_time_shift] ( identifier[self] , identifier[dt] ): literal[string] keyword[from] identifier[pycbc] . identifier[waveform] keyword[import] identifier[apply_fseries_time_shift] identifier[data] = identifier[apply_fseries_time_shift] ( identifier[self] , identifier[dt] ) identifier[data] . identifier[start_time] = identifier[self] . identifier[start_time] - identifier[dt] keyword[return] identifier[data]
def cyclic_time_shift(self, dt): """Shift the data and timestamps by a given number of seconds Shift the data and timestamps in the time domain a given number of seconds. To just change the time stamps, do ts.start_time += dt. The time shift may be smaller than the intrinsic sample rate of the data. Note that data will be cycliclly rotated, so if you shift by 2 seconds, the final 2 seconds of your data will now be at the beginning of the data set. Parameters ---------- dt : float Amount of time to shift the vector. Returns ------- data : pycbc.types.FrequencySeries The time shifted frequency series. """ from pycbc.waveform import apply_fseries_time_shift data = apply_fseries_time_shift(self, dt) data.start_time = self.start_time - dt return data
def by_organizations(self, field=None): """ Used to seggregate the data acording to organizations. This method pops the latest aggregation from the self.aggregations dict and adds it as a nested aggregation under itself :param field: the field to create the parent agg (optional) default: author_org_name :returns: self, which allows the method to be chainable with the other methods """ # this functions is currently only for issues and PRs agg_field = field if field else "author_org_name" agg_key = "terms_" + agg_field if agg_key in self.aggregations.keys(): agg = self.aggregations[agg_key] else: agg = A("terms", field=agg_field, missing="others", size=self.size) child_agg_counter = self.child_agg_counter_dict[agg_key] # 0 if not present because defaultdict child_name, child_agg = self.aggregations.popitem() agg.metric(child_agg_counter, child_agg) self.aggregations[agg_key] = agg self.child_agg_counter_dict[agg_key] += 1 return self
def function[by_organizations, parameter[self, field]]: constant[ Used to seggregate the data acording to organizations. This method pops the latest aggregation from the self.aggregations dict and adds it as a nested aggregation under itself :param field: the field to create the parent agg (optional) default: author_org_name :returns: self, which allows the method to be chainable with the other methods ] variable[agg_field] assign[=] <ast.IfExp object at 0x7da1b26a20b0> variable[agg_key] assign[=] binary_operation[constant[terms_] + name[agg_field]] if compare[name[agg_key] in call[name[self].aggregations.keys, parameter[]]] begin[:] variable[agg] assign[=] call[name[self].aggregations][name[agg_key]] variable[child_agg_counter] assign[=] call[name[self].child_agg_counter_dict][name[agg_key]] <ast.Tuple object at 0x7da1b26a1d20> assign[=] call[name[self].aggregations.popitem, parameter[]] call[name[agg].metric, parameter[name[child_agg_counter], name[child_agg]]] call[name[self].aggregations][name[agg_key]] assign[=] name[agg] <ast.AugAssign object at 0x7da1b26a31c0> return[name[self]]
keyword[def] identifier[by_organizations] ( identifier[self] , identifier[field] = keyword[None] ): literal[string] identifier[agg_field] = identifier[field] keyword[if] identifier[field] keyword[else] literal[string] identifier[agg_key] = literal[string] + identifier[agg_field] keyword[if] identifier[agg_key] keyword[in] identifier[self] . identifier[aggregations] . identifier[keys] (): identifier[agg] = identifier[self] . identifier[aggregations] [ identifier[agg_key] ] keyword[else] : identifier[agg] = identifier[A] ( literal[string] , identifier[field] = identifier[agg_field] , identifier[missing] = literal[string] , identifier[size] = identifier[self] . identifier[size] ) identifier[child_agg_counter] = identifier[self] . identifier[child_agg_counter_dict] [ identifier[agg_key] ] identifier[child_name] , identifier[child_agg] = identifier[self] . identifier[aggregations] . identifier[popitem] () identifier[agg] . identifier[metric] ( identifier[child_agg_counter] , identifier[child_agg] ) identifier[self] . identifier[aggregations] [ identifier[agg_key] ]= identifier[agg] identifier[self] . identifier[child_agg_counter_dict] [ identifier[agg_key] ]+= literal[int] keyword[return] identifier[self]
def by_organizations(self, field=None): """ Used to seggregate the data acording to organizations. This method pops the latest aggregation from the self.aggregations dict and adds it as a nested aggregation under itself :param field: the field to create the parent agg (optional) default: author_org_name :returns: self, which allows the method to be chainable with the other methods """ # this functions is currently only for issues and PRs agg_field = field if field else 'author_org_name' agg_key = 'terms_' + agg_field if agg_key in self.aggregations.keys(): agg = self.aggregations[agg_key] # depends on [control=['if'], data=['agg_key']] else: agg = A('terms', field=agg_field, missing='others', size=self.size) child_agg_counter = self.child_agg_counter_dict[agg_key] # 0 if not present because defaultdict (child_name, child_agg) = self.aggregations.popitem() agg.metric(child_agg_counter, child_agg) self.aggregations[agg_key] = agg self.child_agg_counter_dict[agg_key] += 1 return self
def PublishSystem(self, fromUserId, toUserId, objectName, content, pushContent=None, pushData=None, isPersisted=None, isCounted=None): """ 发送系统消息方法(一个用户向一个或多个用户发送系统消息,单条消息最大 128k,会话类型为 SYSTEM。每秒钟最多发送 100 条消息,每次最多同时向 100 人发送,如:一次发送 100 人时,示为 100 条消息。) 方法 @param fromUserId:发送人用户 Id。(必传) @param toUserId:接收用户 Id,提供多个本参数可以实现向多人发送消息,上限为 1000 人。(必传) @param txtMessage:发送消息内容(必传) @param pushContent:如果为自定义消息,定义显示的 Push 内容,内容中定义标识通过 values 中设置的标识位内容进行替换.如消息类型为自定义不需要 Push 通知,则对应数组传空值即可。(可选) @param pushData:针对 iOS 平台为 Push 通知时附加到 payload 中,Android 客户端收到推送消息时对应字段名为 pushData。如不需要 Push 功能对应数组传空值即可。(可选) @param isPersisted:当前版本有新的自定义消息,而老版本没有该自定义消息时,老版本客户端收到消息后是否进行存储,0 表示为不存储、 1 表示为存储,默认为 1 存储消息。(可选) @param isCounted:当前版本有新的自定义消息,而老版本没有该自定义消息时,老版本客户端收到消息后是否进行未读消息计数,0 表示为不计数、 1 表示为计数,默认为 1 计数,未读消息数增加 1。(可选) @return code:返回码,200 为正常。 @return errorMessage:错误信息。 """ desc = { "name": "CodeSuccessReslut", "desc": " http 成功返回结果", "fields": [{ "name": "code", "type": "Integer", "desc": "返回码,200 为正常。" }, { "name": "errorMessage", "type": "String", "desc": "错误信息。" }] } r = self.call_api( method=('API', 'POST', 'application/x-www-form-urlencoded'), action='/message/system/publish.json', params={ "fromUserId": fromUserId, "toUserId": toUserId, "objectName": objectName, "content": content, "pushContent": pushContent, "pushData": pushData, "isPersisted": isPersisted, "isCounted": isCounted }) return Response(r, desc)
def function[PublishSystem, parameter[self, fromUserId, toUserId, objectName, content, pushContent, pushData, isPersisted, isCounted]]: constant[ 发送系统消息方法(一个用户向一个或多个用户发送系统消息,单条消息最大 128k,会话类型为 SYSTEM。每秒钟最多发送 100 条消息,每次最多同时向 100 人发送,如:一次发送 100 人时,示为 100 条消息。) 方法 @param fromUserId:发送人用户 Id。(必传) @param toUserId:接收用户 Id,提供多个本参数可以实现向多人发送消息,上限为 1000 人。(必传) @param txtMessage:发送消息内容(必传) @param pushContent:如果为自定义消息,定义显示的 Push 内容,内容中定义标识通过 values 中设置的标识位内容进行替换.如消息类型为自定义不需要 Push 通知,则对应数组传空值即可。(可选) @param pushData:针对 iOS 平台为 Push 通知时附加到 payload 中,Android 客户端收到推送消息时对应字段名为 pushData。如不需要 Push 功能对应数组传空值即可。(可选) @param isPersisted:当前版本有新的自定义消息,而老版本没有该自定义消息时,老版本客户端收到消息后是否进行存储,0 表示为不存储、 1 表示为存储,默认为 1 存储消息。(可选) @param isCounted:当前版本有新的自定义消息,而老版本没有该自定义消息时,老版本客户端收到消息后是否进行未读消息计数,0 表示为不计数、 1 表示为计数,默认为 1 计数,未读消息数增加 1。(可选) @return code:返回码,200 为正常。 @return errorMessage:错误信息。 ] variable[desc] assign[=] dictionary[[<ast.Constant object at 0x7da1b1910220>, <ast.Constant object at 0x7da1b19106d0>, <ast.Constant object at 0x7da1b1910c10>], [<ast.Constant object at 0x7da1b19135e0>, <ast.Constant object at 0x7da1b19105e0>, <ast.List object at 0x7da1b1912620>]] variable[r] assign[=] call[name[self].call_api, parameter[]] return[call[name[Response], parameter[name[r], name[desc]]]]
keyword[def] identifier[PublishSystem] ( identifier[self] , identifier[fromUserId] , identifier[toUserId] , identifier[objectName] , identifier[content] , identifier[pushContent] = keyword[None] , identifier[pushData] = keyword[None] , identifier[isPersisted] = keyword[None] , identifier[isCounted] = keyword[None] ): literal[string] identifier[desc] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] :[{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] },{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }] } identifier[r] = identifier[self] . identifier[call_api] ( identifier[method] =( literal[string] , literal[string] , literal[string] ), identifier[action] = literal[string] , identifier[params] ={ literal[string] : identifier[fromUserId] , literal[string] : identifier[toUserId] , literal[string] : identifier[objectName] , literal[string] : identifier[content] , literal[string] : identifier[pushContent] , literal[string] : identifier[pushData] , literal[string] : identifier[isPersisted] , literal[string] : identifier[isCounted] }) keyword[return] identifier[Response] ( identifier[r] , identifier[desc] )
def PublishSystem(self, fromUserId, toUserId, objectName, content, pushContent=None, pushData=None, isPersisted=None, isCounted=None): """ 发送系统消息方法(一个用户向一个或多个用户发送系统消息,单条消息最大 128k,会话类型为 SYSTEM。每秒钟最多发送 100 条消息,每次最多同时向 100 人发送,如:一次发送 100 人时,示为 100 条消息。) 方法 @param fromUserId:发送人用户 Id。(必传) @param toUserId:接收用户 Id,提供多个本参数可以实现向多人发送消息,上限为 1000 人。(必传) @param txtMessage:发送消息内容(必传) @param pushContent:如果为自定义消息,定义显示的 Push 内容,内容中定义标识通过 values 中设置的标识位内容进行替换.如消息类型为自定义不需要 Push 通知,则对应数组传空值即可。(可选) @param pushData:针对 iOS 平台为 Push 通知时附加到 payload 中,Android 客户端收到推送消息时对应字段名为 pushData。如不需要 Push 功能对应数组传空值即可。(可选) @param isPersisted:当前版本有新的自定义消息,而老版本没有该自定义消息时,老版本客户端收到消息后是否进行存储,0 表示为不存储、 1 表示为存储,默认为 1 存储消息。(可选) @param isCounted:当前版本有新的自定义消息,而老版本没有该自定义消息时,老版本客户端收到消息后是否进行未读消息计数,0 表示为不计数、 1 表示为计数,默认为 1 计数,未读消息数增加 1。(可选) @return code:返回码,200 为正常。 @return errorMessage:错误信息。 """ desc = {'name': 'CodeSuccessReslut', 'desc': ' http 成功返回结果', 'fields': [{'name': 'code', 'type': 'Integer', 'desc': '返回码,200 为正常。'}, {'name': 'errorMessage', 'type': 'String', 'desc': '错误信息。'}]} r = self.call_api(method=('API', 'POST', 'application/x-www-form-urlencoded'), action='/message/system/publish.json', params={'fromUserId': fromUserId, 'toUserId': toUserId, 'objectName': objectName, 'content': content, 'pushContent': pushContent, 'pushData': pushData, 'isPersisted': isPersisted, 'isCounted': isCounted}) return Response(r, desc)
def loadb(b): """Deserialize ``b`` (instance of ``bytes``) to a Python object.""" assert isinstance(b, (bytes, bytearray)) return std_json.loads(b.decode('utf-8'))
def function[loadb, parameter[b]]: constant[Deserialize ``b`` (instance of ``bytes``) to a Python object.] assert[call[name[isinstance], parameter[name[b], tuple[[<ast.Name object at 0x7da20e955330>, <ast.Name object at 0x7da20e954d30>]]]]] return[call[name[std_json].loads, parameter[call[name[b].decode, parameter[constant[utf-8]]]]]]
keyword[def] identifier[loadb] ( identifier[b] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[b] ,( identifier[bytes] , identifier[bytearray] )) keyword[return] identifier[std_json] . identifier[loads] ( identifier[b] . identifier[decode] ( literal[string] ))
def loadb(b): """Deserialize ``b`` (instance of ``bytes``) to a Python object.""" assert isinstance(b, (bytes, bytearray)) return std_json.loads(b.decode('utf-8'))
def pcolor_axes(array, px_to_units=px_to_units): """ Return axes :code:`x, y` for *array* to be used with :func:`matplotlib.pyplot.color`. *px_to_units* is a function to convert pixels to units. By default, returns pixels. """ # ====================================== # Coords need to be +1 larger than array # ====================================== x_size = array.shape[0]+1 y_size = array.shape[1]+1 x = _np.empty((x_size, y_size)) y = _np.empty((x_size, y_size)) for i in range(x_size): for j in range(y_size): x[i, j], y[i, j] = px_to_units(i-0.5, j-0.5) return x, y
def function[pcolor_axes, parameter[array, px_to_units]]: constant[ Return axes :code:`x, y` for *array* to be used with :func:`matplotlib.pyplot.color`. *px_to_units* is a function to convert pixels to units. By default, returns pixels. ] variable[x_size] assign[=] binary_operation[call[name[array].shape][constant[0]] + constant[1]] variable[y_size] assign[=] binary_operation[call[name[array].shape][constant[1]] + constant[1]] variable[x] assign[=] call[name[_np].empty, parameter[tuple[[<ast.Name object at 0x7da1afea7070>, <ast.Name object at 0x7da1afea5ab0>]]]] variable[y] assign[=] call[name[_np].empty, parameter[tuple[[<ast.Name object at 0x7da1afea4550>, <ast.Name object at 0x7da1afea73a0>]]]] for taget[name[i]] in starred[call[name[range], parameter[name[x_size]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[name[y_size]]]] begin[:] <ast.Tuple object at 0x7da1afea7c40> assign[=] call[name[px_to_units], parameter[binary_operation[name[i] - constant[0.5]], binary_operation[name[j] - constant[0.5]]]] return[tuple[[<ast.Name object at 0x7da1afea7fd0>, <ast.Name object at 0x7da1afea5f90>]]]
keyword[def] identifier[pcolor_axes] ( identifier[array] , identifier[px_to_units] = identifier[px_to_units] ): literal[string] identifier[x_size] = identifier[array] . identifier[shape] [ literal[int] ]+ literal[int] identifier[y_size] = identifier[array] . identifier[shape] [ literal[int] ]+ literal[int] identifier[x] = identifier[_np] . identifier[empty] (( identifier[x_size] , identifier[y_size] )) identifier[y] = identifier[_np] . identifier[empty] (( identifier[x_size] , identifier[y_size] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[x_size] ): keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[y_size] ): identifier[x] [ identifier[i] , identifier[j] ], identifier[y] [ identifier[i] , identifier[j] ]= identifier[px_to_units] ( identifier[i] - literal[int] , identifier[j] - literal[int] ) keyword[return] identifier[x] , identifier[y]
def pcolor_axes(array, px_to_units=px_to_units): """ Return axes :code:`x, y` for *array* to be used with :func:`matplotlib.pyplot.color`. *px_to_units* is a function to convert pixels to units. By default, returns pixels. """ # ====================================== # Coords need to be +1 larger than array # ====================================== x_size = array.shape[0] + 1 y_size = array.shape[1] + 1 x = _np.empty((x_size, y_size)) y = _np.empty((x_size, y_size)) for i in range(x_size): for j in range(y_size): (x[i, j], y[i, j]) = px_to_units(i - 0.5, j - 0.5) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] return (x, y)
def gradient(self): """Gradient operator of the functional.""" try: op_to_return = self.functional.gradient except NotImplementedError: raise NotImplementedError( '`self.functional.gradient` is not implemented for ' '`self.functional` {}'.format(self.functional)) op_to_return = op_to_return - ConstantOperator(self.subgrad) return op_to_return
def function[gradient, parameter[self]]: constant[Gradient operator of the functional.] <ast.Try object at 0x7da1b20bf5b0> variable[op_to_return] assign[=] binary_operation[name[op_to_return] - call[name[ConstantOperator], parameter[name[self].subgrad]]] return[name[op_to_return]]
keyword[def] identifier[gradient] ( identifier[self] ): literal[string] keyword[try] : identifier[op_to_return] = identifier[self] . identifier[functional] . identifier[gradient] keyword[except] identifier[NotImplementedError] : keyword[raise] identifier[NotImplementedError] ( literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[functional] )) identifier[op_to_return] = identifier[op_to_return] - identifier[ConstantOperator] ( identifier[self] . identifier[subgrad] ) keyword[return] identifier[op_to_return]
def gradient(self): """Gradient operator of the functional.""" try: op_to_return = self.functional.gradient # depends on [control=['try'], data=[]] except NotImplementedError: raise NotImplementedError('`self.functional.gradient` is not implemented for `self.functional` {}'.format(self.functional)) # depends on [control=['except'], data=[]] op_to_return = op_to_return - ConstantOperator(self.subgrad) return op_to_return
def tryload(self, cfgstr=None, on_error='raise'): """ Like load, but returns None if the load fails due to a cache miss. Args: on_error (str): How to handle non-io errors errors. Either raise, which re-raises the exception, or clear which deletes the cache and returns None. """ cfgstr = self._rectify_cfgstr(cfgstr) if self.enabled: try: if self.verbose > 1: self.log('[cacher] tryload fname={}'.format(self.fname)) return self.load(cfgstr) except IOError: if self.verbose > 0: self.log('[cacher] ... {} cache miss'.format(self.fname)) except Exception: if self.verbose > 0: self.log('[cacher] ... failed to load') if on_error == 'raise': raise elif on_error == 'clear': self.clear(cfgstr) return None else: raise KeyError('Unknown method on_error={}'.format(on_error)) else: if self.verbose > 1: self.log('[cacher] ... cache disabled: fname={}'.format(self.fname)) return None
def function[tryload, parameter[self, cfgstr, on_error]]: constant[ Like load, but returns None if the load fails due to a cache miss. Args: on_error (str): How to handle non-io errors errors. Either raise, which re-raises the exception, or clear which deletes the cache and returns None. ] variable[cfgstr] assign[=] call[name[self]._rectify_cfgstr, parameter[name[cfgstr]]] if name[self].enabled begin[:] <ast.Try object at 0x7da1b01db6a0> return[constant[None]]
keyword[def] identifier[tryload] ( identifier[self] , identifier[cfgstr] = keyword[None] , identifier[on_error] = literal[string] ): literal[string] identifier[cfgstr] = identifier[self] . identifier[_rectify_cfgstr] ( identifier[cfgstr] ) keyword[if] identifier[self] . identifier[enabled] : keyword[try] : keyword[if] identifier[self] . identifier[verbose] > literal[int] : identifier[self] . identifier[log] ( literal[string] . identifier[format] ( identifier[self] . identifier[fname] )) keyword[return] identifier[self] . identifier[load] ( identifier[cfgstr] ) keyword[except] identifier[IOError] : keyword[if] identifier[self] . identifier[verbose] > literal[int] : identifier[self] . identifier[log] ( literal[string] . identifier[format] ( identifier[self] . identifier[fname] )) keyword[except] identifier[Exception] : keyword[if] identifier[self] . identifier[verbose] > literal[int] : identifier[self] . identifier[log] ( literal[string] ) keyword[if] identifier[on_error] == literal[string] : keyword[raise] keyword[elif] identifier[on_error] == literal[string] : identifier[self] . identifier[clear] ( identifier[cfgstr] ) keyword[return] keyword[None] keyword[else] : keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[on_error] )) keyword[else] : keyword[if] identifier[self] . identifier[verbose] > literal[int] : identifier[self] . identifier[log] ( literal[string] . identifier[format] ( identifier[self] . identifier[fname] )) keyword[return] keyword[None]
def tryload(self, cfgstr=None, on_error='raise'): """ Like load, but returns None if the load fails due to a cache miss. Args: on_error (str): How to handle non-io errors errors. Either raise, which re-raises the exception, or clear which deletes the cache and returns None. """ cfgstr = self._rectify_cfgstr(cfgstr) if self.enabled: try: if self.verbose > 1: self.log('[cacher] tryload fname={}'.format(self.fname)) # depends on [control=['if'], data=[]] return self.load(cfgstr) # depends on [control=['try'], data=[]] except IOError: if self.verbose > 0: self.log('[cacher] ... {} cache miss'.format(self.fname)) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] except Exception: if self.verbose > 0: self.log('[cacher] ... failed to load') # depends on [control=['if'], data=[]] if on_error == 'raise': raise # depends on [control=['if'], data=[]] elif on_error == 'clear': self.clear(cfgstr) return None # depends on [control=['if'], data=[]] else: raise KeyError('Unknown method on_error={}'.format(on_error)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif self.verbose > 1: self.log('[cacher] ... cache disabled: fname={}'.format(self.fname)) # depends on [control=['if'], data=[]] return None
def _query(action=None, command=None, args=None, method='GET', header_dict=None, data=None): ''' Make a web call to RallyDev. ''' token = _get_token() username = __opts__.get('rallydev', {}).get('username', None) password = __opts__.get('rallydev', {}).get('password', None) path = 'https://rally1.rallydev.com/slm/webservice/v2.0/' if action: path += action if command: path += '/{0}'.format(command) log.debug('RallyDev URL: %s', path) if not isinstance(args, dict): args = {} args['key'] = token if header_dict is None: header_dict = {'Content-type': 'application/json'} if method != 'POST': header_dict['Accept'] = 'application/json' decode = True if method == 'DELETE': decode = False return_content = None result = salt.utils.http.query( path, method, params=args, data=data, header_dict=header_dict, decode=decode, decode_type='json', text=True, status=True, username=username, password=password, cookies=True, persist_session=True, opts=__opts__, ) log.debug('RallyDev Response Status Code: %s', result['status']) if 'error' in result: log.error(result['error']) return [result['status'], result['error']] return [result['status'], result.get('dict', {})]
def function[_query, parameter[action, command, args, method, header_dict, data]]: constant[ Make a web call to RallyDev. ] variable[token] assign[=] call[name[_get_token], parameter[]] variable[username] assign[=] call[call[name[__opts__].get, parameter[constant[rallydev], dictionary[[], []]]].get, parameter[constant[username], constant[None]]] variable[password] assign[=] call[call[name[__opts__].get, parameter[constant[rallydev], dictionary[[], []]]].get, parameter[constant[password], constant[None]]] variable[path] assign[=] constant[https://rally1.rallydev.com/slm/webservice/v2.0/] if name[action] begin[:] <ast.AugAssign object at 0x7da1b2024490> if name[command] begin[:] <ast.AugAssign object at 0x7da1b2026590> call[name[log].debug, parameter[constant[RallyDev URL: %s], name[path]]] if <ast.UnaryOp object at 0x7da1b2024970> begin[:] variable[args] assign[=] dictionary[[], []] call[name[args]][constant[key]] assign[=] name[token] if compare[name[header_dict] is constant[None]] begin[:] variable[header_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b2026e00>], [<ast.Constant object at 0x7da1b2025690>]] if compare[name[method] not_equal[!=] constant[POST]] begin[:] call[name[header_dict]][constant[Accept]] assign[=] constant[application/json] variable[decode] assign[=] constant[True] if compare[name[method] equal[==] constant[DELETE]] begin[:] variable[decode] assign[=] constant[False] variable[return_content] assign[=] constant[None] variable[result] assign[=] call[name[salt].utils.http.query, parameter[name[path], name[method]]] call[name[log].debug, parameter[constant[RallyDev Response Status Code: %s], call[name[result]][constant[status]]]] if compare[constant[error] in name[result]] begin[:] call[name[log].error, parameter[call[name[result]][constant[error]]]] return[list[[<ast.Subscript object at 0x7da1b210ab30>, <ast.Subscript object at 0x7da1b210afe0>]]] return[list[[<ast.Subscript object at 0x7da1b210a230>, <ast.Call object at 0x7da1b2108ac0>]]]
keyword[def] identifier[_query] ( identifier[action] = keyword[None] , identifier[command] = keyword[None] , identifier[args] = keyword[None] , identifier[method] = literal[string] , identifier[header_dict] = keyword[None] , identifier[data] = keyword[None] ): literal[string] identifier[token] = identifier[_get_token] () identifier[username] = identifier[__opts__] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , keyword[None] ) identifier[password] = identifier[__opts__] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] , keyword[None] ) identifier[path] = literal[string] keyword[if] identifier[action] : identifier[path] += identifier[action] keyword[if] identifier[command] : identifier[path] += literal[string] . identifier[format] ( identifier[command] ) identifier[log] . identifier[debug] ( literal[string] , identifier[path] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[args] , identifier[dict] ): identifier[args] ={} identifier[args] [ literal[string] ]= identifier[token] keyword[if] identifier[header_dict] keyword[is] keyword[None] : identifier[header_dict] ={ literal[string] : literal[string] } keyword[if] identifier[method] != literal[string] : identifier[header_dict] [ literal[string] ]= literal[string] identifier[decode] = keyword[True] keyword[if] identifier[method] == literal[string] : identifier[decode] = keyword[False] identifier[return_content] = keyword[None] identifier[result] = identifier[salt] . identifier[utils] . identifier[http] . identifier[query] ( identifier[path] , identifier[method] , identifier[params] = identifier[args] , identifier[data] = identifier[data] , identifier[header_dict] = identifier[header_dict] , identifier[decode] = identifier[decode] , identifier[decode_type] = literal[string] , identifier[text] = keyword[True] , identifier[status] = keyword[True] , identifier[username] = identifier[username] , identifier[password] = identifier[password] , identifier[cookies] = keyword[True] , identifier[persist_session] = keyword[True] , identifier[opts] = identifier[__opts__] , ) identifier[log] . identifier[debug] ( literal[string] , identifier[result] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[result] : identifier[log] . identifier[error] ( identifier[result] [ literal[string] ]) keyword[return] [ identifier[result] [ literal[string] ], identifier[result] [ literal[string] ]] keyword[return] [ identifier[result] [ literal[string] ], identifier[result] . identifier[get] ( literal[string] ,{})]
def _query(action=None, command=None, args=None, method='GET', header_dict=None, data=None): """ Make a web call to RallyDev. """ token = _get_token() username = __opts__.get('rallydev', {}).get('username', None) password = __opts__.get('rallydev', {}).get('password', None) path = 'https://rally1.rallydev.com/slm/webservice/v2.0/' if action: path += action # depends on [control=['if'], data=[]] if command: path += '/{0}'.format(command) # depends on [control=['if'], data=[]] log.debug('RallyDev URL: %s', path) if not isinstance(args, dict): args = {} # depends on [control=['if'], data=[]] args['key'] = token if header_dict is None: header_dict = {'Content-type': 'application/json'} # depends on [control=['if'], data=['header_dict']] if method != 'POST': header_dict['Accept'] = 'application/json' # depends on [control=['if'], data=[]] decode = True if method == 'DELETE': decode = False # depends on [control=['if'], data=[]] return_content = None result = salt.utils.http.query(path, method, params=args, data=data, header_dict=header_dict, decode=decode, decode_type='json', text=True, status=True, username=username, password=password, cookies=True, persist_session=True, opts=__opts__) log.debug('RallyDev Response Status Code: %s', result['status']) if 'error' in result: log.error(result['error']) return [result['status'], result['error']] # depends on [control=['if'], data=['result']] return [result['status'], result.get('dict', {})]
def gaussian_deconvolve (smaj, smin, spa, bmaj, bmin, bpa): """Deconvolve two Gaussians analytically. Given the shapes of 2-dimensional “source” and “beam” Gaussians, this returns a deconvolved “result” Gaussian such that the convolution of “beam” and “result” is “source”. Arguments: smaj Major axis of source Gaussian. smin Minor axis of source Gaussian. spa Orientation angle of source Gaussian, in radians. bmaj Major axis of beam Gaussian. bmin Minor axis of beam Gaussian. bpa Orientation angle of beam Gaussian, in radians. The return value is ``(rmaj, rmin, rpa, status)``. The first three values have the same format as the input arguments. The *status* result is one of "ok", "pointlike", or "fail". A "pointlike" status indicates that the source and beam shapes are difficult to distinguish; a "fail" status indicates that the two shapes seem to be mutually incompatible (e.g., source and beam are very narrow and orthogonal). The axes can be measured in any units, so long as they're consistent. Ideally if:: rmaj, rmin, rpa, status = gaussian_deconvolve (smaj, smin, spa, bmaj, bmin, bpa) then:: smaj, smin, spa = gaussian_convolve (rmaj, rmin, rpa, bmaj, bmin, bpa) Implementation derived from MIRIAD’s ``gaudfac``. This function currently doesn't do a great job of dealing with pointlike sources, i.e. ones where “source” and “beam” are nearly indistinguishable. """ # I've added extra code to ensure ``smaj >= bmaj``, ``smin >= bmin``, and # increased the coefficient in front of "limit" from 0.1 to 0.5. Feel a # little wary about that first change. from numpy import cos, sin, sqrt, min, abs, arctan2 if smaj < bmaj: smaj = bmaj if smin < bmin: smin = bmin alpha = ((smaj * cos (spa))**2 + (smin * sin (spa))**2 - (bmaj * cos (bpa))**2 - (bmin * sin (bpa))**2) beta = ((smaj * sin (spa))**2 + (smin * cos (spa))**2 - (bmaj * sin (bpa))**2 - (bmin * cos (bpa))**2) gamma = 2 * ((smin**2 - smaj**2) * sin (spa) * cos (spa) - (bmin**2 - bmaj**2) * sin (bpa) * cos (bpa)) s = alpha + beta t = sqrt ((alpha - beta)**2 + gamma**2) limit = 0.5 * min ([smaj, smin, bmaj, bmin])**2 status = 'ok' if alpha < 0 or beta < 0 or s < t: dmaj = dmin = dpa = 0 if 0.5 * (s - t) < limit and alpha > -limit and beta > -limit: status = 'pointlike' else: status = 'fail' else: dmaj = sqrt (0.5 * (s + t)) dmin = sqrt (0.5 * (s - t)) if abs (gamma) + abs (alpha - beta) == 0: dpa = 0 else: dpa = 0.5 * arctan2 (-gamma, alpha - beta) return dmaj, dmin, dpa, status
def function[gaussian_deconvolve, parameter[smaj, smin, spa, bmaj, bmin, bpa]]: constant[Deconvolve two Gaussians analytically. Given the shapes of 2-dimensional “source” and “beam” Gaussians, this returns a deconvolved “result” Gaussian such that the convolution of “beam” and “result” is “source”. Arguments: smaj Major axis of source Gaussian. smin Minor axis of source Gaussian. spa Orientation angle of source Gaussian, in radians. bmaj Major axis of beam Gaussian. bmin Minor axis of beam Gaussian. bpa Orientation angle of beam Gaussian, in radians. The return value is ``(rmaj, rmin, rpa, status)``. The first three values have the same format as the input arguments. The *status* result is one of "ok", "pointlike", or "fail". A "pointlike" status indicates that the source and beam shapes are difficult to distinguish; a "fail" status indicates that the two shapes seem to be mutually incompatible (e.g., source and beam are very narrow and orthogonal). The axes can be measured in any units, so long as they're consistent. Ideally if:: rmaj, rmin, rpa, status = gaussian_deconvolve (smaj, smin, spa, bmaj, bmin, bpa) then:: smaj, smin, spa = gaussian_convolve (rmaj, rmin, rpa, bmaj, bmin, bpa) Implementation derived from MIRIAD’s ``gaudfac``. This function currently doesn't do a great job of dealing with pointlike sources, i.e. ones where “source” and “beam” are nearly indistinguishable. ] from relative_module[numpy] import module[cos], module[sin], module[sqrt], module[min], module[abs], module[arctan2] if compare[name[smaj] less[<] name[bmaj]] begin[:] variable[smaj] assign[=] name[bmaj] if compare[name[smin] less[<] name[bmin]] begin[:] variable[smin] assign[=] name[bmin] variable[alpha] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[smaj] * call[name[cos], parameter[name[spa]]]] ** constant[2]] + binary_operation[binary_operation[name[smin] * call[name[sin], parameter[name[spa]]]] ** constant[2]]] - binary_operation[binary_operation[name[bmaj] * call[name[cos], parameter[name[bpa]]]] ** constant[2]]] - binary_operation[binary_operation[name[bmin] * call[name[sin], parameter[name[bpa]]]] ** constant[2]]] variable[beta] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[smaj] * call[name[sin], parameter[name[spa]]]] ** constant[2]] + binary_operation[binary_operation[name[smin] * call[name[cos], parameter[name[spa]]]] ** constant[2]]] - binary_operation[binary_operation[name[bmaj] * call[name[sin], parameter[name[bpa]]]] ** constant[2]]] - binary_operation[binary_operation[name[bmin] * call[name[cos], parameter[name[bpa]]]] ** constant[2]]] variable[gamma] assign[=] binary_operation[constant[2] * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[smin] ** constant[2]] - binary_operation[name[smaj] ** constant[2]]] * call[name[sin], parameter[name[spa]]]] * call[name[cos], parameter[name[spa]]]] - binary_operation[binary_operation[binary_operation[binary_operation[name[bmin] ** constant[2]] - binary_operation[name[bmaj] ** constant[2]]] * call[name[sin], parameter[name[bpa]]]] * call[name[cos], parameter[name[bpa]]]]]] variable[s] assign[=] binary_operation[name[alpha] + name[beta]] variable[t] assign[=] call[name[sqrt], parameter[binary_operation[binary_operation[binary_operation[name[alpha] - name[beta]] ** constant[2]] + binary_operation[name[gamma] ** constant[2]]]]] variable[limit] assign[=] binary_operation[constant[0.5] * binary_operation[call[name[min], parameter[list[[<ast.Name object at 0x7da1b2644850>, <ast.Name object at 0x7da1b2644e80>, <ast.Name object at 0x7da1b2645c60>, <ast.Name object at 0x7da1b2646ce0>]]]] ** constant[2]]] variable[status] assign[=] constant[ok] if <ast.BoolOp object at 0x7da1b2646bc0> begin[:] variable[dmaj] assign[=] constant[0] if <ast.BoolOp object at 0x7da1b2647730> begin[:] variable[status] assign[=] constant[pointlike] return[tuple[[<ast.Name object at 0x7da1b26479d0>, <ast.Name object at 0x7da1b26479a0>, <ast.Name object at 0x7da1b2647850>, <ast.Name object at 0x7da1b27bb850>]]]
keyword[def] identifier[gaussian_deconvolve] ( identifier[smaj] , identifier[smin] , identifier[spa] , identifier[bmaj] , identifier[bmin] , identifier[bpa] ): literal[string] keyword[from] identifier[numpy] keyword[import] identifier[cos] , identifier[sin] , identifier[sqrt] , identifier[min] , identifier[abs] , identifier[arctan2] keyword[if] identifier[smaj] < identifier[bmaj] : identifier[smaj] = identifier[bmaj] keyword[if] identifier[smin] < identifier[bmin] : identifier[smin] = identifier[bmin] identifier[alpha] =(( identifier[smaj] * identifier[cos] ( identifier[spa] ))** literal[int] +( identifier[smin] * identifier[sin] ( identifier[spa] ))** literal[int] - ( identifier[bmaj] * identifier[cos] ( identifier[bpa] ))** literal[int] -( identifier[bmin] * identifier[sin] ( identifier[bpa] ))** literal[int] ) identifier[beta] =(( identifier[smaj] * identifier[sin] ( identifier[spa] ))** literal[int] +( identifier[smin] * identifier[cos] ( identifier[spa] ))** literal[int] - ( identifier[bmaj] * identifier[sin] ( identifier[bpa] ))** literal[int] -( identifier[bmin] * identifier[cos] ( identifier[bpa] ))** literal[int] ) identifier[gamma] = literal[int] *(( identifier[smin] ** literal[int] - identifier[smaj] ** literal[int] )* identifier[sin] ( identifier[spa] )* identifier[cos] ( identifier[spa] )- ( identifier[bmin] ** literal[int] - identifier[bmaj] ** literal[int] )* identifier[sin] ( identifier[bpa] )* identifier[cos] ( identifier[bpa] )) identifier[s] = identifier[alpha] + identifier[beta] identifier[t] = identifier[sqrt] (( identifier[alpha] - identifier[beta] )** literal[int] + identifier[gamma] ** literal[int] ) identifier[limit] = literal[int] * identifier[min] ([ identifier[smaj] , identifier[smin] , identifier[bmaj] , identifier[bmin] ])** literal[int] identifier[status] = literal[string] keyword[if] identifier[alpha] < literal[int] keyword[or] identifier[beta] < literal[int] keyword[or] identifier[s] < identifier[t] : identifier[dmaj] = identifier[dmin] = identifier[dpa] = literal[int] keyword[if] literal[int] *( identifier[s] - identifier[t] )< identifier[limit] keyword[and] identifier[alpha] >- identifier[limit] keyword[and] identifier[beta] >- identifier[limit] : identifier[status] = literal[string] keyword[else] : identifier[status] = literal[string] keyword[else] : identifier[dmaj] = identifier[sqrt] ( literal[int] *( identifier[s] + identifier[t] )) identifier[dmin] = identifier[sqrt] ( literal[int] *( identifier[s] - identifier[t] )) keyword[if] identifier[abs] ( identifier[gamma] )+ identifier[abs] ( identifier[alpha] - identifier[beta] )== literal[int] : identifier[dpa] = literal[int] keyword[else] : identifier[dpa] = literal[int] * identifier[arctan2] (- identifier[gamma] , identifier[alpha] - identifier[beta] ) keyword[return] identifier[dmaj] , identifier[dmin] , identifier[dpa] , identifier[status]
def gaussian_deconvolve(smaj, smin, spa, bmaj, bmin, bpa): """Deconvolve two Gaussians analytically. Given the shapes of 2-dimensional “source” and “beam” Gaussians, this returns a deconvolved “result” Gaussian such that the convolution of “beam” and “result” is “source”. Arguments: smaj Major axis of source Gaussian. smin Minor axis of source Gaussian. spa Orientation angle of source Gaussian, in radians. bmaj Major axis of beam Gaussian. bmin Minor axis of beam Gaussian. bpa Orientation angle of beam Gaussian, in radians. The return value is ``(rmaj, rmin, rpa, status)``. The first three values have the same format as the input arguments. The *status* result is one of "ok", "pointlike", or "fail". A "pointlike" status indicates that the source and beam shapes are difficult to distinguish; a "fail" status indicates that the two shapes seem to be mutually incompatible (e.g., source and beam are very narrow and orthogonal). The axes can be measured in any units, so long as they're consistent. Ideally if:: rmaj, rmin, rpa, status = gaussian_deconvolve (smaj, smin, spa, bmaj, bmin, bpa) then:: smaj, smin, spa = gaussian_convolve (rmaj, rmin, rpa, bmaj, bmin, bpa) Implementation derived from MIRIAD’s ``gaudfac``. This function currently doesn't do a great job of dealing with pointlike sources, i.e. ones where “source” and “beam” are nearly indistinguishable. """ # I've added extra code to ensure ``smaj >= bmaj``, ``smin >= bmin``, and # increased the coefficient in front of "limit" from 0.1 to 0.5. Feel a # little wary about that first change. from numpy import cos, sin, sqrt, min, abs, arctan2 if smaj < bmaj: smaj = bmaj # depends on [control=['if'], data=['smaj', 'bmaj']] if smin < bmin: smin = bmin # depends on [control=['if'], data=['smin', 'bmin']] alpha = (smaj * cos(spa)) ** 2 + (smin * sin(spa)) ** 2 - (bmaj * cos(bpa)) ** 2 - (bmin * sin(bpa)) ** 2 beta = (smaj * sin(spa)) ** 2 + (smin * cos(spa)) ** 2 - (bmaj * sin(bpa)) ** 2 - (bmin * cos(bpa)) ** 2 gamma = 2 * ((smin ** 2 - smaj ** 2) * sin(spa) * cos(spa) - (bmin ** 2 - bmaj ** 2) * sin(bpa) * cos(bpa)) s = alpha + beta t = sqrt((alpha - beta) ** 2 + gamma ** 2) limit = 0.5 * min([smaj, smin, bmaj, bmin]) ** 2 status = 'ok' if alpha < 0 or beta < 0 or s < t: dmaj = dmin = dpa = 0 if 0.5 * (s - t) < limit and alpha > -limit and (beta > -limit): status = 'pointlike' # depends on [control=['if'], data=[]] else: status = 'fail' # depends on [control=['if'], data=[]] else: dmaj = sqrt(0.5 * (s + t)) dmin = sqrt(0.5 * (s - t)) if abs(gamma) + abs(alpha - beta) == 0: dpa = 0 # depends on [control=['if'], data=[]] else: dpa = 0.5 * arctan2(-gamma, alpha - beta) return (dmaj, dmin, dpa, status)
def _encode(values, salt, min_length, alphabet, separators, guards): """Helper function that does the hash building without argument checks.""" len_alphabet = len(alphabet) len_separators = len(separators) values_hash = sum(x % (i + 100) for i, x in enumerate(values)) encoded = lottery = alphabet[values_hash % len(alphabet)] for i, value in enumerate(values): alphabet_salt = (lottery + salt + alphabet)[:len_alphabet] alphabet = _reorder(alphabet, alphabet_salt) last = _hash(value, alphabet) encoded += last value %= ord(last[0]) + i encoded += separators[value % len_separators] encoded = encoded[:-1] # cut off last separator return (encoded if len(encoded) >= min_length else _ensure_length(encoded, min_length, alphabet, guards, values_hash))
def function[_encode, parameter[values, salt, min_length, alphabet, separators, guards]]: constant[Helper function that does the hash building without argument checks.] variable[len_alphabet] assign[=] call[name[len], parameter[name[alphabet]]] variable[len_separators] assign[=] call[name[len], parameter[name[separators]]] variable[values_hash] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18dc9b010>]] variable[encoded] assign[=] call[name[alphabet]][binary_operation[name[values_hash] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[alphabet]]]]] for taget[tuple[[<ast.Name object at 0x7da18dc99330>, <ast.Name object at 0x7da18dc9ab00>]]] in starred[call[name[enumerate], parameter[name[values]]]] begin[:] variable[alphabet_salt] assign[=] call[binary_operation[binary_operation[name[lottery] + name[salt]] + name[alphabet]]][<ast.Slice object at 0x7da18dc99060>] variable[alphabet] assign[=] call[name[_reorder], parameter[name[alphabet], name[alphabet_salt]]] variable[last] assign[=] call[name[_hash], parameter[name[value], name[alphabet]]] <ast.AugAssign object at 0x7da18eb570a0> <ast.AugAssign object at 0x7da18eb56ce0> <ast.AugAssign object at 0x7da18eb56050> variable[encoded] assign[=] call[name[encoded]][<ast.Slice object at 0x7da18eb56800>] return[<ast.IfExp object at 0x7da18eb55420>]
keyword[def] identifier[_encode] ( identifier[values] , identifier[salt] , identifier[min_length] , identifier[alphabet] , identifier[separators] , identifier[guards] ): literal[string] identifier[len_alphabet] = identifier[len] ( identifier[alphabet] ) identifier[len_separators] = identifier[len] ( identifier[separators] ) identifier[values_hash] = identifier[sum] ( identifier[x] %( identifier[i] + literal[int] ) keyword[for] identifier[i] , identifier[x] keyword[in] identifier[enumerate] ( identifier[values] )) identifier[encoded] = identifier[lottery] = identifier[alphabet] [ identifier[values_hash] % identifier[len] ( identifier[alphabet] )] keyword[for] identifier[i] , identifier[value] keyword[in] identifier[enumerate] ( identifier[values] ): identifier[alphabet_salt] =( identifier[lottery] + identifier[salt] + identifier[alphabet] )[: identifier[len_alphabet] ] identifier[alphabet] = identifier[_reorder] ( identifier[alphabet] , identifier[alphabet_salt] ) identifier[last] = identifier[_hash] ( identifier[value] , identifier[alphabet] ) identifier[encoded] += identifier[last] identifier[value] %= identifier[ord] ( identifier[last] [ literal[int] ])+ identifier[i] identifier[encoded] += identifier[separators] [ identifier[value] % identifier[len_separators] ] identifier[encoded] = identifier[encoded] [:- literal[int] ] keyword[return] ( identifier[encoded] keyword[if] identifier[len] ( identifier[encoded] )>= identifier[min_length] keyword[else] identifier[_ensure_length] ( identifier[encoded] , identifier[min_length] , identifier[alphabet] , identifier[guards] , identifier[values_hash] ))
def _encode(values, salt, min_length, alphabet, separators, guards): """Helper function that does the hash building without argument checks.""" len_alphabet = len(alphabet) len_separators = len(separators) values_hash = sum((x % (i + 100) for (i, x) in enumerate(values))) encoded = lottery = alphabet[values_hash % len(alphabet)] for (i, value) in enumerate(values): alphabet_salt = (lottery + salt + alphabet)[:len_alphabet] alphabet = _reorder(alphabet, alphabet_salt) last = _hash(value, alphabet) encoded += last value %= ord(last[0]) + i encoded += separators[value % len_separators] # depends on [control=['for'], data=[]] encoded = encoded[:-1] # cut off last separator return encoded if len(encoded) >= min_length else _ensure_length(encoded, min_length, alphabet, guards, values_hash)
def set(obj, path, value, create_missing=True, afilter=None): """Set the value of the given path in the object. Path must be a list of specific path elements, not a glob. You can use dpath.util.set for globs, but the paths must slready exist. If create_missing is True (the default behavior), then any missing path components in the dictionary are made silently. Otherwise, if False, an exception is thrown if path components are missing. """ cur = obj traversed = [] def _presence_test_dict(obj, elem): return (elem[0] in obj) def _create_missing_dict(obj, elem): obj[elem[0]] = elem[1]() def _presence_test_list(obj, elem): return (int(str(elem[0])) < len(obj)) def _create_missing_list(obj, elem): idx = int(str(elem[0])) while (len(obj)-1) < idx: obj.append(None) def _accessor_dict(obj, elem): return obj[elem[0]] def _accessor_list(obj, elem): return obj[int(str(elem[0]))] def _assigner_dict(obj, elem, value): obj[elem[0]] = value def _assigner_list(obj, elem, value): obj[int(str(elem[0]))] = value elem = None for elem in path: elem_value = elem[0] elem_type = elem[1] tester = None creator = None accessor = None assigner = None if issubclass(obj.__class__, (MutableMapping)): tester = _presence_test_dict creator = _create_missing_dict accessor = _accessor_dict assigner = _assigner_dict elif issubclass(obj.__class__, MutableSequence): if not str(elem_value).isdigit(): raise TypeError("Can only create integer indexes in lists, " "not {}, in {}".format(type(obj), traversed ) ) tester = _presence_test_list creator = _create_missing_list accessor = _accessor_list assigner = _assigner_list else: raise TypeError("Unable to path into elements of type {} " "at {}".format(obj, traversed)) if (not tester(obj, elem)) and (create_missing): creator(obj, elem) elif (not tester(obj, elem)): raise dpath.exceptions.PathNotFound( "{} does not exist in {}".format( elem, traversed ) ) traversed.append(elem_value) if len(traversed) < len(path): obj = accessor(obj, elem) if elem is None: return if (afilter and afilter(accessor(obj, elem))) or (not afilter): assigner(obj, elem, value)
def function[set, parameter[obj, path, value, create_missing, afilter]]: constant[Set the value of the given path in the object. Path must be a list of specific path elements, not a glob. You can use dpath.util.set for globs, but the paths must slready exist. If create_missing is True (the default behavior), then any missing path components in the dictionary are made silently. Otherwise, if False, an exception is thrown if path components are missing. ] variable[cur] assign[=] name[obj] variable[traversed] assign[=] list[[]] def function[_presence_test_dict, parameter[obj, elem]]: return[compare[call[name[elem]][constant[0]] in name[obj]]] def function[_create_missing_dict, parameter[obj, elem]]: call[name[obj]][call[name[elem]][constant[0]]] assign[=] call[call[name[elem]][constant[1]], parameter[]] def function[_presence_test_list, parameter[obj, elem]]: return[compare[call[name[int], parameter[call[name[str], parameter[call[name[elem]][constant[0]]]]]] less[<] call[name[len], parameter[name[obj]]]]] def function[_create_missing_list, parameter[obj, elem]]: variable[idx] assign[=] call[name[int], parameter[call[name[str], parameter[call[name[elem]][constant[0]]]]]] while compare[binary_operation[call[name[len], parameter[name[obj]]] - constant[1]] less[<] name[idx]] begin[:] call[name[obj].append, parameter[constant[None]]] def function[_accessor_dict, parameter[obj, elem]]: return[call[name[obj]][call[name[elem]][constant[0]]]] def function[_accessor_list, parameter[obj, elem]]: return[call[name[obj]][call[name[int], parameter[call[name[str], parameter[call[name[elem]][constant[0]]]]]]]] def function[_assigner_dict, parameter[obj, elem, value]]: call[name[obj]][call[name[elem]][constant[0]]] assign[=] name[value] def function[_assigner_list, parameter[obj, elem, value]]: call[name[obj]][call[name[int], parameter[call[name[str], parameter[call[name[elem]][constant[0]]]]]]] assign[=] name[value] variable[elem] assign[=] constant[None] for taget[name[elem]] in starred[name[path]] begin[:] variable[elem_value] assign[=] call[name[elem]][constant[0]] variable[elem_type] assign[=] call[name[elem]][constant[1]] variable[tester] assign[=] constant[None] variable[creator] assign[=] constant[None] variable[accessor] assign[=] constant[None] variable[assigner] assign[=] constant[None] if call[name[issubclass], parameter[name[obj].__class__, name[MutableMapping]]] begin[:] variable[tester] assign[=] name[_presence_test_dict] variable[creator] assign[=] name[_create_missing_dict] variable[accessor] assign[=] name[_accessor_dict] variable[assigner] assign[=] name[_assigner_dict] if <ast.BoolOp object at 0x7da18dc98400> begin[:] call[name[creator], parameter[name[obj], name[elem]]] call[name[traversed].append, parameter[name[elem_value]]] if compare[call[name[len], parameter[name[traversed]]] less[<] call[name[len], parameter[name[path]]]] begin[:] variable[obj] assign[=] call[name[accessor], parameter[name[obj], name[elem]]] if compare[name[elem] is constant[None]] begin[:] return[None] if <ast.BoolOp object at 0x7da18dc98220> begin[:] call[name[assigner], parameter[name[obj], name[elem], name[value]]]
keyword[def] identifier[set] ( identifier[obj] , identifier[path] , identifier[value] , identifier[create_missing] = keyword[True] , identifier[afilter] = keyword[None] ): literal[string] identifier[cur] = identifier[obj] identifier[traversed] =[] keyword[def] identifier[_presence_test_dict] ( identifier[obj] , identifier[elem] ): keyword[return] ( identifier[elem] [ literal[int] ] keyword[in] identifier[obj] ) keyword[def] identifier[_create_missing_dict] ( identifier[obj] , identifier[elem] ): identifier[obj] [ identifier[elem] [ literal[int] ]]= identifier[elem] [ literal[int] ]() keyword[def] identifier[_presence_test_list] ( identifier[obj] , identifier[elem] ): keyword[return] ( identifier[int] ( identifier[str] ( identifier[elem] [ literal[int] ]))< identifier[len] ( identifier[obj] )) keyword[def] identifier[_create_missing_list] ( identifier[obj] , identifier[elem] ): identifier[idx] = identifier[int] ( identifier[str] ( identifier[elem] [ literal[int] ])) keyword[while] ( identifier[len] ( identifier[obj] )- literal[int] )< identifier[idx] : identifier[obj] . identifier[append] ( keyword[None] ) keyword[def] identifier[_accessor_dict] ( identifier[obj] , identifier[elem] ): keyword[return] identifier[obj] [ identifier[elem] [ literal[int] ]] keyword[def] identifier[_accessor_list] ( identifier[obj] , identifier[elem] ): keyword[return] identifier[obj] [ identifier[int] ( identifier[str] ( identifier[elem] [ literal[int] ]))] keyword[def] identifier[_assigner_dict] ( identifier[obj] , identifier[elem] , identifier[value] ): identifier[obj] [ identifier[elem] [ literal[int] ]]= identifier[value] keyword[def] identifier[_assigner_list] ( identifier[obj] , identifier[elem] , identifier[value] ): identifier[obj] [ identifier[int] ( identifier[str] ( identifier[elem] [ literal[int] ]))]= identifier[value] identifier[elem] = keyword[None] keyword[for] identifier[elem] keyword[in] identifier[path] : identifier[elem_value] = identifier[elem] [ literal[int] ] identifier[elem_type] = identifier[elem] [ literal[int] ] identifier[tester] = keyword[None] identifier[creator] = keyword[None] identifier[accessor] = keyword[None] identifier[assigner] = keyword[None] keyword[if] identifier[issubclass] ( identifier[obj] . identifier[__class__] ,( identifier[MutableMapping] )): identifier[tester] = identifier[_presence_test_dict] identifier[creator] = identifier[_create_missing_dict] identifier[accessor] = identifier[_accessor_dict] identifier[assigner] = identifier[_assigner_dict] keyword[elif] identifier[issubclass] ( identifier[obj] . identifier[__class__] , identifier[MutableSequence] ): keyword[if] keyword[not] identifier[str] ( identifier[elem_value] ). identifier[isdigit] (): keyword[raise] identifier[TypeError] ( literal[string] literal[string] . identifier[format] ( identifier[type] ( identifier[obj] ), identifier[traversed] ) ) identifier[tester] = identifier[_presence_test_list] identifier[creator] = identifier[_create_missing_list] identifier[accessor] = identifier[_accessor_list] identifier[assigner] = identifier[_assigner_list] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] literal[string] . identifier[format] ( identifier[obj] , identifier[traversed] )) keyword[if] ( keyword[not] identifier[tester] ( identifier[obj] , identifier[elem] )) keyword[and] ( identifier[create_missing] ): identifier[creator] ( identifier[obj] , identifier[elem] ) keyword[elif] ( keyword[not] identifier[tester] ( identifier[obj] , identifier[elem] )): keyword[raise] identifier[dpath] . identifier[exceptions] . identifier[PathNotFound] ( literal[string] . identifier[format] ( identifier[elem] , identifier[traversed] ) ) identifier[traversed] . identifier[append] ( identifier[elem_value] ) keyword[if] identifier[len] ( identifier[traversed] )< identifier[len] ( identifier[path] ): identifier[obj] = identifier[accessor] ( identifier[obj] , identifier[elem] ) keyword[if] identifier[elem] keyword[is] keyword[None] : keyword[return] keyword[if] ( identifier[afilter] keyword[and] identifier[afilter] ( identifier[accessor] ( identifier[obj] , identifier[elem] ))) keyword[or] ( keyword[not] identifier[afilter] ): identifier[assigner] ( identifier[obj] , identifier[elem] , identifier[value] )
def set(obj, path, value, create_missing=True, afilter=None): """Set the value of the given path in the object. Path must be a list of specific path elements, not a glob. You can use dpath.util.set for globs, but the paths must slready exist. If create_missing is True (the default behavior), then any missing path components in the dictionary are made silently. Otherwise, if False, an exception is thrown if path components are missing. """ cur = obj traversed = [] def _presence_test_dict(obj, elem): return elem[0] in obj def _create_missing_dict(obj, elem): obj[elem[0]] = elem[1]() def _presence_test_list(obj, elem): return int(str(elem[0])) < len(obj) def _create_missing_list(obj, elem): idx = int(str(elem[0])) while len(obj) - 1 < idx: obj.append(None) # depends on [control=['while'], data=[]] def _accessor_dict(obj, elem): return obj[elem[0]] def _accessor_list(obj, elem): return obj[int(str(elem[0]))] def _assigner_dict(obj, elem, value): obj[elem[0]] = value def _assigner_list(obj, elem, value): obj[int(str(elem[0]))] = value elem = None for elem in path: elem_value = elem[0] elem_type = elem[1] tester = None creator = None accessor = None assigner = None if issubclass(obj.__class__, MutableMapping): tester = _presence_test_dict creator = _create_missing_dict accessor = _accessor_dict assigner = _assigner_dict # depends on [control=['if'], data=[]] elif issubclass(obj.__class__, MutableSequence): if not str(elem_value).isdigit(): raise TypeError('Can only create integer indexes in lists, not {}, in {}'.format(type(obj), traversed)) # depends on [control=['if'], data=[]] tester = _presence_test_list creator = _create_missing_list accessor = _accessor_list assigner = _assigner_list # depends on [control=['if'], data=[]] else: raise TypeError('Unable to path into elements of type {} at {}'.format(obj, traversed)) if not tester(obj, elem) and create_missing: creator(obj, elem) # depends on [control=['if'], data=[]] elif not tester(obj, elem): raise dpath.exceptions.PathNotFound('{} does not exist in {}'.format(elem, traversed)) # depends on [control=['if'], data=[]] traversed.append(elem_value) if len(traversed) < len(path): obj = accessor(obj, elem) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['elem']] if elem is None: return # depends on [control=['if'], data=[]] if afilter and afilter(accessor(obj, elem)) or not afilter: assigner(obj, elem, value) # depends on [control=['if'], data=[]]
def delete_entitlement(owner, repo, identifier): """Delete an entitlement from a repository.""" client = get_entitlements_api() with catch_raise_api_exception(): _, _, headers = client.entitlements_delete_with_http_info( owner=owner, repo=repo, identifier=identifier ) ratelimits.maybe_rate_limit(client, headers)
def function[delete_entitlement, parameter[owner, repo, identifier]]: constant[Delete an entitlement from a repository.] variable[client] assign[=] call[name[get_entitlements_api], parameter[]] with call[name[catch_raise_api_exception], parameter[]] begin[:] <ast.Tuple object at 0x7da1b198c5b0> assign[=] call[name[client].entitlements_delete_with_http_info, parameter[]] call[name[ratelimits].maybe_rate_limit, parameter[name[client], name[headers]]]
keyword[def] identifier[delete_entitlement] ( identifier[owner] , identifier[repo] , identifier[identifier] ): literal[string] identifier[client] = identifier[get_entitlements_api] () keyword[with] identifier[catch_raise_api_exception] (): identifier[_] , identifier[_] , identifier[headers] = identifier[client] . identifier[entitlements_delete_with_http_info] ( identifier[owner] = identifier[owner] , identifier[repo] = identifier[repo] , identifier[identifier] = identifier[identifier] ) identifier[ratelimits] . identifier[maybe_rate_limit] ( identifier[client] , identifier[headers] )
def delete_entitlement(owner, repo, identifier): """Delete an entitlement from a repository.""" client = get_entitlements_api() with catch_raise_api_exception(): (_, _, headers) = client.entitlements_delete_with_http_info(owner=owner, repo=repo, identifier=identifier) # depends on [control=['with'], data=[]] ratelimits.maybe_rate_limit(client, headers)
def clean_weight_files(cls): """ Cleans existing weight files. """ deleted = [] for f in cls._files: try: os.remove(f) deleted.append(f) except FileNotFoundError: pass print('Deleted %d weight files' % len(deleted)) cls._files = []
def function[clean_weight_files, parameter[cls]]: constant[ Cleans existing weight files. ] variable[deleted] assign[=] list[[]] for taget[name[f]] in starred[name[cls]._files] begin[:] <ast.Try object at 0x7da1b079b400> call[name[print], parameter[binary_operation[constant[Deleted %d weight files] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[deleted]]]]]] name[cls]._files assign[=] list[[]]
keyword[def] identifier[clean_weight_files] ( identifier[cls] ): literal[string] identifier[deleted] =[] keyword[for] identifier[f] keyword[in] identifier[cls] . identifier[_files] : keyword[try] : identifier[os] . identifier[remove] ( identifier[f] ) identifier[deleted] . identifier[append] ( identifier[f] ) keyword[except] identifier[FileNotFoundError] : keyword[pass] identifier[print] ( literal[string] % identifier[len] ( identifier[deleted] )) identifier[cls] . identifier[_files] =[]
def clean_weight_files(cls): """ Cleans existing weight files. """ deleted = [] for f in cls._files: try: os.remove(f) deleted.append(f) # depends on [control=['try'], data=[]] except FileNotFoundError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['f']] print('Deleted %d weight files' % len(deleted)) cls._files = []
def replace_countries_geo_zone_by_id(cls, countries_geo_zone_id, countries_geo_zone, **kwargs): """Replace CountriesGeoZone Replace all attributes of CountriesGeoZone This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.replace_countries_geo_zone_by_id(countries_geo_zone_id, countries_geo_zone, async=True) >>> result = thread.get() :param async bool :param str countries_geo_zone_id: ID of countriesGeoZone to replace (required) :param CountriesGeoZone countries_geo_zone: Attributes of countriesGeoZone to replace (required) :return: CountriesGeoZone If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._replace_countries_geo_zone_by_id_with_http_info(countries_geo_zone_id, countries_geo_zone, **kwargs) else: (data) = cls._replace_countries_geo_zone_by_id_with_http_info(countries_geo_zone_id, countries_geo_zone, **kwargs) return data
def function[replace_countries_geo_zone_by_id, parameter[cls, countries_geo_zone_id, countries_geo_zone]]: constant[Replace CountriesGeoZone Replace all attributes of CountriesGeoZone This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.replace_countries_geo_zone_by_id(countries_geo_zone_id, countries_geo_zone, async=True) >>> result = thread.get() :param async bool :param str countries_geo_zone_id: ID of countriesGeoZone to replace (required) :param CountriesGeoZone countries_geo_zone: Attributes of countriesGeoZone to replace (required) :return: CountriesGeoZone If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async]]] begin[:] return[call[name[cls]._replace_countries_geo_zone_by_id_with_http_info, parameter[name[countries_geo_zone_id], name[countries_geo_zone]]]]
keyword[def] identifier[replace_countries_geo_zone_by_id] ( identifier[cls] , identifier[countries_geo_zone_id] , identifier[countries_geo_zone] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[cls] . identifier[_replace_countries_geo_zone_by_id_with_http_info] ( identifier[countries_geo_zone_id] , identifier[countries_geo_zone] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[cls] . identifier[_replace_countries_geo_zone_by_id_with_http_info] ( identifier[countries_geo_zone_id] , identifier[countries_geo_zone] ,** identifier[kwargs] ) keyword[return] identifier[data]
def replace_countries_geo_zone_by_id(cls, countries_geo_zone_id, countries_geo_zone, **kwargs): """Replace CountriesGeoZone Replace all attributes of CountriesGeoZone This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.replace_countries_geo_zone_by_id(countries_geo_zone_id, countries_geo_zone, async=True) >>> result = thread.get() :param async bool :param str countries_geo_zone_id: ID of countriesGeoZone to replace (required) :param CountriesGeoZone countries_geo_zone: Attributes of countriesGeoZone to replace (required) :return: CountriesGeoZone If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._replace_countries_geo_zone_by_id_with_http_info(countries_geo_zone_id, countries_geo_zone, **kwargs) # depends on [control=['if'], data=[]] else: data = cls._replace_countries_geo_zone_by_id_with_http_info(countries_geo_zone_id, countries_geo_zone, **kwargs) return data
def add_model_file(self, model_fpath, position=1, file_id=None): """Add a kappa model from a file at given path to the project.""" if file_id is None: file_id = self.make_unique_id('file_input') ret_data = self.file_create(File.from_file(model_fpath, position, file_id)) return ret_data
def function[add_model_file, parameter[self, model_fpath, position, file_id]]: constant[Add a kappa model from a file at given path to the project.] if compare[name[file_id] is constant[None]] begin[:] variable[file_id] assign[=] call[name[self].make_unique_id, parameter[constant[file_input]]] variable[ret_data] assign[=] call[name[self].file_create, parameter[call[name[File].from_file, parameter[name[model_fpath], name[position], name[file_id]]]]] return[name[ret_data]]
keyword[def] identifier[add_model_file] ( identifier[self] , identifier[model_fpath] , identifier[position] = literal[int] , identifier[file_id] = keyword[None] ): literal[string] keyword[if] identifier[file_id] keyword[is] keyword[None] : identifier[file_id] = identifier[self] . identifier[make_unique_id] ( literal[string] ) identifier[ret_data] = identifier[self] . identifier[file_create] ( identifier[File] . identifier[from_file] ( identifier[model_fpath] , identifier[position] , identifier[file_id] )) keyword[return] identifier[ret_data]
def add_model_file(self, model_fpath, position=1, file_id=None): """Add a kappa model from a file at given path to the project.""" if file_id is None: file_id = self.make_unique_id('file_input') # depends on [control=['if'], data=['file_id']] ret_data = self.file_create(File.from_file(model_fpath, position, file_id)) return ret_data
def field_columns(self, well_row, well_column): """Field columns for given well. Equivalent to --X in files. Parameters ---------- well_row : int Starts at 0. Same as --V in files. well_column : int Starts at 0. Same as --U in files. Returns ------- list of ints Columns found for specified well. """ imgs = self.well_images(well_row, well_column) return list(set([attribute(img, 'x') for img in imgs]))
def function[field_columns, parameter[self, well_row, well_column]]: constant[Field columns for given well. Equivalent to --X in files. Parameters ---------- well_row : int Starts at 0. Same as --V in files. well_column : int Starts at 0. Same as --U in files. Returns ------- list of ints Columns found for specified well. ] variable[imgs] assign[=] call[name[self].well_images, parameter[name[well_row], name[well_column]]] return[call[name[list], parameter[call[name[set], parameter[<ast.ListComp object at 0x7da1b00890f0>]]]]]
keyword[def] identifier[field_columns] ( identifier[self] , identifier[well_row] , identifier[well_column] ): literal[string] identifier[imgs] = identifier[self] . identifier[well_images] ( identifier[well_row] , identifier[well_column] ) keyword[return] identifier[list] ( identifier[set] ([ identifier[attribute] ( identifier[img] , literal[string] ) keyword[for] identifier[img] keyword[in] identifier[imgs] ]))
def field_columns(self, well_row, well_column): """Field columns for given well. Equivalent to --X in files. Parameters ---------- well_row : int Starts at 0. Same as --V in files. well_column : int Starts at 0. Same as --U in files. Returns ------- list of ints Columns found for specified well. """ imgs = self.well_images(well_row, well_column) return list(set([attribute(img, 'x') for img in imgs]))
def _attempt_connect(host, port, timeout, verify, **kwargs): """ Internal function to attempt :param host: <str> "localhost" or IPAddress :param port: <int> :param timeout: <int> :param verify: <bool> :param kwargs: <**dict> rethinkdb keyword args :return: <connection> or <NoneType> """ try: connection = rethinkdb.connect(host, port, timeout=timeout, **kwargs) if verify: brain_post(connection) except (rethinkdb.errors.ReqlDriverError, rethinkdb.errors.ReqlOpFailedError, AssertionError): connection = None return connection
def function[_attempt_connect, parameter[host, port, timeout, verify]]: constant[ Internal function to attempt :param host: <str> "localhost" or IPAddress :param port: <int> :param timeout: <int> :param verify: <bool> :param kwargs: <**dict> rethinkdb keyword args :return: <connection> or <NoneType> ] <ast.Try object at 0x7da18f09f9a0> return[name[connection]]
keyword[def] identifier[_attempt_connect] ( identifier[host] , identifier[port] , identifier[timeout] , identifier[verify] ,** identifier[kwargs] ): literal[string] keyword[try] : identifier[connection] = identifier[rethinkdb] . identifier[connect] ( identifier[host] , identifier[port] , identifier[timeout] = identifier[timeout] , ** identifier[kwargs] ) keyword[if] identifier[verify] : identifier[brain_post] ( identifier[connection] ) keyword[except] ( identifier[rethinkdb] . identifier[errors] . identifier[ReqlDriverError] , identifier[rethinkdb] . identifier[errors] . identifier[ReqlOpFailedError] , identifier[AssertionError] ): identifier[connection] = keyword[None] keyword[return] identifier[connection]
def _attempt_connect(host, port, timeout, verify, **kwargs): """ Internal function to attempt :param host: <str> "localhost" or IPAddress :param port: <int> :param timeout: <int> :param verify: <bool> :param kwargs: <**dict> rethinkdb keyword args :return: <connection> or <NoneType> """ try: connection = rethinkdb.connect(host, port, timeout=timeout, **kwargs) if verify: brain_post(connection) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except (rethinkdb.errors.ReqlDriverError, rethinkdb.errors.ReqlOpFailedError, AssertionError): connection = None # depends on [control=['except'], data=[]] return connection
def get(key, default='', delimiter=DEFAULT_TARGET_DELIM, ordered=True): ''' Attempt to retrieve the named value from grains, if the named value is not available return the passed default. The default return is an empty string. The value can also represent a value in a nested dict using a ":" delimiter for the dict. This means that if a dict in grains looks like this:: {'pkg': {'apache': 'httpd'}} To retrieve the value associated with the apache key in the pkg dict this key can be passed:: pkg:apache :param delimiter: Specify an alternate delimiter to use when traversing a nested dict. This is useful for when the desired key contains a colon. See CLI example below for usage. .. versionadded:: 2014.7.0 :param ordered: Outputs an ordered dict if applicable (default: True) .. versionadded:: 2016.11.0 CLI Example: .. code-block:: bash salt '*' grains.get pkg:apache salt '*' grains.get abc::def|ghi delimiter='|' ''' if ordered is True: grains = __grains__ else: grains = salt.utils.json.loads(salt.utils.json.dumps(__grains__)) return salt.utils.data.traverse_dict_and_list( grains, key, default, delimiter)
def function[get, parameter[key, default, delimiter, ordered]]: constant[ Attempt to retrieve the named value from grains, if the named value is not available return the passed default. The default return is an empty string. The value can also represent a value in a nested dict using a ":" delimiter for the dict. This means that if a dict in grains looks like this:: {'pkg': {'apache': 'httpd'}} To retrieve the value associated with the apache key in the pkg dict this key can be passed:: pkg:apache :param delimiter: Specify an alternate delimiter to use when traversing a nested dict. This is useful for when the desired key contains a colon. See CLI example below for usage. .. versionadded:: 2014.7.0 :param ordered: Outputs an ordered dict if applicable (default: True) .. versionadded:: 2016.11.0 CLI Example: .. code-block:: bash salt '*' grains.get pkg:apache salt '*' grains.get abc::def|ghi delimiter='|' ] if compare[name[ordered] is constant[True]] begin[:] variable[grains] assign[=] name[__grains__] return[call[name[salt].utils.data.traverse_dict_and_list, parameter[name[grains], name[key], name[default], name[delimiter]]]]
keyword[def] identifier[get] ( identifier[key] , identifier[default] = literal[string] , identifier[delimiter] = identifier[DEFAULT_TARGET_DELIM] , identifier[ordered] = keyword[True] ): literal[string] keyword[if] identifier[ordered] keyword[is] keyword[True] : identifier[grains] = identifier[__grains__] keyword[else] : identifier[grains] = identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[salt] . identifier[utils] . identifier[json] . identifier[dumps] ( identifier[__grains__] )) keyword[return] identifier[salt] . identifier[utils] . identifier[data] . identifier[traverse_dict_and_list] ( identifier[grains] , identifier[key] , identifier[default] , identifier[delimiter] )
def get(key, default='', delimiter=DEFAULT_TARGET_DELIM, ordered=True): """ Attempt to retrieve the named value from grains, if the named value is not available return the passed default. The default return is an empty string. The value can also represent a value in a nested dict using a ":" delimiter for the dict. This means that if a dict in grains looks like this:: {'pkg': {'apache': 'httpd'}} To retrieve the value associated with the apache key in the pkg dict this key can be passed:: pkg:apache :param delimiter: Specify an alternate delimiter to use when traversing a nested dict. This is useful for when the desired key contains a colon. See CLI example below for usage. .. versionadded:: 2014.7.0 :param ordered: Outputs an ordered dict if applicable (default: True) .. versionadded:: 2016.11.0 CLI Example: .. code-block:: bash salt '*' grains.get pkg:apache salt '*' grains.get abc::def|ghi delimiter='|' """ if ordered is True: grains = __grains__ # depends on [control=['if'], data=[]] else: grains = salt.utils.json.loads(salt.utils.json.dumps(__grains__)) return salt.utils.data.traverse_dict_and_list(grains, key, default, delimiter)
def removeEventListener(self, event: str, listener: _EventListenerType ) -> None: """Remove an event listener of this node. The listener is removed only when both event type and listener is matched. """ self._remove_event_listener(event, listener)
def function[removeEventListener, parameter[self, event, listener]]: constant[Remove an event listener of this node. The listener is removed only when both event type and listener is matched. ] call[name[self]._remove_event_listener, parameter[name[event], name[listener]]]
keyword[def] identifier[removeEventListener] ( identifier[self] , identifier[event] : identifier[str] , identifier[listener] : identifier[_EventListenerType] )-> keyword[None] : literal[string] identifier[self] . identifier[_remove_event_listener] ( identifier[event] , identifier[listener] )
def removeEventListener(self, event: str, listener: _EventListenerType) -> None: """Remove an event listener of this node. The listener is removed only when both event type and listener is matched. """ self._remove_event_listener(event, listener)
def create_cross_sectional_bootstrap_samples(obs_id_array, alt_id_array, choice_array, num_samples, seed=None): """ Determines the unique observations that will be present in each bootstrap sample. This function DOES NOT create the new design matrices or a new long-format dataframe for each bootstrap sample. Note that these will be correct bootstrap samples for cross-sectional datasets. This function will not work correctly for panel datasets. Parameters ---------- obs_id_array : 1D ndarray of ints. Each element should denote a unique observation id for the corresponding row of the long format array. alt_id_array : 1D ndarray of ints. Each element should denote a unique alternative id for the corresponding row of the long format array. choice_array : 1D ndarray of ints. Each element should be a one or a zero. The values should denote a whether or not the corresponding alternative in `alt_id_array` was chosen by the observational unit in the corresponding row of `obs_id_array.` num_samples : int. Denotes the number of bootstrap samples that need to be drawn. seed : non-negative int or None, optional. Denotes the random seed to be used in order to ensure reproducibility of the bootstrap sample generation. Default is None. If None, no seed will be used and the generation of the bootstrap samples will (in general) not be reproducible. Returns ------- ids_per_sample : 2D ndarray. Each row represents a complete bootstrap sample. Each column denotes a selected bootstrap observation that comprises the bootstrap sample. The elements of the array denote the observation ids of the chosen observational units. """ # Determine the units of observation that chose each alternative. chosen_alts_to_obs_ids =\ relate_obs_ids_to_chosen_alts(obs_id_array, alt_id_array, choice_array) # Determine the number of unique units of observation per group and overall num_obs_per_group, tot_num_obs =\ get_num_obs_choosing_each_alternative(chosen_alts_to_obs_ids) # Initialize the array that will store the observation ids for each sample ids_per_sample = np.empty((num_samples, tot_num_obs), dtype=float) if seed is not None: # Check the validity of the seed argument. if not isinstance(seed, int): msg = "`boot_seed` MUST be an int." raise ValueError(msg) # If desiring reproducibility, set the random seed within numpy np.random.seed(seed) # Initialize a variable to keep track of what column we're on. col_idx = 0 for alt_id in num_obs_per_group: # Get the set of observations that chose the current alternative. relevant_ids = chosen_alts_to_obs_ids[alt_id] # Determine the number of needed resampled ids. resample_size = num_obs_per_group[alt_id] # Resample, with replacement, observations who chose this alternative. current_ids = (np.random.choice(relevant_ids, size=resample_size * num_samples, replace=True) .reshape((num_samples, resample_size))) # Determine the last column index to use when storing the resampled ids end_col = col_idx + resample_size # Assign the sampled ids to the correct columns of ids_per_sample ids_per_sample[:, col_idx:end_col] = current_ids # Update the column index col_idx += resample_size # Return the resampled observation ids. return ids_per_sample
def function[create_cross_sectional_bootstrap_samples, parameter[obs_id_array, alt_id_array, choice_array, num_samples, seed]]: constant[ Determines the unique observations that will be present in each bootstrap sample. This function DOES NOT create the new design matrices or a new long-format dataframe for each bootstrap sample. Note that these will be correct bootstrap samples for cross-sectional datasets. This function will not work correctly for panel datasets. Parameters ---------- obs_id_array : 1D ndarray of ints. Each element should denote a unique observation id for the corresponding row of the long format array. alt_id_array : 1D ndarray of ints. Each element should denote a unique alternative id for the corresponding row of the long format array. choice_array : 1D ndarray of ints. Each element should be a one or a zero. The values should denote a whether or not the corresponding alternative in `alt_id_array` was chosen by the observational unit in the corresponding row of `obs_id_array.` num_samples : int. Denotes the number of bootstrap samples that need to be drawn. seed : non-negative int or None, optional. Denotes the random seed to be used in order to ensure reproducibility of the bootstrap sample generation. Default is None. If None, no seed will be used and the generation of the bootstrap samples will (in general) not be reproducible. Returns ------- ids_per_sample : 2D ndarray. Each row represents a complete bootstrap sample. Each column denotes a selected bootstrap observation that comprises the bootstrap sample. The elements of the array denote the observation ids of the chosen observational units. ] variable[chosen_alts_to_obs_ids] assign[=] call[name[relate_obs_ids_to_chosen_alts], parameter[name[obs_id_array], name[alt_id_array], name[choice_array]]] <ast.Tuple object at 0x7da18eb55060> assign[=] call[name[get_num_obs_choosing_each_alternative], parameter[name[chosen_alts_to_obs_ids]]] variable[ids_per_sample] assign[=] call[name[np].empty, parameter[tuple[[<ast.Name object at 0x7da18eb55360>, <ast.Name object at 0x7da18eb543a0>]]]] if compare[name[seed] is_not constant[None]] begin[:] if <ast.UnaryOp object at 0x7da18eb57340> begin[:] variable[msg] assign[=] constant[`boot_seed` MUST be an int.] <ast.Raise object at 0x7da18eb56530> call[name[np].random.seed, parameter[name[seed]]] variable[col_idx] assign[=] constant[0] for taget[name[alt_id]] in starred[name[num_obs_per_group]] begin[:] variable[relevant_ids] assign[=] call[name[chosen_alts_to_obs_ids]][name[alt_id]] variable[resample_size] assign[=] call[name[num_obs_per_group]][name[alt_id]] variable[current_ids] assign[=] call[call[name[np].random.choice, parameter[name[relevant_ids]]].reshape, parameter[tuple[[<ast.Name object at 0x7da18ede5960>, <ast.Name object at 0x7da18ede7430>]]]] variable[end_col] assign[=] binary_operation[name[col_idx] + name[resample_size]] call[name[ids_per_sample]][tuple[[<ast.Slice object at 0x7da18ede4820>, <ast.Slice object at 0x7da18ede46a0>]]] assign[=] name[current_ids] <ast.AugAssign object at 0x7da18ede7100> return[name[ids_per_sample]]
keyword[def] identifier[create_cross_sectional_bootstrap_samples] ( identifier[obs_id_array] , identifier[alt_id_array] , identifier[choice_array] , identifier[num_samples] , identifier[seed] = keyword[None] ): literal[string] identifier[chosen_alts_to_obs_ids] = identifier[relate_obs_ids_to_chosen_alts] ( identifier[obs_id_array] , identifier[alt_id_array] , identifier[choice_array] ) identifier[num_obs_per_group] , identifier[tot_num_obs] = identifier[get_num_obs_choosing_each_alternative] ( identifier[chosen_alts_to_obs_ids] ) identifier[ids_per_sample] = identifier[np] . identifier[empty] (( identifier[num_samples] , identifier[tot_num_obs] ), identifier[dtype] = identifier[float] ) keyword[if] identifier[seed] keyword[is] keyword[not] keyword[None] : keyword[if] keyword[not] identifier[isinstance] ( identifier[seed] , identifier[int] ): identifier[msg] = literal[string] keyword[raise] identifier[ValueError] ( identifier[msg] ) identifier[np] . identifier[random] . identifier[seed] ( identifier[seed] ) identifier[col_idx] = literal[int] keyword[for] identifier[alt_id] keyword[in] identifier[num_obs_per_group] : identifier[relevant_ids] = identifier[chosen_alts_to_obs_ids] [ identifier[alt_id] ] identifier[resample_size] = identifier[num_obs_per_group] [ identifier[alt_id] ] identifier[current_ids] =( identifier[np] . identifier[random] . identifier[choice] ( identifier[relevant_ids] , identifier[size] = identifier[resample_size] * identifier[num_samples] , identifier[replace] = keyword[True] ) . identifier[reshape] (( identifier[num_samples] , identifier[resample_size] ))) identifier[end_col] = identifier[col_idx] + identifier[resample_size] identifier[ids_per_sample] [:, identifier[col_idx] : identifier[end_col] ]= identifier[current_ids] identifier[col_idx] += identifier[resample_size] keyword[return] identifier[ids_per_sample]
def create_cross_sectional_bootstrap_samples(obs_id_array, alt_id_array, choice_array, num_samples, seed=None): """ Determines the unique observations that will be present in each bootstrap sample. This function DOES NOT create the new design matrices or a new long-format dataframe for each bootstrap sample. Note that these will be correct bootstrap samples for cross-sectional datasets. This function will not work correctly for panel datasets. Parameters ---------- obs_id_array : 1D ndarray of ints. Each element should denote a unique observation id for the corresponding row of the long format array. alt_id_array : 1D ndarray of ints. Each element should denote a unique alternative id for the corresponding row of the long format array. choice_array : 1D ndarray of ints. Each element should be a one or a zero. The values should denote a whether or not the corresponding alternative in `alt_id_array` was chosen by the observational unit in the corresponding row of `obs_id_array.` num_samples : int. Denotes the number of bootstrap samples that need to be drawn. seed : non-negative int or None, optional. Denotes the random seed to be used in order to ensure reproducibility of the bootstrap sample generation. Default is None. If None, no seed will be used and the generation of the bootstrap samples will (in general) not be reproducible. Returns ------- ids_per_sample : 2D ndarray. Each row represents a complete bootstrap sample. Each column denotes a selected bootstrap observation that comprises the bootstrap sample. The elements of the array denote the observation ids of the chosen observational units. """ # Determine the units of observation that chose each alternative. chosen_alts_to_obs_ids = relate_obs_ids_to_chosen_alts(obs_id_array, alt_id_array, choice_array) # Determine the number of unique units of observation per group and overall (num_obs_per_group, tot_num_obs) = get_num_obs_choosing_each_alternative(chosen_alts_to_obs_ids) # Initialize the array that will store the observation ids for each sample ids_per_sample = np.empty((num_samples, tot_num_obs), dtype=float) if seed is not None: # Check the validity of the seed argument. if not isinstance(seed, int): msg = '`boot_seed` MUST be an int.' raise ValueError(msg) # depends on [control=['if'], data=[]] # If desiring reproducibility, set the random seed within numpy np.random.seed(seed) # depends on [control=['if'], data=['seed']] # Initialize a variable to keep track of what column we're on. col_idx = 0 for alt_id in num_obs_per_group: # Get the set of observations that chose the current alternative. relevant_ids = chosen_alts_to_obs_ids[alt_id] # Determine the number of needed resampled ids. resample_size = num_obs_per_group[alt_id] # Resample, with replacement, observations who chose this alternative. current_ids = np.random.choice(relevant_ids, size=resample_size * num_samples, replace=True).reshape((num_samples, resample_size)) # Determine the last column index to use when storing the resampled ids end_col = col_idx + resample_size # Assign the sampled ids to the correct columns of ids_per_sample ids_per_sample[:, col_idx:end_col] = current_ids # Update the column index col_idx += resample_size # depends on [control=['for'], data=['alt_id']] # Return the resampled observation ids. return ids_per_sample
def color_scale(color, level): """ Scale RGB tuple by level, 0 - 256 """ return tuple([int(i * level) >> 8 for i in list(color)])
def function[color_scale, parameter[color, level]]: constant[ Scale RGB tuple by level, 0 - 256 ] return[call[name[tuple], parameter[<ast.ListComp object at 0x7da1b010ae00>]]]
keyword[def] identifier[color_scale] ( identifier[color] , identifier[level] ): literal[string] keyword[return] identifier[tuple] ([ identifier[int] ( identifier[i] * identifier[level] )>> literal[int] keyword[for] identifier[i] keyword[in] identifier[list] ( identifier[color] )])
def color_scale(color, level): """ Scale RGB tuple by level, 0 - 256 """ return tuple([int(i * level) >> 8 for i in list(color)])
def from_dict(cls, d): """ Invokes """ entry_type = d["entry_type"] if entry_type == "Ion": entry = IonEntry.from_dict(d["entry"]) else: entry = PDEntry.from_dict(d["entry"]) entry_id = d["entry_id"] concentration = d["concentration"] return PourbaixEntry(entry, entry_id, concentration)
def function[from_dict, parameter[cls, d]]: constant[ Invokes ] variable[entry_type] assign[=] call[name[d]][constant[entry_type]] if compare[name[entry_type] equal[==] constant[Ion]] begin[:] variable[entry] assign[=] call[name[IonEntry].from_dict, parameter[call[name[d]][constant[entry]]]] variable[entry_id] assign[=] call[name[d]][constant[entry_id]] variable[concentration] assign[=] call[name[d]][constant[concentration]] return[call[name[PourbaixEntry], parameter[name[entry], name[entry_id], name[concentration]]]]
keyword[def] identifier[from_dict] ( identifier[cls] , identifier[d] ): literal[string] identifier[entry_type] = identifier[d] [ literal[string] ] keyword[if] identifier[entry_type] == literal[string] : identifier[entry] = identifier[IonEntry] . identifier[from_dict] ( identifier[d] [ literal[string] ]) keyword[else] : identifier[entry] = identifier[PDEntry] . identifier[from_dict] ( identifier[d] [ literal[string] ]) identifier[entry_id] = identifier[d] [ literal[string] ] identifier[concentration] = identifier[d] [ literal[string] ] keyword[return] identifier[PourbaixEntry] ( identifier[entry] , identifier[entry_id] , identifier[concentration] )
def from_dict(cls, d): """ Invokes """ entry_type = d['entry_type'] if entry_type == 'Ion': entry = IonEntry.from_dict(d['entry']) # depends on [control=['if'], data=[]] else: entry = PDEntry.from_dict(d['entry']) entry_id = d['entry_id'] concentration = d['concentration'] return PourbaixEntry(entry, entry_id, concentration)
def send(self, command, payload): """ Send a WorkRequest to containing command and payload to the queue specified in config. :param command: str: name of the command we want run by WorkQueueProcessor :param payload: object: pickable data to be used when running the command """ request = WorkRequest(command, payload) logging.info("Sending {} message to queue {}.".format(request.command, self.queue_name)) # setting protocol to version 2 to be compatible with python2 self.connection.send_durable_message(self.queue_name, pickle.dumps(request, protocol=2)) logging.info("Sent {} message.".format(request.command, self.queue_name))
def function[send, parameter[self, command, payload]]: constant[ Send a WorkRequest to containing command and payload to the queue specified in config. :param command: str: name of the command we want run by WorkQueueProcessor :param payload: object: pickable data to be used when running the command ] variable[request] assign[=] call[name[WorkRequest], parameter[name[command], name[payload]]] call[name[logging].info, parameter[call[constant[Sending {} message to queue {}.].format, parameter[name[request].command, name[self].queue_name]]]] call[name[self].connection.send_durable_message, parameter[name[self].queue_name, call[name[pickle].dumps, parameter[name[request]]]]] call[name[logging].info, parameter[call[constant[Sent {} message.].format, parameter[name[request].command, name[self].queue_name]]]]
keyword[def] identifier[send] ( identifier[self] , identifier[command] , identifier[payload] ): literal[string] identifier[request] = identifier[WorkRequest] ( identifier[command] , identifier[payload] ) identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[request] . identifier[command] , identifier[self] . identifier[queue_name] )) identifier[self] . identifier[connection] . identifier[send_durable_message] ( identifier[self] . identifier[queue_name] , identifier[pickle] . identifier[dumps] ( identifier[request] , identifier[protocol] = literal[int] )) identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[request] . identifier[command] , identifier[self] . identifier[queue_name] ))
def send(self, command, payload): """ Send a WorkRequest to containing command and payload to the queue specified in config. :param command: str: name of the command we want run by WorkQueueProcessor :param payload: object: pickable data to be used when running the command """ request = WorkRequest(command, payload) logging.info('Sending {} message to queue {}.'.format(request.command, self.queue_name)) # setting protocol to version 2 to be compatible with python2 self.connection.send_durable_message(self.queue_name, pickle.dumps(request, protocol=2)) logging.info('Sent {} message.'.format(request.command, self.queue_name))
def abup_se_plot(mod,species): """ plot species from one ABUPP file and the se file. You must use this function in the directory where the ABP files are and an ABUP file for model mod must exist. Parameters ---------- mod : integer Model to plot, you need to have an ABUPP file for that model. species : string The species to plot. Notes ----- The species is set to 'C-12'. """ # Marco, you have already implemented finding headers and columns in # ABUP files. You may want to transplant that into here? species='C-12' filename = 'ABUPP%07d0000.DAT' % mod print(filename) mass,c12=np.loadtxt(filename,skiprows=4,usecols=[1,18],unpack=True) c12_se=self.se.get(mod,'iso_massf','C-12') mass_se=self.se.get(mod,'mass') pyl.plot(mass,c12) pyl.plot(mass_se,c12_se,'o',label='cycle '+str(mod)) pyl.legend()
def function[abup_se_plot, parameter[mod, species]]: constant[ plot species from one ABUPP file and the se file. You must use this function in the directory where the ABP files are and an ABUP file for model mod must exist. Parameters ---------- mod : integer Model to plot, you need to have an ABUPP file for that model. species : string The species to plot. Notes ----- The species is set to 'C-12'. ] variable[species] assign[=] constant[C-12] variable[filename] assign[=] binary_operation[constant[ABUPP%07d0000.DAT] <ast.Mod object at 0x7da2590d6920> name[mod]] call[name[print], parameter[name[filename]]] <ast.Tuple object at 0x7da204346350> assign[=] call[name[np].loadtxt, parameter[name[filename]]] variable[c12_se] assign[=] call[name[self].se.get, parameter[name[mod], constant[iso_massf], constant[C-12]]] variable[mass_se] assign[=] call[name[self].se.get, parameter[name[mod], constant[mass]]] call[name[pyl].plot, parameter[name[mass], name[c12]]] call[name[pyl].plot, parameter[name[mass_se], name[c12_se], constant[o]]] call[name[pyl].legend, parameter[]]
keyword[def] identifier[abup_se_plot] ( identifier[mod] , identifier[species] ): literal[string] identifier[species] = literal[string] identifier[filename] = literal[string] % identifier[mod] identifier[print] ( identifier[filename] ) identifier[mass] , identifier[c12] = identifier[np] . identifier[loadtxt] ( identifier[filename] , identifier[skiprows] = literal[int] , identifier[usecols] =[ literal[int] , literal[int] ], identifier[unpack] = keyword[True] ) identifier[c12_se] = identifier[self] . identifier[se] . identifier[get] ( identifier[mod] , literal[string] , literal[string] ) identifier[mass_se] = identifier[self] . identifier[se] . identifier[get] ( identifier[mod] , literal[string] ) identifier[pyl] . identifier[plot] ( identifier[mass] , identifier[c12] ) identifier[pyl] . identifier[plot] ( identifier[mass_se] , identifier[c12_se] , literal[string] , identifier[label] = literal[string] + identifier[str] ( identifier[mod] )) identifier[pyl] . identifier[legend] ()
def abup_se_plot(mod, species): """ plot species from one ABUPP file and the se file. You must use this function in the directory where the ABP files are and an ABUP file for model mod must exist. Parameters ---------- mod : integer Model to plot, you need to have an ABUPP file for that model. species : string The species to plot. Notes ----- The species is set to 'C-12'. """ # Marco, you have already implemented finding headers and columns in # ABUP files. You may want to transplant that into here? species = 'C-12' filename = 'ABUPP%07d0000.DAT' % mod print(filename) (mass, c12) = np.loadtxt(filename, skiprows=4, usecols=[1, 18], unpack=True) c12_se = self.se.get(mod, 'iso_massf', 'C-12') mass_se = self.se.get(mod, 'mass') pyl.plot(mass, c12) pyl.plot(mass_se, c12_se, 'o', label='cycle ' + str(mod)) pyl.legend()
def nominatim_request(params, type = "search", pause_duration=1, timeout=30, error_pause_duration=180): """ Send a request to the Nominatim API via HTTP GET and return the JSON response. Parameters ---------- params : dict or OrderedDict key-value pairs of parameters type : string Type of Nominatim query. One of the following: search, reverse or lookup pause_duration : int how long to pause before requests, in seconds timeout : int the timeout interval for the requests library error_pause_duration : int how long to pause in seconds before re-trying requests if error Returns ------- response_json : dict """ known_requests = {"search", "reverse", "lookup"} if type not in known_requests: raise ValueError("The type of Nominatim request is invalid. Please choose one of {{'search', 'reverse', 'lookup'}}") # prepare the Nominatim API URL and see if request already exists in the # cache url = 'https://nominatim.openstreetmap.org/{}'.format(type) prepared_url = requests.Request('GET', url, params=params).prepare().url cached_response_json = get_from_cache(prepared_url) if cached_response_json is not None: # found this request in the cache, just return it instead of making a # new HTTP call return cached_response_json else: # if this URL is not already in the cache, pause, then request it log('Pausing {:,.2f} seconds before making API GET request'.format(pause_duration)) time.sleep(pause_duration) start_time = time.time() log('Requesting {} with timeout={}'.format(prepared_url, timeout)) response = requests.get(url, params=params, timeout=timeout, headers=get_http_headers()) # get the response size and the domain, log result size_kb = len(response.content) / 1000. domain = re.findall(r'//(?s)(.*?)/', url)[0] log('Downloaded {:,.1f}KB from {} in {:,.2f} seconds'.format(size_kb, domain, time.time()-start_time)) try: response_json = response.json() save_to_cache(prepared_url, response_json) except Exception: #429 is 'too many requests' and 504 is 'gateway timeout' from server # overload - handle these errors by recursively calling # nominatim_request until we get a valid response if response.status_code in [429, 504]: # pause for error_pause_duration seconds before re-trying request log('Server at {} returned status code {} and no JSON data. Re-trying request in {:.2f} seconds.'.format(domain, response.status_code, error_pause_duration), level=lg.WARNING) time.sleep(error_pause_duration) response_json = nominatim_request(params=params, pause_duration=pause_duration, timeout=timeout) # else, this was an unhandled status_code, throw an exception else: log('Server at {} returned status code {} and no JSON data'.format(domain, response.status_code), level=lg.ERROR) raise Exception('Server returned no JSON data.\n{} {}\n{}'.format(response, response.reason, response.text)) return response_json
def function[nominatim_request, parameter[params, type, pause_duration, timeout, error_pause_duration]]: constant[ Send a request to the Nominatim API via HTTP GET and return the JSON response. Parameters ---------- params : dict or OrderedDict key-value pairs of parameters type : string Type of Nominatim query. One of the following: search, reverse or lookup pause_duration : int how long to pause before requests, in seconds timeout : int the timeout interval for the requests library error_pause_duration : int how long to pause in seconds before re-trying requests if error Returns ------- response_json : dict ] variable[known_requests] assign[=] <ast.Set object at 0x7da1b1b69180> if compare[name[type] <ast.NotIn object at 0x7da2590d7190> name[known_requests]] begin[:] <ast.Raise object at 0x7da1b1b6a050> variable[url] assign[=] call[constant[https://nominatim.openstreetmap.org/{}].format, parameter[name[type]]] variable[prepared_url] assign[=] call[call[name[requests].Request, parameter[constant[GET], name[url]]].prepare, parameter[]].url variable[cached_response_json] assign[=] call[name[get_from_cache], parameter[name[prepared_url]]] if compare[name[cached_response_json] is_not constant[None]] begin[:] return[name[cached_response_json]]
keyword[def] identifier[nominatim_request] ( identifier[params] , identifier[type] = literal[string] , identifier[pause_duration] = literal[int] , identifier[timeout] = literal[int] , identifier[error_pause_duration] = literal[int] ): literal[string] identifier[known_requests] ={ literal[string] , literal[string] , literal[string] } keyword[if] identifier[type] keyword[not] keyword[in] identifier[known_requests] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[url] = literal[string] . identifier[format] ( identifier[type] ) identifier[prepared_url] = identifier[requests] . identifier[Request] ( literal[string] , identifier[url] , identifier[params] = identifier[params] ). identifier[prepare] (). identifier[url] identifier[cached_response_json] = identifier[get_from_cache] ( identifier[prepared_url] ) keyword[if] identifier[cached_response_json] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[cached_response_json] keyword[else] : identifier[log] ( literal[string] . identifier[format] ( identifier[pause_duration] )) identifier[time] . identifier[sleep] ( identifier[pause_duration] ) identifier[start_time] = identifier[time] . identifier[time] () identifier[log] ( literal[string] . identifier[format] ( identifier[prepared_url] , identifier[timeout] )) identifier[response] = identifier[requests] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] , identifier[timeout] = identifier[timeout] , identifier[headers] = identifier[get_http_headers] ()) identifier[size_kb] = identifier[len] ( identifier[response] . identifier[content] )/ literal[int] identifier[domain] = identifier[re] . identifier[findall] ( literal[string] , identifier[url] )[ literal[int] ] identifier[log] ( literal[string] . identifier[format] ( identifier[size_kb] , identifier[domain] , identifier[time] . identifier[time] ()- identifier[start_time] )) keyword[try] : identifier[response_json] = identifier[response] . identifier[json] () identifier[save_to_cache] ( identifier[prepared_url] , identifier[response_json] ) keyword[except] identifier[Exception] : keyword[if] identifier[response] . identifier[status_code] keyword[in] [ literal[int] , literal[int] ]: identifier[log] ( literal[string] . identifier[format] ( identifier[domain] , identifier[response] . identifier[status_code] , identifier[error_pause_duration] ), identifier[level] = identifier[lg] . identifier[WARNING] ) identifier[time] . identifier[sleep] ( identifier[error_pause_duration] ) identifier[response_json] = identifier[nominatim_request] ( identifier[params] = identifier[params] , identifier[pause_duration] = identifier[pause_duration] , identifier[timeout] = identifier[timeout] ) keyword[else] : identifier[log] ( literal[string] . identifier[format] ( identifier[domain] , identifier[response] . identifier[status_code] ), identifier[level] = identifier[lg] . identifier[ERROR] ) keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[response] , identifier[response] . identifier[reason] , identifier[response] . identifier[text] )) keyword[return] identifier[response_json]
def nominatim_request(params, type='search', pause_duration=1, timeout=30, error_pause_duration=180): """ Send a request to the Nominatim API via HTTP GET and return the JSON response. Parameters ---------- params : dict or OrderedDict key-value pairs of parameters type : string Type of Nominatim query. One of the following: search, reverse or lookup pause_duration : int how long to pause before requests, in seconds timeout : int the timeout interval for the requests library error_pause_duration : int how long to pause in seconds before re-trying requests if error Returns ------- response_json : dict """ known_requests = {'search', 'reverse', 'lookup'} if type not in known_requests: raise ValueError("The type of Nominatim request is invalid. Please choose one of {{'search', 'reverse', 'lookup'}}") # depends on [control=['if'], data=[]] # prepare the Nominatim API URL and see if request already exists in the # cache url = 'https://nominatim.openstreetmap.org/{}'.format(type) prepared_url = requests.Request('GET', url, params=params).prepare().url cached_response_json = get_from_cache(prepared_url) if cached_response_json is not None: # found this request in the cache, just return it instead of making a # new HTTP call return cached_response_json # depends on [control=['if'], data=['cached_response_json']] else: # if this URL is not already in the cache, pause, then request it log('Pausing {:,.2f} seconds before making API GET request'.format(pause_duration)) time.sleep(pause_duration) start_time = time.time() log('Requesting {} with timeout={}'.format(prepared_url, timeout)) response = requests.get(url, params=params, timeout=timeout, headers=get_http_headers()) # get the response size and the domain, log result size_kb = len(response.content) / 1000.0 domain = re.findall('//(?s)(.*?)/', url)[0] log('Downloaded {:,.1f}KB from {} in {:,.2f} seconds'.format(size_kb, domain, time.time() - start_time)) try: response_json = response.json() save_to_cache(prepared_url, response_json) # depends on [control=['try'], data=[]] except Exception: #429 is 'too many requests' and 504 is 'gateway timeout' from server # overload - handle these errors by recursively calling # nominatim_request until we get a valid response if response.status_code in [429, 504]: # pause for error_pause_duration seconds before re-trying request log('Server at {} returned status code {} and no JSON data. Re-trying request in {:.2f} seconds.'.format(domain, response.status_code, error_pause_duration), level=lg.WARNING) time.sleep(error_pause_duration) response_json = nominatim_request(params=params, pause_duration=pause_duration, timeout=timeout) # depends on [control=['if'], data=[]] else: # else, this was an unhandled status_code, throw an exception log('Server at {} returned status code {} and no JSON data'.format(domain, response.status_code), level=lg.ERROR) raise Exception('Server returned no JSON data.\n{} {}\n{}'.format(response, response.reason, response.text)) # depends on [control=['except'], data=[]] return response_json
def _watchdog_time(self): """ 标题时间显示 """ while not self.quit: self.data.time = self.player.time_pos self.view.display() time.sleep(1)
def function[_watchdog_time, parameter[self]]: constant[ 标题时间显示 ] while <ast.UnaryOp object at 0x7da18fe92950> begin[:] name[self].data.time assign[=] name[self].player.time_pos call[name[self].view.display, parameter[]] call[name[time].sleep, parameter[constant[1]]]
keyword[def] identifier[_watchdog_time] ( identifier[self] ): literal[string] keyword[while] keyword[not] identifier[self] . identifier[quit] : identifier[self] . identifier[data] . identifier[time] = identifier[self] . identifier[player] . identifier[time_pos] identifier[self] . identifier[view] . identifier[display] () identifier[time] . identifier[sleep] ( literal[int] )
def _watchdog_time(self): """ 标题时间显示 """ while not self.quit: self.data.time = self.player.time_pos self.view.display() time.sleep(1) # depends on [control=['while'], data=[]]
def getCertificateExpireDate(self, CorpNum): """ 공인인증서 만료일 확인, 등록여부 확인용도로 활용가능 args CorpNum : 확인할 회원 사업자번호 return 등록시 만료일자, 미등록시 해당 PopbillException raise. raise PopbillException """ result = self._httpget('/Taxinvoice?cfg=CERT', CorpNum) return datetime.strptime(result.certificateExpiration, '%Y%m%d%H%M%S')
def function[getCertificateExpireDate, parameter[self, CorpNum]]: constant[ 공인인증서 만료일 확인, 등록여부 확인용도로 활용가능 args CorpNum : 확인할 회원 사업자번호 return 등록시 만료일자, 미등록시 해당 PopbillException raise. raise PopbillException ] variable[result] assign[=] call[name[self]._httpget, parameter[constant[/Taxinvoice?cfg=CERT], name[CorpNum]]] return[call[name[datetime].strptime, parameter[name[result].certificateExpiration, constant[%Y%m%d%H%M%S]]]]
keyword[def] identifier[getCertificateExpireDate] ( identifier[self] , identifier[CorpNum] ): literal[string] identifier[result] = identifier[self] . identifier[_httpget] ( literal[string] , identifier[CorpNum] ) keyword[return] identifier[datetime] . identifier[strptime] ( identifier[result] . identifier[certificateExpiration] , literal[string] )
def getCertificateExpireDate(self, CorpNum): """ 공인인증서 만료일 확인, 등록여부 확인용도로 활용가능 args CorpNum : 확인할 회원 사업자번호 return 등록시 만료일자, 미등록시 해당 PopbillException raise. raise PopbillException """ result = self._httpget('/Taxinvoice?cfg=CERT', CorpNum) return datetime.strptime(result.certificateExpiration, '%Y%m%d%H%M%S')
def returnTradeHistoryPublic(self, currencyPair, start=None, end=None): """Returns the past 200 trades for a given market, or up to 50,000 trades between a range specified in UNIX timestamps by the "start" and "end" GET parameters.""" return super(Poloniex, self).returnTradeHistory(currencyPair, start, end)
def function[returnTradeHistoryPublic, parameter[self, currencyPair, start, end]]: constant[Returns the past 200 trades for a given market, or up to 50,000 trades between a range specified in UNIX timestamps by the "start" and "end" GET parameters.] return[call[call[name[super], parameter[name[Poloniex], name[self]]].returnTradeHistory, parameter[name[currencyPair], name[start], name[end]]]]
keyword[def] identifier[returnTradeHistoryPublic] ( identifier[self] , identifier[currencyPair] , identifier[start] = keyword[None] , identifier[end] = keyword[None] ): literal[string] keyword[return] identifier[super] ( identifier[Poloniex] , identifier[self] ). identifier[returnTradeHistory] ( identifier[currencyPair] , identifier[start] , identifier[end] )
def returnTradeHistoryPublic(self, currencyPair, start=None, end=None): """Returns the past 200 trades for a given market, or up to 50,000 trades between a range specified in UNIX timestamps by the "start" and "end" GET parameters.""" return super(Poloniex, self).returnTradeHistory(currencyPair, start, end)
def spmatrix(self, reordered = True, symmetric = False): """ Converts the :py:class:`cspmatrix` :math:`A` to a sparse matrix. A reordered matrix is returned if the optional argument `reordered` is `True` (default), and otherwise the inverse permutation is applied. Only the default options are allowed if the :py:class:`cspmatrix` :math:`A` represents a Cholesky factor. :param reordered: boolean (default: True) :param symmetric: boolean (default: False) """ n = self.symb.n snptr = self.symb.snptr snode = self.symb.snode relptr = self.symb.relptr snrowidx = self.symb.snrowidx sncolptr = self.symb.sncolptr blkptr = self.symb.blkptr blkval = self.blkval if self.is_factor: if symmetric: raise ValueError("'symmetric = True' not implemented for Cholesky factors") if not reordered: raise ValueError("'reordered = False' not implemented for Cholesky factors") snpost = self.symb.snpost blkval = +blkval for k in snpost: j = snode[snptr[k]] # representative vertex nn = snptr[k+1]-snptr[k] # |Nk| na = relptr[k+1]-relptr[k] # |Ak| if na == 0: continue nj = na + nn if nn == 1: blas.scal(blkval[blkptr[k]],blkval,offset = blkptr[k]+1,n=na) else: blas.trmm(blkval,blkval, transA = "N", diag = "N", side = "R",uplo = "L", \ m = na, n = nn, ldA = nj, ldB = nj, \ offsetA = blkptr[k],offsetB = blkptr[k] + nn) cc = matrix(0,(n,1)) # count number of nonzeros in each col for k in range(self.symb.Nsn): nn = snptr[k+1]-snptr[k] na = relptr[k+1]-relptr[k] nj = nn + na for i in range(nn): j = snode[snptr[k]+i] cc[j] = nj - i # build col. ptr cp = [0] for i in range(n): cp.append(cp[-1] + cc[i]) cp = matrix(cp) # copy data and row indices val = matrix(0.0, (cp[-1],1)) ri = matrix(0, (cp[-1],1)) for k in range(self.symb.Nsn): nn = snptr[k+1]-snptr[k] na = relptr[k+1]-relptr[k] nj = nn + na for i in range(nn): j = snode[snptr[k]+i] blas.copy(blkval, val, offsetx = blkptr[k]+nj*i+i, offsety = cp[j], n = nj-i) ri[cp[j]:cp[j+1]] = snrowidx[sncolptr[k]+i:sncolptr[k+1]] I = []; J = [] for i in range(n): I += list(ri[cp[i]:cp[i+1]]) J += (cp[i+1]-cp[i])*[i] tmp = spmatrix(val, I, J, (n,n)) # tmp is reordered and lower tril. if reordered or self.symb.p is None: # reordered matrix (do not apply inverse permutation) if not symmetric: return tmp else: return symmetrize(tmp) else: # apply inverse permutation tmp = perm(symmetrize(tmp), self.symb.ip) if symmetric: return tmp else: return tril(tmp)
def function[spmatrix, parameter[self, reordered, symmetric]]: constant[ Converts the :py:class:`cspmatrix` :math:`A` to a sparse matrix. A reordered matrix is returned if the optional argument `reordered` is `True` (default), and otherwise the inverse permutation is applied. Only the default options are allowed if the :py:class:`cspmatrix` :math:`A` represents a Cholesky factor. :param reordered: boolean (default: True) :param symmetric: boolean (default: False) ] variable[n] assign[=] name[self].symb.n variable[snptr] assign[=] name[self].symb.snptr variable[snode] assign[=] name[self].symb.snode variable[relptr] assign[=] name[self].symb.relptr variable[snrowidx] assign[=] name[self].symb.snrowidx variable[sncolptr] assign[=] name[self].symb.sncolptr variable[blkptr] assign[=] name[self].symb.blkptr variable[blkval] assign[=] name[self].blkval if name[self].is_factor begin[:] if name[symmetric] begin[:] <ast.Raise object at 0x7da204347580> if <ast.UnaryOp object at 0x7da204346590> begin[:] <ast.Raise object at 0x7da204347fd0> variable[snpost] assign[=] name[self].symb.snpost variable[blkval] assign[=] <ast.UnaryOp object at 0x7da2043442e0> for taget[name[k]] in starred[name[snpost]] begin[:] variable[j] assign[=] call[name[snode]][call[name[snptr]][name[k]]] variable[nn] assign[=] binary_operation[call[name[snptr]][binary_operation[name[k] + constant[1]]] - call[name[snptr]][name[k]]] variable[na] assign[=] binary_operation[call[name[relptr]][binary_operation[name[k] + constant[1]]] - call[name[relptr]][name[k]]] if compare[name[na] equal[==] constant[0]] begin[:] continue variable[nj] assign[=] binary_operation[name[na] + name[nn]] if compare[name[nn] equal[==] constant[1]] begin[:] call[name[blas].scal, parameter[call[name[blkval]][call[name[blkptr]][name[k]]], name[blkval]]] variable[cc] assign[=] call[name[matrix], parameter[constant[0], tuple[[<ast.Name object at 0x7da204346c50>, <ast.Constant object at 0x7da204347610>]]]] for taget[name[k]] in starred[call[name[range], parameter[name[self].symb.Nsn]]] begin[:] variable[nn] assign[=] binary_operation[call[name[snptr]][binary_operation[name[k] + constant[1]]] - call[name[snptr]][name[k]]] variable[na] assign[=] binary_operation[call[name[relptr]][binary_operation[name[k] + constant[1]]] - call[name[relptr]][name[k]]] variable[nj] assign[=] binary_operation[name[nn] + name[na]] for taget[name[i]] in starred[call[name[range], parameter[name[nn]]]] begin[:] variable[j] assign[=] call[name[snode]][binary_operation[call[name[snptr]][name[k]] + name[i]]] call[name[cc]][name[j]] assign[=] binary_operation[name[nj] - name[i]] variable[cp] assign[=] list[[<ast.Constant object at 0x7da2043463e0>]] for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:] call[name[cp].append, parameter[binary_operation[call[name[cp]][<ast.UnaryOp object at 0x7da204345390>] + call[name[cc]][name[i]]]]] variable[cp] assign[=] call[name[matrix], parameter[name[cp]]] variable[val] assign[=] call[name[matrix], parameter[constant[0.0], tuple[[<ast.Subscript object at 0x7da204344eb0>, <ast.Constant object at 0x7da204345bd0>]]]] variable[ri] assign[=] call[name[matrix], parameter[constant[0], tuple[[<ast.Subscript object at 0x7da204345b10>, <ast.Constant object at 0x7da2043450c0>]]]] for taget[name[k]] in starred[call[name[range], parameter[name[self].symb.Nsn]]] begin[:] variable[nn] assign[=] binary_operation[call[name[snptr]][binary_operation[name[k] + constant[1]]] - call[name[snptr]][name[k]]] variable[na] assign[=] binary_operation[call[name[relptr]][binary_operation[name[k] + constant[1]]] - call[name[relptr]][name[k]]] variable[nj] assign[=] binary_operation[name[nn] + name[na]] for taget[name[i]] in starred[call[name[range], parameter[name[nn]]]] begin[:] variable[j] assign[=] call[name[snode]][binary_operation[call[name[snptr]][name[k]] + name[i]]] call[name[blas].copy, parameter[name[blkval], name[val]]] call[name[ri]][<ast.Slice object at 0x7da18f00c160>] assign[=] call[name[snrowidx]][<ast.Slice object at 0x7da20cabdb70>] variable[I] assign[=] list[[]] variable[J] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:] <ast.AugAssign object at 0x7da20cabdfc0> <ast.AugAssign object at 0x7da20cabf700> variable[tmp] assign[=] call[name[spmatrix], parameter[name[val], name[I], name[J], tuple[[<ast.Name object at 0x7da20cabea40>, <ast.Name object at 0x7da20cabfb50>]]]] if <ast.BoolOp object at 0x7da20cabfb80> begin[:] if <ast.UnaryOp object at 0x7da20cabd8a0> begin[:] return[name[tmp]]
keyword[def] identifier[spmatrix] ( identifier[self] , identifier[reordered] = keyword[True] , identifier[symmetric] = keyword[False] ): literal[string] identifier[n] = identifier[self] . identifier[symb] . identifier[n] identifier[snptr] = identifier[self] . identifier[symb] . identifier[snptr] identifier[snode] = identifier[self] . identifier[symb] . identifier[snode] identifier[relptr] = identifier[self] . identifier[symb] . identifier[relptr] identifier[snrowidx] = identifier[self] . identifier[symb] . identifier[snrowidx] identifier[sncolptr] = identifier[self] . identifier[symb] . identifier[sncolptr] identifier[blkptr] = identifier[self] . identifier[symb] . identifier[blkptr] identifier[blkval] = identifier[self] . identifier[blkval] keyword[if] identifier[self] . identifier[is_factor] : keyword[if] identifier[symmetric] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[reordered] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[snpost] = identifier[self] . identifier[symb] . identifier[snpost] identifier[blkval] =+ identifier[blkval] keyword[for] identifier[k] keyword[in] identifier[snpost] : identifier[j] = identifier[snode] [ identifier[snptr] [ identifier[k] ]] identifier[nn] = identifier[snptr] [ identifier[k] + literal[int] ]- identifier[snptr] [ identifier[k] ] identifier[na] = identifier[relptr] [ identifier[k] + literal[int] ]- identifier[relptr] [ identifier[k] ] keyword[if] identifier[na] == literal[int] : keyword[continue] identifier[nj] = identifier[na] + identifier[nn] keyword[if] identifier[nn] == literal[int] : identifier[blas] . identifier[scal] ( identifier[blkval] [ identifier[blkptr] [ identifier[k] ]], identifier[blkval] , identifier[offset] = identifier[blkptr] [ identifier[k] ]+ literal[int] , identifier[n] = identifier[na] ) keyword[else] : identifier[blas] . identifier[trmm] ( identifier[blkval] , identifier[blkval] , identifier[transA] = literal[string] , identifier[diag] = literal[string] , identifier[side] = literal[string] , identifier[uplo] = literal[string] , identifier[m] = identifier[na] , identifier[n] = identifier[nn] , identifier[ldA] = identifier[nj] , identifier[ldB] = identifier[nj] , identifier[offsetA] = identifier[blkptr] [ identifier[k] ], identifier[offsetB] = identifier[blkptr] [ identifier[k] ]+ identifier[nn] ) identifier[cc] = identifier[matrix] ( literal[int] ,( identifier[n] , literal[int] )) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[self] . identifier[symb] . identifier[Nsn] ): identifier[nn] = identifier[snptr] [ identifier[k] + literal[int] ]- identifier[snptr] [ identifier[k] ] identifier[na] = identifier[relptr] [ identifier[k] + literal[int] ]- identifier[relptr] [ identifier[k] ] identifier[nj] = identifier[nn] + identifier[na] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nn] ): identifier[j] = identifier[snode] [ identifier[snptr] [ identifier[k] ]+ identifier[i] ] identifier[cc] [ identifier[j] ]= identifier[nj] - identifier[i] identifier[cp] =[ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ): identifier[cp] . identifier[append] ( identifier[cp] [- literal[int] ]+ identifier[cc] [ identifier[i] ]) identifier[cp] = identifier[matrix] ( identifier[cp] ) identifier[val] = identifier[matrix] ( literal[int] ,( identifier[cp] [- literal[int] ], literal[int] )) identifier[ri] = identifier[matrix] ( literal[int] ,( identifier[cp] [- literal[int] ], literal[int] )) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[self] . identifier[symb] . identifier[Nsn] ): identifier[nn] = identifier[snptr] [ identifier[k] + literal[int] ]- identifier[snptr] [ identifier[k] ] identifier[na] = identifier[relptr] [ identifier[k] + literal[int] ]- identifier[relptr] [ identifier[k] ] identifier[nj] = identifier[nn] + identifier[na] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nn] ): identifier[j] = identifier[snode] [ identifier[snptr] [ identifier[k] ]+ identifier[i] ] identifier[blas] . identifier[copy] ( identifier[blkval] , identifier[val] , identifier[offsetx] = identifier[blkptr] [ identifier[k] ]+ identifier[nj] * identifier[i] + identifier[i] , identifier[offsety] = identifier[cp] [ identifier[j] ], identifier[n] = identifier[nj] - identifier[i] ) identifier[ri] [ identifier[cp] [ identifier[j] ]: identifier[cp] [ identifier[j] + literal[int] ]]= identifier[snrowidx] [ identifier[sncolptr] [ identifier[k] ]+ identifier[i] : identifier[sncolptr] [ identifier[k] + literal[int] ]] identifier[I] =[]; identifier[J] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ): identifier[I] += identifier[list] ( identifier[ri] [ identifier[cp] [ identifier[i] ]: identifier[cp] [ identifier[i] + literal[int] ]]) identifier[J] +=( identifier[cp] [ identifier[i] + literal[int] ]- identifier[cp] [ identifier[i] ])*[ identifier[i] ] identifier[tmp] = identifier[spmatrix] ( identifier[val] , identifier[I] , identifier[J] ,( identifier[n] , identifier[n] )) keyword[if] identifier[reordered] keyword[or] identifier[self] . identifier[symb] . identifier[p] keyword[is] keyword[None] : keyword[if] keyword[not] identifier[symmetric] : keyword[return] identifier[tmp] keyword[else] : keyword[return] identifier[symmetrize] ( identifier[tmp] ) keyword[else] : identifier[tmp] = identifier[perm] ( identifier[symmetrize] ( identifier[tmp] ), identifier[self] . identifier[symb] . identifier[ip] ) keyword[if] identifier[symmetric] : keyword[return] identifier[tmp] keyword[else] : keyword[return] identifier[tril] ( identifier[tmp] )
def spmatrix(self, reordered=True, symmetric=False): """ Converts the :py:class:`cspmatrix` :math:`A` to a sparse matrix. A reordered matrix is returned if the optional argument `reordered` is `True` (default), and otherwise the inverse permutation is applied. Only the default options are allowed if the :py:class:`cspmatrix` :math:`A` represents a Cholesky factor. :param reordered: boolean (default: True) :param symmetric: boolean (default: False) """ n = self.symb.n snptr = self.symb.snptr snode = self.symb.snode relptr = self.symb.relptr snrowidx = self.symb.snrowidx sncolptr = self.symb.sncolptr blkptr = self.symb.blkptr blkval = self.blkval if self.is_factor: if symmetric: raise ValueError("'symmetric = True' not implemented for Cholesky factors") # depends on [control=['if'], data=[]] if not reordered: raise ValueError("'reordered = False' not implemented for Cholesky factors") # depends on [control=['if'], data=[]] snpost = self.symb.snpost blkval = +blkval for k in snpost: j = snode[snptr[k]] # representative vertex nn = snptr[k + 1] - snptr[k] # |Nk| na = relptr[k + 1] - relptr[k] # |Ak| if na == 0: continue # depends on [control=['if'], data=[]] nj = na + nn if nn == 1: blas.scal(blkval[blkptr[k]], blkval, offset=blkptr[k] + 1, n=na) # depends on [control=['if'], data=[]] else: blas.trmm(blkval, blkval, transA='N', diag='N', side='R', uplo='L', m=na, n=nn, ldA=nj, ldB=nj, offsetA=blkptr[k], offsetB=blkptr[k] + nn) # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]] cc = matrix(0, (n, 1)) # count number of nonzeros in each col for k in range(self.symb.Nsn): nn = snptr[k + 1] - snptr[k] na = relptr[k + 1] - relptr[k] nj = nn + na for i in range(nn): j = snode[snptr[k] + i] cc[j] = nj - i # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['k']] # build col. ptr cp = [0] for i in range(n): cp.append(cp[-1] + cc[i]) # depends on [control=['for'], data=['i']] cp = matrix(cp) # copy data and row indices val = matrix(0.0, (cp[-1], 1)) ri = matrix(0, (cp[-1], 1)) for k in range(self.symb.Nsn): nn = snptr[k + 1] - snptr[k] na = relptr[k + 1] - relptr[k] nj = nn + na for i in range(nn): j = snode[snptr[k] + i] blas.copy(blkval, val, offsetx=blkptr[k] + nj * i + i, offsety=cp[j], n=nj - i) ri[cp[j]:cp[j + 1]] = snrowidx[sncolptr[k] + i:sncolptr[k + 1]] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['k']] I = [] J = [] for i in range(n): I += list(ri[cp[i]:cp[i + 1]]) J += (cp[i + 1] - cp[i]) * [i] # depends on [control=['for'], data=['i']] tmp = spmatrix(val, I, J, (n, n)) # tmp is reordered and lower tril. if reordered or self.symb.p is None: # reordered matrix (do not apply inverse permutation) if not symmetric: return tmp # depends on [control=['if'], data=[]] else: return symmetrize(tmp) # depends on [control=['if'], data=[]] else: # apply inverse permutation tmp = perm(symmetrize(tmp), self.symb.ip) if symmetric: return tmp # depends on [control=['if'], data=[]] else: return tril(tmp)
def op_list_venvs(self): """Prints out and returns a list of known virtual environments. :rtype: list :return: list of virtual environments """ self.logger.info('Listing known virtual environments ...') venvs = self.get_venvs() for venv in venvs: self.logger.info('Found `%s`' % venv) else: self.logger.info('No virtual environments found in `%s` directory.' % VENVS_DIRNAME) return venvs
def function[op_list_venvs, parameter[self]]: constant[Prints out and returns a list of known virtual environments. :rtype: list :return: list of virtual environments ] call[name[self].logger.info, parameter[constant[Listing known virtual environments ...]]] variable[venvs] assign[=] call[name[self].get_venvs, parameter[]] for taget[name[venv]] in starred[name[venvs]] begin[:] call[name[self].logger.info, parameter[binary_operation[constant[Found `%s`] <ast.Mod object at 0x7da2590d6920> name[venv]]]] return[name[venvs]]
keyword[def] identifier[op_list_venvs] ( identifier[self] ): literal[string] identifier[self] . identifier[logger] . identifier[info] ( literal[string] ) identifier[venvs] = identifier[self] . identifier[get_venvs] () keyword[for] identifier[venv] keyword[in] identifier[venvs] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] % identifier[venv] ) keyword[else] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] % identifier[VENVS_DIRNAME] ) keyword[return] identifier[venvs]
def op_list_venvs(self): """Prints out and returns a list of known virtual environments. :rtype: list :return: list of virtual environments """ self.logger.info('Listing known virtual environments ...') venvs = self.get_venvs() for venv in venvs: self.logger.info('Found `%s`' % venv) # depends on [control=['for'], data=['venv']] else: self.logger.info('No virtual environments found in `%s` directory.' % VENVS_DIRNAME) return venvs
def Bezier(points, at): """Build Bézier curve from points. Deprecated. CatmulClark builds nicer splines """ at = np.asarray(at) at_flat = at.ravel() N = len(points) curve = np.zeros((at_flat.shape[0], 2)) for ii in range(N): curve += np.outer(Bernstein(N - 1, ii)(at_flat), points[ii]) return curve.reshape(at.shape + (2,))
def function[Bezier, parameter[points, at]]: constant[Build Bézier curve from points. Deprecated. CatmulClark builds nicer splines ] variable[at] assign[=] call[name[np].asarray, parameter[name[at]]] variable[at_flat] assign[=] call[name[at].ravel, parameter[]] variable[N] assign[=] call[name[len], parameter[name[points]]] variable[curve] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Subscript object at 0x7da18f58e710>, <ast.Constant object at 0x7da18f58d960>]]]] for taget[name[ii]] in starred[call[name[range], parameter[name[N]]]] begin[:] <ast.AugAssign object at 0x7da18f58ceb0> return[call[name[curve].reshape, parameter[binary_operation[name[at].shape + tuple[[<ast.Constant object at 0x7da18f58e350>]]]]]]
keyword[def] identifier[Bezier] ( identifier[points] , identifier[at] ): literal[string] identifier[at] = identifier[np] . identifier[asarray] ( identifier[at] ) identifier[at_flat] = identifier[at] . identifier[ravel] () identifier[N] = identifier[len] ( identifier[points] ) identifier[curve] = identifier[np] . identifier[zeros] (( identifier[at_flat] . identifier[shape] [ literal[int] ], literal[int] )) keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[N] ): identifier[curve] += identifier[np] . identifier[outer] ( identifier[Bernstein] ( identifier[N] - literal[int] , identifier[ii] )( identifier[at_flat] ), identifier[points] [ identifier[ii] ]) keyword[return] identifier[curve] . identifier[reshape] ( identifier[at] . identifier[shape] +( literal[int] ,))
def Bezier(points, at): """Build Bézier curve from points. Deprecated. CatmulClark builds nicer splines """ at = np.asarray(at) at_flat = at.ravel() N = len(points) curve = np.zeros((at_flat.shape[0], 2)) for ii in range(N): curve += np.outer(Bernstein(N - 1, ii)(at_flat), points[ii]) # depends on [control=['for'], data=['ii']] return curve.reshape(at.shape + (2,))
def logdebug(logger, msg, *args, **kwargs): ''' Logs messages as DEBUG, unless show=True and esgfpid.defaults.LOG_SHOW_TO_INFO=True, (then it logs messages as INFO). ''' if esgfpid.defaults.LOG_DEBUG_TO_INFO: logger.info('DEBUG %s ' % msg, *args, **kwargs) else: logger.debug(msg, *args, **kwargs)
def function[logdebug, parameter[logger, msg]]: constant[ Logs messages as DEBUG, unless show=True and esgfpid.defaults.LOG_SHOW_TO_INFO=True, (then it logs messages as INFO). ] if name[esgfpid].defaults.LOG_DEBUG_TO_INFO begin[:] call[name[logger].info, parameter[binary_operation[constant[DEBUG %s ] <ast.Mod object at 0x7da2590d6920> name[msg]], <ast.Starred object at 0x7da1b1e5bd60>]]
keyword[def] identifier[logdebug] ( identifier[logger] , identifier[msg] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[esgfpid] . identifier[defaults] . identifier[LOG_DEBUG_TO_INFO] : identifier[logger] . identifier[info] ( literal[string] % identifier[msg] ,* identifier[args] ,** identifier[kwargs] ) keyword[else] : identifier[logger] . identifier[debug] ( identifier[msg] ,* identifier[args] ,** identifier[kwargs] )
def logdebug(logger, msg, *args, **kwargs): """ Logs messages as DEBUG, unless show=True and esgfpid.defaults.LOG_SHOW_TO_INFO=True, (then it logs messages as INFO). """ if esgfpid.defaults.LOG_DEBUG_TO_INFO: logger.info('DEBUG %s ' % msg, *args, **kwargs) # depends on [control=['if'], data=[]] else: logger.debug(msg, *args, **kwargs)
def _input_handler_decorator(self, data): """Adds positional parameters to selected input_handler's results. """ input_handler = getattr(self, self.__InputHandler) input_parts = [ self.Parameters['taxonomy_file'], input_handler(data), self.Parameters['training_set_id'], self.Parameters['taxonomy_version'], self.Parameters['modification_info'], self.ModelDir, ] return self._commandline_join(input_parts)
def function[_input_handler_decorator, parameter[self, data]]: constant[Adds positional parameters to selected input_handler's results. ] variable[input_handler] assign[=] call[name[getattr], parameter[name[self], name[self].__InputHandler]] variable[input_parts] assign[=] list[[<ast.Subscript object at 0x7da1b0bd9900>, <ast.Call object at 0x7da1b0bdaf50>, <ast.Subscript object at 0x7da1b0bd8cd0>, <ast.Subscript object at 0x7da1b0bdad10>, <ast.Subscript object at 0x7da1b0bd91e0>, <ast.Attribute object at 0x7da1b0bdb5b0>]] return[call[name[self]._commandline_join, parameter[name[input_parts]]]]
keyword[def] identifier[_input_handler_decorator] ( identifier[self] , identifier[data] ): literal[string] identifier[input_handler] = identifier[getattr] ( identifier[self] , identifier[self] . identifier[__InputHandler] ) identifier[input_parts] =[ identifier[self] . identifier[Parameters] [ literal[string] ], identifier[input_handler] ( identifier[data] ), identifier[self] . identifier[Parameters] [ literal[string] ], identifier[self] . identifier[Parameters] [ literal[string] ], identifier[self] . identifier[Parameters] [ literal[string] ], identifier[self] . identifier[ModelDir] , ] keyword[return] identifier[self] . identifier[_commandline_join] ( identifier[input_parts] )
def _input_handler_decorator(self, data): """Adds positional parameters to selected input_handler's results. """ input_handler = getattr(self, self.__InputHandler) input_parts = [self.Parameters['taxonomy_file'], input_handler(data), self.Parameters['training_set_id'], self.Parameters['taxonomy_version'], self.Parameters['modification_info'], self.ModelDir] return self._commandline_join(input_parts)
def radpress_get_markup_descriptions(): """ Provides markup options. It used for adding descriptions in admin and zen mode. :return: list """ result = [] for markup in get_markup_choices(): markup_name = markup[0] result.append({ 'name': markup_name, 'title': markup[1], 'description': trim(get_reader(markup=markup_name).description) }) return result
def function[radpress_get_markup_descriptions, parameter[]]: constant[ Provides markup options. It used for adding descriptions in admin and zen mode. :return: list ] variable[result] assign[=] list[[]] for taget[name[markup]] in starred[call[name[get_markup_choices], parameter[]]] begin[:] variable[markup_name] assign[=] call[name[markup]][constant[0]] call[name[result].append, parameter[dictionary[[<ast.Constant object at 0x7da204345ed0>, <ast.Constant object at 0x7da204344970>, <ast.Constant object at 0x7da2043453c0>], [<ast.Name object at 0x7da204346a70>, <ast.Subscript object at 0x7da204347f40>, <ast.Call object at 0x7da204344fd0>]]]] return[name[result]]
keyword[def] identifier[radpress_get_markup_descriptions] (): literal[string] identifier[result] =[] keyword[for] identifier[markup] keyword[in] identifier[get_markup_choices] (): identifier[markup_name] = identifier[markup] [ literal[int] ] identifier[result] . identifier[append] ({ literal[string] : identifier[markup_name] , literal[string] : identifier[markup] [ literal[int] ], literal[string] : identifier[trim] ( identifier[get_reader] ( identifier[markup] = identifier[markup_name] ). identifier[description] ) }) keyword[return] identifier[result]
def radpress_get_markup_descriptions(): """ Provides markup options. It used for adding descriptions in admin and zen mode. :return: list """ result = [] for markup in get_markup_choices(): markup_name = markup[0] result.append({'name': markup_name, 'title': markup[1], 'description': trim(get_reader(markup=markup_name).description)}) # depends on [control=['for'], data=['markup']] return result
def range_for_length(self, length): """If the range is for bytes, the length is not None and there is exactly one range and it is satisfiable it returns a ``(start, stop)`` tuple, otherwise `None`. """ if self.units != "bytes" or length is None or len(self.ranges) != 1: return None start, end = self.ranges[0] if end is None: end = length if start < 0: start += length if is_byte_range_valid(start, end, length): return start, min(end, length)
def function[range_for_length, parameter[self, length]]: constant[If the range is for bytes, the length is not None and there is exactly one range and it is satisfiable it returns a ``(start, stop)`` tuple, otherwise `None`. ] if <ast.BoolOp object at 0x7da20c6a87c0> begin[:] return[constant[None]] <ast.Tuple object at 0x7da18f8115d0> assign[=] call[name[self].ranges][constant[0]] if compare[name[end] is constant[None]] begin[:] variable[end] assign[=] name[length] if compare[name[start] less[<] constant[0]] begin[:] <ast.AugAssign object at 0x7da18f8101c0> if call[name[is_byte_range_valid], parameter[name[start], name[end], name[length]]] begin[:] return[tuple[[<ast.Name object at 0x7da18f8139a0>, <ast.Call object at 0x7da18f810a00>]]]
keyword[def] identifier[range_for_length] ( identifier[self] , identifier[length] ): literal[string] keyword[if] identifier[self] . identifier[units] != literal[string] keyword[or] identifier[length] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[self] . identifier[ranges] )!= literal[int] : keyword[return] keyword[None] identifier[start] , identifier[end] = identifier[self] . identifier[ranges] [ literal[int] ] keyword[if] identifier[end] keyword[is] keyword[None] : identifier[end] = identifier[length] keyword[if] identifier[start] < literal[int] : identifier[start] += identifier[length] keyword[if] identifier[is_byte_range_valid] ( identifier[start] , identifier[end] , identifier[length] ): keyword[return] identifier[start] , identifier[min] ( identifier[end] , identifier[length] )
def range_for_length(self, length): """If the range is for bytes, the length is not None and there is exactly one range and it is satisfiable it returns a ``(start, stop)`` tuple, otherwise `None`. """ if self.units != 'bytes' or length is None or len(self.ranges) != 1: return None # depends on [control=['if'], data=[]] (start, end) = self.ranges[0] if end is None: end = length if start < 0: start += length # depends on [control=['if'], data=['start']] # depends on [control=['if'], data=['end']] if is_byte_range_valid(start, end, length): return (start, min(end, length)) # depends on [control=['if'], data=[]]
def set_many(self, block, update_dict): """ Update many fields on an XBlock simultaneously. :param block: the block to update :type block: :class:`~xblock.core.XBlock` :param update_dict: A map of field names to their new values :type update_dict: dict """ for key, value in six.iteritems(update_dict): self.set(block, key, value)
def function[set_many, parameter[self, block, update_dict]]: constant[ Update many fields on an XBlock simultaneously. :param block: the block to update :type block: :class:`~xblock.core.XBlock` :param update_dict: A map of field names to their new values :type update_dict: dict ] for taget[tuple[[<ast.Name object at 0x7da18f58d390>, <ast.Name object at 0x7da18f58d7e0>]]] in starred[call[name[six].iteritems, parameter[name[update_dict]]]] begin[:] call[name[self].set, parameter[name[block], name[key], name[value]]]
keyword[def] identifier[set_many] ( identifier[self] , identifier[block] , identifier[update_dict] ): literal[string] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[update_dict] ): identifier[self] . identifier[set] ( identifier[block] , identifier[key] , identifier[value] )
def set_many(self, block, update_dict): """ Update many fields on an XBlock simultaneously. :param block: the block to update :type block: :class:`~xblock.core.XBlock` :param update_dict: A map of field names to their new values :type update_dict: dict """ for (key, value) in six.iteritems(update_dict): self.set(block, key, value) # depends on [control=['for'], data=[]]
def get_amount_normal(self, billing_cycle): """Get the amount due on the given billing cycle For regular recurring costs this is simply `fixed_amount`. For one-off costs this is the portion of `fixed_amount` for the given billing_cycle. """ if self.is_one_off(): billing_cycle_number = self._get_billing_cycle_number(billing_cycle) if billing_cycle_number > self.total_billing_cycles: # A future billing cycle after this one has ended return Decimal('0') else: # This is a current cycle. Split the amount into # equal parts then return the part for this cycle splits = ratio_split( amount=self.fixed_amount, ratios=[Decimal('1')] * self.total_billing_cycles, ) return splits[billing_cycle_number - 1] else: # This is a none-one-off recurring cost, so the logic is simple return self.fixed_amount
def function[get_amount_normal, parameter[self, billing_cycle]]: constant[Get the amount due on the given billing cycle For regular recurring costs this is simply `fixed_amount`. For one-off costs this is the portion of `fixed_amount` for the given billing_cycle. ] if call[name[self].is_one_off, parameter[]] begin[:] variable[billing_cycle_number] assign[=] call[name[self]._get_billing_cycle_number, parameter[name[billing_cycle]]] if compare[name[billing_cycle_number] greater[>] name[self].total_billing_cycles] begin[:] return[call[name[Decimal], parameter[constant[0]]]]
keyword[def] identifier[get_amount_normal] ( identifier[self] , identifier[billing_cycle] ): literal[string] keyword[if] identifier[self] . identifier[is_one_off] (): identifier[billing_cycle_number] = identifier[self] . identifier[_get_billing_cycle_number] ( identifier[billing_cycle] ) keyword[if] identifier[billing_cycle_number] > identifier[self] . identifier[total_billing_cycles] : keyword[return] identifier[Decimal] ( literal[string] ) keyword[else] : identifier[splits] = identifier[ratio_split] ( identifier[amount] = identifier[self] . identifier[fixed_amount] , identifier[ratios] =[ identifier[Decimal] ( literal[string] )]* identifier[self] . identifier[total_billing_cycles] , ) keyword[return] identifier[splits] [ identifier[billing_cycle_number] - literal[int] ] keyword[else] : keyword[return] identifier[self] . identifier[fixed_amount]
def get_amount_normal(self, billing_cycle): """Get the amount due on the given billing cycle For regular recurring costs this is simply `fixed_amount`. For one-off costs this is the portion of `fixed_amount` for the given billing_cycle. """ if self.is_one_off(): billing_cycle_number = self._get_billing_cycle_number(billing_cycle) if billing_cycle_number > self.total_billing_cycles: # A future billing cycle after this one has ended return Decimal('0') # depends on [control=['if'], data=[]] else: # This is a current cycle. Split the amount into # equal parts then return the part for this cycle splits = ratio_split(amount=self.fixed_amount, ratios=[Decimal('1')] * self.total_billing_cycles) return splits[billing_cycle_number - 1] # depends on [control=['if'], data=[]] else: # This is a none-one-off recurring cost, so the logic is simple return self.fixed_amount
def faster_minimum_distance2(hull_a, center_a, hull_b, center_b): '''Do the minimum distance using the bimodal property of hull ordering ''' # # Find the farthest vertex in b from some point within A. Find the # vertices within A visible from this point in B. If the point in A # is within B or the farthest vertex in B is within A, then the objects # intersect. # if within_hull(center_a, hull_b): return 0 farthest_b = find_farthest(center_a, hull_b) if within_hull(hull_b[farthest_b,:], hull_a): return 0 visible_b = find_visible(hull_b, center_a, farthest_b) # # Do the same for B if within_hull(center_b, hull_a): return 0 farthest_a = find_farthest(center_b, hull_a) if within_hull(hull_a[farthest_a,:], hull_b): return 0 visible_a = find_visible(hull_a, center_b, farthest_a) # # Now go from the first in A and last in B measuring distances # which should decrease as we move toward the best # i = visible_a[0] i_next = (i+1) % hull_a.shape[0] j = visible_b[1] j_next = (j+hull_b.shape[0]-1) % hull_b.shape[0] a = hull_a[i,:] a_next = hull_a[i_next,:] b = hull_b[j,:] b_next = hull_b[j_next,:] d2_min = np.sum((a-b)**2) while i != visible_a[1] and j != visible_b[0]: if lines_intersect(a, a_next, b, b_next): return 0 if (np.dot(b-b_next,a-b_next) > 0 and np.dot(b_next-b,a-b) > 0): # do the edge if better than the vertex d2a = distance2_to_line(b, a, a_next) else: # try the next vertex of a d2a = np.sum((a_next-b)**2) if (np.dot(a-a_next,b-a_next) > 0 and np.dot(a_next-a,b-a) > 0): d2b = distance2_to_line(a, b, b_next) else: d2b = np.sum((b_next-a)**2) if d2a < d2_min and d2a < d2b: # The edge of A is closer than the best or the b-edge # Take it and advance A d2_min = d2a a = a_next i = i_next i_next = (i+1) % hull_a.shape[0] a_next = hull_a[i_next,:] elif d2b < d2_min: # B is better. Take it and advance d2_min = d2b b = b_next j = j_next j_next = (j+hull_b.shape[0]-1) % hull_b.shape[0] b_next = hull_b[j_next,:] else: return d2_min # # Some more to do... either one more i or one more j # while i != visible_a[1]: d2_min = min(d2_min, np.sum((a_next-b)**2)) a = a_next i = i_next i_next = (i+1) % hull_a.shape[0] a_next = hull_a[i_next,:] while j != visible_b[0]: d2_min = min(d2_min, np.sum((b_next-a)**2)) b = b_next j = j_next j_next = (j+ hull_b.shape[0]-1) % hull_b.shape[0] b_next = hull_b[j_next,:] return d2_min
def function[faster_minimum_distance2, parameter[hull_a, center_a, hull_b, center_b]]: constant[Do the minimum distance using the bimodal property of hull ordering ] if call[name[within_hull], parameter[name[center_a], name[hull_b]]] begin[:] return[constant[0]] variable[farthest_b] assign[=] call[name[find_farthest], parameter[name[center_a], name[hull_b]]] if call[name[within_hull], parameter[call[name[hull_b]][tuple[[<ast.Name object at 0x7da20c7c9510>, <ast.Slice object at 0x7da20c7c9ea0>]]], name[hull_a]]] begin[:] return[constant[0]] variable[visible_b] assign[=] call[name[find_visible], parameter[name[hull_b], name[center_a], name[farthest_b]]] if call[name[within_hull], parameter[name[center_b], name[hull_a]]] begin[:] return[constant[0]] variable[farthest_a] assign[=] call[name[find_farthest], parameter[name[center_b], name[hull_a]]] if call[name[within_hull], parameter[call[name[hull_a]][tuple[[<ast.Name object at 0x7da20c7c88b0>, <ast.Slice object at 0x7da20c7c84c0>]]], name[hull_b]]] begin[:] return[constant[0]] variable[visible_a] assign[=] call[name[find_visible], parameter[name[hull_a], name[center_b], name[farthest_a]]] variable[i] assign[=] call[name[visible_a]][constant[0]] variable[i_next] assign[=] binary_operation[binary_operation[name[i] + constant[1]] <ast.Mod object at 0x7da2590d6920> call[name[hull_a].shape][constant[0]]] variable[j] assign[=] call[name[visible_b]][constant[1]] variable[j_next] assign[=] binary_operation[binary_operation[binary_operation[name[j] + call[name[hull_b].shape][constant[0]]] - constant[1]] <ast.Mod object at 0x7da2590d6920> call[name[hull_b].shape][constant[0]]] variable[a] assign[=] call[name[hull_a]][tuple[[<ast.Name object at 0x7da1b23451b0>, <ast.Slice object at 0x7da1b2347610>]]] variable[a_next] assign[=] call[name[hull_a]][tuple[[<ast.Name object at 0x7da18bccb970>, <ast.Slice object at 0x7da18bcc9b40>]]] variable[b] assign[=] call[name[hull_b]][tuple[[<ast.Name object at 0x7da18bccada0>, <ast.Slice object at 0x7da18bccb3a0>]]] variable[b_next] assign[=] call[name[hull_b]][tuple[[<ast.Name object at 0x7da18bcca2c0>, <ast.Slice object at 0x7da18bcca680>]]] variable[d2_min] assign[=] call[name[np].sum, parameter[binary_operation[binary_operation[name[a] - name[b]] ** constant[2]]]] while <ast.BoolOp object at 0x7da18bccbdf0> begin[:] if call[name[lines_intersect], parameter[name[a], name[a_next], name[b], name[b_next]]] begin[:] return[constant[0]] if <ast.BoolOp object at 0x7da18bcc9d80> begin[:] variable[d2a] assign[=] call[name[distance2_to_line], parameter[name[b], name[a], name[a_next]]] if <ast.BoolOp object at 0x7da20c7c8c10> begin[:] variable[d2b] assign[=] call[name[distance2_to_line], parameter[name[a], name[b], name[b_next]]] if <ast.BoolOp object at 0x7da20c7cb040> begin[:] variable[d2_min] assign[=] name[d2a] variable[a] assign[=] name[a_next] variable[i] assign[=] name[i_next] variable[i_next] assign[=] binary_operation[binary_operation[name[i] + constant[1]] <ast.Mod object at 0x7da2590d6920> call[name[hull_a].shape][constant[0]]] variable[a_next] assign[=] call[name[hull_a]][tuple[[<ast.Name object at 0x7da20c7c8ca0>, <ast.Slice object at 0x7da20c7ca200>]]] while compare[name[i] not_equal[!=] call[name[visible_a]][constant[1]]] begin[:] variable[d2_min] assign[=] call[name[min], parameter[name[d2_min], call[name[np].sum, parameter[binary_operation[binary_operation[name[a_next] - name[b]] ** constant[2]]]]]] variable[a] assign[=] name[a_next] variable[i] assign[=] name[i_next] variable[i_next] assign[=] binary_operation[binary_operation[name[i] + constant[1]] <ast.Mod object at 0x7da2590d6920> call[name[hull_a].shape][constant[0]]] variable[a_next] assign[=] call[name[hull_a]][tuple[[<ast.Name object at 0x7da20c7cbdc0>, <ast.Slice object at 0x7da20c7ca920>]]] while compare[name[j] not_equal[!=] call[name[visible_b]][constant[0]]] begin[:] variable[d2_min] assign[=] call[name[min], parameter[name[d2_min], call[name[np].sum, parameter[binary_operation[binary_operation[name[b_next] - name[a]] ** constant[2]]]]]] variable[b] assign[=] name[b_next] variable[j] assign[=] name[j_next] variable[j_next] assign[=] binary_operation[binary_operation[binary_operation[name[j] + call[name[hull_b].shape][constant[0]]] - constant[1]] <ast.Mod object at 0x7da2590d6920> call[name[hull_b].shape][constant[0]]] variable[b_next] assign[=] call[name[hull_b]][tuple[[<ast.Name object at 0x7da18f09c880>, <ast.Slice object at 0x7da18f09e710>]]] return[name[d2_min]]
keyword[def] identifier[faster_minimum_distance2] ( identifier[hull_a] , identifier[center_a] , identifier[hull_b] , identifier[center_b] ): literal[string] keyword[if] identifier[within_hull] ( identifier[center_a] , identifier[hull_b] ): keyword[return] literal[int] identifier[farthest_b] = identifier[find_farthest] ( identifier[center_a] , identifier[hull_b] ) keyword[if] identifier[within_hull] ( identifier[hull_b] [ identifier[farthest_b] ,:], identifier[hull_a] ): keyword[return] literal[int] identifier[visible_b] = identifier[find_visible] ( identifier[hull_b] , identifier[center_a] , identifier[farthest_b] ) keyword[if] identifier[within_hull] ( identifier[center_b] , identifier[hull_a] ): keyword[return] literal[int] identifier[farthest_a] = identifier[find_farthest] ( identifier[center_b] , identifier[hull_a] ) keyword[if] identifier[within_hull] ( identifier[hull_a] [ identifier[farthest_a] ,:], identifier[hull_b] ): keyword[return] literal[int] identifier[visible_a] = identifier[find_visible] ( identifier[hull_a] , identifier[center_b] , identifier[farthest_a] ) identifier[i] = identifier[visible_a] [ literal[int] ] identifier[i_next] =( identifier[i] + literal[int] )% identifier[hull_a] . identifier[shape] [ literal[int] ] identifier[j] = identifier[visible_b] [ literal[int] ] identifier[j_next] =( identifier[j] + identifier[hull_b] . identifier[shape] [ literal[int] ]- literal[int] )% identifier[hull_b] . identifier[shape] [ literal[int] ] identifier[a] = identifier[hull_a] [ identifier[i] ,:] identifier[a_next] = identifier[hull_a] [ identifier[i_next] ,:] identifier[b] = identifier[hull_b] [ identifier[j] ,:] identifier[b_next] = identifier[hull_b] [ identifier[j_next] ,:] identifier[d2_min] = identifier[np] . identifier[sum] (( identifier[a] - identifier[b] )** literal[int] ) keyword[while] identifier[i] != identifier[visible_a] [ literal[int] ] keyword[and] identifier[j] != identifier[visible_b] [ literal[int] ]: keyword[if] identifier[lines_intersect] ( identifier[a] , identifier[a_next] , identifier[b] , identifier[b_next] ): keyword[return] literal[int] keyword[if] ( identifier[np] . identifier[dot] ( identifier[b] - identifier[b_next] , identifier[a] - identifier[b_next] )> literal[int] keyword[and] identifier[np] . identifier[dot] ( identifier[b_next] - identifier[b] , identifier[a] - identifier[b] )> literal[int] ): identifier[d2a] = identifier[distance2_to_line] ( identifier[b] , identifier[a] , identifier[a_next] ) keyword[else] : identifier[d2a] = identifier[np] . identifier[sum] (( identifier[a_next] - identifier[b] )** literal[int] ) keyword[if] ( identifier[np] . identifier[dot] ( identifier[a] - identifier[a_next] , identifier[b] - identifier[a_next] )> literal[int] keyword[and] identifier[np] . identifier[dot] ( identifier[a_next] - identifier[a] , identifier[b] - identifier[a] )> literal[int] ): identifier[d2b] = identifier[distance2_to_line] ( identifier[a] , identifier[b] , identifier[b_next] ) keyword[else] : identifier[d2b] = identifier[np] . identifier[sum] (( identifier[b_next] - identifier[a] )** literal[int] ) keyword[if] identifier[d2a] < identifier[d2_min] keyword[and] identifier[d2a] < identifier[d2b] : identifier[d2_min] = identifier[d2a] identifier[a] = identifier[a_next] identifier[i] = identifier[i_next] identifier[i_next] =( identifier[i] + literal[int] )% identifier[hull_a] . identifier[shape] [ literal[int] ] identifier[a_next] = identifier[hull_a] [ identifier[i_next] ,:] keyword[elif] identifier[d2b] < identifier[d2_min] : identifier[d2_min] = identifier[d2b] identifier[b] = identifier[b_next] identifier[j] = identifier[j_next] identifier[j_next] =( identifier[j] + identifier[hull_b] . identifier[shape] [ literal[int] ]- literal[int] )% identifier[hull_b] . identifier[shape] [ literal[int] ] identifier[b_next] = identifier[hull_b] [ identifier[j_next] ,:] keyword[else] : keyword[return] identifier[d2_min] keyword[while] identifier[i] != identifier[visible_a] [ literal[int] ]: identifier[d2_min] = identifier[min] ( identifier[d2_min] , identifier[np] . identifier[sum] (( identifier[a_next] - identifier[b] )** literal[int] )) identifier[a] = identifier[a_next] identifier[i] = identifier[i_next] identifier[i_next] =( identifier[i] + literal[int] )% identifier[hull_a] . identifier[shape] [ literal[int] ] identifier[a_next] = identifier[hull_a] [ identifier[i_next] ,:] keyword[while] identifier[j] != identifier[visible_b] [ literal[int] ]: identifier[d2_min] = identifier[min] ( identifier[d2_min] , identifier[np] . identifier[sum] (( identifier[b_next] - identifier[a] )** literal[int] )) identifier[b] = identifier[b_next] identifier[j] = identifier[j_next] identifier[j_next] =( identifier[j] + identifier[hull_b] . identifier[shape] [ literal[int] ]- literal[int] )% identifier[hull_b] . identifier[shape] [ literal[int] ] identifier[b_next] = identifier[hull_b] [ identifier[j_next] ,:] keyword[return] identifier[d2_min]
def faster_minimum_distance2(hull_a, center_a, hull_b, center_b): """Do the minimum distance using the bimodal property of hull ordering """ # # Find the farthest vertex in b from some point within A. Find the # vertices within A visible from this point in B. If the point in A # is within B or the farthest vertex in B is within A, then the objects # intersect. # if within_hull(center_a, hull_b): return 0 # depends on [control=['if'], data=[]] farthest_b = find_farthest(center_a, hull_b) if within_hull(hull_b[farthest_b, :], hull_a): return 0 # depends on [control=['if'], data=[]] visible_b = find_visible(hull_b, center_a, farthest_b) # # Do the same for B if within_hull(center_b, hull_a): return 0 # depends on [control=['if'], data=[]] farthest_a = find_farthest(center_b, hull_a) if within_hull(hull_a[farthest_a, :], hull_b): return 0 # depends on [control=['if'], data=[]] visible_a = find_visible(hull_a, center_b, farthest_a) # # Now go from the first in A and last in B measuring distances # which should decrease as we move toward the best # i = visible_a[0] i_next = (i + 1) % hull_a.shape[0] j = visible_b[1] j_next = (j + hull_b.shape[0] - 1) % hull_b.shape[0] a = hull_a[i, :] a_next = hull_a[i_next, :] b = hull_b[j, :] b_next = hull_b[j_next, :] d2_min = np.sum((a - b) ** 2) while i != visible_a[1] and j != visible_b[0]: if lines_intersect(a, a_next, b, b_next): return 0 # depends on [control=['if'], data=[]] if np.dot(b - b_next, a - b_next) > 0 and np.dot(b_next - b, a - b) > 0: # do the edge if better than the vertex d2a = distance2_to_line(b, a, a_next) # depends on [control=['if'], data=[]] else: # try the next vertex of a d2a = np.sum((a_next - b) ** 2) if np.dot(a - a_next, b - a_next) > 0 and np.dot(a_next - a, b - a) > 0: d2b = distance2_to_line(a, b, b_next) # depends on [control=['if'], data=[]] else: d2b = np.sum((b_next - a) ** 2) if d2a < d2_min and d2a < d2b: # The edge of A is closer than the best or the b-edge # Take it and advance A d2_min = d2a a = a_next i = i_next i_next = (i + 1) % hull_a.shape[0] a_next = hull_a[i_next, :] # depends on [control=['if'], data=[]] elif d2b < d2_min: # B is better. Take it and advance d2_min = d2b b = b_next j = j_next j_next = (j + hull_b.shape[0] - 1) % hull_b.shape[0] b_next = hull_b[j_next, :] # depends on [control=['if'], data=['d2b', 'd2_min']] else: return d2_min # depends on [control=['while'], data=[]] # # Some more to do... either one more i or one more j # while i != visible_a[1]: d2_min = min(d2_min, np.sum((a_next - b) ** 2)) a = a_next i = i_next i_next = (i + 1) % hull_a.shape[0] a_next = hull_a[i_next, :] # depends on [control=['while'], data=['i']] while j != visible_b[0]: d2_min = min(d2_min, np.sum((b_next - a) ** 2)) b = b_next j = j_next j_next = (j + hull_b.shape[0] - 1) % hull_b.shape[0] b_next = hull_b[j_next, :] # depends on [control=['while'], data=['j']] return d2_min
def run(self): """ Run sdist, then 'rpmbuild' the tar.gz """ os.system("cp python-bugzilla.spec /tmp") try: os.system("rm -rf python-bugzilla-%s" % get_version()) self.run_command('sdist') os.system('rpmbuild -ta --clean dist/python-bugzilla-%s.tar.gz' % get_version()) finally: os.system("mv /tmp/python-bugzilla.spec .")
def function[run, parameter[self]]: constant[ Run sdist, then 'rpmbuild' the tar.gz ] call[name[os].system, parameter[constant[cp python-bugzilla.spec /tmp]]] <ast.Try object at 0x7da1b0dbe0e0>
keyword[def] identifier[run] ( identifier[self] ): literal[string] identifier[os] . identifier[system] ( literal[string] ) keyword[try] : identifier[os] . identifier[system] ( literal[string] % identifier[get_version] ()) identifier[self] . identifier[run_command] ( literal[string] ) identifier[os] . identifier[system] ( literal[string] % identifier[get_version] ()) keyword[finally] : identifier[os] . identifier[system] ( literal[string] )
def run(self): """ Run sdist, then 'rpmbuild' the tar.gz """ os.system('cp python-bugzilla.spec /tmp') try: os.system('rm -rf python-bugzilla-%s' % get_version()) self.run_command('sdist') os.system('rpmbuild -ta --clean dist/python-bugzilla-%s.tar.gz' % get_version()) # depends on [control=['try'], data=[]] finally: os.system('mv /tmp/python-bugzilla.spec .')
def cancel(self, contacts): """Wrapper to call raise_cancel_downtime_log_entry for ref (host/service) set can_be_deleted to True set is_in_effect to False :return: None """ self.is_in_effect = False contact = contacts[self.ref] contact.raise_cancel_downtime_log_entry() self.can_be_deleted = True
def function[cancel, parameter[self, contacts]]: constant[Wrapper to call raise_cancel_downtime_log_entry for ref (host/service) set can_be_deleted to True set is_in_effect to False :return: None ] name[self].is_in_effect assign[=] constant[False] variable[contact] assign[=] call[name[contacts]][name[self].ref] call[name[contact].raise_cancel_downtime_log_entry, parameter[]] name[self].can_be_deleted assign[=] constant[True]
keyword[def] identifier[cancel] ( identifier[self] , identifier[contacts] ): literal[string] identifier[self] . identifier[is_in_effect] = keyword[False] identifier[contact] = identifier[contacts] [ identifier[self] . identifier[ref] ] identifier[contact] . identifier[raise_cancel_downtime_log_entry] () identifier[self] . identifier[can_be_deleted] = keyword[True]
def cancel(self, contacts): """Wrapper to call raise_cancel_downtime_log_entry for ref (host/service) set can_be_deleted to True set is_in_effect to False :return: None """ self.is_in_effect = False contact = contacts[self.ref] contact.raise_cancel_downtime_log_entry() self.can_be_deleted = True
def create_data_file_by_format(directory_path = None): """ Browse subdirectories to extract stata and sas files """ stata_files = [] sas_files = [] for root, subdirs, files in os.walk(directory_path): for file_name in files: file_path = os.path.join(root, file_name) if os.path.basename(file_name).endswith(".dta"): log.info("Found stata file {}".format(file_path)) stata_files.append(file_path) if os.path.basename(file_name).endswith(".sas7bdat"): log.info("Found sas file {}".format(file_path)) sas_files.append(file_path) return {'stata': stata_files, 'sas': sas_files}
def function[create_data_file_by_format, parameter[directory_path]]: constant[ Browse subdirectories to extract stata and sas files ] variable[stata_files] assign[=] list[[]] variable[sas_files] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18dc994e0>, <ast.Name object at 0x7da18dc9aad0>, <ast.Name object at 0x7da18dc9bac0>]]] in starred[call[name[os].walk, parameter[name[directory_path]]]] begin[:] for taget[name[file_name]] in starred[name[files]] begin[:] variable[file_path] assign[=] call[name[os].path.join, parameter[name[root], name[file_name]]] if call[call[name[os].path.basename, parameter[name[file_name]]].endswith, parameter[constant[.dta]]] begin[:] call[name[log].info, parameter[call[constant[Found stata file {}].format, parameter[name[file_path]]]]] call[name[stata_files].append, parameter[name[file_path]]] if call[call[name[os].path.basename, parameter[name[file_name]]].endswith, parameter[constant[.sas7bdat]]] begin[:] call[name[log].info, parameter[call[constant[Found sas file {}].format, parameter[name[file_path]]]]] call[name[sas_files].append, parameter[name[file_path]]] return[dictionary[[<ast.Constant object at 0x7da18dc9aaa0>, <ast.Constant object at 0x7da18dc99e10>], [<ast.Name object at 0x7da18dc99fc0>, <ast.Name object at 0x7da18dc9b7f0>]]]
keyword[def] identifier[create_data_file_by_format] ( identifier[directory_path] = keyword[None] ): literal[string] identifier[stata_files] =[] identifier[sas_files] =[] keyword[for] identifier[root] , identifier[subdirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[directory_path] ): keyword[for] identifier[file_name] keyword[in] identifier[files] : identifier[file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[file_name] ) keyword[if] identifier[os] . identifier[path] . identifier[basename] ( identifier[file_name] ). identifier[endswith] ( literal[string] ): identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[file_path] )) identifier[stata_files] . identifier[append] ( identifier[file_path] ) keyword[if] identifier[os] . identifier[path] . identifier[basename] ( identifier[file_name] ). identifier[endswith] ( literal[string] ): identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[file_path] )) identifier[sas_files] . identifier[append] ( identifier[file_path] ) keyword[return] { literal[string] : identifier[stata_files] , literal[string] : identifier[sas_files] }
def create_data_file_by_format(directory_path=None): """ Browse subdirectories to extract stata and sas files """ stata_files = [] sas_files = [] for (root, subdirs, files) in os.walk(directory_path): for file_name in files: file_path = os.path.join(root, file_name) if os.path.basename(file_name).endswith('.dta'): log.info('Found stata file {}'.format(file_path)) stata_files.append(file_path) # depends on [control=['if'], data=[]] if os.path.basename(file_name).endswith('.sas7bdat'): log.info('Found sas file {}'.format(file_path)) sas_files.append(file_path) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['file_name']] # depends on [control=['for'], data=[]] return {'stata': stata_files, 'sas': sas_files}
def _read_depth_images(self, num_images): """ Reads depth images from the device """ depth_images = self._ros_read_images(self._depth_image_buffer, num_images, self.staleness_limit) for i in range(0, num_images): depth_images[i] = depth_images[i] * MM_TO_METERS # convert to meters if self._flip_images: depth_images[i] = np.flipud(depth_images[i]) depth_images[i] = np.fliplr(depth_images[i]) depth_images[i] = DepthImage(depth_images[i], frame=self._frame) return depth_images
def function[_read_depth_images, parameter[self, num_images]]: constant[ Reads depth images from the device ] variable[depth_images] assign[=] call[name[self]._ros_read_images, parameter[name[self]._depth_image_buffer, name[num_images], name[self].staleness_limit]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[num_images]]]] begin[:] call[name[depth_images]][name[i]] assign[=] binary_operation[call[name[depth_images]][name[i]] * name[MM_TO_METERS]] if name[self]._flip_images begin[:] call[name[depth_images]][name[i]] assign[=] call[name[np].flipud, parameter[call[name[depth_images]][name[i]]]] call[name[depth_images]][name[i]] assign[=] call[name[np].fliplr, parameter[call[name[depth_images]][name[i]]]] call[name[depth_images]][name[i]] assign[=] call[name[DepthImage], parameter[call[name[depth_images]][name[i]]]] return[name[depth_images]]
keyword[def] identifier[_read_depth_images] ( identifier[self] , identifier[num_images] ): literal[string] identifier[depth_images] = identifier[self] . identifier[_ros_read_images] ( identifier[self] . identifier[_depth_image_buffer] , identifier[num_images] , identifier[self] . identifier[staleness_limit] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[num_images] ): identifier[depth_images] [ identifier[i] ]= identifier[depth_images] [ identifier[i] ]* identifier[MM_TO_METERS] keyword[if] identifier[self] . identifier[_flip_images] : identifier[depth_images] [ identifier[i] ]= identifier[np] . identifier[flipud] ( identifier[depth_images] [ identifier[i] ]) identifier[depth_images] [ identifier[i] ]= identifier[np] . identifier[fliplr] ( identifier[depth_images] [ identifier[i] ]) identifier[depth_images] [ identifier[i] ]= identifier[DepthImage] ( identifier[depth_images] [ identifier[i] ], identifier[frame] = identifier[self] . identifier[_frame] ) keyword[return] identifier[depth_images]
def _read_depth_images(self, num_images): """ Reads depth images from the device """ depth_images = self._ros_read_images(self._depth_image_buffer, num_images, self.staleness_limit) for i in range(0, num_images): depth_images[i] = depth_images[i] * MM_TO_METERS # convert to meters if self._flip_images: depth_images[i] = np.flipud(depth_images[i]) depth_images[i] = np.fliplr(depth_images[i]) # depends on [control=['if'], data=[]] depth_images[i] = DepthImage(depth_images[i], frame=self._frame) # depends on [control=['for'], data=['i']] return depth_images
def subtract_weeks(self, weeks: int) -> datetime: """ Subtracts number of weeks from the current value """ self.value = self.value - timedelta(weeks=weeks) return self.value
def function[subtract_weeks, parameter[self, weeks]]: constant[ Subtracts number of weeks from the current value ] name[self].value assign[=] binary_operation[name[self].value - call[name[timedelta], parameter[]]] return[name[self].value]
keyword[def] identifier[subtract_weeks] ( identifier[self] , identifier[weeks] : identifier[int] )-> identifier[datetime] : literal[string] identifier[self] . identifier[value] = identifier[self] . identifier[value] - identifier[timedelta] ( identifier[weeks] = identifier[weeks] ) keyword[return] identifier[self] . identifier[value]
def subtract_weeks(self, weeks: int) -> datetime: """ Subtracts number of weeks from the current value """ self.value = self.value - timedelta(weeks=weeks) return self.value
def _rebuild_all_command_chains(self): """ Rebuilds execution chain for all registered commands. This method is typically called when intercepters are changed. Because of that it is more efficient to register intercepters before registering commands (typically it will be done in abstract classes). However, that performance penalty will be only once during creation time. """ self._commands_by_name = {} for command in self._commands: self._build_command_chain(command)
def function[_rebuild_all_command_chains, parameter[self]]: constant[ Rebuilds execution chain for all registered commands. This method is typically called when intercepters are changed. Because of that it is more efficient to register intercepters before registering commands (typically it will be done in abstract classes). However, that performance penalty will be only once during creation time. ] name[self]._commands_by_name assign[=] dictionary[[], []] for taget[name[command]] in starred[name[self]._commands] begin[:] call[name[self]._build_command_chain, parameter[name[command]]]
keyword[def] identifier[_rebuild_all_command_chains] ( identifier[self] ): literal[string] identifier[self] . identifier[_commands_by_name] ={} keyword[for] identifier[command] keyword[in] identifier[self] . identifier[_commands] : identifier[self] . identifier[_build_command_chain] ( identifier[command] )
def _rebuild_all_command_chains(self): """ Rebuilds execution chain for all registered commands. This method is typically called when intercepters are changed. Because of that it is more efficient to register intercepters before registering commands (typically it will be done in abstract classes). However, that performance penalty will be only once during creation time. """ self._commands_by_name = {} for command in self._commands: self._build_command_chain(command) # depends on [control=['for'], data=['command']]
def imfrombytes(content, flag='color'): """Read an image from bytes. Args: content (bytes): Image bytes got from files or other streams. flag (str): Same as :func:`imread`. Returns: ndarray: Loaded image array. """ img_np = np.frombuffer(content, np.uint8) flag = imread_flags[flag] if is_str(flag) else flag img = cv2.imdecode(img_np, flag) return img
def function[imfrombytes, parameter[content, flag]]: constant[Read an image from bytes. Args: content (bytes): Image bytes got from files or other streams. flag (str): Same as :func:`imread`. Returns: ndarray: Loaded image array. ] variable[img_np] assign[=] call[name[np].frombuffer, parameter[name[content], name[np].uint8]] variable[flag] assign[=] <ast.IfExp object at 0x7da1b0531a80> variable[img] assign[=] call[name[cv2].imdecode, parameter[name[img_np], name[flag]]] return[name[img]]
keyword[def] identifier[imfrombytes] ( identifier[content] , identifier[flag] = literal[string] ): literal[string] identifier[img_np] = identifier[np] . identifier[frombuffer] ( identifier[content] , identifier[np] . identifier[uint8] ) identifier[flag] = identifier[imread_flags] [ identifier[flag] ] keyword[if] identifier[is_str] ( identifier[flag] ) keyword[else] identifier[flag] identifier[img] = identifier[cv2] . identifier[imdecode] ( identifier[img_np] , identifier[flag] ) keyword[return] identifier[img]
def imfrombytes(content, flag='color'): """Read an image from bytes. Args: content (bytes): Image bytes got from files or other streams. flag (str): Same as :func:`imread`. Returns: ndarray: Loaded image array. """ img_np = np.frombuffer(content, np.uint8) flag = imread_flags[flag] if is_str(flag) else flag img = cv2.imdecode(img_np, flag) return img
def resizeEvent(self, event): """ Overloads the resize event to control if we are still editing. If we are resizing, then we are no longer editing. """ curr_item = self.currentItem() self.closePersistentEditor(curr_item) super(XMultiTagEdit, self).resizeEvent(event)
def function[resizeEvent, parameter[self, event]]: constant[ Overloads the resize event to control if we are still editing. If we are resizing, then we are no longer editing. ] variable[curr_item] assign[=] call[name[self].currentItem, parameter[]] call[name[self].closePersistentEditor, parameter[name[curr_item]]] call[call[name[super], parameter[name[XMultiTagEdit], name[self]]].resizeEvent, parameter[name[event]]]
keyword[def] identifier[resizeEvent] ( identifier[self] , identifier[event] ): literal[string] identifier[curr_item] = identifier[self] . identifier[currentItem] () identifier[self] . identifier[closePersistentEditor] ( identifier[curr_item] ) identifier[super] ( identifier[XMultiTagEdit] , identifier[self] ). identifier[resizeEvent] ( identifier[event] )
def resizeEvent(self, event): """ Overloads the resize event to control if we are still editing. If we are resizing, then we are no longer editing. """ curr_item = self.currentItem() self.closePersistentEditor(curr_item) super(XMultiTagEdit, self).resizeEvent(event)
def get_device_name_list(): """Returns a list of device names installed.""" dev_names = ctypes.create_string_buffer(1024) pydaq.DAQmxGetSysDevNames(dev_names, len(dev_names)) return dev_names.value.split(', ')
def function[get_device_name_list, parameter[]]: constant[Returns a list of device names installed.] variable[dev_names] assign[=] call[name[ctypes].create_string_buffer, parameter[constant[1024]]] call[name[pydaq].DAQmxGetSysDevNames, parameter[name[dev_names], call[name[len], parameter[name[dev_names]]]]] return[call[name[dev_names].value.split, parameter[constant[, ]]]]
keyword[def] identifier[get_device_name_list] (): literal[string] identifier[dev_names] = identifier[ctypes] . identifier[create_string_buffer] ( literal[int] ) identifier[pydaq] . identifier[DAQmxGetSysDevNames] ( identifier[dev_names] , identifier[len] ( identifier[dev_names] )) keyword[return] identifier[dev_names] . identifier[value] . identifier[split] ( literal[string] )
def get_device_name_list(): """Returns a list of device names installed.""" dev_names = ctypes.create_string_buffer(1024) pydaq.DAQmxGetSysDevNames(dev_names, len(dev_names)) return dev_names.value.split(', ')
def show(self): """ Pretty-print instance data """ if not self.data: return if self.data.get('continue'): return ptitle = self.params.get('title') dtitle = self.data.get('title') pageid = self.params.get('pageid') seed = dtitle or ptitle or pageid if utils.is_text(seed): seed = seed.replace('_', ' ') prettyprint(self._build_showstr(seed))
def function[show, parameter[self]]: constant[ Pretty-print instance data ] if <ast.UnaryOp object at 0x7da1b1206170> begin[:] return[None] if call[name[self].data.get, parameter[constant[continue]]] begin[:] return[None] variable[ptitle] assign[=] call[name[self].params.get, parameter[constant[title]]] variable[dtitle] assign[=] call[name[self].data.get, parameter[constant[title]]] variable[pageid] assign[=] call[name[self].params.get, parameter[constant[pageid]]] variable[seed] assign[=] <ast.BoolOp object at 0x7da1b1252080> if call[name[utils].is_text, parameter[name[seed]]] begin[:] variable[seed] assign[=] call[name[seed].replace, parameter[constant[_], constant[ ]]] call[name[prettyprint], parameter[call[name[self]._build_showstr, parameter[name[seed]]]]]
keyword[def] identifier[show] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[data] : keyword[return] keyword[if] identifier[self] . identifier[data] . identifier[get] ( literal[string] ): keyword[return] identifier[ptitle] = identifier[self] . identifier[params] . identifier[get] ( literal[string] ) identifier[dtitle] = identifier[self] . identifier[data] . identifier[get] ( literal[string] ) identifier[pageid] = identifier[self] . identifier[params] . identifier[get] ( literal[string] ) identifier[seed] = identifier[dtitle] keyword[or] identifier[ptitle] keyword[or] identifier[pageid] keyword[if] identifier[utils] . identifier[is_text] ( identifier[seed] ): identifier[seed] = identifier[seed] . identifier[replace] ( literal[string] , literal[string] ) identifier[prettyprint] ( identifier[self] . identifier[_build_showstr] ( identifier[seed] ))
def show(self): """ Pretty-print instance data """ if not self.data: return # depends on [control=['if'], data=[]] if self.data.get('continue'): return # depends on [control=['if'], data=[]] ptitle = self.params.get('title') dtitle = self.data.get('title') pageid = self.params.get('pageid') seed = dtitle or ptitle or pageid if utils.is_text(seed): seed = seed.replace('_', ' ') # depends on [control=['if'], data=[]] prettyprint(self._build_showstr(seed))
def extra_prepare(self, configuration, args_dict): """Called before the configuration.converters are activated""" aws_syncr = args_dict.pop("aws_syncr") self.configuration.update( { "$@": aws_syncr.get("extra", "") , "aws_syncr": aws_syncr , "templates": {} , "config_folder": self.configuration_folder } , source = "<args_dict>" )
def function[extra_prepare, parameter[self, configuration, args_dict]]: constant[Called before the configuration.converters are activated] variable[aws_syncr] assign[=] call[name[args_dict].pop, parameter[constant[aws_syncr]]] call[name[self].configuration.update, parameter[dictionary[[<ast.Constant object at 0x7da18fe901f0>, <ast.Constant object at 0x7da18fe92ad0>, <ast.Constant object at 0x7da18fe93940>, <ast.Constant object at 0x7da18fe92590>], [<ast.Call object at 0x7da18fe90310>, <ast.Name object at 0x7da18fe91ba0>, <ast.Dict object at 0x7da18fe912a0>, <ast.Attribute object at 0x7da18fe90b20>]]]]
keyword[def] identifier[extra_prepare] ( identifier[self] , identifier[configuration] , identifier[args_dict] ): literal[string] identifier[aws_syncr] = identifier[args_dict] . identifier[pop] ( literal[string] ) identifier[self] . identifier[configuration] . identifier[update] ( { literal[string] : identifier[aws_syncr] . identifier[get] ( literal[string] , literal[string] ) , literal[string] : identifier[aws_syncr] , literal[string] :{} , literal[string] : identifier[self] . identifier[configuration_folder] } , identifier[source] = literal[string] )
def extra_prepare(self, configuration, args_dict): """Called before the configuration.converters are activated""" aws_syncr = args_dict.pop('aws_syncr') self.configuration.update({'$@': aws_syncr.get('extra', ''), 'aws_syncr': aws_syncr, 'templates': {}, 'config_folder': self.configuration_folder}, source='<args_dict>')
def preferences_view(request, semester, targetUsername, profile=None): """ Show the user their preferences for the given semester. """ # TODO: Change template to show descriptions in tooltip / ajax show box? wprofile = get_object_or_404( WorkshiftProfile, user__username=targetUsername, ) full_management = utils.can_manage(request.user, semester=semester) if wprofile.user != request.user and \ not full_management: messages.add_message( request, messages.ERROR, MESSAGES["ADMINS_ONLY"], ) return HttpResponseRedirect(semester.get_view_url()) rating_forms = [] for wtype in WorkshiftType.objects.filter(rateable=True): try: rating = wprofile.ratings.get(workshift_type=wtype) except WorkshiftRating.DoesNotExist: rating = WorkshiftRating(workshift_type=wtype) form = WorkshiftRatingForm( data=request.POST or None, prefix="rating-{}".format(wtype.pk), instance=rating, profile=wprofile, ) rating_forms.append(form) time_formset = TimeBlockFormSet( data=request.POST or None, prefix="time", profile=wprofile, ) note_form = ProfileNoteForm( data=request.POST or None, instance=wprofile, prefix="note", ) if all(i.is_valid() for i in rating_forms) and time_formset.is_valid() and \ note_form.is_valid(): for form in rating_forms: form.save() time_formset.save() note_form.save() if wprofile.preference_save_time is None: wprofile.preference_save_time = now() wprofile.save() messages.add_message( request, messages.INFO, "Preferences saved.", ) return HttpResponseRedirect(wurl( "workshift:preferences", sem_url=semester.sem_url, targetUsername=request.user.username, )) if wprofile == profile: page_name = "My Workshift Preferences" else: page_name = "{}'s Workshift Preferences".format( wprofile.user.get_full_name(), ) return render_to_response("preferences.html", { "page_name": page_name, "profile": wprofile, "rating_forms": rating_forms, "time_formset": time_formset, "note_form": note_form, }, context_instance=RequestContext(request))
def function[preferences_view, parameter[request, semester, targetUsername, profile]]: constant[ Show the user their preferences for the given semester. ] variable[wprofile] assign[=] call[name[get_object_or_404], parameter[name[WorkshiftProfile]]] variable[full_management] assign[=] call[name[utils].can_manage, parameter[name[request].user]] if <ast.BoolOp object at 0x7da1b149c700> begin[:] call[name[messages].add_message, parameter[name[request], name[messages].ERROR, call[name[MESSAGES]][constant[ADMINS_ONLY]]]] return[call[name[HttpResponseRedirect], parameter[call[name[semester].get_view_url, parameter[]]]]] variable[rating_forms] assign[=] list[[]] for taget[name[wtype]] in starred[call[name[WorkshiftType].objects.filter, parameter[]]] begin[:] <ast.Try object at 0x7da1b149dd80> variable[form] assign[=] call[name[WorkshiftRatingForm], parameter[]] call[name[rating_forms].append, parameter[name[form]]] variable[time_formset] assign[=] call[name[TimeBlockFormSet], parameter[]] variable[note_form] assign[=] call[name[ProfileNoteForm], parameter[]] if <ast.BoolOp object at 0x7da1b1579f90> begin[:] for taget[name[form]] in starred[name[rating_forms]] begin[:] call[name[form].save, parameter[]] call[name[time_formset].save, parameter[]] call[name[note_form].save, parameter[]] if compare[name[wprofile].preference_save_time is constant[None]] begin[:] name[wprofile].preference_save_time assign[=] call[name[now], parameter[]] call[name[wprofile].save, parameter[]] call[name[messages].add_message, parameter[name[request], name[messages].INFO, constant[Preferences saved.]]] return[call[name[HttpResponseRedirect], parameter[call[name[wurl], parameter[constant[workshift:preferences]]]]]] if compare[name[wprofile] equal[==] name[profile]] begin[:] variable[page_name] assign[=] constant[My Workshift Preferences] return[call[name[render_to_response], parameter[constant[preferences.html], dictionary[[<ast.Constant object at 0x7da18bc71900>, <ast.Constant object at 0x7da18bc73010>, <ast.Constant object at 0x7da18bc713f0>, <ast.Constant object at 0x7da18bc72ce0>, <ast.Constant object at 0x7da18bc71240>], [<ast.Name object at 0x7da18bc72ef0>, <ast.Name object at 0x7da18bc732e0>, <ast.Name object at 0x7da18bc733a0>, <ast.Name object at 0x7da18bc732b0>, <ast.Name object at 0x7da18bc725c0>]]]]]
keyword[def] identifier[preferences_view] ( identifier[request] , identifier[semester] , identifier[targetUsername] , identifier[profile] = keyword[None] ): literal[string] identifier[wprofile] = identifier[get_object_or_404] ( identifier[WorkshiftProfile] , identifier[user__username] = identifier[targetUsername] , ) identifier[full_management] = identifier[utils] . identifier[can_manage] ( identifier[request] . identifier[user] , identifier[semester] = identifier[semester] ) keyword[if] identifier[wprofile] . identifier[user] != identifier[request] . identifier[user] keyword[and] keyword[not] identifier[full_management] : identifier[messages] . identifier[add_message] ( identifier[request] , identifier[messages] . identifier[ERROR] , identifier[MESSAGES] [ literal[string] ], ) keyword[return] identifier[HttpResponseRedirect] ( identifier[semester] . identifier[get_view_url] ()) identifier[rating_forms] =[] keyword[for] identifier[wtype] keyword[in] identifier[WorkshiftType] . identifier[objects] . identifier[filter] ( identifier[rateable] = keyword[True] ): keyword[try] : identifier[rating] = identifier[wprofile] . identifier[ratings] . identifier[get] ( identifier[workshift_type] = identifier[wtype] ) keyword[except] identifier[WorkshiftRating] . identifier[DoesNotExist] : identifier[rating] = identifier[WorkshiftRating] ( identifier[workshift_type] = identifier[wtype] ) identifier[form] = identifier[WorkshiftRatingForm] ( identifier[data] = identifier[request] . identifier[POST] keyword[or] keyword[None] , identifier[prefix] = literal[string] . identifier[format] ( identifier[wtype] . identifier[pk] ), identifier[instance] = identifier[rating] , identifier[profile] = identifier[wprofile] , ) identifier[rating_forms] . identifier[append] ( identifier[form] ) identifier[time_formset] = identifier[TimeBlockFormSet] ( identifier[data] = identifier[request] . identifier[POST] keyword[or] keyword[None] , identifier[prefix] = literal[string] , identifier[profile] = identifier[wprofile] , ) identifier[note_form] = identifier[ProfileNoteForm] ( identifier[data] = identifier[request] . identifier[POST] keyword[or] keyword[None] , identifier[instance] = identifier[wprofile] , identifier[prefix] = literal[string] , ) keyword[if] identifier[all] ( identifier[i] . identifier[is_valid] () keyword[for] identifier[i] keyword[in] identifier[rating_forms] ) keyword[and] identifier[time_formset] . identifier[is_valid] () keyword[and] identifier[note_form] . identifier[is_valid] (): keyword[for] identifier[form] keyword[in] identifier[rating_forms] : identifier[form] . identifier[save] () identifier[time_formset] . identifier[save] () identifier[note_form] . identifier[save] () keyword[if] identifier[wprofile] . identifier[preference_save_time] keyword[is] keyword[None] : identifier[wprofile] . identifier[preference_save_time] = identifier[now] () identifier[wprofile] . identifier[save] () identifier[messages] . identifier[add_message] ( identifier[request] , identifier[messages] . identifier[INFO] , literal[string] , ) keyword[return] identifier[HttpResponseRedirect] ( identifier[wurl] ( literal[string] , identifier[sem_url] = identifier[semester] . identifier[sem_url] , identifier[targetUsername] = identifier[request] . identifier[user] . identifier[username] , )) keyword[if] identifier[wprofile] == identifier[profile] : identifier[page_name] = literal[string] keyword[else] : identifier[page_name] = literal[string] . identifier[format] ( identifier[wprofile] . identifier[user] . identifier[get_full_name] (), ) keyword[return] identifier[render_to_response] ( literal[string] ,{ literal[string] : identifier[page_name] , literal[string] : identifier[wprofile] , literal[string] : identifier[rating_forms] , literal[string] : identifier[time_formset] , literal[string] : identifier[note_form] , }, identifier[context_instance] = identifier[RequestContext] ( identifier[request] ))
def preferences_view(request, semester, targetUsername, profile=None): """ Show the user their preferences for the given semester. """ # TODO: Change template to show descriptions in tooltip / ajax show box? wprofile = get_object_or_404(WorkshiftProfile, user__username=targetUsername) full_management = utils.can_manage(request.user, semester=semester) if wprofile.user != request.user and (not full_management): messages.add_message(request, messages.ERROR, MESSAGES['ADMINS_ONLY']) return HttpResponseRedirect(semester.get_view_url()) # depends on [control=['if'], data=[]] rating_forms = [] for wtype in WorkshiftType.objects.filter(rateable=True): try: rating = wprofile.ratings.get(workshift_type=wtype) # depends on [control=['try'], data=[]] except WorkshiftRating.DoesNotExist: rating = WorkshiftRating(workshift_type=wtype) # depends on [control=['except'], data=[]] form = WorkshiftRatingForm(data=request.POST or None, prefix='rating-{}'.format(wtype.pk), instance=rating, profile=wprofile) rating_forms.append(form) # depends on [control=['for'], data=['wtype']] time_formset = TimeBlockFormSet(data=request.POST or None, prefix='time', profile=wprofile) note_form = ProfileNoteForm(data=request.POST or None, instance=wprofile, prefix='note') if all((i.is_valid() for i in rating_forms)) and time_formset.is_valid() and note_form.is_valid(): for form in rating_forms: form.save() # depends on [control=['for'], data=['form']] time_formset.save() note_form.save() if wprofile.preference_save_time is None: wprofile.preference_save_time = now() wprofile.save() # depends on [control=['if'], data=[]] messages.add_message(request, messages.INFO, 'Preferences saved.') return HttpResponseRedirect(wurl('workshift:preferences', sem_url=semester.sem_url, targetUsername=request.user.username)) # depends on [control=['if'], data=[]] if wprofile == profile: page_name = 'My Workshift Preferences' # depends on [control=['if'], data=[]] else: page_name = "{}'s Workshift Preferences".format(wprofile.user.get_full_name()) return render_to_response('preferences.html', {'page_name': page_name, 'profile': wprofile, 'rating_forms': rating_forms, 'time_formset': time_formset, 'note_form': note_form}, context_instance=RequestContext(request))
def to_float(s, default=0.0, allow_nan=False): """ Return input converted into a float. If failed, then return ``default``. Note that, by default, ``allow_nan=False``, so ``to_float`` will not return ``nan``, ``inf``, or ``-inf``. Examples:: >>> to_float('1.5') 1.5 >>> to_float(1) 1.0 >>> to_float('') 0.0 >>> to_float('nan') 0.0 >>> to_float('inf') 0.0 >>> to_float('-inf', allow_nan=True) -inf >>> to_float(None) 0.0 >>> to_float(0, default='Empty') 0.0 >>> to_float(None, default='Empty') 'Empty' """ try: f = float(s) except (TypeError, ValueError): return default if not allow_nan: if f != f or f in _infs: return default return f
def function[to_float, parameter[s, default, allow_nan]]: constant[ Return input converted into a float. If failed, then return ``default``. Note that, by default, ``allow_nan=False``, so ``to_float`` will not return ``nan``, ``inf``, or ``-inf``. Examples:: >>> to_float('1.5') 1.5 >>> to_float(1) 1.0 >>> to_float('') 0.0 >>> to_float('nan') 0.0 >>> to_float('inf') 0.0 >>> to_float('-inf', allow_nan=True) -inf >>> to_float(None) 0.0 >>> to_float(0, default='Empty') 0.0 >>> to_float(None, default='Empty') 'Empty' ] <ast.Try object at 0x7da1b0f41600> if <ast.UnaryOp object at 0x7da1b0f41ab0> begin[:] if <ast.BoolOp object at 0x7da1b0f40250> begin[:] return[name[default]] return[name[f]]
keyword[def] identifier[to_float] ( identifier[s] , identifier[default] = literal[int] , identifier[allow_nan] = keyword[False] ): literal[string] keyword[try] : identifier[f] = identifier[float] ( identifier[s] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[return] identifier[default] keyword[if] keyword[not] identifier[allow_nan] : keyword[if] identifier[f] != identifier[f] keyword[or] identifier[f] keyword[in] identifier[_infs] : keyword[return] identifier[default] keyword[return] identifier[f]
def to_float(s, default=0.0, allow_nan=False): """ Return input converted into a float. If failed, then return ``default``. Note that, by default, ``allow_nan=False``, so ``to_float`` will not return ``nan``, ``inf``, or ``-inf``. Examples:: >>> to_float('1.5') 1.5 >>> to_float(1) 1.0 >>> to_float('') 0.0 >>> to_float('nan') 0.0 >>> to_float('inf') 0.0 >>> to_float('-inf', allow_nan=True) -inf >>> to_float(None) 0.0 >>> to_float(0, default='Empty') 0.0 >>> to_float(None, default='Empty') 'Empty' """ try: f = float(s) # depends on [control=['try'], data=[]] except (TypeError, ValueError): return default # depends on [control=['except'], data=[]] if not allow_nan: if f != f or f in _infs: return default # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return f
def python_sidebar(python_input): """ Create the `Layout` for the sidebar with the configurable options. """ def get_text_fragments(): tokens = [] def append_category(category): tokens.extend([ ('class:sidebar', ' '), ('class:sidebar.title', ' %-36s' % category.title), ('class:sidebar', '\n'), ]) def append(index, label, status): selected = index == python_input.selected_option_index @if_mousedown def select_item(mouse_event): python_input.selected_option_index = index @if_mousedown def goto_next(mouse_event): " Select item and go to next value. " python_input.selected_option_index = index option = python_input.selected_option option.activate_next() sel = ',selected' if selected else '' tokens.append(('class:sidebar' + sel, ' >' if selected else ' ')) tokens.append(('class:sidebar.label' + sel, '%-24s' % label, select_item)) tokens.append(('class:sidebar.status' + sel, ' ', select_item)) tokens.append(('class:sidebar.status' + sel, '%s' % status, goto_next)) if selected: tokens.append(('[SetCursorPosition]', '')) tokens.append(('class:sidebar.status' + sel, ' ' * (13 - len(status)), goto_next)) tokens.append(('class:sidebar', '<' if selected else '')) tokens.append(('class:sidebar', '\n')) i = 0 for category in python_input.options: append_category(category) for option in category.options: append(i, option.title, '%s' % option.get_current_value()) i += 1 tokens.pop() # Remove last newline. return tokens class Control(FormattedTextControl): def move_cursor_down(self): python_input.selected_option_index += 1 def move_cursor_up(self): python_input.selected_option_index -= 1 return Window( Control(get_text_fragments), style='class:sidebar', width=Dimension.exact(43), height=Dimension(min=3), scroll_offsets=ScrollOffsets(top=1, bottom=1))
def function[python_sidebar, parameter[python_input]]: constant[ Create the `Layout` for the sidebar with the configurable options. ] def function[get_text_fragments, parameter[]]: variable[tokens] assign[=] list[[]] def function[append_category, parameter[category]]: call[name[tokens].extend, parameter[list[[<ast.Tuple object at 0x7da1b0880220>, <ast.Tuple object at 0x7da1b0880fa0>, <ast.Tuple object at 0x7da1b0880610>]]]] def function[append, parameter[index, label, status]]: variable[selected] assign[=] compare[name[index] equal[==] name[python_input].selected_option_index] def function[select_item, parameter[mouse_event]]: name[python_input].selected_option_index assign[=] name[index] def function[goto_next, parameter[mouse_event]]: constant[ Select item and go to next value. ] name[python_input].selected_option_index assign[=] name[index] variable[option] assign[=] name[python_input].selected_option call[name[option].activate_next, parameter[]] variable[sel] assign[=] <ast.IfExp object at 0x7da1b0882560> call[name[tokens].append, parameter[tuple[[<ast.BinOp object at 0x7da1b0881540>, <ast.IfExp object at 0x7da1b0881f00>]]]] call[name[tokens].append, parameter[tuple[[<ast.BinOp object at 0x7da1b0882f50>, <ast.BinOp object at 0x7da1b0882410>, <ast.Name object at 0x7da1b0881ed0>]]]] call[name[tokens].append, parameter[tuple[[<ast.BinOp object at 0x7da1b0883ca0>, <ast.Constant object at 0x7da1b0880250>, <ast.Name object at 0x7da1b0882530>]]]] call[name[tokens].append, parameter[tuple[[<ast.BinOp object at 0x7da1b0883fd0>, <ast.BinOp object at 0x7da1b0883580>, <ast.Name object at 0x7da1b08811b0>]]]] if name[selected] begin[:] call[name[tokens].append, parameter[tuple[[<ast.Constant object at 0x7da1b0881bd0>, <ast.Constant object at 0x7da1b0880430>]]]] call[name[tokens].append, parameter[tuple[[<ast.BinOp object at 0x7da1b08802b0>, <ast.BinOp object at 0x7da1b0881420>, <ast.Name object at 0x7da1b0854850>]]]] call[name[tokens].append, parameter[tuple[[<ast.Constant object at 0x7da1b0854a60>, <ast.IfExp object at 0x7da1b0854a90>]]]] call[name[tokens].append, parameter[tuple[[<ast.Constant object at 0x7da1b0855720>, <ast.Constant object at 0x7da1b08557e0>]]]] variable[i] assign[=] constant[0] for taget[name[category]] in starred[name[python_input].options] begin[:] call[name[append_category], parameter[name[category]]] for taget[name[option]] in starred[name[category].options] begin[:] call[name[append], parameter[name[i], name[option].title, binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> call[name[option].get_current_value, parameter[]]]]] <ast.AugAssign object at 0x7da1b0856320> call[name[tokens].pop, parameter[]] return[name[tokens]] class class[Control, parameter[]] begin[:] def function[move_cursor_down, parameter[self]]: <ast.AugAssign object at 0x7da1b0854430> def function[move_cursor_up, parameter[self]]: <ast.AugAssign object at 0x7da1b0854580> return[call[name[Window], parameter[call[name[Control], parameter[name[get_text_fragments]]]]]]
keyword[def] identifier[python_sidebar] ( identifier[python_input] ): literal[string] keyword[def] identifier[get_text_fragments] (): identifier[tokens] =[] keyword[def] identifier[append_category] ( identifier[category] ): identifier[tokens] . identifier[extend] ([ ( literal[string] , literal[string] ), ( literal[string] , literal[string] % identifier[category] . identifier[title] ), ( literal[string] , literal[string] ), ]) keyword[def] identifier[append] ( identifier[index] , identifier[label] , identifier[status] ): identifier[selected] = identifier[index] == identifier[python_input] . identifier[selected_option_index] @ identifier[if_mousedown] keyword[def] identifier[select_item] ( identifier[mouse_event] ): identifier[python_input] . identifier[selected_option_index] = identifier[index] @ identifier[if_mousedown] keyword[def] identifier[goto_next] ( identifier[mouse_event] ): literal[string] identifier[python_input] . identifier[selected_option_index] = identifier[index] identifier[option] = identifier[python_input] . identifier[selected_option] identifier[option] . identifier[activate_next] () identifier[sel] = literal[string] keyword[if] identifier[selected] keyword[else] literal[string] identifier[tokens] . identifier[append] (( literal[string] + identifier[sel] , literal[string] keyword[if] identifier[selected] keyword[else] literal[string] )) identifier[tokens] . identifier[append] (( literal[string] + identifier[sel] , literal[string] % identifier[label] , identifier[select_item] )) identifier[tokens] . identifier[append] (( literal[string] + identifier[sel] , literal[string] , identifier[select_item] )) identifier[tokens] . identifier[append] (( literal[string] + identifier[sel] , literal[string] % identifier[status] , identifier[goto_next] )) keyword[if] identifier[selected] : identifier[tokens] . identifier[append] (( literal[string] , literal[string] )) identifier[tokens] . identifier[append] (( literal[string] + identifier[sel] , literal[string] *( literal[int] - identifier[len] ( identifier[status] )), identifier[goto_next] )) identifier[tokens] . identifier[append] (( literal[string] , literal[string] keyword[if] identifier[selected] keyword[else] literal[string] )) identifier[tokens] . identifier[append] (( literal[string] , literal[string] )) identifier[i] = literal[int] keyword[for] identifier[category] keyword[in] identifier[python_input] . identifier[options] : identifier[append_category] ( identifier[category] ) keyword[for] identifier[option] keyword[in] identifier[category] . identifier[options] : identifier[append] ( identifier[i] , identifier[option] . identifier[title] , literal[string] % identifier[option] . identifier[get_current_value] ()) identifier[i] += literal[int] identifier[tokens] . identifier[pop] () keyword[return] identifier[tokens] keyword[class] identifier[Control] ( identifier[FormattedTextControl] ): keyword[def] identifier[move_cursor_down] ( identifier[self] ): identifier[python_input] . identifier[selected_option_index] += literal[int] keyword[def] identifier[move_cursor_up] ( identifier[self] ): identifier[python_input] . identifier[selected_option_index] -= literal[int] keyword[return] identifier[Window] ( identifier[Control] ( identifier[get_text_fragments] ), identifier[style] = literal[string] , identifier[width] = identifier[Dimension] . identifier[exact] ( literal[int] ), identifier[height] = identifier[Dimension] ( identifier[min] = literal[int] ), identifier[scroll_offsets] = identifier[ScrollOffsets] ( identifier[top] = literal[int] , identifier[bottom] = literal[int] ))
def python_sidebar(python_input): """ Create the `Layout` for the sidebar with the configurable options. """ def get_text_fragments(): tokens = [] def append_category(category): tokens.extend([('class:sidebar', ' '), ('class:sidebar.title', ' %-36s' % category.title), ('class:sidebar', '\n')]) def append(index, label, status): selected = index == python_input.selected_option_index @if_mousedown def select_item(mouse_event): python_input.selected_option_index = index @if_mousedown def goto_next(mouse_event): """ Select item and go to next value. """ python_input.selected_option_index = index option = python_input.selected_option option.activate_next() sel = ',selected' if selected else '' tokens.append(('class:sidebar' + sel, ' >' if selected else ' ')) tokens.append(('class:sidebar.label' + sel, '%-24s' % label, select_item)) tokens.append(('class:sidebar.status' + sel, ' ', select_item)) tokens.append(('class:sidebar.status' + sel, '%s' % status, goto_next)) if selected: tokens.append(('[SetCursorPosition]', '')) # depends on [control=['if'], data=[]] tokens.append(('class:sidebar.status' + sel, ' ' * (13 - len(status)), goto_next)) tokens.append(('class:sidebar', '<' if selected else '')) tokens.append(('class:sidebar', '\n')) i = 0 for category in python_input.options: append_category(category) for option in category.options: append(i, option.title, '%s' % option.get_current_value()) i += 1 # depends on [control=['for'], data=['option']] # depends on [control=['for'], data=['category']] tokens.pop() # Remove last newline. return tokens class Control(FormattedTextControl): def move_cursor_down(self): python_input.selected_option_index += 1 def move_cursor_up(self): python_input.selected_option_index -= 1 return Window(Control(get_text_fragments), style='class:sidebar', width=Dimension.exact(43), height=Dimension(min=3), scroll_offsets=ScrollOffsets(top=1, bottom=1))
def match(self, text, noprefix=False): """Matches date/datetime string against date patterns and returns pattern and parsed date if matched. It's not indeded for common usage, since if successful it returns date as array of numbers and pattern that matched this date :param text: Any human readable string :type date_string: str|unicode :param noprefix: If set True than doesn't use prefix based date patterns filtering settings :type noprefix: bool :return: Returns dicts with `values` as array of representing parsed date and 'pattern' with info about matched pattern if successful, else returns None :rtype: :class:`dict`.""" n = len(text) if self.cachedpats is not None: pats = self.cachedpats else: pats = self.patterns if n > 5 and not noprefix: basekeys = self.__matchPrefix(text[:6]) else: basekeys = [] for p in pats: if n < p['length']['min'] or n > p['length']['max']: continue if p['right'] and len(basekeys) > 0 and p['basekey'] not in basekeys: continue try: r = p['pattern'].parseString(text) # Do sanity check d = r.asDict() if 'month' in d: val = int(d['month']) if val > 12 or val < 1: continue if 'day' in d: val = int(d['day']) if val > 31 or val < 1: continue return {'values' : r, 'pattern' : p} except ParseException as e: # print p['key'], text.encode('utf-8'), e pass return None
def function[match, parameter[self, text, noprefix]]: constant[Matches date/datetime string against date patterns and returns pattern and parsed date if matched. It's not indeded for common usage, since if successful it returns date as array of numbers and pattern that matched this date :param text: Any human readable string :type date_string: str|unicode :param noprefix: If set True than doesn't use prefix based date patterns filtering settings :type noprefix: bool :return: Returns dicts with `values` as array of representing parsed date and 'pattern' with info about matched pattern if successful, else returns None :rtype: :class:`dict`.] variable[n] assign[=] call[name[len], parameter[name[text]]] if compare[name[self].cachedpats is_not constant[None]] begin[:] variable[pats] assign[=] name[self].cachedpats if <ast.BoolOp object at 0x7da1b092cb20> begin[:] variable[basekeys] assign[=] call[name[self].__matchPrefix, parameter[call[name[text]][<ast.Slice object at 0x7da1b092d5d0>]]] for taget[name[p]] in starred[name[pats]] begin[:] if <ast.BoolOp object at 0x7da1b092e500> begin[:] continue if <ast.BoolOp object at 0x7da1b092f160> begin[:] continue <ast.Try object at 0x7da1b092f190> return[constant[None]]
keyword[def] identifier[match] ( identifier[self] , identifier[text] , identifier[noprefix] = keyword[False] ): literal[string] identifier[n] = identifier[len] ( identifier[text] ) keyword[if] identifier[self] . identifier[cachedpats] keyword[is] keyword[not] keyword[None] : identifier[pats] = identifier[self] . identifier[cachedpats] keyword[else] : identifier[pats] = identifier[self] . identifier[patterns] keyword[if] identifier[n] > literal[int] keyword[and] keyword[not] identifier[noprefix] : identifier[basekeys] = identifier[self] . identifier[__matchPrefix] ( identifier[text] [: literal[int] ]) keyword[else] : identifier[basekeys] =[] keyword[for] identifier[p] keyword[in] identifier[pats] : keyword[if] identifier[n] < identifier[p] [ literal[string] ][ literal[string] ] keyword[or] identifier[n] > identifier[p] [ literal[string] ][ literal[string] ]: keyword[continue] keyword[if] identifier[p] [ literal[string] ] keyword[and] identifier[len] ( identifier[basekeys] )> literal[int] keyword[and] identifier[p] [ literal[string] ] keyword[not] keyword[in] identifier[basekeys] : keyword[continue] keyword[try] : identifier[r] = identifier[p] [ literal[string] ]. identifier[parseString] ( identifier[text] ) identifier[d] = identifier[r] . identifier[asDict] () keyword[if] literal[string] keyword[in] identifier[d] : identifier[val] = identifier[int] ( identifier[d] [ literal[string] ]) keyword[if] identifier[val] > literal[int] keyword[or] identifier[val] < literal[int] : keyword[continue] keyword[if] literal[string] keyword[in] identifier[d] : identifier[val] = identifier[int] ( identifier[d] [ literal[string] ]) keyword[if] identifier[val] > literal[int] keyword[or] identifier[val] < literal[int] : keyword[continue] keyword[return] { literal[string] : identifier[r] , literal[string] : identifier[p] } keyword[except] identifier[ParseException] keyword[as] identifier[e] : keyword[pass] keyword[return] keyword[None]
def match(self, text, noprefix=False): """Matches date/datetime string against date patterns and returns pattern and parsed date if matched. It's not indeded for common usage, since if successful it returns date as array of numbers and pattern that matched this date :param text: Any human readable string :type date_string: str|unicode :param noprefix: If set True than doesn't use prefix based date patterns filtering settings :type noprefix: bool :return: Returns dicts with `values` as array of representing parsed date and 'pattern' with info about matched pattern if successful, else returns None :rtype: :class:`dict`.""" n = len(text) if self.cachedpats is not None: pats = self.cachedpats # depends on [control=['if'], data=[]] else: pats = self.patterns if n > 5 and (not noprefix): basekeys = self.__matchPrefix(text[:6]) # depends on [control=['if'], data=[]] else: basekeys = [] for p in pats: if n < p['length']['min'] or n > p['length']['max']: continue # depends on [control=['if'], data=[]] if p['right'] and len(basekeys) > 0 and (p['basekey'] not in basekeys): continue # depends on [control=['if'], data=[]] try: r = p['pattern'].parseString(text) # Do sanity check d = r.asDict() if 'month' in d: val = int(d['month']) if val > 12 or val < 1: continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['d']] if 'day' in d: val = int(d['day']) if val > 31 or val < 1: continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['d']] return {'values': r, 'pattern': p} # depends on [control=['try'], data=[]] except ParseException as e: # print p['key'], text.encode('utf-8'), e pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['p']] return None
def cmd_all(args): """List everything recursively""" for penlist in penStore.data: puts(penlist) with indent(4, ' -'): for penfile in penStore.data[penlist]: puts(penfile)
def function[cmd_all, parameter[args]]: constant[List everything recursively] for taget[name[penlist]] in starred[name[penStore].data] begin[:] call[name[puts], parameter[name[penlist]]] with call[name[indent], parameter[constant[4], constant[ -]]] begin[:] for taget[name[penfile]] in starred[call[name[penStore].data][name[penlist]]] begin[:] call[name[puts], parameter[name[penfile]]]
keyword[def] identifier[cmd_all] ( identifier[args] ): literal[string] keyword[for] identifier[penlist] keyword[in] identifier[penStore] . identifier[data] : identifier[puts] ( identifier[penlist] ) keyword[with] identifier[indent] ( literal[int] , literal[string] ): keyword[for] identifier[penfile] keyword[in] identifier[penStore] . identifier[data] [ identifier[penlist] ]: identifier[puts] ( identifier[penfile] )
def cmd_all(args): """List everything recursively""" for penlist in penStore.data: puts(penlist) with indent(4, ' -'): for penfile in penStore.data[penlist]: puts(penfile) # depends on [control=['for'], data=['penfile']] # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['penlist']]
def _event_funcs(self, event: str) -> Iterable[Callable]: """ Returns an Iterable of the functions subscribed to a event. :param event: Name of the event. :type event: str :return: A iterable to do things with. :rtype: Iterable """ for func in self._events[event]: yield func
def function[_event_funcs, parameter[self, event]]: constant[ Returns an Iterable of the functions subscribed to a event. :param event: Name of the event. :type event: str :return: A iterable to do things with. :rtype: Iterable ] for taget[name[func]] in starred[call[name[self]._events][name[event]]] begin[:] <ast.Yield object at 0x7da2044c14e0>
keyword[def] identifier[_event_funcs] ( identifier[self] , identifier[event] : identifier[str] )-> identifier[Iterable] [ identifier[Callable] ]: literal[string] keyword[for] identifier[func] keyword[in] identifier[self] . identifier[_events] [ identifier[event] ]: keyword[yield] identifier[func]
def _event_funcs(self, event: str) -> Iterable[Callable]: """ Returns an Iterable of the functions subscribed to a event. :param event: Name of the event. :type event: str :return: A iterable to do things with. :rtype: Iterable """ for func in self._events[event]: yield func # depends on [control=['for'], data=['func']]
def setattr(self, req, ino, attr, to_set, fi): """Set file attributes Valid replies: reply_attr reply_err """ self.reply_err(req, errno.EROFS)
def function[setattr, parameter[self, req, ino, attr, to_set, fi]]: constant[Set file attributes Valid replies: reply_attr reply_err ] call[name[self].reply_err, parameter[name[req], name[errno].EROFS]]
keyword[def] identifier[setattr] ( identifier[self] , identifier[req] , identifier[ino] , identifier[attr] , identifier[to_set] , identifier[fi] ): literal[string] identifier[self] . identifier[reply_err] ( identifier[req] , identifier[errno] . identifier[EROFS] )
def setattr(self, req, ino, attr, to_set, fi): """Set file attributes Valid replies: reply_attr reply_err """ self.reply_err(req, errno.EROFS)
def _set_alignment(self, group_size, bit_offset=0, auto_align=False): """ Sets the alignment of the ``Decimal`` field. :param int group_size: size of the aligned `Field` group in bytes, can be between ``1`` and ``8``. :param int bit_offset: bit offset of the `Decimal` field within the aligned `Field` group, can be between ``0`` and ``63``. :param bool auto_align: if ``True`` the `Decimal` field aligns itself to the next matching byte size according to the *size* of the `Decimal` field. """ # Field alignment offset field_offset = int(bit_offset) # Auto alignment if auto_align: # Field alignment size field_size, bit_offset = divmod(field_offset, 8) if bit_offset is not 0: field_size += 1 field_size = max(field_size, 1) # No auto alignment else: # Field alignment size field_size = int(group_size) # Field alignment alignment = Alignment(field_size, field_offset) # Invalid field alignment size if field_size not in range(1, 8): raise FieldAlignmentError(self, self.index, alignment) # Invalid field alignment offset if not (0 <= field_offset <= 63): raise FieldAlignmentError(self, self.index, alignment) # Invalid field alignment if field_offset >= field_size * 8: raise FieldAlignmentError(self, self.index, alignment) # Set field alignment self._align_to_byte_size = alignment.byte_size self._align_to_bit_offset = alignment.bit_offset
def function[_set_alignment, parameter[self, group_size, bit_offset, auto_align]]: constant[ Sets the alignment of the ``Decimal`` field. :param int group_size: size of the aligned `Field` group in bytes, can be between ``1`` and ``8``. :param int bit_offset: bit offset of the `Decimal` field within the aligned `Field` group, can be between ``0`` and ``63``. :param bool auto_align: if ``True`` the `Decimal` field aligns itself to the next matching byte size according to the *size* of the `Decimal` field. ] variable[field_offset] assign[=] call[name[int], parameter[name[bit_offset]]] if name[auto_align] begin[:] <ast.Tuple object at 0x7da2054a5bd0> assign[=] call[name[divmod], parameter[name[field_offset], constant[8]]] if compare[name[bit_offset] is_not constant[0]] begin[:] <ast.AugAssign object at 0x7da2054a4400> variable[field_size] assign[=] call[name[max], parameter[name[field_size], constant[1]]] variable[alignment] assign[=] call[name[Alignment], parameter[name[field_size], name[field_offset]]] if compare[name[field_size] <ast.NotIn object at 0x7da2590d7190> call[name[range], parameter[constant[1], constant[8]]]] begin[:] <ast.Raise object at 0x7da2054a72b0> if <ast.UnaryOp object at 0x7da2054a7790> begin[:] <ast.Raise object at 0x7da2054a4f40> if compare[name[field_offset] greater_or_equal[>=] binary_operation[name[field_size] * constant[8]]] begin[:] <ast.Raise object at 0x7da20e954b50> name[self]._align_to_byte_size assign[=] name[alignment].byte_size name[self]._align_to_bit_offset assign[=] name[alignment].bit_offset
keyword[def] identifier[_set_alignment] ( identifier[self] , identifier[group_size] , identifier[bit_offset] = literal[int] , identifier[auto_align] = keyword[False] ): literal[string] identifier[field_offset] = identifier[int] ( identifier[bit_offset] ) keyword[if] identifier[auto_align] : identifier[field_size] , identifier[bit_offset] = identifier[divmod] ( identifier[field_offset] , literal[int] ) keyword[if] identifier[bit_offset] keyword[is] keyword[not] literal[int] : identifier[field_size] += literal[int] identifier[field_size] = identifier[max] ( identifier[field_size] , literal[int] ) keyword[else] : identifier[field_size] = identifier[int] ( identifier[group_size] ) identifier[alignment] = identifier[Alignment] ( identifier[field_size] , identifier[field_offset] ) keyword[if] identifier[field_size] keyword[not] keyword[in] identifier[range] ( literal[int] , literal[int] ): keyword[raise] identifier[FieldAlignmentError] ( identifier[self] , identifier[self] . identifier[index] , identifier[alignment] ) keyword[if] keyword[not] ( literal[int] <= identifier[field_offset] <= literal[int] ): keyword[raise] identifier[FieldAlignmentError] ( identifier[self] , identifier[self] . identifier[index] , identifier[alignment] ) keyword[if] identifier[field_offset] >= identifier[field_size] * literal[int] : keyword[raise] identifier[FieldAlignmentError] ( identifier[self] , identifier[self] . identifier[index] , identifier[alignment] ) identifier[self] . identifier[_align_to_byte_size] = identifier[alignment] . identifier[byte_size] identifier[self] . identifier[_align_to_bit_offset] = identifier[alignment] . identifier[bit_offset]
def _set_alignment(self, group_size, bit_offset=0, auto_align=False): """ Sets the alignment of the ``Decimal`` field. :param int group_size: size of the aligned `Field` group in bytes, can be between ``1`` and ``8``. :param int bit_offset: bit offset of the `Decimal` field within the aligned `Field` group, can be between ``0`` and ``63``. :param bool auto_align: if ``True`` the `Decimal` field aligns itself to the next matching byte size according to the *size* of the `Decimal` field. """ # Field alignment offset field_offset = int(bit_offset) # Auto alignment if auto_align: # Field alignment size (field_size, bit_offset) = divmod(field_offset, 8) if bit_offset is not 0: field_size += 1 # depends on [control=['if'], data=[]] field_size = max(field_size, 1) # depends on [control=['if'], data=[]] else: # No auto alignment # Field alignment size field_size = int(group_size) # Field alignment alignment = Alignment(field_size, field_offset) # Invalid field alignment size if field_size not in range(1, 8): raise FieldAlignmentError(self, self.index, alignment) # depends on [control=['if'], data=[]] # Invalid field alignment offset if not 0 <= field_offset <= 63: raise FieldAlignmentError(self, self.index, alignment) # depends on [control=['if'], data=[]] # Invalid field alignment if field_offset >= field_size * 8: raise FieldAlignmentError(self, self.index, alignment) # depends on [control=['if'], data=[]] # Set field alignment self._align_to_byte_size = alignment.byte_size self._align_to_bit_offset = alignment.bit_offset
def get(url, params={}): """Invoke an HTTP GET request on a url Args: url (string): URL endpoint to request params (dict): Dictionary of url parameters Returns: dict: JSON response as a dictionary """ request_url = url if len(params): request_url = "{}?{}".format(url, urlencode(params)) try: req = Request(request_url, headers={'User-Agent': 'Mozilla/5.0'}) response = json.loads(urlopen(req).read().decode("utf-8")) return response except HTTPError as err: raise MtgException(err.read())
def function[get, parameter[url, params]]: constant[Invoke an HTTP GET request on a url Args: url (string): URL endpoint to request params (dict): Dictionary of url parameters Returns: dict: JSON response as a dictionary ] variable[request_url] assign[=] name[url] if call[name[len], parameter[name[params]]] begin[:] variable[request_url] assign[=] call[constant[{}?{}].format, parameter[name[url], call[name[urlencode], parameter[name[params]]]]] <ast.Try object at 0x7da1b0b4a050>
keyword[def] identifier[get] ( identifier[url] , identifier[params] ={}): literal[string] identifier[request_url] = identifier[url] keyword[if] identifier[len] ( identifier[params] ): identifier[request_url] = literal[string] . identifier[format] ( identifier[url] , identifier[urlencode] ( identifier[params] )) keyword[try] : identifier[req] = identifier[Request] ( identifier[request_url] , identifier[headers] ={ literal[string] : literal[string] }) identifier[response] = identifier[json] . identifier[loads] ( identifier[urlopen] ( identifier[req] ). identifier[read] (). identifier[decode] ( literal[string] )) keyword[return] identifier[response] keyword[except] identifier[HTTPError] keyword[as] identifier[err] : keyword[raise] identifier[MtgException] ( identifier[err] . identifier[read] ())
def get(url, params={}): """Invoke an HTTP GET request on a url Args: url (string): URL endpoint to request params (dict): Dictionary of url parameters Returns: dict: JSON response as a dictionary """ request_url = url if len(params): request_url = '{}?{}'.format(url, urlencode(params)) # depends on [control=['if'], data=[]] try: req = Request(request_url, headers={'User-Agent': 'Mozilla/5.0'}) response = json.loads(urlopen(req).read().decode('utf-8')) return response # depends on [control=['try'], data=[]] except HTTPError as err: raise MtgException(err.read()) # depends on [control=['except'], data=['err']]
def get_string(self, distance=6, velocity=8, charge=3): """ Returns the string representation of LammpsData, essentially the string to be written to a file. Args: distance (int): No. of significant figures to output for box settings (bounds and tilt) and atomic coordinates. Default to 6. velocity (int): No. of significant figures to output for velocities. Default to 8. charge (int): No. of significant figures to output for charges. Default to 3. Returns: String representation """ file_template = """Generated by pymatgen.io.lammps.data.LammpsData {stats} {box} {body} """ box = self.box.get_string(distance) body_dict = OrderedDict() body_dict["Masses"] = self.masses types = OrderedDict() types["atom"] = len(self.masses) if self.force_field: all_ff_kws = SECTION_KEYWORDS["ff"] + SECTION_KEYWORDS["class2"] ff_kws = [k for k in all_ff_kws if k in self.force_field] for kw in ff_kws: body_dict[kw] = self.force_field[kw] if kw in SECTION_KEYWORDS["ff"][2:]: types[kw.lower()[:-7]] = len(self.force_field[kw]) body_dict["Atoms"] = self.atoms counts = OrderedDict() counts["atoms"] = len(self.atoms) if self.velocities is not None: body_dict["Velocities"] = self.velocities if self.topology: for kw in SECTION_KEYWORDS["topology"]: if kw in self.topology: body_dict[kw] = self.topology[kw] counts[kw.lower()] = len(self.topology[kw]) all_stats = list(counts.values()) + list(types.values()) stats_template = "{:>%d} {}" % len(str(max(all_stats))) count_lines = [stats_template.format(v, k) for k, v in counts.items()] type_lines = [stats_template.format(v, k + " types") for k, v in types.items()] stats = "\n".join(count_lines + [""] + type_lines) map_coords = lambda q: ("{:.%df}" % distance).format(q) map_velos = lambda q: ("{:.%df}" % velocity).format(q) map_charges = lambda q: ("{:.%df}" % charge).format(q) formatters = {"x": map_coords, "y": map_coords, "z": map_coords, "vx": map_velos, "vy": map_velos, "vz": map_velos, "q": map_charges} section_template = "{kw}\n\n{df}\n" parts = [] for k, v in body_dict.items(): index = True if k != "PairIJ Coeffs" else False df_string = v.to_string(header=False, formatters=formatters, index_names=False, index=index) parts.append(section_template.format(kw=k, df=df_string)) body = "\n".join(parts) return file_template.format(stats=stats, box=box, body=body)
def function[get_string, parameter[self, distance, velocity, charge]]: constant[ Returns the string representation of LammpsData, essentially the string to be written to a file. Args: distance (int): No. of significant figures to output for box settings (bounds and tilt) and atomic coordinates. Default to 6. velocity (int): No. of significant figures to output for velocities. Default to 8. charge (int): No. of significant figures to output for charges. Default to 3. Returns: String representation ] variable[file_template] assign[=] constant[Generated by pymatgen.io.lammps.data.LammpsData {stats} {box} {body} ] variable[box] assign[=] call[name[self].box.get_string, parameter[name[distance]]] variable[body_dict] assign[=] call[name[OrderedDict], parameter[]] call[name[body_dict]][constant[Masses]] assign[=] name[self].masses variable[types] assign[=] call[name[OrderedDict], parameter[]] call[name[types]][constant[atom]] assign[=] call[name[len], parameter[name[self].masses]] if name[self].force_field begin[:] variable[all_ff_kws] assign[=] binary_operation[call[name[SECTION_KEYWORDS]][constant[ff]] + call[name[SECTION_KEYWORDS]][constant[class2]]] variable[ff_kws] assign[=] <ast.ListComp object at 0x7da207f02920> for taget[name[kw]] in starred[name[ff_kws]] begin[:] call[name[body_dict]][name[kw]] assign[=] call[name[self].force_field][name[kw]] if compare[name[kw] in call[call[name[SECTION_KEYWORDS]][constant[ff]]][<ast.Slice object at 0x7da207f02050>]] begin[:] call[name[types]][call[call[name[kw].lower, parameter[]]][<ast.Slice object at 0x7da207f02560>]] assign[=] call[name[len], parameter[call[name[self].force_field][name[kw]]]] call[name[body_dict]][constant[Atoms]] assign[=] name[self].atoms variable[counts] assign[=] call[name[OrderedDict], parameter[]] call[name[counts]][constant[atoms]] assign[=] call[name[len], parameter[name[self].atoms]] if compare[name[self].velocities is_not constant[None]] begin[:] call[name[body_dict]][constant[Velocities]] assign[=] name[self].velocities if name[self].topology begin[:] for taget[name[kw]] in starred[call[name[SECTION_KEYWORDS]][constant[topology]]] begin[:] if compare[name[kw] in name[self].topology] begin[:] call[name[body_dict]][name[kw]] assign[=] call[name[self].topology][name[kw]] call[name[counts]][call[name[kw].lower, parameter[]]] assign[=] call[name[len], parameter[call[name[self].topology][name[kw]]]] variable[all_stats] assign[=] binary_operation[call[name[list], parameter[call[name[counts].values, parameter[]]]] + call[name[list], parameter[call[name[types].values, parameter[]]]]] variable[stats_template] assign[=] binary_operation[constant[{:>%d} {}] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[call[name[str], parameter[call[name[max], parameter[name[all_stats]]]]]]]] variable[count_lines] assign[=] <ast.ListComp object at 0x7da207f03760> variable[type_lines] assign[=] <ast.ListComp object at 0x7da207f03e50> variable[stats] assign[=] call[constant[ ].join, parameter[binary_operation[binary_operation[name[count_lines] + list[[<ast.Constant object at 0x7da207f02d10>]]] + name[type_lines]]]] variable[map_coords] assign[=] <ast.Lambda object at 0x7da207f01c90> variable[map_velos] assign[=] <ast.Lambda object at 0x7da207f02410> variable[map_charges] assign[=] <ast.Lambda object at 0x7da207f02a40> variable[formatters] assign[=] dictionary[[<ast.Constant object at 0x7da207f00d60>, <ast.Constant object at 0x7da207f02950>, <ast.Constant object at 0x7da207f03520>, <ast.Constant object at 0x7da207f027a0>, <ast.Constant object at 0x7da207f023b0>, <ast.Constant object at 0x7da207f02740>, <ast.Constant object at 0x7da207f00b50>], [<ast.Name object at 0x7da207f00dc0>, <ast.Name object at 0x7da207f00460>, <ast.Name object at 0x7da207f003a0>, <ast.Name object at 0x7da207f00520>, <ast.Name object at 0x7da207f003d0>, <ast.Name object at 0x7da207f01030>, <ast.Name object at 0x7da207f02470>]] variable[section_template] assign[=] constant[{kw} {df} ] variable[parts] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20c991f30>, <ast.Name object at 0x7da20c993100>]]] in starred[call[name[body_dict].items, parameter[]]] begin[:] variable[index] assign[=] <ast.IfExp object at 0x7da20c9915d0> variable[df_string] assign[=] call[name[v].to_string, parameter[]] call[name[parts].append, parameter[call[name[section_template].format, parameter[]]]] variable[body] assign[=] call[constant[ ].join, parameter[name[parts]]] return[call[name[file_template].format, parameter[]]]
keyword[def] identifier[get_string] ( identifier[self] , identifier[distance] = literal[int] , identifier[velocity] = literal[int] , identifier[charge] = literal[int] ): literal[string] identifier[file_template] = literal[string] identifier[box] = identifier[self] . identifier[box] . identifier[get_string] ( identifier[distance] ) identifier[body_dict] = identifier[OrderedDict] () identifier[body_dict] [ literal[string] ]= identifier[self] . identifier[masses] identifier[types] = identifier[OrderedDict] () identifier[types] [ literal[string] ]= identifier[len] ( identifier[self] . identifier[masses] ) keyword[if] identifier[self] . identifier[force_field] : identifier[all_ff_kws] = identifier[SECTION_KEYWORDS] [ literal[string] ]+ identifier[SECTION_KEYWORDS] [ literal[string] ] identifier[ff_kws] =[ identifier[k] keyword[for] identifier[k] keyword[in] identifier[all_ff_kws] keyword[if] identifier[k] keyword[in] identifier[self] . identifier[force_field] ] keyword[for] identifier[kw] keyword[in] identifier[ff_kws] : identifier[body_dict] [ identifier[kw] ]= identifier[self] . identifier[force_field] [ identifier[kw] ] keyword[if] identifier[kw] keyword[in] identifier[SECTION_KEYWORDS] [ literal[string] ][ literal[int] :]: identifier[types] [ identifier[kw] . identifier[lower] ()[:- literal[int] ]]= identifier[len] ( identifier[self] . identifier[force_field] [ identifier[kw] ]) identifier[body_dict] [ literal[string] ]= identifier[self] . identifier[atoms] identifier[counts] = identifier[OrderedDict] () identifier[counts] [ literal[string] ]= identifier[len] ( identifier[self] . identifier[atoms] ) keyword[if] identifier[self] . identifier[velocities] keyword[is] keyword[not] keyword[None] : identifier[body_dict] [ literal[string] ]= identifier[self] . identifier[velocities] keyword[if] identifier[self] . identifier[topology] : keyword[for] identifier[kw] keyword[in] identifier[SECTION_KEYWORDS] [ literal[string] ]: keyword[if] identifier[kw] keyword[in] identifier[self] . identifier[topology] : identifier[body_dict] [ identifier[kw] ]= identifier[self] . identifier[topology] [ identifier[kw] ] identifier[counts] [ identifier[kw] . identifier[lower] ()]= identifier[len] ( identifier[self] . identifier[topology] [ identifier[kw] ]) identifier[all_stats] = identifier[list] ( identifier[counts] . identifier[values] ())+ identifier[list] ( identifier[types] . identifier[values] ()) identifier[stats_template] = literal[string] % identifier[len] ( identifier[str] ( identifier[max] ( identifier[all_stats] ))) identifier[count_lines] =[ identifier[stats_template] . identifier[format] ( identifier[v] , identifier[k] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[counts] . identifier[items] ()] identifier[type_lines] =[ identifier[stats_template] . identifier[format] ( identifier[v] , identifier[k] + literal[string] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[types] . identifier[items] ()] identifier[stats] = literal[string] . identifier[join] ( identifier[count_lines] +[ literal[string] ]+ identifier[type_lines] ) identifier[map_coords] = keyword[lambda] identifier[q] :( literal[string] % identifier[distance] ). identifier[format] ( identifier[q] ) identifier[map_velos] = keyword[lambda] identifier[q] :( literal[string] % identifier[velocity] ). identifier[format] ( identifier[q] ) identifier[map_charges] = keyword[lambda] identifier[q] :( literal[string] % identifier[charge] ). identifier[format] ( identifier[q] ) identifier[formatters] ={ literal[string] : identifier[map_coords] , literal[string] : identifier[map_coords] , literal[string] : identifier[map_coords] , literal[string] : identifier[map_velos] , literal[string] : identifier[map_velos] , literal[string] : identifier[map_velos] , literal[string] : identifier[map_charges] } identifier[section_template] = literal[string] identifier[parts] =[] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[body_dict] . identifier[items] (): identifier[index] = keyword[True] keyword[if] identifier[k] != literal[string] keyword[else] keyword[False] identifier[df_string] = identifier[v] . identifier[to_string] ( identifier[header] = keyword[False] , identifier[formatters] = identifier[formatters] , identifier[index_names] = keyword[False] , identifier[index] = identifier[index] ) identifier[parts] . identifier[append] ( identifier[section_template] . identifier[format] ( identifier[kw] = identifier[k] , identifier[df] = identifier[df_string] )) identifier[body] = literal[string] . identifier[join] ( identifier[parts] ) keyword[return] identifier[file_template] . identifier[format] ( identifier[stats] = identifier[stats] , identifier[box] = identifier[box] , identifier[body] = identifier[body] )
def get_string(self, distance=6, velocity=8, charge=3): """ Returns the string representation of LammpsData, essentially the string to be written to a file. Args: distance (int): No. of significant figures to output for box settings (bounds and tilt) and atomic coordinates. Default to 6. velocity (int): No. of significant figures to output for velocities. Default to 8. charge (int): No. of significant figures to output for charges. Default to 3. Returns: String representation """ file_template = 'Generated by pymatgen.io.lammps.data.LammpsData\n\n{stats}\n\n{box}\n\n{body}\n' box = self.box.get_string(distance) body_dict = OrderedDict() body_dict['Masses'] = self.masses types = OrderedDict() types['atom'] = len(self.masses) if self.force_field: all_ff_kws = SECTION_KEYWORDS['ff'] + SECTION_KEYWORDS['class2'] ff_kws = [k for k in all_ff_kws if k in self.force_field] for kw in ff_kws: body_dict[kw] = self.force_field[kw] if kw in SECTION_KEYWORDS['ff'][2:]: types[kw.lower()[:-7]] = len(self.force_field[kw]) # depends on [control=['if'], data=['kw']] # depends on [control=['for'], data=['kw']] # depends on [control=['if'], data=[]] body_dict['Atoms'] = self.atoms counts = OrderedDict() counts['atoms'] = len(self.atoms) if self.velocities is not None: body_dict['Velocities'] = self.velocities # depends on [control=['if'], data=[]] if self.topology: for kw in SECTION_KEYWORDS['topology']: if kw in self.topology: body_dict[kw] = self.topology[kw] counts[kw.lower()] = len(self.topology[kw]) # depends on [control=['if'], data=['kw']] # depends on [control=['for'], data=['kw']] # depends on [control=['if'], data=[]] all_stats = list(counts.values()) + list(types.values()) stats_template = '{:>%d} {}' % len(str(max(all_stats))) count_lines = [stats_template.format(v, k) for (k, v) in counts.items()] type_lines = [stats_template.format(v, k + ' types') for (k, v) in types.items()] stats = '\n'.join(count_lines + [''] + type_lines) map_coords = lambda q: ('{:.%df}' % distance).format(q) map_velos = lambda q: ('{:.%df}' % velocity).format(q) map_charges = lambda q: ('{:.%df}' % charge).format(q) formatters = {'x': map_coords, 'y': map_coords, 'z': map_coords, 'vx': map_velos, 'vy': map_velos, 'vz': map_velos, 'q': map_charges} section_template = '{kw}\n\n{df}\n' parts = [] for (k, v) in body_dict.items(): index = True if k != 'PairIJ Coeffs' else False df_string = v.to_string(header=False, formatters=formatters, index_names=False, index=index) parts.append(section_template.format(kw=k, df=df_string)) # depends on [control=['for'], data=[]] body = '\n'.join(parts) return file_template.format(stats=stats, box=box, body=body)
def stacked_node_layout(self,EdgeAttribute=None,network=None,NodeAttribute=None,\ nodeList=None,x_position=None,y_start_position=None,verbose=None): """ Execute the Stacked Node Layout on a network. :param EdgeAttribute (string, optional): The name of the edge column contai ning numeric values that will be used as weights in the layout algor ithm. Only columns containing numeric values are shown :param network (string, optional): Specifies a network by name, or by SUID if the prefix SUID: is used. The keyword CURRENT, or a blank value c an also be used to specify the current network. :param NodeAttribute (string, optional): The name of the node column contai ning numeric values that will be used as weights in the layout algor ithm. Only columns containing numeric values are shown :param nodeList (string, optional): Specifies a list of nodes. The keywords all, selected, or unselected can be used to specify nodes by their selection state. The pattern COLUMN:VALUE sets this parameter to any rows that contain the specified column value; if the COLUMN prefix is not used, the NAME column is matched by default. A list of COLUMN :VALUE pairs of the format COLUMN1:VALUE1,COLUMN2:VALUE2,... can be used to match multiple values. :param x_position (string, optional): X start position, in numeric value :param y_start_position (string, optional): Y start position, in numeric va lue """ network=check_network(self,network,verbose=verbose) PARAMS=set_param(['EdgeAttribute','network','NodeAttribute','nodeList',\ 'x_position','y_start_position'],[EdgeAttribute,network,NodeAttribute,\ nodeList,x_position,y_start_position]) response=api(url=self.__url+"/stacked-node-layout", PARAMS=PARAMS, method="POST", verbose=verbose) return response
def function[stacked_node_layout, parameter[self, EdgeAttribute, network, NodeAttribute, nodeList, x_position, y_start_position, verbose]]: constant[ Execute the Stacked Node Layout on a network. :param EdgeAttribute (string, optional): The name of the edge column contai ning numeric values that will be used as weights in the layout algor ithm. Only columns containing numeric values are shown :param network (string, optional): Specifies a network by name, or by SUID if the prefix SUID: is used. The keyword CURRENT, or a blank value c an also be used to specify the current network. :param NodeAttribute (string, optional): The name of the node column contai ning numeric values that will be used as weights in the layout algor ithm. Only columns containing numeric values are shown :param nodeList (string, optional): Specifies a list of nodes. The keywords all, selected, or unselected can be used to specify nodes by their selection state. The pattern COLUMN:VALUE sets this parameter to any rows that contain the specified column value; if the COLUMN prefix is not used, the NAME column is matched by default. A list of COLUMN :VALUE pairs of the format COLUMN1:VALUE1,COLUMN2:VALUE2,... can be used to match multiple values. :param x_position (string, optional): X start position, in numeric value :param y_start_position (string, optional): Y start position, in numeric va lue ] variable[network] assign[=] call[name[check_network], parameter[name[self], name[network]]] variable[PARAMS] assign[=] call[name[set_param], parameter[list[[<ast.Constant object at 0x7da18fe90e50>, <ast.Constant object at 0x7da18fe90f40>, <ast.Constant object at 0x7da18fe90a60>, <ast.Constant object at 0x7da18fe933a0>, <ast.Constant object at 0x7da18fe907f0>, <ast.Constant object at 0x7da18fe924a0>]], list[[<ast.Name object at 0x7da18fe916c0>, <ast.Name object at 0x7da18fe92110>, <ast.Name object at 0x7da18fe90280>, <ast.Name object at 0x7da18fe91750>, <ast.Name object at 0x7da18fe93be0>, <ast.Name object at 0x7da18fe927d0>]]]] variable[response] assign[=] call[name[api], parameter[]] return[name[response]]
keyword[def] identifier[stacked_node_layout] ( identifier[self] , identifier[EdgeAttribute] = keyword[None] , identifier[network] = keyword[None] , identifier[NodeAttribute] = keyword[None] , identifier[nodeList] = keyword[None] , identifier[x_position] = keyword[None] , identifier[y_start_position] = keyword[None] , identifier[verbose] = keyword[None] ): literal[string] identifier[network] = identifier[check_network] ( identifier[self] , identifier[network] , identifier[verbose] = identifier[verbose] ) identifier[PARAMS] = identifier[set_param] ([ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ],[ identifier[EdgeAttribute] , identifier[network] , identifier[NodeAttribute] , identifier[nodeList] , identifier[x_position] , identifier[y_start_position] ]) identifier[response] = identifier[api] ( identifier[url] = identifier[self] . identifier[__url] + literal[string] , identifier[PARAMS] = identifier[PARAMS] , identifier[method] = literal[string] , identifier[verbose] = identifier[verbose] ) keyword[return] identifier[response]
def stacked_node_layout(self, EdgeAttribute=None, network=None, NodeAttribute=None, nodeList=None, x_position=None, y_start_position=None, verbose=None): """ Execute the Stacked Node Layout on a network. :param EdgeAttribute (string, optional): The name of the edge column contai ning numeric values that will be used as weights in the layout algor ithm. Only columns containing numeric values are shown :param network (string, optional): Specifies a network by name, or by SUID if the prefix SUID: is used. The keyword CURRENT, or a blank value c an also be used to specify the current network. :param NodeAttribute (string, optional): The name of the node column contai ning numeric values that will be used as weights in the layout algor ithm. Only columns containing numeric values are shown :param nodeList (string, optional): Specifies a list of nodes. The keywords all, selected, or unselected can be used to specify nodes by their selection state. The pattern COLUMN:VALUE sets this parameter to any rows that contain the specified column value; if the COLUMN prefix is not used, the NAME column is matched by default. A list of COLUMN :VALUE pairs of the format COLUMN1:VALUE1,COLUMN2:VALUE2,... can be used to match multiple values. :param x_position (string, optional): X start position, in numeric value :param y_start_position (string, optional): Y start position, in numeric va lue """ network = check_network(self, network, verbose=verbose) PARAMS = set_param(['EdgeAttribute', 'network', 'NodeAttribute', 'nodeList', 'x_position', 'y_start_position'], [EdgeAttribute, network, NodeAttribute, nodeList, x_position, y_start_position]) response = api(url=self.__url + '/stacked-node-layout', PARAMS=PARAMS, method='POST', verbose=verbose) return response
def on_left_click(self, event, grid, choices): """ creates popup menu when user clicks on the column if that column is in the list of choices that get a drop-down menu. allows user to edit the column, but only from available values """ row, col = event.GetRow(), event.GetCol() if col == 0 and self.grid.name != 'ages': default_val = self.grid.GetCellValue(row, col) msg = "Choose a new name for {}.\nThe new value will propagate throughout the contribution.".format(default_val) dia = wx.TextEntryDialog(self.grid, msg, "Rename {}".format(self.grid.name, default_val), default_val) res = dia.ShowModal() if res == wx.ID_OK: new_val = dia.GetValue() # update the contribution with new name self.contribution.rename_item(self.grid.name, default_val, new_val) # don't propagate changes if we are just assigning a new name # and not really renaming # (i.e., if a blank row was added then named) if default_val == '': self.grid.SetCellValue(row, 0, new_val) return # update the current grid with new name for row in range(self.grid.GetNumberRows()): cell_value = self.grid.GetCellValue(row, 0) if cell_value == default_val: self.grid.SetCellValue(row, 0, new_val) else: continue return color = self.grid.GetCellBackgroundColour(event.GetRow(), event.GetCol()) # allow user to cherry-pick cells for editing. # gets selection of meta key for mac, ctrl key for pc if event.ControlDown() or event.MetaDown(): row, col = event.GetRow(), event.GetCol() if (row, col) not in self.dispersed_selection: self.dispersed_selection.append((row, col)) self.grid.SetCellBackgroundColour(row, col, 'light blue') else: self.dispersed_selection.remove((row, col)) self.grid.SetCellBackgroundColour(row, col, color)# 'white' self.grid.ForceRefresh() return if event.ShiftDown(): # allow user to highlight multiple consecutive cells in a column previous_col = self.grid.GetGridCursorCol() previous_row = self.grid.GetGridCursorRow() col = event.GetCol() row = event.GetRow() if col != previous_col: return else: if row > previous_row: row_range = list(range(previous_row, row+1)) else: row_range = list(range(row, previous_row+1)) for r in row_range: self.grid.SetCellBackgroundColour(r, col, 'light blue') self.selection.append((r, col)) self.grid.ForceRefresh() return selection = False if self.dispersed_selection: is_dispersed = True selection = self.dispersed_selection if self.selection: is_dispersed = False selection = self.selection try: col = event.GetCol() row = event.GetRow() except AttributeError: row, col = selection[0][0], selection[0][1] self.grid.SetGridCursor(row, col) if col in list(choices.keys()): # column should have a pop-up menu menu = wx.Menu() two_tiered = choices[col][1] choices = choices[col][0] if not two_tiered: # menu is one tiered if 'CLEAR cell of all values' not in choices: choices.insert(0, 'CLEAR cell of all values') for choice in choices: if not choice: choice = " " # prevents error if choice is an empty string menuitem = menu.Append(wx.ID_ANY, str(choice)) self.window.Bind(wx.EVT_MENU, lambda event: self.on_select_menuitem(event, grid, row, col, selection), menuitem) self.show_menu(event, menu) else: # menu is two_tiered clear = menu.Append(-1, 'CLEAR cell of all values') self.window.Bind(wx.EVT_MENU, lambda event: self.on_select_menuitem(event, grid, row, col, selection), clear) for choice in sorted(choices.items()): submenu = wx.Menu() for item in choice[1]: menuitem = submenu.Append(-1, str(item)) self.window.Bind(wx.EVT_MENU, lambda event: self.on_select_menuitem(event, grid, row, col, selection), menuitem) menu.Append(-1, choice[0], submenu) self.show_menu(event, menu) if selection: # re-whiten the cells that were previously highlighted for row, col in selection: self.grid.SetCellBackgroundColour(row, col, self.col_color) self.dispersed_selection = [] self.selection = [] self.grid.ForceRefresh()
def function[on_left_click, parameter[self, event, grid, choices]]: constant[ creates popup menu when user clicks on the column if that column is in the list of choices that get a drop-down menu. allows user to edit the column, but only from available values ] <ast.Tuple object at 0x7da1b022ceb0> assign[=] tuple[[<ast.Call object at 0x7da1b022fa90>, <ast.Call object at 0x7da1b022c880>]] if <ast.BoolOp object at 0x7da1b022ff10> begin[:] variable[default_val] assign[=] call[name[self].grid.GetCellValue, parameter[name[row], name[col]]] variable[msg] assign[=] call[constant[Choose a new name for {}. The new value will propagate throughout the contribution.].format, parameter[name[default_val]]] variable[dia] assign[=] call[name[wx].TextEntryDialog, parameter[name[self].grid, name[msg], call[constant[Rename {}].format, parameter[name[self].grid.name, name[default_val]]], name[default_val]]] variable[res] assign[=] call[name[dia].ShowModal, parameter[]] if compare[name[res] equal[==] name[wx].ID_OK] begin[:] variable[new_val] assign[=] call[name[dia].GetValue, parameter[]] call[name[self].contribution.rename_item, parameter[name[self].grid.name, name[default_val], name[new_val]]] if compare[name[default_val] equal[==] constant[]] begin[:] call[name[self].grid.SetCellValue, parameter[name[row], constant[0], name[new_val]]] return[None] for taget[name[row]] in starred[call[name[range], parameter[call[name[self].grid.GetNumberRows, parameter[]]]]] begin[:] variable[cell_value] assign[=] call[name[self].grid.GetCellValue, parameter[name[row], constant[0]]] if compare[name[cell_value] equal[==] name[default_val]] begin[:] call[name[self].grid.SetCellValue, parameter[name[row], constant[0], name[new_val]]] return[None] variable[color] assign[=] call[name[self].grid.GetCellBackgroundColour, parameter[call[name[event].GetRow, parameter[]], call[name[event].GetCol, parameter[]]]] if <ast.BoolOp object at 0x7da1b02e93c0> begin[:] <ast.Tuple object at 0x7da1b02e9ff0> assign[=] tuple[[<ast.Call object at 0x7da1b02e8250>, <ast.Call object at 0x7da1b02ebac0>]] if compare[tuple[[<ast.Name object at 0x7da1b02ebd00>, <ast.Name object at 0x7da1b02ebca0>]] <ast.NotIn object at 0x7da2590d7190> name[self].dispersed_selection] begin[:] call[name[self].dispersed_selection.append, parameter[tuple[[<ast.Name object at 0x7da1b02e8c70>, <ast.Name object at 0x7da1b02e8490>]]]] call[name[self].grid.SetCellBackgroundColour, parameter[name[row], name[col], constant[light blue]]] call[name[self].grid.ForceRefresh, parameter[]] return[None] if call[name[event].ShiftDown, parameter[]] begin[:] variable[previous_col] assign[=] call[name[self].grid.GetGridCursorCol, parameter[]] variable[previous_row] assign[=] call[name[self].grid.GetGridCursorRow, parameter[]] variable[col] assign[=] call[name[event].GetCol, parameter[]] variable[row] assign[=] call[name[event].GetRow, parameter[]] if compare[name[col] not_equal[!=] name[previous_col]] begin[:] return[None] for taget[name[r]] in starred[name[row_range]] begin[:] call[name[self].grid.SetCellBackgroundColour, parameter[name[r], name[col], constant[light blue]]] call[name[self].selection.append, parameter[tuple[[<ast.Name object at 0x7da1b02eb7f0>, <ast.Name object at 0x7da1b02e9a20>]]]] call[name[self].grid.ForceRefresh, parameter[]] return[None] variable[selection] assign[=] constant[False] if name[self].dispersed_selection begin[:] variable[is_dispersed] assign[=] constant[True] variable[selection] assign[=] name[self].dispersed_selection if name[self].selection begin[:] variable[is_dispersed] assign[=] constant[False] variable[selection] assign[=] name[self].selection <ast.Try object at 0x7da1b02ebf70> call[name[self].grid.SetGridCursor, parameter[name[row], name[col]]] if compare[name[col] in call[name[list], parameter[call[name[choices].keys, parameter[]]]]] begin[:] variable[menu] assign[=] call[name[wx].Menu, parameter[]] variable[two_tiered] assign[=] call[call[name[choices]][name[col]]][constant[1]] variable[choices] assign[=] call[call[name[choices]][name[col]]][constant[0]] if <ast.UnaryOp object at 0x7da1b024a2f0> begin[:] if compare[constant[CLEAR cell of all values] <ast.NotIn object at 0x7da2590d7190> name[choices]] begin[:] call[name[choices].insert, parameter[constant[0], constant[CLEAR cell of all values]]] for taget[name[choice]] in starred[name[choices]] begin[:] if <ast.UnaryOp object at 0x7da1b0249bd0> begin[:] variable[choice] assign[=] constant[ ] variable[menuitem] assign[=] call[name[menu].Append, parameter[name[wx].ID_ANY, call[name[str], parameter[name[choice]]]]] call[name[self].window.Bind, parameter[name[wx].EVT_MENU, <ast.Lambda object at 0x7da1b024a1d0>, name[menuitem]]] call[name[self].show_menu, parameter[name[event], name[menu]]] if name[selection] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b05bca30>, <ast.Name object at 0x7da1b05bdf90>]]] in starred[name[selection]] begin[:] call[name[self].grid.SetCellBackgroundColour, parameter[name[row], name[col], name[self].col_color]] name[self].dispersed_selection assign[=] list[[]] name[self].selection assign[=] list[[]] call[name[self].grid.ForceRefresh, parameter[]]
keyword[def] identifier[on_left_click] ( identifier[self] , identifier[event] , identifier[grid] , identifier[choices] ): literal[string] identifier[row] , identifier[col] = identifier[event] . identifier[GetRow] (), identifier[event] . identifier[GetCol] () keyword[if] identifier[col] == literal[int] keyword[and] identifier[self] . identifier[grid] . identifier[name] != literal[string] : identifier[default_val] = identifier[self] . identifier[grid] . identifier[GetCellValue] ( identifier[row] , identifier[col] ) identifier[msg] = literal[string] . identifier[format] ( identifier[default_val] ) identifier[dia] = identifier[wx] . identifier[TextEntryDialog] ( identifier[self] . identifier[grid] , identifier[msg] , literal[string] . identifier[format] ( identifier[self] . identifier[grid] . identifier[name] , identifier[default_val] ), identifier[default_val] ) identifier[res] = identifier[dia] . identifier[ShowModal] () keyword[if] identifier[res] == identifier[wx] . identifier[ID_OK] : identifier[new_val] = identifier[dia] . identifier[GetValue] () identifier[self] . identifier[contribution] . identifier[rename_item] ( identifier[self] . identifier[grid] . identifier[name] , identifier[default_val] , identifier[new_val] ) keyword[if] identifier[default_val] == literal[string] : identifier[self] . identifier[grid] . identifier[SetCellValue] ( identifier[row] , literal[int] , identifier[new_val] ) keyword[return] keyword[for] identifier[row] keyword[in] identifier[range] ( identifier[self] . identifier[grid] . identifier[GetNumberRows] ()): identifier[cell_value] = identifier[self] . identifier[grid] . identifier[GetCellValue] ( identifier[row] , literal[int] ) keyword[if] identifier[cell_value] == identifier[default_val] : identifier[self] . identifier[grid] . identifier[SetCellValue] ( identifier[row] , literal[int] , identifier[new_val] ) keyword[else] : keyword[continue] keyword[return] identifier[color] = identifier[self] . identifier[grid] . identifier[GetCellBackgroundColour] ( identifier[event] . identifier[GetRow] (), identifier[event] . identifier[GetCol] ()) keyword[if] identifier[event] . identifier[ControlDown] () keyword[or] identifier[event] . identifier[MetaDown] (): identifier[row] , identifier[col] = identifier[event] . identifier[GetRow] (), identifier[event] . identifier[GetCol] () keyword[if] ( identifier[row] , identifier[col] ) keyword[not] keyword[in] identifier[self] . identifier[dispersed_selection] : identifier[self] . identifier[dispersed_selection] . identifier[append] (( identifier[row] , identifier[col] )) identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[row] , identifier[col] , literal[string] ) keyword[else] : identifier[self] . identifier[dispersed_selection] . identifier[remove] (( identifier[row] , identifier[col] )) identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[row] , identifier[col] , identifier[color] ) identifier[self] . identifier[grid] . identifier[ForceRefresh] () keyword[return] keyword[if] identifier[event] . identifier[ShiftDown] (): identifier[previous_col] = identifier[self] . identifier[grid] . identifier[GetGridCursorCol] () identifier[previous_row] = identifier[self] . identifier[grid] . identifier[GetGridCursorRow] () identifier[col] = identifier[event] . identifier[GetCol] () identifier[row] = identifier[event] . identifier[GetRow] () keyword[if] identifier[col] != identifier[previous_col] : keyword[return] keyword[else] : keyword[if] identifier[row] > identifier[previous_row] : identifier[row_range] = identifier[list] ( identifier[range] ( identifier[previous_row] , identifier[row] + literal[int] )) keyword[else] : identifier[row_range] = identifier[list] ( identifier[range] ( identifier[row] , identifier[previous_row] + literal[int] )) keyword[for] identifier[r] keyword[in] identifier[row_range] : identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[r] , identifier[col] , literal[string] ) identifier[self] . identifier[selection] . identifier[append] (( identifier[r] , identifier[col] )) identifier[self] . identifier[grid] . identifier[ForceRefresh] () keyword[return] identifier[selection] = keyword[False] keyword[if] identifier[self] . identifier[dispersed_selection] : identifier[is_dispersed] = keyword[True] identifier[selection] = identifier[self] . identifier[dispersed_selection] keyword[if] identifier[self] . identifier[selection] : identifier[is_dispersed] = keyword[False] identifier[selection] = identifier[self] . identifier[selection] keyword[try] : identifier[col] = identifier[event] . identifier[GetCol] () identifier[row] = identifier[event] . identifier[GetRow] () keyword[except] identifier[AttributeError] : identifier[row] , identifier[col] = identifier[selection] [ literal[int] ][ literal[int] ], identifier[selection] [ literal[int] ][ literal[int] ] identifier[self] . identifier[grid] . identifier[SetGridCursor] ( identifier[row] , identifier[col] ) keyword[if] identifier[col] keyword[in] identifier[list] ( identifier[choices] . identifier[keys] ()): identifier[menu] = identifier[wx] . identifier[Menu] () identifier[two_tiered] = identifier[choices] [ identifier[col] ][ literal[int] ] identifier[choices] = identifier[choices] [ identifier[col] ][ literal[int] ] keyword[if] keyword[not] identifier[two_tiered] : keyword[if] literal[string] keyword[not] keyword[in] identifier[choices] : identifier[choices] . identifier[insert] ( literal[int] , literal[string] ) keyword[for] identifier[choice] keyword[in] identifier[choices] : keyword[if] keyword[not] identifier[choice] : identifier[choice] = literal[string] identifier[menuitem] = identifier[menu] . identifier[Append] ( identifier[wx] . identifier[ID_ANY] , identifier[str] ( identifier[choice] )) identifier[self] . identifier[window] . identifier[Bind] ( identifier[wx] . identifier[EVT_MENU] , keyword[lambda] identifier[event] : identifier[self] . identifier[on_select_menuitem] ( identifier[event] , identifier[grid] , identifier[row] , identifier[col] , identifier[selection] ), identifier[menuitem] ) identifier[self] . identifier[show_menu] ( identifier[event] , identifier[menu] ) keyword[else] : identifier[clear] = identifier[menu] . identifier[Append] (- literal[int] , literal[string] ) identifier[self] . identifier[window] . identifier[Bind] ( identifier[wx] . identifier[EVT_MENU] , keyword[lambda] identifier[event] : identifier[self] . identifier[on_select_menuitem] ( identifier[event] , identifier[grid] , identifier[row] , identifier[col] , identifier[selection] ), identifier[clear] ) keyword[for] identifier[choice] keyword[in] identifier[sorted] ( identifier[choices] . identifier[items] ()): identifier[submenu] = identifier[wx] . identifier[Menu] () keyword[for] identifier[item] keyword[in] identifier[choice] [ literal[int] ]: identifier[menuitem] = identifier[submenu] . identifier[Append] (- literal[int] , identifier[str] ( identifier[item] )) identifier[self] . identifier[window] . identifier[Bind] ( identifier[wx] . identifier[EVT_MENU] , keyword[lambda] identifier[event] : identifier[self] . identifier[on_select_menuitem] ( identifier[event] , identifier[grid] , identifier[row] , identifier[col] , identifier[selection] ), identifier[menuitem] ) identifier[menu] . identifier[Append] (- literal[int] , identifier[choice] [ literal[int] ], identifier[submenu] ) identifier[self] . identifier[show_menu] ( identifier[event] , identifier[menu] ) keyword[if] identifier[selection] : keyword[for] identifier[row] , identifier[col] keyword[in] identifier[selection] : identifier[self] . identifier[grid] . identifier[SetCellBackgroundColour] ( identifier[row] , identifier[col] , identifier[self] . identifier[col_color] ) identifier[self] . identifier[dispersed_selection] =[] identifier[self] . identifier[selection] =[] identifier[self] . identifier[grid] . identifier[ForceRefresh] ()
def on_left_click(self, event, grid, choices): """ creates popup menu when user clicks on the column if that column is in the list of choices that get a drop-down menu. allows user to edit the column, but only from available values """ (row, col) = (event.GetRow(), event.GetCol()) if col == 0 and self.grid.name != 'ages': default_val = self.grid.GetCellValue(row, col) msg = 'Choose a new name for {}.\nThe new value will propagate throughout the contribution.'.format(default_val) dia = wx.TextEntryDialog(self.grid, msg, 'Rename {}'.format(self.grid.name, default_val), default_val) res = dia.ShowModal() if res == wx.ID_OK: new_val = dia.GetValue() # update the contribution with new name self.contribution.rename_item(self.grid.name, default_val, new_val) # don't propagate changes if we are just assigning a new name # and not really renaming # (i.e., if a blank row was added then named) if default_val == '': self.grid.SetCellValue(row, 0, new_val) return # depends on [control=['if'], data=[]] # update the current grid with new name for row in range(self.grid.GetNumberRows()): cell_value = self.grid.GetCellValue(row, 0) if cell_value == default_val: self.grid.SetCellValue(row, 0, new_val) # depends on [control=['if'], data=[]] else: continue # depends on [control=['for'], data=['row']] # depends on [control=['if'], data=[]] return # depends on [control=['if'], data=[]] color = self.grid.GetCellBackgroundColour(event.GetRow(), event.GetCol()) # allow user to cherry-pick cells for editing. # gets selection of meta key for mac, ctrl key for pc if event.ControlDown() or event.MetaDown(): (row, col) = (event.GetRow(), event.GetCol()) if (row, col) not in self.dispersed_selection: self.dispersed_selection.append((row, col)) self.grid.SetCellBackgroundColour(row, col, 'light blue') # depends on [control=['if'], data=[]] else: self.dispersed_selection.remove((row, col)) self.grid.SetCellBackgroundColour(row, col, color) # 'white' self.grid.ForceRefresh() return # depends on [control=['if'], data=[]] if event.ShiftDown(): # allow user to highlight multiple consecutive cells in a column previous_col = self.grid.GetGridCursorCol() previous_row = self.grid.GetGridCursorRow() col = event.GetCol() row = event.GetRow() if col != previous_col: return # depends on [control=['if'], data=[]] elif row > previous_row: row_range = list(range(previous_row, row + 1)) # depends on [control=['if'], data=['row', 'previous_row']] else: row_range = list(range(row, previous_row + 1)) for r in row_range: self.grid.SetCellBackgroundColour(r, col, 'light blue') self.selection.append((r, col)) # depends on [control=['for'], data=['r']] self.grid.ForceRefresh() return # depends on [control=['if'], data=[]] selection = False if self.dispersed_selection: is_dispersed = True selection = self.dispersed_selection # depends on [control=['if'], data=[]] if self.selection: is_dispersed = False selection = self.selection # depends on [control=['if'], data=[]] try: col = event.GetCol() row = event.GetRow() # depends on [control=['try'], data=[]] except AttributeError: (row, col) = (selection[0][0], selection[0][1]) # depends on [control=['except'], data=[]] self.grid.SetGridCursor(row, col) if col in list(choices.keys()): # column should have a pop-up menu menu = wx.Menu() two_tiered = choices[col][1] choices = choices[col][0] if not two_tiered: # menu is one tiered if 'CLEAR cell of all values' not in choices: choices.insert(0, 'CLEAR cell of all values') # depends on [control=['if'], data=['choices']] for choice in choices: if not choice: choice = ' ' # prevents error if choice is an empty string # depends on [control=['if'], data=[]] menuitem = menu.Append(wx.ID_ANY, str(choice)) self.window.Bind(wx.EVT_MENU, lambda event: self.on_select_menuitem(event, grid, row, col, selection), menuitem) # depends on [control=['for'], data=['choice']] self.show_menu(event, menu) # depends on [control=['if'], data=[]] else: # menu is two_tiered clear = menu.Append(-1, 'CLEAR cell of all values') self.window.Bind(wx.EVT_MENU, lambda event: self.on_select_menuitem(event, grid, row, col, selection), clear) for choice in sorted(choices.items()): submenu = wx.Menu() for item in choice[1]: menuitem = submenu.Append(-1, str(item)) self.window.Bind(wx.EVT_MENU, lambda event: self.on_select_menuitem(event, grid, row, col, selection), menuitem) # depends on [control=['for'], data=['item']] menu.Append(-1, choice[0], submenu) # depends on [control=['for'], data=['choice']] self.show_menu(event, menu) # depends on [control=['if'], data=['col']] if selection: # re-whiten the cells that were previously highlighted for (row, col) in selection: self.grid.SetCellBackgroundColour(row, col, self.col_color) # depends on [control=['for'], data=[]] self.dispersed_selection = [] self.selection = [] self.grid.ForceRefresh() # depends on [control=['if'], data=[]]
def complete(self): """ Make the graph a complete graph. @attention: This will modify the current graph. """ for each in self.nodes(): for other in self.nodes(): if (each != other and not self.has_edge((each, other))): self.add_edge((each, other))
def function[complete, parameter[self]]: constant[ Make the graph a complete graph. @attention: This will modify the current graph. ] for taget[name[each]] in starred[call[name[self].nodes, parameter[]]] begin[:] for taget[name[other]] in starred[call[name[self].nodes, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b17f68c0> begin[:] call[name[self].add_edge, parameter[tuple[[<ast.Name object at 0x7da1b17f68f0>, <ast.Name object at 0x7da1b17f6920>]]]]
keyword[def] identifier[complete] ( identifier[self] ): literal[string] keyword[for] identifier[each] keyword[in] identifier[self] . identifier[nodes] (): keyword[for] identifier[other] keyword[in] identifier[self] . identifier[nodes] (): keyword[if] ( identifier[each] != identifier[other] keyword[and] keyword[not] identifier[self] . identifier[has_edge] (( identifier[each] , identifier[other] ))): identifier[self] . identifier[add_edge] (( identifier[each] , identifier[other] ))
def complete(self): """ Make the graph a complete graph. @attention: This will modify the current graph. """ for each in self.nodes(): for other in self.nodes(): if each != other and (not self.has_edge((each, other))): self.add_edge((each, other)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['other']] # depends on [control=['for'], data=['each']]
def write_model(self, filename='scores', filepath='', output_format='csv'): """ This method calculates the scores and writes them to a file the data frame received. If the output format is other than 'csv' it will print the scores. :param filename: the name to give to the file :type filename: string :param filepath: the path to save the file :type filepath: string :param output_format: the format of the file to write ('csv') :type output_format: string """ scores_array = np.array([]) for obs in self.observations: c, sd = self.__get_centroids_sd(obs) points, ids = self.__get_features_for_observation(observation=obs, last_column_is_id=True) b = np.array([]) for p in points: b = np.append(b, [self.get_single_score(p, centroids=c, sd=sd)]) scores_array = np.vstack([scores_array, b]) if scores_array.size else b scores_array = np.concatenate((ids[:, np.newaxis], scores_array.transpose()), axis=1) header = 'id,'+','.join(self.observations) try: if output_format == 'csv': filename = join(filepath, filename) + '.' + output_format np.savetxt(filename, scores_array, delimiter=",", fmt='%i', header=header,comments='') else: print(scores_array) except: logging.error("Unexpected error on writing output")
def function[write_model, parameter[self, filename, filepath, output_format]]: constant[ This method calculates the scores and writes them to a file the data frame received. If the output format is other than 'csv' it will print the scores. :param filename: the name to give to the file :type filename: string :param filepath: the path to save the file :type filepath: string :param output_format: the format of the file to write ('csv') :type output_format: string ] variable[scores_array] assign[=] call[name[np].array, parameter[list[[]]]] for taget[name[obs]] in starred[name[self].observations] begin[:] <ast.Tuple object at 0x7da2041d80a0> assign[=] call[name[self].__get_centroids_sd, parameter[name[obs]]] <ast.Tuple object at 0x7da2041d8b80> assign[=] call[name[self].__get_features_for_observation, parameter[]] variable[b] assign[=] call[name[np].array, parameter[list[[]]]] for taget[name[p]] in starred[name[points]] begin[:] variable[b] assign[=] call[name[np].append, parameter[name[b], list[[<ast.Call object at 0x7da2041d9420>]]]] variable[scores_array] assign[=] <ast.IfExp object at 0x7da2041d9c60> variable[scores_array] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Subscript object at 0x7da18f09cd60>, <ast.Call object at 0x7da18f09e470>]]]] variable[header] assign[=] binary_operation[constant[id,] + call[constant[,].join, parameter[name[self].observations]]] <ast.Try object at 0x7da18f09cd00>
keyword[def] identifier[write_model] ( identifier[self] , identifier[filename] = literal[string] , identifier[filepath] = literal[string] , identifier[output_format] = literal[string] ): literal[string] identifier[scores_array] = identifier[np] . identifier[array] ([]) keyword[for] identifier[obs] keyword[in] identifier[self] . identifier[observations] : identifier[c] , identifier[sd] = identifier[self] . identifier[__get_centroids_sd] ( identifier[obs] ) identifier[points] , identifier[ids] = identifier[self] . identifier[__get_features_for_observation] ( identifier[observation] = identifier[obs] , identifier[last_column_is_id] = keyword[True] ) identifier[b] = identifier[np] . identifier[array] ([]) keyword[for] identifier[p] keyword[in] identifier[points] : identifier[b] = identifier[np] . identifier[append] ( identifier[b] ,[ identifier[self] . identifier[get_single_score] ( identifier[p] , identifier[centroids] = identifier[c] , identifier[sd] = identifier[sd] )]) identifier[scores_array] = identifier[np] . identifier[vstack] ([ identifier[scores_array] , identifier[b] ]) keyword[if] identifier[scores_array] . identifier[size] keyword[else] identifier[b] identifier[scores_array] = identifier[np] . identifier[concatenate] (( identifier[ids] [:, identifier[np] . identifier[newaxis] ], identifier[scores_array] . identifier[transpose] ()), identifier[axis] = literal[int] ) identifier[header] = literal[string] + literal[string] . identifier[join] ( identifier[self] . identifier[observations] ) keyword[try] : keyword[if] identifier[output_format] == literal[string] : identifier[filename] = identifier[join] ( identifier[filepath] , identifier[filename] )+ literal[string] + identifier[output_format] identifier[np] . identifier[savetxt] ( identifier[filename] , identifier[scores_array] , identifier[delimiter] = literal[string] , identifier[fmt] = literal[string] , identifier[header] = identifier[header] , identifier[comments] = literal[string] ) keyword[else] : identifier[print] ( identifier[scores_array] ) keyword[except] : identifier[logging] . identifier[error] ( literal[string] )
def write_model(self, filename='scores', filepath='', output_format='csv'): """ This method calculates the scores and writes them to a file the data frame received. If the output format is other than 'csv' it will print the scores. :param filename: the name to give to the file :type filename: string :param filepath: the path to save the file :type filepath: string :param output_format: the format of the file to write ('csv') :type output_format: string """ scores_array = np.array([]) for obs in self.observations: (c, sd) = self.__get_centroids_sd(obs) (points, ids) = self.__get_features_for_observation(observation=obs, last_column_is_id=True) b = np.array([]) for p in points: b = np.append(b, [self.get_single_score(p, centroids=c, sd=sd)]) # depends on [control=['for'], data=['p']] scores_array = np.vstack([scores_array, b]) if scores_array.size else b # depends on [control=['for'], data=['obs']] scores_array = np.concatenate((ids[:, np.newaxis], scores_array.transpose()), axis=1) header = 'id,' + ','.join(self.observations) try: if output_format == 'csv': filename = join(filepath, filename) + '.' + output_format np.savetxt(filename, scores_array, delimiter=',', fmt='%i', header=header, comments='') # depends on [control=['if'], data=['output_format']] else: print(scores_array) # depends on [control=['try'], data=[]] except: logging.error('Unexpected error on writing output') # depends on [control=['except'], data=[]]
def to_bigquery_fields(self, name_case=DdlParseBase.NAME_CASE.original): """ Generate BigQuery JSON fields define :param name_case: name case type * DdlParse.NAME_CASE.original : Return to no convert * DdlParse.NAME_CASE.lower : Return to lower * DdlParse.NAME_CASE.upper : Return to upper :return: BigQuery JSON fields define """ return self._columns.to_bigquery_fields(name_case)
def function[to_bigquery_fields, parameter[self, name_case]]: constant[ Generate BigQuery JSON fields define :param name_case: name case type * DdlParse.NAME_CASE.original : Return to no convert * DdlParse.NAME_CASE.lower : Return to lower * DdlParse.NAME_CASE.upper : Return to upper :return: BigQuery JSON fields define ] return[call[name[self]._columns.to_bigquery_fields, parameter[name[name_case]]]]
keyword[def] identifier[to_bigquery_fields] ( identifier[self] , identifier[name_case] = identifier[DdlParseBase] . identifier[NAME_CASE] . identifier[original] ): literal[string] keyword[return] identifier[self] . identifier[_columns] . identifier[to_bigquery_fields] ( identifier[name_case] )
def to_bigquery_fields(self, name_case=DdlParseBase.NAME_CASE.original): """ Generate BigQuery JSON fields define :param name_case: name case type * DdlParse.NAME_CASE.original : Return to no convert * DdlParse.NAME_CASE.lower : Return to lower * DdlParse.NAME_CASE.upper : Return to upper :return: BigQuery JSON fields define """ return self._columns.to_bigquery_fields(name_case)
def find_neighbors(neighbors, coords, I, source_files, f, sides): """Find the tile neighbors based on filenames Parameters ----------- neighbors : dict Dictionary that stores the neighbors. Format is neighbors["source_file_name"]["side"] = "neighbor_source_file_name" coords : list List of coordinates determined from the filename. See :py:func:`utils.parse_fn` I : array Sort index. Different sorting schemes will speed up when neighbors are found source_files : list List of strings of source file names f : callable Function that determines if two tiles are neighbors based on their coordinates. f(c1, c2) returns True if tiles are neighbors sides : list List of 2 strings that give the "side" where tiles are neighbors. Returns ------- neighbors : dict Dictionary of neighbors Notes ------- For example, if Tile1 is to the left of Tile2, then neighbors['Tile1']['right'] = 'Tile2' neighbors['Tile2']['left'] = 'Tile1' """ for i, c1 in enumerate(coords): me = source_files[I[i]] # If the left neighbor has already been found... if neighbors[me][sides[0]] != '': continue # could try coords[i:] (+ fixes) for speed if it becomes a problem for j, c2 in enumerate(coords): if f(c1, c2): # then tiles are neighbors neighbors neigh = source_files[I[j]] neighbors[me][sides[0]] = neigh neighbors[neigh][sides[1]] = me break return neighbors
def function[find_neighbors, parameter[neighbors, coords, I, source_files, f, sides]]: constant[Find the tile neighbors based on filenames Parameters ----------- neighbors : dict Dictionary that stores the neighbors. Format is neighbors["source_file_name"]["side"] = "neighbor_source_file_name" coords : list List of coordinates determined from the filename. See :py:func:`utils.parse_fn` I : array Sort index. Different sorting schemes will speed up when neighbors are found source_files : list List of strings of source file names f : callable Function that determines if two tiles are neighbors based on their coordinates. f(c1, c2) returns True if tiles are neighbors sides : list List of 2 strings that give the "side" where tiles are neighbors. Returns ------- neighbors : dict Dictionary of neighbors Notes ------- For example, if Tile1 is to the left of Tile2, then neighbors['Tile1']['right'] = 'Tile2' neighbors['Tile2']['left'] = 'Tile1' ] for taget[tuple[[<ast.Name object at 0x7da18dc04970>, <ast.Name object at 0x7da18dc05d80>]]] in starred[call[name[enumerate], parameter[name[coords]]]] begin[:] variable[me] assign[=] call[name[source_files]][call[name[I]][name[i]]] if compare[call[call[name[neighbors]][name[me]]][call[name[sides]][constant[0]]] not_equal[!=] constant[]] begin[:] continue for taget[tuple[[<ast.Name object at 0x7da20c990370>, <ast.Name object at 0x7da20c993850>]]] in starred[call[name[enumerate], parameter[name[coords]]]] begin[:] if call[name[f], parameter[name[c1], name[c2]]] begin[:] variable[neigh] assign[=] call[name[source_files]][call[name[I]][name[j]]] call[call[name[neighbors]][name[me]]][call[name[sides]][constant[0]]] assign[=] name[neigh] call[call[name[neighbors]][name[neigh]]][call[name[sides]][constant[1]]] assign[=] name[me] break return[name[neighbors]]
keyword[def] identifier[find_neighbors] ( identifier[neighbors] , identifier[coords] , identifier[I] , identifier[source_files] , identifier[f] , identifier[sides] ): literal[string] keyword[for] identifier[i] , identifier[c1] keyword[in] identifier[enumerate] ( identifier[coords] ): identifier[me] = identifier[source_files] [ identifier[I] [ identifier[i] ]] keyword[if] identifier[neighbors] [ identifier[me] ][ identifier[sides] [ literal[int] ]]!= literal[string] : keyword[continue] keyword[for] identifier[j] , identifier[c2] keyword[in] identifier[enumerate] ( identifier[coords] ): keyword[if] identifier[f] ( identifier[c1] , identifier[c2] ): identifier[neigh] = identifier[source_files] [ identifier[I] [ identifier[j] ]] identifier[neighbors] [ identifier[me] ][ identifier[sides] [ literal[int] ]]= identifier[neigh] identifier[neighbors] [ identifier[neigh] ][ identifier[sides] [ literal[int] ]]= identifier[me] keyword[break] keyword[return] identifier[neighbors]
def find_neighbors(neighbors, coords, I, source_files, f, sides): """Find the tile neighbors based on filenames Parameters ----------- neighbors : dict Dictionary that stores the neighbors. Format is neighbors["source_file_name"]["side"] = "neighbor_source_file_name" coords : list List of coordinates determined from the filename. See :py:func:`utils.parse_fn` I : array Sort index. Different sorting schemes will speed up when neighbors are found source_files : list List of strings of source file names f : callable Function that determines if two tiles are neighbors based on their coordinates. f(c1, c2) returns True if tiles are neighbors sides : list List of 2 strings that give the "side" where tiles are neighbors. Returns ------- neighbors : dict Dictionary of neighbors Notes ------- For example, if Tile1 is to the left of Tile2, then neighbors['Tile1']['right'] = 'Tile2' neighbors['Tile2']['left'] = 'Tile1' """ for (i, c1) in enumerate(coords): me = source_files[I[i]] # If the left neighbor has already been found... if neighbors[me][sides[0]] != '': continue # depends on [control=['if'], data=[]] # could try coords[i:] (+ fixes) for speed if it becomes a problem for (j, c2) in enumerate(coords): if f(c1, c2): # then tiles are neighbors neighbors neigh = source_files[I[j]] neighbors[me][sides[0]] = neigh neighbors[neigh][sides[1]] = me break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return neighbors
def laser_mirrors(rows, cols, mir): """Orienting mirrors to allow reachability by laser beam :param int rows: :param int cols: rows and cols are the dimension of the grid :param mir: list of mirror coordinates, except mir[0]= laser entrance, mir[-1]= laser exit. :complexity: :math:`O(2^n)` """ # build structures n = len(mir) orien = [None] * (n + 2) orien[n] = 0 # arbitrary orientations orien[n + 1] = 0 succ = [[None for direc in range(4)] for i in range(n + 2)] L = [(mir[i][0], mir[i][1], i) for i in range(n)] L.append((0, -1, n)) # enter L.append((0, cols, n + 1)) # exit last_r, last_i = None, None for (r, c, i) in sorted(L): # sweep by row if last_r == r: succ[i][LEFT] = last_i succ[last_i][RIGHT] = i last_r, last_i = r, i last_c = None for (r, c, i) in sorted(L, key=lambda rci: (rci[1], rci[0])): if last_c == c: # sweep by column succ[i][UP] = last_i succ[last_i][DOWN] = i last_c, last_i = c, i if solve(succ, orien, n, RIGHT): # exploration return orien[:n] else: return None
def function[laser_mirrors, parameter[rows, cols, mir]]: constant[Orienting mirrors to allow reachability by laser beam :param int rows: :param int cols: rows and cols are the dimension of the grid :param mir: list of mirror coordinates, except mir[0]= laser entrance, mir[-1]= laser exit. :complexity: :math:`O(2^n)` ] variable[n] assign[=] call[name[len], parameter[name[mir]]] variable[orien] assign[=] binary_operation[list[[<ast.Constant object at 0x7da20c993ee0>]] * binary_operation[name[n] + constant[2]]] call[name[orien]][name[n]] assign[=] constant[0] call[name[orien]][binary_operation[name[n] + constant[1]]] assign[=] constant[0] variable[succ] assign[=] <ast.ListComp object at 0x7da20c991a50> variable[L] assign[=] <ast.ListComp object at 0x7da20c991f90> call[name[L].append, parameter[tuple[[<ast.Constant object at 0x7da20c991060>, <ast.UnaryOp object at 0x7da20c9910f0>, <ast.Name object at 0x7da20c993b50>]]]] call[name[L].append, parameter[tuple[[<ast.Constant object at 0x7da20c991030>, <ast.Name object at 0x7da20c992050>, <ast.BinOp object at 0x7da20c990910>]]]] <ast.Tuple object at 0x7da20c991330> assign[=] tuple[[<ast.Constant object at 0x7da20c991d20>, <ast.Constant object at 0x7da20c993430>]] for taget[tuple[[<ast.Name object at 0x7da20c993400>, <ast.Name object at 0x7da20c992650>, <ast.Name object at 0x7da20c9916c0>]]] in starred[call[name[sorted], parameter[name[L]]]] begin[:] if compare[name[last_r] equal[==] name[r]] begin[:] call[call[name[succ]][name[i]]][name[LEFT]] assign[=] name[last_i] call[call[name[succ]][name[last_i]]][name[RIGHT]] assign[=] name[i] <ast.Tuple object at 0x7da20c9922c0> assign[=] tuple[[<ast.Name object at 0x7da20c992b90>, <ast.Name object at 0x7da20c991d50>]] variable[last_c] assign[=] constant[None] for taget[tuple[[<ast.Name object at 0x7da20c9928c0>, <ast.Name object at 0x7da20c9900d0>, <ast.Name object at 0x7da20c9904c0>]]] in starred[call[name[sorted], parameter[name[L]]]] begin[:] if compare[name[last_c] equal[==] name[c]] begin[:] call[call[name[succ]][name[i]]][name[UP]] assign[=] name[last_i] call[call[name[succ]][name[last_i]]][name[DOWN]] assign[=] name[i] <ast.Tuple object at 0x7da20c992560> assign[=] tuple[[<ast.Name object at 0x7da20c9911e0>, <ast.Name object at 0x7da20c991fc0>]] if call[name[solve], parameter[name[succ], name[orien], name[n], name[RIGHT]]] begin[:] return[call[name[orien]][<ast.Slice object at 0x7da20c992380>]]
keyword[def] identifier[laser_mirrors] ( identifier[rows] , identifier[cols] , identifier[mir] ): literal[string] identifier[n] = identifier[len] ( identifier[mir] ) identifier[orien] =[ keyword[None] ]*( identifier[n] + literal[int] ) identifier[orien] [ identifier[n] ]= literal[int] identifier[orien] [ identifier[n] + literal[int] ]= literal[int] identifier[succ] =[[ keyword[None] keyword[for] identifier[direc] keyword[in] identifier[range] ( literal[int] )] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] + literal[int] )] identifier[L] =[( identifier[mir] [ identifier[i] ][ literal[int] ], identifier[mir] [ identifier[i] ][ literal[int] ], identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] )] identifier[L] . identifier[append] (( literal[int] ,- literal[int] , identifier[n] )) identifier[L] . identifier[append] (( literal[int] , identifier[cols] , identifier[n] + literal[int] )) identifier[last_r] , identifier[last_i] = keyword[None] , keyword[None] keyword[for] ( identifier[r] , identifier[c] , identifier[i] ) keyword[in] identifier[sorted] ( identifier[L] ): keyword[if] identifier[last_r] == identifier[r] : identifier[succ] [ identifier[i] ][ identifier[LEFT] ]= identifier[last_i] identifier[succ] [ identifier[last_i] ][ identifier[RIGHT] ]= identifier[i] identifier[last_r] , identifier[last_i] = identifier[r] , identifier[i] identifier[last_c] = keyword[None] keyword[for] ( identifier[r] , identifier[c] , identifier[i] ) keyword[in] identifier[sorted] ( identifier[L] , identifier[key] = keyword[lambda] identifier[rci] :( identifier[rci] [ literal[int] ], identifier[rci] [ literal[int] ])): keyword[if] identifier[last_c] == identifier[c] : identifier[succ] [ identifier[i] ][ identifier[UP] ]= identifier[last_i] identifier[succ] [ identifier[last_i] ][ identifier[DOWN] ]= identifier[i] identifier[last_c] , identifier[last_i] = identifier[c] , identifier[i] keyword[if] identifier[solve] ( identifier[succ] , identifier[orien] , identifier[n] , identifier[RIGHT] ): keyword[return] identifier[orien] [: identifier[n] ] keyword[else] : keyword[return] keyword[None]
def laser_mirrors(rows, cols, mir): """Orienting mirrors to allow reachability by laser beam :param int rows: :param int cols: rows and cols are the dimension of the grid :param mir: list of mirror coordinates, except mir[0]= laser entrance, mir[-1]= laser exit. :complexity: :math:`O(2^n)` """ # build structures n = len(mir) orien = [None] * (n + 2) orien[n] = 0 # arbitrary orientations orien[n + 1] = 0 succ = [[None for direc in range(4)] for i in range(n + 2)] L = [(mir[i][0], mir[i][1], i) for i in range(n)] L.append((0, -1, n)) # enter L.append((0, cols, n + 1)) # exit (last_r, last_i) = (None, None) for (r, c, i) in sorted(L): # sweep by row if last_r == r: succ[i][LEFT] = last_i succ[last_i][RIGHT] = i # depends on [control=['if'], data=[]] (last_r, last_i) = (r, i) # depends on [control=['for'], data=[]] last_c = None for (r, c, i) in sorted(L, key=lambda rci: (rci[1], rci[0])): if last_c == c: # sweep by column succ[i][UP] = last_i succ[last_i][DOWN] = i # depends on [control=['if'], data=[]] (last_c, last_i) = (c, i) # depends on [control=['for'], data=[]] if solve(succ, orien, n, RIGHT): # exploration return orien[:n] # depends on [control=['if'], data=[]] else: return None