code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
conv = {'UNKNOWN': enums.budUnknown, 'NEVER_BEEN_FRIEND': enums.budNeverBeenFriend, 'DELETED_FRIEND': enums.budDeletedFriend, 'PENDING_AUTHORIZATION': enums.budPendingAuthorization, 'FRIEND': enums.budFriend} try: return self._TextTo('bud', conv[Text.upper()]) except KeyError: raise ValueError('Bad text')
def TextToBuddyStatus(self, Text)
Returns buddy status code. :Parameters: Text : unicode Text, one of 'UNKNOWN', 'NEVER_BEEN_FRIEND', 'DELETED_FRIEND', 'PENDING_AUTHORIZATION', 'FRIEND'. :return: Buddy status. :rtype: `enums`.bud*
5.637242
2.574731
2.189449
self.composite_fields[name] = field self._init_composite_field(name, field)
def add_composite_field(self, name, field)
Add a dynamic composite field to the already existing ones and initialize it appropriatly.
3.725515
3.277249
1.136781
field = self.composite_fields[name] if hasattr(field, 'get_form'): return self.forms[name] if hasattr(field, 'get_formset'): return self.formsets[name]
def get_composite_field_value(self, name)
Return the form/formset instance for the given field name.
3.1465
2.279527
1.38033
# The base_composite_fields class attribute is the *class-wide* # definition of fields. Because a particular *instance* of the class # might want to alter self.composite_fields, we create # self.composite_fields here by copying base_composite_fields. # Instances should always modify self.composite_fields; they should not # modify base_composite_fields. self.composite_fields = copy.deepcopy(self.base_composite_fields) self.forms = OrderedDict() self.formsets = OrderedDict() for name, field in self.composite_fields.items(): self._init_composite_field(name, field)
def _init_composite_fields(self)
Setup the forms and formsets.
4.73122
4.209971
1.123813
super(SuperFormMixin, self).full_clean() for field_name, composite in self.forms.items(): composite.full_clean() if not composite.is_valid() and composite._errors: self._errors[field_name] = ErrorDict(composite._errors) for field_name, composite in self.formsets.items(): composite.full_clean() if not composite.is_valid() and composite._errors: self._errors[field_name] = ErrorList(composite._errors)
def full_clean(self)
Clean the form, including all formsets and add formset errors to the errors dict. Errors of nested forms and formsets are only included if they actually contain errors.
2.518873
2.234983
1.127021
media_list = [] media_list.append(super(SuperFormMixin, self).media) for composite_name in self.composite_fields.keys(): form = self.get_composite_field_value(composite_name) media_list.append(form.media) return reduce(lambda a, b: a + b, media_list)
def media(self)
Incooperate composite field's media.
3.918209
3.329098
1.176958
saved_obj = self.save_form(commit=commit) self.save_forms(commit=commit) self.save_formsets(commit=commit) return saved_obj
def save(self, commit=True)
When saving a super model form, the nested forms and formsets will be saved as well. The implementation of ``.save()`` looks like this: .. code:: python saved_obj = self.save_form() self.save_forms() self.save_formsets() return saved_obj That makes it easy to override it in order to change the order in which things are saved. The ``.save()`` method will return only a single model instance even if nested forms are saved as well. That keeps the API similiar to what Django's model forms are offering. If ``commit=False`` django's modelform implementation will attach a ``save_m2m`` method to the form instance, so that you can call it manually later. When you call ``save_m2m``, the ``save_forms`` and ``save_formsets`` methods will be executed as well so again all nested forms are taken care of transparantly.
3.56988
2.38256
1.498338
return super(SuperModelFormMixin, self).save(commit=commit)
def save_form(self, commit=True)
This calls Django's ``ModelForm.save()``. It only takes care of saving this actual form, and leaves the nested forms and formsets alone. We separate this out of the :meth:`~django_superform.forms.SuperModelForm.save` method to make extensibility easier.
7.004095
5.445631
1.286186
saved_composites = [] for name, composite in self.formsets.items(): field = self.composite_fields[name] if hasattr(field, 'save'): field.save(self, name, composite, commit=commit) saved_composites.append(composite) self._extend_save_m2m('save_formsets_m2m', saved_composites)
def save_formsets(self, commit=True)
Save all formsets. If ``commit=False``, it will modify the form's ``save_m2m()`` so that it also calls the formsets' ``save_m2m()`` methods.
4.20136
4.057244
1.035521
return '{form_prefix}{prefix_name}-{field_name}'.format( form_prefix=form.prefix + '-' if form.prefix else '', prefix_name=self.prefix_name, field_name=name)
def get_prefix(self, form, name)
Return the prefix that is used for the formset.
3.486135
3.251621
1.072122
if hasattr(form, 'initial'): return form.initial.get(name, None) return None
def get_initial(self, form, name)
Get the initial data that got passed into the superform for this composite field. It should return ``None`` if no initial values where given.
3.589216
3.644683
0.984781
kwargs = { 'prefix': self.get_prefix(form, name), 'initial': self.get_initial(form, name), } kwargs.update(self.default_kwargs) return kwargs
def get_kwargs(self, form, name)
Return the keyword arguments that are used to instantiate the formset.
2.458384
2.497315
0.984411
kwargs = self.get_kwargs(form, name) form_class = self.get_form_class(form, name) composite_form = form_class( data=form.data if form.is_bound else None, files=form.files if form.is_bound else None, **kwargs) return composite_form
def get_form(self, form, name)
Get an instance of the form.
2.599598
2.468266
1.053208
kwargs = super(ModelFormField, self).get_kwargs(form, name) instance = self.get_instance(form, name) kwargs.setdefault('instance', instance) kwargs.setdefault('empty_permitted', not self.required) return kwargs
def get_kwargs(self, form, name)
Return the keyword arguments that are used to instantiate the formset. The ``instance`` kwarg will be set to the value returned by :meth:`~django_superform.fields.ModelFormField.get_instance`. The ``empty_permitted`` kwarg will be set to the inverse of the ``required`` argument passed into the constructor of this field.
2.74287
2.053083
1.335976
if composite_form.empty_permitted and not composite_form.has_changed(): return False return True
def shall_save(self, form, name, composite_form)
Return ``True`` if the given ``composite_form`` (the nested form of this field) shall be saved. Return ``False`` if the form shall not be saved together with the super-form. By default it will return ``False`` if the form was not changed and the ``empty_permitted`` argument for the form was set to ``True``. That way you can allow empty forms.
5.502239
3.762379
1.462436
if self.shall_save(form, name, composite_form): return composite_form.save(commit=commit) return None
def save(self, form, name, composite_form, commit)
This method is called by :meth:`django_superform.forms.SuperModelForm.save` in order to save the modelform that this field takes care of and calls on the nested form's ``save()`` method. But only if :meth:`~django_superform.fields.ModelFormField.shall_save` returns ``True``.
3.638984
2.64495
1.375823
if self.blank is not None: return self.blank model = form._meta.model field = model._meta.get_field(self.get_field_name(form, name)) return field.blank
def allow_blank(self, form, name)
Allow blank determines if the form might be completely empty. If it's empty it will result in a None as the saved value for the ForeignKey.
3.295366
3.277308
1.00551
kwargs = self.get_kwargs(form, name) formset_class = self.get_formset_class(form, name) formset = formset_class( form.data if form.is_bound else None, form.files if form.is_bound else None, **kwargs) return formset
def get_formset(self, form, name)
Get an instance of the formset.
2.085997
1.983742
1.051546
if self.formset_class is not None: return self.formset_class formset_class = inlineformset_factory( self.get_parent_model(form, name), self.get_model(form, name), **self.formset_factory_kwargs) return formset_class
def get_formset_class(self, form, name)
Either return the formset class that was provided as argument to the __init__ method, or build one based on the ``parent_model`` and ``model`` attributes.
2.501574
2.403146
1.040958
self.config_prefix = config_prefix def key(suffix): return '%s_%s' % (config_prefix, suffix) if key('DATABASE') not in app.config: raise ImproperlyConfiguredError("You should provide a database name " "(the %s setting)." % key('DATABASE')) uri = _get_mongo_uri(app, key) rs = app.config.get(key('REPLICA_SET')) timezone = None if key('TIMEZONE') in app.config: timezone = pytz.timezone(app.config.get(key('TIMEZONE'))) self.session = session.Session.connect(app.config.get(key('DATABASE')), safe=app.config.get(key('SAFE_SESSION'), False), timezone = timezone, host=uri, replicaSet=rs) self.Document._session = self.session
def init_app(self, app, config_prefix='MONGOALCHEMY')
This callback can be used to initialize an application for the use with this MongoDB setup. Never use a database in the context of an application not initialized that way or connections will leak.
3.734704
3.81574
0.978763
return self.query.paginate(self.page - 1, self.per_page, error_out)
def prev(self, error_out=False)
Return a :class:`Pagination` object for the previous page.
5.584899
3.309371
1.687601
try: return self.filter(self.type.mongo_id == mongo_id).first() except exceptions.BadValueException: return None
def get(self, mongo_id)
Returns a :class:`Document` instance from its ``mongo_id`` or ``None`` if not found
6.555761
6.476917
1.012173
document = self.get(mongo_id) if document is None: abort(404) return document
def get_or_404(self, mongo_id)
Like :meth:`get` method but aborts with 404 if not found instead of returning `None`
2.951999
2.819276
1.047077
if page < 1 and error_out: abort(404) items = self.skip((page - 1) * per_page).limit(per_page).all() if len(items) < 1 and page != 1 and error_out: abort(404) return Pagination(self, page, per_page, self.count(), items)
def paginate(self, page, per_page=20, error_out=True)
Returns ``per_page`` items from page ``page`` By default, it will abort with 404 if no items were found and the page was larger than 1. This behaviour can be disabled by setting ``error_out`` to ``False``. Returns a :class:`Pagination` object.
2.276871
2.371693
0.960019
self._session.insert(self, safe=safe) self._session.flush()
def save(self, safe=None)
Saves the document itself in the database. The optional ``safe`` argument is a boolean that specifies if the remove method should wait for the operation to complete.
8.506783
10.47642
0.811993
self._session.remove(self, safe=None) self._session.flush()
def remove(self, safe=None)
Removes the document itself from database. The optional ``safe`` argument is a boolean that specifies if the remove method should wait for the operation to complete.
9.424281
9.406846
1.001853
authors = Author.query.all() content = '<p>Authors:</p>' for author in authors: content += '<p>%s</p>' % author.name return content
def list_authors()
List all authors. e.g.: GET /authors
3.002813
3.539803
0.848299
disable = _CXString() conf.lib.clang_getDiagnosticOption(self, byref(disable)) return _CXString.from_result(disable)
def disable_option(self)
The command-line option that disables this diagnostic.
12.783303
9.637378
1.32643
if options is None: options = conf.lib.clang_defaultDiagnosticDisplayOptions() if options & ~Diagnostic._FormatOptionsMask: raise ValueError('Invalid format options') return conf.lib.clang_formatDiagnostic(self, options)
def format(self, options=None)
Format this diagnostic for display. The options argument takes Diagnostic.Display* flags, which can be combined using bitwise OR. If the options argument is not provided, the default display options will be used.
10.545353
6.225586
1.693873
if self._name_map is None: self._name_map = {} for key, value in self.__class__.__dict__.items(): if isinstance(value, self.__class__): self._name_map[value] = key return self._name_map[self]
def name(self)
Get the enumeration name of this cursor kind.
2.504088
2.236576
1.119608
if not hasattr(self, '_spelling'): self._spelling = conf.lib.clang_getCursorSpelling(self) return self._spelling
def spelling(self)
Return the spelling of the entity pointed at by the cursor.
6.272277
4.743724
1.322226
if not hasattr(self, '_displayname'): self._displayname = conf.lib.clang_getCursorDisplayName(self) return self._displayname
def displayname(self)
Return the display name for the entity referenced by this cursor. The display name contains extra information that helps identify the cursor, such as the parameters of a function or template or the arguments of a class template specialization.
6.831288
5.758442
1.186308
if not hasattr(self, '_mangled_name'): self._mangled_name = conf.lib.clang_Cursor_getMangling(self) return self._mangled_name
def mangled_name(self)
Return the mangled name for the entity referenced by this cursor.
7.539585
5.652021
1.333963
if not hasattr(self, '_linkage'): self._linkage = conf.lib.clang_getCursorLinkage(self) return LinkageKind.from_id(self._linkage)
def linkage(self)
Return the linkage of this cursor.
7.595504
5.365377
1.415651
if not hasattr(self, '_availability'): self._availability = conf.lib.clang_getCursorAvailability(self) return AvailabilityKind.from_id(self._availability)
def availability(self)
Retrieves the availability of the entity pointed at by the cursor.
9.250743
7.107408
1.301564
if not hasattr(self, '_objc_type_encoding'): self._objc_type_encoding = \ conf.lib.clang_getDeclObjCTypeEncoding(self) return self._objc_type_encoding
def objc_type_encoding(self)
Return the Objective-C type encoding as a str.
4.605045
4.103955
1.122099
if args is None: args = [] if unsaved_files is None: unsaved_files = [] if index is None: index = Index.create() args_array = None if len(args) > 0: args_array = (c_char_p * len(args))(*[b(x) for x in args]) unsaved_array = None if len(unsaved_files) > 0: unsaved_array = (_CXUnsavedFile * len(unsaved_files))() for i, (name, contents) in enumerate(unsaved_files): if hasattr(contents, "read"): contents = contents.read() unsaved_array[i].name = b(name) unsaved_array[i].contents = b(contents) unsaved_array[i].length = len(contents) ptr = conf.lib.clang_parseTranslationUnit(index, filename, args_array, len(args), unsaved_array, len(unsaved_files), options) if not ptr: raise TranslationUnitLoadError("Error parsing translation unit.") return cls(ptr, index=index)
def from_source(cls, filename, args=None, unsaved_files=None, options=0, index=None)
Create a TranslationUnit by parsing source. This is capable of processing source code both from files on the filesystem as well as in-memory contents. Command-line arguments that would be passed to clang are specified as a list via args. These can be used to specify include paths, warnings, etc. e.g. ["-Wall", "-I/path/to/include"]. In-memory file content can be provided via unsaved_files. This is an iterable of 2-tuples. The first element is the str filename. The second element defines the content. Content can be provided as str source code or as file objects (anything with a read() method). If a file object is being used, content will be read until EOF and the read cursor will not be reset to its original position. options is a bitwise or of TranslationUnit.PARSE_XXX flags which will control parsing behavior. index is an Index instance to utilize. If not provided, a new Index will be created for this TranslationUnit. To parse source from the filesystem, the filename of the file to parse is specified by the filename argument. Or, filename could be None and the args list would contain the filename(s) to parse. To parse source from an in-memory buffer, set filename to the virtual filename you wish to associate with this source (e.g. "test.c"). The contents of that file are then provided in unsaved_files. If an error occurs, a TranslationUnitLoadError is raised. Please note that a TranslationUnit with parser errors may be returned. It is the caller's responsibility to check tu.diagnostics for errors. Also note that Clang infers the source language from the extension of the input filename. If you pass in source code containing a C++ class declaration with the filename "test.c" parsing will fail.
2.281734
2.11584
1.078406
cursor = Cursor() cursor._tu = self._tu conf.lib.clang_annotateTokens(self._tu, byref(self), 1, byref(cursor)) return cursor
def cursor(self)
The Cursor this Token corresponds to.
15.561744
10.022048
1.552751
self.index = cindex.Index.create() self.headers = {} for f in self.files: if f in self.processed: continue print('Processing {0}'.format(os.path.basename(f))) tu = self.index.parse(f, self.flags) if len(tu.diagnostics) != 0: fatal = False for d in tu.diagnostics: sys.stderr.write(d.format()) sys.stderr.write("\n") if d.severity == cindex.Diagnostic.Fatal or \ d.severity == cindex.Diagnostic.Error: fatal = True if fatal: sys.stderr.write("\nCould not generate documentation due to parser errors\n") sys.exit(1) if not tu: sys.stderr.write("Could not parse file %s...\n" % (f,)) sys.exit(1) # Extract comments from files and included files that we are # supposed to inspect extractfiles = [f] for inc in tu.get_includes(): filename = str(inc.include) self.headers[filename] = True if filename in self.processed or (not filename in self.files) or filename in extractfiles: continue extractfiles.append(filename) for e in extractfiles: db = comment.CommentsDatabase(e, tu) self.add_categories(db.category_names) self.commentsdbs[e] = db self.visit(tu.cursor.get_children()) for f in self.processing: self.processed[f] = True self.processing = {} # Construct hierarchy of nodes. for node in self.all_nodes: q = node.qid if node.parent is None: par = self.find_parent(node) # Lookup categories for things in the root if (par is None or par == self.root) and (not node.cursor is None): location = node.cursor.extent.start db = self.commentsdbs[location.file.name] if db: par = self.category_to_node[db.lookup_category(location)] if par is None: par = self.root par.append(node) # Resolve comment cm = self.find_node_comment(node) if cm: node.merge_comment(cm) # Keep track of classes to resolve bases and subclasses classes = {} # Map final qid to node for node in self.all_nodes: q = node.qid self.qid_to_node[q] = node if isinstance(node, nodes.Class): classes[q] = node # Resolve bases and subclasses for qid in classes: classes[qid].resolve_bases(classes)
def process(self)
process processes all the files with clang and extracts all relevant nodes from the generated AST
4.226033
3.919324
1.078256
if not citer: return while True: try: item = next(citer) except StopIteration: return # Check the source of item if not item.location.file: self.visit(item.get_children()) continue # Ignore files we already processed if str(item.location.file) in self.processed: continue # Ignore files other than the ones we are scanning for if not str(item.location.file) in self.files: continue # Ignore unexposed things if item.kind == cindex.CursorKind.UNEXPOSED_DECL: self.visit(item.get_children(), parent) continue self.processing[str(item.location.file)] = True if item.kind in self.kindmap: cls = self.kindmap[item.kind] if not cls: # Skip continue # see if we already have a node for this thing node = self.usr_to_node[item.get_usr()] if not node or self.is_unique_anon_struct(node, parent): # Only register new nodes if they are exposed. if self.cursor_is_exposed(item): node = cls(item, None) self.register_node(node, parent) elif isinstance(parent, nodes.Typedef) and isinstance(node, nodes.Struct): # Typedefs are handled a bit specially because what happens # is that clang first exposes an unnamed struct/enum, and # then exposes the typedef, with as a child again the # cursor to the already defined struct/enum. This is a # bit reversed as to how we normally process things. self.register_anon_typedef(node, parent) else: self.cursor_to_node[item] = node node.add_ref(item) if node and node.process_children: self.visit(item.get_children(), node) else: par = self.cursor_to_node[item.semantic_parent] if not par: par = parent if par: ret = par.visit(item, citer) if not ret is None: for node in ret: self.register_node(node, par) ignoretop = [cindex.CursorKind.TYPE_REF, cindex.CursorKind.PARM_DECL] if (not par or ret is None) and not item.kind in ignoretop: log.warning("Unhandled cursor: %s", item.kind)
def visit(self, citer, parent=None)
visit iterates over the provided cursor iterator and creates nodes from the AST cursors.
4.53201
4.513407
1.004122
it = tu.get_tokens(extent=tu.get_extent(filename, (0, int(os.stat(filename).st_size)))) while True: try: self.extract_loop(it) except StopIteration: break
def extract(self, filename, tu)
extract extracts comments from a translation unit for a given file by iterating over all the tokens in the TU, locating the COMMENT tokens and finding out to which cursors the comments semantically belong.
5.536194
5.714386
0.968817
''' Generate namedtuple with default values. :param name: name :param fields: iterable with field names :param defaults: iterable or mapping with field defaults :returns: defaultdict with given fields and given defaults :rtype: collections.defaultdict ''' nt = collections.namedtuple(name, fields) nt.__new__.__defaults__ = (None,) * len(nt._fields) if isinstance(defaults, collections.Mapping): nt.__new__.__defaults__ = tuple(nt(**defaults)) elif defaults: nt.__new__.__defaults__ = tuple(nt(*defaults)) return nt
def defaultsnamedtuple(name, fields, defaults=None)
Generate namedtuple with default values. :param name: name :param fields: iterable with field names :param defaults: iterable or mapping with field defaults :returns: defaultdict with given fields and given defaults :rtype: collections.defaultdict
2.64525
1.656437
1.596952
''' Initialize this Flask extension for given app. ''' self.app = app if not hasattr(app, 'extensions'): app.extensions = {} app.extensions['plugin_manager'] = self self.reload()
def init_app(self, app)
Initialize this Flask extension for given app.
4.870773
3.31704
1.468409
''' Clear plugin manager state and reload plugins. This method will make use of :meth:`clear` and :meth:`load_plugin`, so all internal state will be cleared, and all plugins defined in :data:`self.app.config['plugin_modules']` will be loaded. ''' self.clear() for plugin in self.app.config.get('plugin_modules', ()): self.load_plugin(plugin)
def reload(self)
Clear plugin manager state and reload plugins. This method will make use of :meth:`clear` and :meth:`load_plugin`, so all internal state will be cleared, and all plugins defined in :data:`self.app.config['plugin_modules']` will be loaded.
5.012308
1.616692
3.100348
''' Import plugin by given name, looking at :attr:`namespaces`. :param plugin: plugin module name :type plugin: str :raises PluginNotFoundError: if not found on any namespace ''' names = [ '%s%s%s' % (namespace, '' if namespace[-1] == '_' else '.', plugin) if namespace else plugin for namespace in self.namespaces ] for name in names: if name in sys.modules: return sys.modules[name] for name in names: try: __import__(name) return sys.modules[name] except (ImportError, KeyError): pass raise PluginNotFoundError( 'No plugin module %r found, tried %r' % (plugin, names), plugin, names)
def import_plugin(self, plugin)
Import plugin by given name, looking at :attr:`namespaces`. :param plugin: plugin module name :type plugin: str :raises PluginNotFoundError: if not found on any namespace
3.609223
2.423995
1.488956
''' Import plugin (see :meth:`import_plugin`) and load related data. If available, plugin's module-level :func:`register_plugin` function will be called with current plugin manager instance as first argument. :param plugin: plugin module name :type plugin: str :raises PluginNotFoundError: if not found on any namespace ''' module = super(RegistrablePluginManager, self).load_plugin(plugin) if hasattr(module, 'register_plugin'): module.register_plugin(self) return module
def load_plugin(self, plugin)
Import plugin (see :meth:`import_plugin`) and load related data. If available, plugin's module-level :func:`register_plugin` function will be called with current plugin manager instance as first argument. :param plugin: plugin module name :type plugin: str :raises PluginNotFoundError: if not found on any namespace
5.258822
1.786228
2.944094
''' Register given blueprint on curren app. This method is provided for using inside plugin's module-level :func:`register_plugin` functions. :param blueprint: blueprint object with plugin endpoints :type blueprint: flask.Blueprint ''' if blueprint not in self._blueprint_known: self.app.register_blueprint(blueprint) self._blueprint_known.add(blueprint)
def register_blueprint(self, blueprint)
Register given blueprint on curren app. This method is provided for using inside plugin's module-level :func:`register_plugin` functions. :param blueprint: blueprint object with plugin endpoints :type blueprint: flask.Blueprint
7.599414
1.941645
3.913906
''' Resolve widget callable properties into static ones. :param file: file will be used to resolve callable properties. :type file: browsepy.file.Node :param widget: widget instance optionally with callable properties :type widget: object :returns: a new widget instance of the same type as widget parameter :rtype: object ''' return widget.__class__(*[ value(file) if callable(value) else value for value in widget ])
def _resolve_widget(cls, file, widget)
Resolve widget callable properties into static ones. :param file: file will be used to resolve callable properties. :type file: browsepy.file.Node :param widget: widget instance optionally with callable properties :type widget: object :returns: a new widget instance of the same type as widget parameter :rtype: object
7.543042
2.033887
3.708684
''' Iterate registered widgets, optionally matching given criteria. :param file: optional file object will be passed to widgets' filter functions. :type file: browsepy.file.Node or None :param place: optional template place hint. :type place: str :yields: widget instances :ytype: object ''' for filter, dynamic, cwidget in self._widgets: try: if file and filter and not filter(file): continue except BaseException as e: # Exception is handled as this method execution is deffered, # making hard to debug for plugin developers. warnings.warn( 'Plugin action filtering failed with error: %s' % e, RuntimeWarning ) continue if place and place != cwidget.place: continue if file and dynamic: cwidget = self._resolve_widget(file, cwidget) yield cwidget
def iter_widgets(self, file=None, place=None)
Iterate registered widgets, optionally matching given criteria. :param file: optional file object will be passed to widgets' filter functions. :type file: browsepy.file.Node or None :param place: optional template place hint. :type place: str :yields: widget instances :ytype: object
7.624583
4.01695
1.898102
''' Create a widget object based on given arguments. If file object is provided, callable arguments will be resolved: its return value will be used after calling them with file as first parameter. All extra `kwargs` parameters will be passed to widget constructor. :param place: place hint where widget should be shown. :type place: str :param type: widget type name as taken from :attr:`widget_types` dict keys. :type type: str :param file: optional file object for widget attribute resolving :type type: browsepy.files.Node or None :returns: widget instance :rtype: object ''' widget_class = self.widget_types.get(type, self.widget_types['base']) kwargs.update(place=place, type=type) try: element = widget_class(**kwargs) except TypeError as e: message = e.args[0] if e.args else '' if ( 'unexpected keyword argument' in message or 'required positional argument' in message ): raise WidgetParameterException( 'type %s; %s; available: %r' % (type, message, widget_class._fields) ) raise e if file and any(map(callable, element)): return self._resolve_widget(file, element) return element
def create_widget(self, place, type, file=None, **kwargs)
Create a widget object based on given arguments. If file object is provided, callable arguments will be resolved: its return value will be used after calling them with file as first parameter. All extra `kwargs` parameters will be passed to widget constructor. :param place: place hint where widget should be shown. :type place: str :param type: widget type name as taken from :attr:`widget_types` dict keys. :type type: str :param file: optional file object for widget attribute resolving :type type: browsepy.files.Node or None :returns: widget instance :rtype: object
5.255858
2.101784
2.500665
''' Create (see :meth:`create_widget`) or use provided widget and register it. This method provides this dual behavior in order to simplify widget creation-registration on an functional single step without sacrifycing the reusability of a object-oriented approach. :param place: where widget should be placed. This param conflicts with `widget` argument. :type place: str or None :param type: widget type name as taken from :attr:`widget_types` dict keys. This param conflicts with `widget` argument. :type type: str or None :param widget: optional widget object will be used as is. This param conflicts with both place and type arguments. :type widget: object or None :raises TypeError: if both widget and place or type are provided at the same time (they're mutually exclusive). :returns: created or given widget object :rtype: object ''' if bool(widget) == bool(place or type): raise InvalidArgumentError( 'register_widget takes either place and type or widget' ) widget = widget or self.create_widget(place, type, **kwargs) dynamic = any(map(callable, widget)) self._widgets.append((filter, dynamic, widget)) return widget
def register_widget(self, place=None, type=None, widget=None, filter=None, **kwargs)
Create (see :meth:`create_widget`) or use provided widget and register it. This method provides this dual behavior in order to simplify widget creation-registration on an functional single step without sacrifycing the reusability of a object-oriented approach. :param place: where widget should be placed. This param conflicts with `widget` argument. :type place: str or None :param type: widget type name as taken from :attr:`widget_types` dict keys. This param conflicts with `widget` argument. :type type: str or None :param widget: optional widget object will be used as is. This param conflicts with both place and type arguments. :type widget: object or None :raises TypeError: if both widget and place or type are provided at the same time (they're mutually exclusive). :returns: created or given widget object :rtype: object
6.538747
1.692453
3.863474
''' Clear plugin manager state. Registered mimetype functions will be disposed after calling this method. ''' self._mimetype_functions = list(self._default_mimetype_functions) super(MimetypePluginManager, self).clear()
def clear(self)
Clear plugin manager state. Registered mimetype functions will be disposed after calling this method.
10.682817
3.167844
3.372267
''' Get mimetype of given path calling all registered mime functions (and default ones). :param path: filesystem path of file :type path: str :returns: mimetype :rtype: str ''' for fnc in self._mimetype_functions: mime = fnc(path) if mime: return mime return mimetype.by_default(path)
def get_mimetype(self, path)
Get mimetype of given path calling all registered mime functions (and default ones). :param path: filesystem path of file :type path: str :returns: mimetype :rtype: str
5.359886
2.396212
2.236816
''' Given a plugin name, extracts its registered_arguments as an iterable of (args, kwargs) tuples. :param plugin: plugin name :type plugin: str :returns: iterable if (args, kwargs) tuples. :rtype: iterable ''' module = self.import_plugin(plugin) if hasattr(module, 'register_arguments'): manager = ArgumentPluginManager() module.register_arguments(manager) return manager._argparse_argkwargs return ()
def extract_plugin_arguments(self, plugin)
Given a plugin name, extracts its registered_arguments as an iterable of (args, kwargs) tuples. :param plugin: plugin name :type plugin: str :returns: iterable if (args, kwargs) tuples. :rtype: iterable
5.506051
2.670541
2.061773
''' Process given argument list based on registered arguments and given optional base :class:`argparse.ArgumentParser` instance. This method saves processed arguments on itself, and this state won't be lost after :meth:`clean` calls. Processed argument state will be available via :meth:`get_argument` method. :param argv: command-line arguments (without command itself) :type argv: iterable of str :param base: optional base :class:`argparse.ArgumentParser` instance. :type base: argparse.ArgumentParser or None :returns: argparse.Namespace instance with processed arguments as given by :meth:`argparse.ArgumentParser.parse_args`. :rtype: argparse.Namespace ''' plugin_parser = argparse.ArgumentParser(add_help=False) plugin_parser.add_argument('--plugin', action='append', default=[]) parent = base or plugin_parser parser = argparse.ArgumentParser( parents=(parent,), add_help=False, **getattr(parent, 'defaults', {}) ) plugins = [ plugin for plugins in plugin_parser.parse_known_args(argv)[0].plugin for plugin in plugins.split(',') ] for plugin in sorted(set(plugins), key=plugins.index): arguments = self.extract_plugin_arguments(plugin) if arguments: group = parser.add_argument_group('%s arguments' % plugin) for argargs, argkwargs in arguments: group.add_argument(*argargs, **argkwargs) self._argparse_arguments = parser.parse_args(argv) return self._argparse_arguments
def load_arguments(self, argv, base=None)
Process given argument list based on registered arguments and given optional base :class:`argparse.ArgumentParser` instance. This method saves processed arguments on itself, and this state won't be lost after :meth:`clean` calls. Processed argument state will be available via :meth:`get_argument` method. :param argv: command-line arguments (without command itself) :type argv: iterable of str :param base: optional base :class:`argparse.ArgumentParser` instance. :type base: argparse.ArgumentParser or None :returns: argparse.Namespace instance with processed arguments as given by :meth:`argparse.ArgumentParser.parse_args`. :rtype: argparse.Namespace
3.582076
1.867628
1.917981
''' Get sorting-cookie from cookies dictionary. :yields: tuple of path and sorting property :ytype: 2-tuple of strings ''' try: data = cookies.get('browse-sorting', 'e30=').encode('ascii') for path, prop in json.loads(base64.b64decode(data).decode('utf-8')): yield path, prop except (ValueError, TypeError, KeyError) as e: logger.exception(e)
def iter_cookie_browse_sorting(cookies)
Get sorting-cookie from cookies dictionary. :yields: tuple of path and sorting property :ytype: 2-tuple of strings
5.521573
3.006937
1.836278
''' Get sorting-cookie data for path of current request. :returns: sorting property :rtype: string ''' if request: for cpath, cprop in iter_cookie_browse_sorting(request.cookies): if path == cpath: return cprop return default
def get_cookie_browse_sorting(path, default)
Get sorting-cookie data for path of current request. :returns: sorting property :rtype: string
8.543097
3.883739
2.199709
''' Get sorting function for directory listing based on given attribute name, with some caveats: * Directories will be first. * If *name* is given, link widget lowercase text will be used istead. * If *size* is given, bytesize will be used. :param prop: file attribute name :returns: tuple with sorting gunction and reverse bool :rtype: tuple of a dict and a bool ''' if prop.startswith('-'): prop = prop[1:] reverse = True else: reverse = False if prop == 'text': return ( lambda x: ( x.is_directory == reverse, x.link.text.lower() if x.link and x.link.text else x.name ), reverse ) if prop == 'size': return ( lambda x: ( x.is_directory == reverse, x.stats.st_size ), reverse ) return ( lambda x: ( x.is_directory == reverse, getattr(x, prop, None) ), reverse )
def browse_sortkey_reverse(prop)
Get sorting function for directory listing based on given attribute name, with some caveats: * Directories will be first. * If *name* is given, link widget lowercase text will be used istead. * If *size* is given, bytesize will be used. :param prop: file attribute name :returns: tuple with sorting gunction and reverse bool :rtype: tuple of a dict and a bool
4.776177
1.796897
2.658014
''' Some templates can be huge, this function returns an streaming response, sending the content in chunks and preventing from timeout. :param template_name: template :param **context: parameters for templates. :yields: HTML strings ''' app.update_template_context(context) template = app.jinja_env.get_template(template_name) stream = template.generate(context) return Response(stream_with_context(stream))
def stream_template(template_name, **context)
Some templates can be huge, this function returns an streaming response, sending the content in chunks and preventing from timeout. :param template_name: template :param **context: parameters for templates. :yields: HTML strings
5.704976
2.001347
2.850568
''' Pre-translated key dictionary constructor. See :type:`dict` for more info. :returns: dictionary with uppercase keys :rtype: dict ''' gk = cls.genkey return dict((gk(k), v) for k, v in dict(*args, **kwargs).items())
def gendict(cls, *args, **kwargs)
Pre-translated key dictionary constructor. See :type:`dict` for more info. :returns: dictionary with uppercase keys :rtype: dict
7.620223
2.813118
2.708818
''' Get the next state jump. The next jump is calculated looking at :attr:`current` state and its possible :attr:`jumps` to find the nearest and bigger option in :attr:`pending` data. If none is found, the returned next state label will be None. :returns: tuple with index, substring and next state label :rtype: tuple ''' try: options = self.jumps[self.current] except KeyError: raise KeyError( 'Current state %r not defined in %s.jumps.' % (self.current, self.__class__) ) offset = len(self.start) index = len(self.pending) if self.streaming: index -= max(map(len, options)) key = (index, 1) result = (index, '', None) for amark, anext in options.items(): asize = len(amark) aindex = self.pending.find(amark, offset, index + asize) if aindex > -1: index = aindex akey = (aindex, -asize) if akey < key: key = akey result = (aindex, amark, anext) return result
def nearest(self)
Get the next state jump. The next jump is calculated looking at :attr:`current` state and its possible :attr:`jumps` to find the nearest and bigger option in :attr:`pending` data. If none is found, the returned next state label will be None. :returns: tuple with index, substring and next state label :rtype: tuple
5.974348
2.804595
2.1302
''' Apply the appropriate transformation function on current state data, which is supposed to end at this point. It is expected transformation logic makes use of :attr:`start`, :attr:`current` and :attr:`streaming` instance attributes to bettee know the state is being left. :param data: string to transform (includes start) :type data: str :param mark: string producing the new state jump :type mark: str :param next: state is about to star, None on finish :type next: str or None :returns: transformed data :rtype: str ''' method = getattr(self, 'transform_%s' % self.current, None) return method(data, mark, next) if method else data
def transform(self, data, mark, next)
Apply the appropriate transformation function on current state data, which is supposed to end at this point. It is expected transformation logic makes use of :attr:`start`, :attr:`current` and :attr:`streaming` instance attributes to bettee know the state is being left. :param data: string to transform (includes start) :type data: str :param mark: string producing the new state jump :type mark: str :param next: state is about to star, None on finish :type next: str or None :returns: transformed data :rtype: str
9.43377
1.413521
6.673952
''' Optionally add pending data, switch into streaming mode, and yield result chunks. :yields: result chunks :ytype: str ''' self.streaming = True self.pending += data for i in self: yield i
def feed(self, data='')
Optionally add pending data, switch into streaming mode, and yield result chunks. :yields: result chunks :ytype: str
13.268205
3.089566
4.29452
''' Optionally add pending data, turn off streaming mode, and yield result chunks, which implies all pending data will be consumed. :yields: result chunks :ytype: str ''' self.pending += data self.streaming = False for i in self: yield i
def finish(self, data='')
Optionally add pending data, turn off streaming mode, and yield result chunks, which implies all pending data will be consumed. :yields: result chunks :ytype: str
15.824456
2.853279
5.546061
''' Writes data on internal tarfile instance, which writes to current object, using :meth:`write`. As this method is blocking, it is used inside a thread. This method is called automatically, on a thread, on initialization, so there is little need to call it manually. ''' if self.exclude: exclude = self.exclude ap = functools.partial(os.path.join, self.path) self._tarfile.add( self.path, "", filter=lambda info: None if exclude(ap(info.name)) else info ) else: self._tarfile.add(self.path, "") self._tarfile.close() # force stream flush self._finished += 1 if not self._result.is_set(): self._result.set()
def fill(self)
Writes data on internal tarfile instance, which writes to current object, using :meth:`write`. As this method is blocking, it is used inside a thread. This method is called automatically, on a thread, on initialization, so there is little need to call it manually.
6.712301
2.762626
2.429682
''' Write method used by internal tarfile instance to output data. This method blocks tarfile execution once internal buffer is full. As this method is blocking, it is used inside the same thread of :meth:`fill`. :param data: bytes to write to internal buffer :type data: bytes :returns: number of bytes written :rtype: int ''' self._add.wait() self._data += data if len(self._data) > self._want: self._add.clear() self._result.set() return len(data)
def write(self, data)
Write method used by internal tarfile instance to output data. This method blocks tarfile execution once internal buffer is full. As this method is blocking, it is used inside the same thread of :meth:`fill`. :param data: bytes to write to internal buffer :type data: bytes :returns: number of bytes written :rtype: int
7.796598
2.11814
3.680871
''' Read method, gets data from internal buffer while releasing :meth:`write` locks when needed. The lock usage means it must ran on a different thread than :meth:`fill`, ie. the main thread, otherwise will deadlock. The combination of both write and this method running on different threads makes tarfile being streamed on-the-fly, with data chunks being processed and retrieved on demand. :param want: number bytes to read, defaults to 0 (all available) :type want: int :returns: tarfile data as bytes :rtype: bytes ''' if self._finished: if self._finished == 1: self._finished += 1 return "" return EOFError("EOF reached") # Thread communication self._want = want self._add.set() self._result.wait() self._result.clear() if want: data = self._data[:want] self._data = self._data[want:] else: data = self._data self._data = bytes() return data
def read(self, want=0)
Read method, gets data from internal buffer while releasing :meth:`write` locks when needed. The lock usage means it must ran on a different thread than :meth:`fill`, ie. the main thread, otherwise will deadlock. The combination of both write and this method running on different threads makes tarfile being streamed on-the-fly, with data chunks being processed and retrieved on demand. :param want: number bytes to read, defaults to 0 (all available) :type want: int :returns: tarfile data as bytes :rtype: bytes
8.814275
2.069657
4.258809
''' Check if given path points to an executable file. :param path: file path :type path: str :return: True if executable, False otherwise :rtype: bool ''' return os.path.isfile(path) and os.access(path, os.X_OK)
def isexec(path)
Check if given path points to an executable file. :param path: file path :type path: str :return: True if executable, False otherwise :rtype: bool
2.306746
1.807152
1.276454
''' Decode given path. :param path: path will be decoded if using bytes :type path: bytes or str :param os_name: operative system name, defaults to os.name :type os_name: str :param fs_encoding: current filesystem encoding, defaults to autodetected :type fs_encoding: str :return: decoded path :rtype: str ''' if not isinstance(path, bytes): return path if not errors: use_strict = PY_LEGACY or os_name == 'nt' errors = 'strict' if use_strict else 'surrogateescape' return path.decode(fs_encoding, errors=errors)
def fsdecode(path, os_name=os.name, fs_encoding=FS_ENCODING, errors=None)
Decode given path. :param path: path will be decoded if using bytes :type path: bytes or str :param os_name: operative system name, defaults to os.name :type os_name: str :param fs_encoding: current filesystem encoding, defaults to autodetected :type fs_encoding: str :return: decoded path :rtype: str
3.195121
1.831847
1.744207
''' Encode given path. :param path: path will be encoded if not using bytes :type path: bytes or str :param os_name: operative system name, defaults to os.name :type os_name: str :param fs_encoding: current filesystem encoding, defaults to autodetected :type fs_encoding: str :return: encoded path :rtype: bytes ''' if isinstance(path, bytes): return path if not errors: use_strict = PY_LEGACY or os_name == 'nt' errors = 'strict' if use_strict else 'surrogateescape' return path.encode(fs_encoding, errors=errors)
def fsencode(path, os_name=os.name, fs_encoding=FS_ENCODING, errors=None)
Encode given path. :param path: path will be encoded if not using bytes :type path: bytes or str :param os_name: operative system name, defaults to os.name :type os_name: str :param fs_encoding: current filesystem encoding, defaults to autodetected :type fs_encoding: str :return: encoded path :rtype: bytes
3.135393
1.828281
1.71494
''' Get current work directory's absolute path. Like os.getcwd but garanteed to return an unicode-str object. :param fs_encoding: filesystem encoding, defaults to autodetected :type fs_encoding: str :param cwd_fnc: callable used to get the path, defaults to os.getcwd :type cwd_fnc: Callable :return: path :rtype: str ''' path = fsdecode(cwd_fnc(), fs_encoding=fs_encoding) return os.path.abspath(path)
def getcwd(fs_encoding=FS_ENCODING, cwd_fnc=os.getcwd)
Get current work directory's absolute path. Like os.getcwd but garanteed to return an unicode-str object. :param fs_encoding: filesystem encoding, defaults to autodetected :type fs_encoding: str :param cwd_fnc: callable used to get the path, defaults to os.getcwd :type cwd_fnc: Callable :return: path :rtype: str
3.821888
1.596423
2.394033
''' Get if app is expected to be ran in debug mode looking at environment variables. :param environ: environment dict-like object :type environ: collections.abc.Mapping :returns: True if debug contains a true-like string, False otherwise :rtype: bool ''' return environ.get('DEBUG', '').lower() in true_values
def getdebug(environ=os.environ, true_values=TRUE_VALUES)
Get if app is expected to be ran in debug mode looking at environment variables. :param environ: environment dict-like object :type environ: collections.abc.Mapping :returns: True if debug contains a true-like string, False otherwise :rtype: bool
5.751901
1.769013
3.251475
''' Decorator used to mark functions as deprecated. It will result in a warning being emmitted hen the function is called. Usage: >>> @deprecated ... def fnc(): ... pass Usage (custom message): >>> @deprecated('This is deprecated') ... def fnc(): ... pass :param func_or_text: message or callable to decorate :type func_or_text: callable :param environ: optional environment mapping :type environ: collections.abc.Mapping :returns: nested decorator or new decorated function (depending on params) :rtype: callable ''' def inner(func): message = ( 'Deprecated function {}.'.format(func.__name__) if callable(func_or_text) else func_or_text ) @functools.wraps(func) def new_func(*args, **kwargs): with warnings.catch_warnings(): if getdebug(environ): warnings.simplefilter('always', DeprecationWarning) warnings.warn(message, category=DeprecationWarning, stacklevel=3) return func(*args, **kwargs) return new_func return inner(func_or_text) if callable(func_or_text) else inner
def deprecated(func_or_text, environ=os.environ)
Decorator used to mark functions as deprecated. It will result in a warning being emmitted hen the function is called. Usage: >>> @deprecated ... def fnc(): ... pass Usage (custom message): >>> @deprecated('This is deprecated') ... def fnc(): ... pass :param func_or_text: message or callable to decorate :type func_or_text: callable :param environ: optional environment mapping :type environ: collections.abc.Mapping :returns: nested decorator or new decorated function (depending on params) :rtype: callable
2.871596
1.567268
1.83223
''' Decorator which copies __doc__ of given object into decorated one. Usage: >>> def fnc1(): ... ... pass >>> @usedoc(fnc1) ... def fnc2(): ... pass >>> fnc2.__doc__ 'docstring'collections.abc.D :param other: anything with a __doc__ attribute :type other: any :returns: decorator function :rtype: callable ''' def inner(fnc): fnc.__doc__ = fnc.__doc__ or getattr(other, '__doc__') return fnc return inner
def usedoc(other)
Decorator which copies __doc__ of given object into decorated one. Usage: >>> def fnc1(): ... """docstring""" ... pass >>> @usedoc(fnc1) ... def fnc2(): ... pass >>> fnc2.__doc__ 'docstring'collections.abc.D :param other: anything with a __doc__ attribute :type other: any :returns: decorator function :rtype: callable
4.241215
1.630279
2.601527
''' Get enviroment PATH elements as list. This function only cares about spliting across OSes. :param value: path string, as given by os.environ['PATH'] :type value: str :param sep: PATH separator, defaults to os.pathsep :type sep: str :yields: every path :ytype: str ''' for part in value.split(sep): if part[:1] == part[-1:] == '"' or part[:1] == part[-1:] == '\'': part = part[1:-1] yield part
def pathsplit(value, sep=os.pathsep)
Get enviroment PATH elements as list. This function only cares about spliting across OSes. :param value: path string, as given by os.environ['PATH'] :type value: str :param sep: PATH separator, defaults to os.pathsep :type sep: str :yields: every path :ytype: str
4.658226
1.698057
2.743269
''' Get enviroment PATH directories as list. This function cares about spliting, escapes and normalization of paths across OSes. :param value: path string, as given by os.environ['PATH'] :type value: str :param sep: PATH separator, defaults to os.pathsep :type sep: str :param os_sep: OS filesystem path separator, defaults to os.sep :type os_sep: str :yields: every path :ytype: str ''' escapes = [] normpath = ntpath.normpath if os_sep == '\\' else posixpath.normpath if '\\' not in (os_sep, sep): escapes.extend(( ('\\\\', '<ESCAPE-ESCAPE>', '\\'), ('\\"', '<ESCAPE-DQUOTE>', '"'), ('\\\'', '<ESCAPE-SQUOTE>', '\''), ('\\%s' % sep, '<ESCAPE-PATHSEP>', sep), )) for original, escape, unescape in escapes: value = value.replace(original, escape) for part in pathsplit(value, sep=sep): if part[-1:] == os_sep and part != os_sep: part = part[:-1] for original, escape, unescape in escapes: part = part.replace(escape, unescape) yield normpath(fsdecode(part))
def pathparse(value, sep=os.pathsep, os_sep=os.sep)
Get enviroment PATH directories as list. This function cares about spliting, escapes and normalization of paths across OSes. :param value: path string, as given by os.environ['PATH'] :type value: str :param sep: PATH separator, defaults to os.pathsep :type sep: str :param os_sep: OS filesystem path separator, defaults to os.sep :type os_sep: str :yields: every path :ytype: str
3.962665
2.269151
1.74632
''' Get all pathconf variables for given path. :param path: absolute fs path :type path: str :returns: dictionary containing pathconf keys and their values (both str) :rtype: dict ''' if pathconf_fnc and pathconf_names: return {key: pathconf_fnc(path, key) for key in pathconf_names} if os_name == 'nt': maxpath = 246 if isdir_fnc(path) else 259 # 260 minus <END> else: maxpath = 255 # conservative sane default return { 'PC_PATH_MAX': maxpath, 'PC_NAME_MAX': maxpath - len(path), }
def pathconf(path, os_name=os.name, isdir_fnc=os.path.isdir, pathconf_fnc=getattr(os, 'pathconf', None), pathconf_names=getattr(os, 'pathconf_names', ()))
Get all pathconf variables for given path. :param path: absolute fs path :type path: str :returns: dictionary containing pathconf keys and their values (both str) :rtype: dict
4.349818
3.106203
1.400365
''' Get command absolute path. :param name: name of executable command :type name: str :param env_path: OS environment executable paths, defaults to autodetected :type env_path: list of str :param is_executable_fnc: callable will be used to detect if path is executable, defaults to `isexec` :type is_executable_fnc: Callable :param path_join_fnc: callable will be used to join path components :type path_join_fnc: Callable :param os_name: os name, defaults to os.name :type os_name: str :return: absolute path :rtype: str or None ''' for path in env_path: for suffix in env_path_ext: exe_file = path_join_fnc(path, name) + suffix if is_executable_fnc(exe_file): return exe_file return None
def which(name, env_path=ENV_PATH, env_path_ext=ENV_PATHEXT, is_executable_fnc=isexec, path_join_fnc=os.path.join, os_name=os.name)
Get command absolute path. :param name: name of executable command :type name: str :param env_path: OS environment executable paths, defaults to autodetected :type env_path: list of str :param is_executable_fnc: callable will be used to detect if path is executable, defaults to `isexec` :type is_executable_fnc: Callable :param path_join_fnc: callable will be used to join path components :type path_join_fnc: Callable :param os_name: os name, defaults to os.name :type os_name: str :return: absolute path :rtype: str or None
2.384711
1.347641
1.769545
''' Escape all special regex characters in pattern. Logic taken from regex module. :param pattern: regex pattern to escape :type patterm: str :returns: escaped pattern :rtype: str ''' escape = '\\{}'.format return ''.join( escape(c) if c in chars or c.isspace() else '\\000' if c == '\x00' else c for c in pattern )
def re_escape(pattern, chars=frozenset("()[]{}?*+|^$\\.-#"))
Escape all special regex characters in pattern. Logic taken from regex module. :param pattern: regex pattern to escape :type patterm: str :returns: escaped pattern :rtype: str
4.668869
2.660342
1.754989
''' Register blueprints and actions using given plugin manager. :param manager: plugin manager :type manager: browsepy.manager.PluginManager ''' manager.register_blueprint(player) manager.register_mimetype_function(detect_playable_mimetype) # add style tag manager.register_widget( place='styles', type='stylesheet', endpoint='player.static', filename='css/browse.css' ) # register link actions manager.register_widget( place='entry-link', type='link', endpoint='player.audio', filter=PlayableFile.detect ) manager.register_widget( place='entry-link', icon='playlist', type='link', endpoint='player.playlist', filter=PlayListFile.detect ) # register action buttons manager.register_widget( place='entry-actions', css='play', type='button', endpoint='player.audio', filter=PlayableFile.detect ) manager.register_widget( place='entry-actions', css='play', type='button', endpoint='player.playlist', filter=PlayListFile.detect ) # check argument (see `register_arguments`) before registering if manager.get_argument('player_directory_play'): # register header button manager.register_widget( place='header', type='button', endpoint='player.directory', text='Play directory', filter=PlayableDirectory.detect )
def register_plugin(manager)
Register blueprints and actions using given plugin manager. :param manager: plugin manager :type manager: browsepy.manager.PluginManager
4.336367
3.873165
1.119593
''' Get size and unit. :param size: size in bytes :type size: int :param binary: whether use binary or standard units, defaults to True :type binary: bool :return: size and unit :rtype: tuple of int and unit as str ''' if binary: fmt_sizes = binary_units fmt_divider = 1024. else: fmt_sizes = standard_units fmt_divider = 1000. for fmt in fmt_sizes[:-1]: if size < 1000: return (size, fmt) size /= fmt_divider return size, fmt_sizes[-1]
def fmt_size(size, binary=True)
Get size and unit. :param size: size in bytes :type size: int :param binary: whether use binary or standard units, defaults to True :type binary: bool :return: size and unit :rtype: tuple of int and unit as str
3.245342
2.208997
1.469147
''' Make absolute path relative to an absolute base. :param path: absolute path :type path: str :param base: absolute base path :type base: str :param os_sep: path component separator, defaults to current OS separator :type os_sep: str :return: relative path :rtype: str or unicode :raises OutsideDirectoryBase: if path is not below base ''' if not check_base(path, base, os_sep): raise OutsideDirectoryBase("%r is not under %r" % (path, base)) prefix_len = len(base) if not base.endswith(os_sep): prefix_len += len(os_sep) return path[prefix_len:]
def relativize_path(path, base, os_sep=os.sep)
Make absolute path relative to an absolute base. :param path: absolute path :type path: str :param base: absolute base path :type base: str :param os_sep: path component separator, defaults to current OS separator :type os_sep: str :return: relative path :rtype: str or unicode :raises OutsideDirectoryBase: if path is not below base
2.966889
1.746231
1.699024
''' Make filesystem absolute path uri relative using given absolute base path. :param path: absolute path :param base: absolute base path :param os_sep: path component separator, defaults to current OS separator :return: relative uri :rtype: str or unicode :raises OutsideDirectoryBase: if resulting path is not below base ''' return relativize_path(path, base, os_sep).replace(os_sep, '/')
def abspath_to_urlpath(path, base, os_sep=os.sep)
Make filesystem absolute path uri relative using given absolute base path. :param path: absolute path :param base: absolute base path :param os_sep: path component separator, defaults to current OS separator :return: relative uri :rtype: str or unicode :raises OutsideDirectoryBase: if resulting path is not below base
6.331386
1.84151
3.438148
''' Make uri relative path fs absolute using a given absolute base path. :param path: relative path :param base: absolute base path :param os_sep: path component separator, defaults to current OS separator :return: absolute path :rtype: str or unicode :raises OutsideDirectoryBase: if resulting path is not below base ''' prefix = base if base.endswith(os_sep) else base + os_sep realpath = os.path.abspath(prefix + path.replace('/', os_sep)) if check_path(base, realpath) or check_under_base(realpath, base): return realpath raise OutsideDirectoryBase("%r is not under %r" % (realpath, base))
def urlpath_to_abspath(path, base, os_sep=os.sep)
Make uri relative path fs absolute using a given absolute base path. :param path: relative path :param base: absolute base path :param os_sep: path component separator, defaults to current OS separator :return: absolute path :rtype: str or unicode :raises OutsideDirectoryBase: if resulting path is not below base
4.784768
2.175261
2.199629
''' Extract filename of given path os-indepently, taking care of known path separators. :param path: path :return: filename :rtype: str or unicode (depending on given path) ''' for sep in common_path_separators: if sep in path: _, path = path.rsplit(sep, 1) return path
def generic_filename(path)
Extract filename of given path os-indepently, taking care of known path separators. :param path: path :return: filename :rtype: str or unicode (depending on given path)
6.754783
2.422504
2.788348
''' Get path without restricted characters. :param path: path :return: path without restricted characters :rtype: str or unicode (depending on given path) ''' for character in restricted_chars: path = path.replace(character, '_') return path
def clean_restricted_chars(path, restricted_chars=restricted_chars)
Get path without restricted characters. :param path: path :return: path without restricted characters :rtype: str or unicode (depending on given path)
4.21301
2.109175
1.997468
''' Get if given filename is forbidden for current OS or filesystem. :param filename: :param destiny_os: destination operative system :param fs_encoding: destination filesystem filename encoding :return: wether is forbidden on given OS (or filesystem) or not :rtype: bool ''' return ( filename in restricted_names or destiny_os == 'nt' and filename.split('.', 1)[0].upper() in nt_device_names )
def check_forbidden_filename(filename, destiny_os=os.name, restricted_names=restricted_names)
Get if given filename is forbidden for current OS or filesystem. :param filename: :param destiny_os: destination operative system :param fs_encoding: destination filesystem filename encoding :return: wether is forbidden on given OS (or filesystem) or not :rtype: bool
7.240443
2.467282
2.934583
''' Check if both given paths are equal. :param path: absolute path :type path: str :param base: absolute base path :type base: str :param os_sep: path separator, defaults to os.sep :type base: str :return: wether two path are equal or not :rtype: bool ''' base = base[:-len(os_sep)] if base.endswith(os_sep) else base return os.path.normcase(path) == os.path.normcase(base)
def check_path(path, base, os_sep=os.sep)
Check if both given paths are equal. :param path: absolute path :type path: str :param base: absolute base path :type base: str :param os_sep: path separator, defaults to os.sep :type base: str :return: wether two path are equal or not :rtype: bool
2.571407
1.661463
1.547676
''' Check if given absolute path is under or given base. :param path: absolute path :type path: str :param base: absolute base path :type base: str :param os_sep: path separator, defaults to os.sep :return: wether path is under given base or not :rtype: bool ''' return ( check_path(path, base, os_sep) or check_under_base(path, base, os_sep) )
def check_base(path, base, os_sep=os.sep)
Check if given absolute path is under or given base. :param path: absolute path :type path: str :param base: absolute base path :type base: str :param os_sep: path separator, defaults to os.sep :return: wether path is under given base or not :rtype: bool
3.550072
2.029801
1.748976
''' Check if given absolute path is under given base. :param path: absolute path :type path: str :param base: absolute base path :type base: str :param os_sep: path separator, defaults to os.sep :return: wether file is under given base or not :rtype: bool ''' prefix = base if base.endswith(os_sep) else base + os_sep return os.path.normcase(path).startswith(os.path.normcase(prefix))
def check_under_base(path, base, os_sep=os.sep)
Check if given absolute path is under given base. :param path: absolute path :type path: str :param base: absolute base path :type base: str :param os_sep: path separator, defaults to os.sep :return: wether file is under given base or not :rtype: bool
2.485843
1.680724
1.479031
''' Get rid of parent path components and special filenames. If path is invalid or protected, return empty string. :param path: unsafe path, only basename will be used :type: str :param destiny_os: destination operative system (defaults to os.name) :type destiny_os: str :param fs_encoding: fs path encoding (defaults to detected) :type fs_encoding: str :return: filename or empty string :rtype: str ''' path = generic_filename(path) path = clean_restricted_chars( path, restricted_chars=( nt_restricted_chars if destiny_os == 'nt' else restricted_chars )) path = path.strip(' .') # required by nt, recommended for others if check_forbidden_filename(path, destiny_os=destiny_os): return '' if isinstance(path, bytes): path = path.decode('latin-1', errors=underscore_replace) # Decode and recover from filesystem encoding in order to strip unwanted # characters out kwargs = { 'os_name': destiny_os, 'fs_encoding': fs_encoding, 'errors': underscore_replace, } fs_encoded_path = compat.fsencode(path, **kwargs) fs_decoded_path = compat.fsdecode(fs_encoded_path, **kwargs) return fs_decoded_path
def secure_filename(path, destiny_os=os.name, fs_encoding=compat.FS_ENCODING)
Get rid of parent path components and special filenames. If path is invalid or protected, return empty string. :param path: unsafe path, only basename will be used :type: str :param destiny_os: destination operative system (defaults to os.name) :type destiny_os: str :param fs_encoding: fs path encoding (defaults to detected) :type fs_encoding: str :return: filename or empty string :rtype: str
4.784269
2.961747
1.615354
''' Generates an alternative version of given filename. If an number attempt parameter is given, will be used on the alternative name, a random value will be used otherwise. :param filename: original filename :param attempt: optional attempt number, defaults to null :return: new filename :rtype: str or unicode ''' filename_parts = filename.rsplit(u'.', 2) name = filename_parts[0] ext = ''.join(u'.%s' % ext for ext in filename_parts[1:]) if attempt is None: choose = random.choice extra = u' %s' % ''.join(choose(fs_safe_characters) for i in range(8)) else: extra = u' (%d)' % attempt return u'%s%s%s' % (name, extra, ext)
def alternative_filename(filename, attempt=None)
Generates an alternative version of given filename. If an number attempt parameter is given, will be used on the alternative name, a random value will be used otherwise. :param filename: original filename :param attempt: optional attempt number, defaults to null :return: new filename :rtype: str or unicode
4.284482
2.220392
1.929607
''' Config-aware scandir. Currently, only aware of ``exclude_fnc``. :param path: absolute path :type path: str :param app: flask application :type app: flask.Flask or None :returns: filtered scandir entries :rtype: iterator ''' exclude = app and app.config.get('exclude_fnc') if exclude: return ( item for item in compat.scandir(path) if not exclude(item.path) ) return compat.scandir(path)
def scandir(path, app=None)
Config-aware scandir. Currently, only aware of ``exclude_fnc``. :param path: absolute path :type path: str :param app: flask application :type app: flask.Flask or None :returns: filtered scandir entries :rtype: iterator
4.536726
1.998362
2.270222
''' Get if current node shouldn't be shown, using :attt:`app` config's exclude_fnc. :returns: True if excluded, False otherwise ''' exclude = self.app and self.app.config['exclude_fnc'] return exclude and exclude(self.path)
def is_excluded(self)
Get if current node shouldn't be shown, using :attt:`app` config's exclude_fnc. :returns: True if excluded, False otherwise
12.653529
2.853751
4.433998
''' List widgets with filter return True for this node (or without filter). Remove button is prepended if :property:can_remove returns true. :returns: list of widgets :rtype: list of namedtuple instances ''' widgets = [] if self.can_remove: widgets.append( self.plugin_manager.create_widget( 'entry-actions', 'button', file=self, css='remove', endpoint='remove' ) ) return widgets + self.plugin_manager.get_widgets(file=self)
def widgets(self)
List widgets with filter return True for this node (or without filter). Remove button is prepended if :property:can_remove returns true. :returns: list of widgets :rtype: list of namedtuple instances
10.695329
3.403943
3.142041
''' Get last widget with place "entry-link". :returns: widget on entry-link (ideally a link one) :rtype: namedtuple instance ''' link = None for widget in self.widgets: if widget.place == 'entry-link': link = widget return link
def link(self)
Get last widget with place "entry-link". :returns: widget on entry-link (ideally a link one) :rtype: namedtuple instance
11.433499
2.102833
5.43719
''' Get if current node can be removed based on app config's directory_remove. :returns: True if current node can be removed, False otherwise. :rtype: bool ''' dirbase = self.app.config["directory_remove"] return bool(dirbase and check_under_base(self.path, dirbase))
def can_remove(self)
Get if current node can be removed based on app config's directory_remove. :returns: True if current node can be removed, False otherwise. :rtype: bool
7.797467
3.679208
2.119333
''' Get parent node if available based on app config's directory_base. :returns: parent object if available :rtype: Node instance or None ''' if check_path(self.path, self.app.config['directory_base']): return None parent = os.path.dirname(self.path) if self.path else None return self.directory_class(parent, self.app) if parent else None
def parent(self)
Get parent node if available based on app config's directory_base. :returns: parent object if available :rtype: Node instance or None
6.429599
2.795227
2.300206
''' Get list of ancestors until app config's directory_base is reached. :returns: list of ancestors starting from nearest. :rtype: list of Node objects ''' ancestors = [] parent = self.parent while parent: ancestors.append(parent) parent = parent.parent return ancestors
def ancestors(self)
Get list of ancestors until app config's directory_base is reached. :returns: list of ancestors starting from nearest. :rtype: list of Node objects
6.736017
1.84556
3.649849
''' Get human-readable last modification date-time. :returns: iso9008-like date-time string (without timezone) :rtype: str ''' try: dt = datetime.datetime.fromtimestamp(self.stats.st_mtime) return dt.strftime('%Y.%m.%d %H:%M:%S') except OSError: return None
def modified(self)
Get human-readable last modification date-time. :returns: iso9008-like date-time string (without timezone) :rtype: str
6.378938
2.299894
2.773579