code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
cmdlist = [cmd] if verbosity > 1: cmdlist.append('v') else: cmdlist.append('l') cmdlist.append(archive) return cmdlist
def list_lzh (archive, compression, cmd, verbosity, interactive)
List a LZH archive.
3.410997
3.385232
1.007611
outfile = util.get_single_outfile(outdir, archive, extension=".wav") return [cmd, archive, outfile, '-d']
def extract_ape (archive, compression, cmd, verbosity, interactive, outdir)
Decompress an APE archive to a WAV file.
17.676718
11.944559
1.479897
return [cmd, archive, '-d', outdir]
def extract_adf (archive, compression, cmd, verbosity, interactive, outdir)
Extract an ADF archive.
23.078583
23.229021
0.993524
cmdlist = [cmd, '-d'] if verbosity > 1: cmdlist.append('-v') outfile = util.get_single_outfile(outdir, archive) cmdlist.extend(["-o", outfile, os.path.abspath(archive)]) return cmdlist
def extract_lrzip (archive, compression, cmd, verbosity, interactive, outdir)
Extract a LRZIP archive.
4.187282
4.201101
0.996711
return stripext(cmd, archive, verbosity)
def list_bzip2 (archive, compression, cmd, verbosity, interactive)
List a BZIP2 archive.
65.971817
98.84491
0.667428
return stripext(cmd, archive, verbosity, extension=".wav")
def list_ape (archive, compression, cmd, verbosity, interactive)
List an APE archive.
96.634254
136.753677
0.70663
if verbosity >= 0: print(util.stripext(archive)+extension) return None
def stripext (cmd, archive, verbosity, extension="")
Print the name without suffix.
10.681758
7.475055
1.428987
opts = 'x' if verbosity > 1: opts += 'v' cmdlist = [cmd, opts, os.path.abspath(archive)] return (cmdlist, {'cwd': outdir})
def extract_ar (archive, compression, cmd, verbosity, interactive, outdir)
Extract a AR archive.
5.527061
5.376676
1.02797
opts = 't' if verbosity > 1: opts += 'v' return [cmd, opts, archive]
def list_ar (archive, compression, cmd, verbosity, interactive)
List a AR archive.
7.022075
6.965822
1.008076
opts = 'rc' if verbosity > 1: opts += 'v' cmdlist = [cmd, opts, archive] cmdlist.extend(filenames) return cmdlist
def create_ar (archive, compression, cmd, verbosity, interactive, filenames)
Create a AR archive.
5.150222
4.740026
1.086539
cmdlist = [cmd, '-d', outdir] if verbosity > 0: cmdlist.append('-v') cmdlist.append(archive) return cmdlist
def extract_cab (archive, compression, cmd, verbosity, interactive, outdir)
Extract a CAB archive.
3.467714
3.441693
1.00756
cmdlist = [cmd, '-l'] if verbosity > 0: cmdlist.append('-v') cmdlist.append(archive) return cmdlist
def list_cab (archive, compression, cmd, verbosity, interactive)
List a CAB archive.
3.377882
3.383836
0.998241
cmdlist = [cmd, '-d', '-k'] if verbosity > 1: cmdlist.append('-v') outfile = util.get_single_outfile(outdir, archive) cmdlist.extend(["-o", outfile, archive]) return cmdlist
def extract_rzip (archive, compression, cmd, verbosity, interactive, outdir)
Extract an RZIP archive.
4.355125
4.314899
1.009323
if program in ('tar', ): return compression in ('gzip', 'bzip2', 'xz', 'lzip', 'compress', 'lzma') + py_lzma elif program in ('star', 'bsdtar', 'py_tarfile'): return compression in ('gzip', 'bzip2') + py_lzma return False
def program_supports_compression (program, compression)
Decide if the given program supports the compression natively. @return: True iff the program supports the given compression format natively, else False.
6.856938
6.925399
0.990115
mime, compression = util.guess_mime(filename) if not (mime or compression): raise util.PatoolError("unknown archive format for file `%s'" % filename) if mime in ArchiveMimetypes: format = ArchiveMimetypes[mime] else: raise util.PatoolError("unknown archive format for file `%s' (mime-type is `%s')" % (filename, mime)) if format == compression: # file cannot be in same format compressed compression = None return format, compression
def get_archive_format (filename)
Detect filename archive format and optional compression.
4.876785
4.569541
1.067237
if format not in ArchiveFormats: raise util.PatoolError("unknown archive format `%s'" % format) if compression is not None and compression not in ArchiveCompressions: raise util.PatoolError("unkonwn archive compression `%s'" % compression)
def check_archive_format (format, compression)
Make sure format and compression is known.
3.874162
3.431456
1.129014
commands = ArchivePrograms[format] programs = [] if program is not None: # try a specific program first programs.append(program) # first try the universal programs with key None for key in (None, command): if key in commands: programs.extend(commands[key]) if not programs: raise util.PatoolError("%s archive format `%s' is not supported" % (command, format)) # return the first existing program for program in programs: if program.startswith('py_'): # it's a Python module and therefore always supported return program exe = util.find_program(program) if exe: if program == '7z' and format == 'rar' and not util.p7zip_supports_rar(): continue return exe # no programs found raise util.PatoolError("could not find an executable program to %s format %s; candidates are (%s)," % (command, format, ",".join(programs)))
def find_archive_program (format, command, program=None)
Find suitable archive program for given format and mode.
5.506076
5.401168
1.019423
print("Archive programs of", App) print("Archive programs are searched in the following directories:") print(util.system_search_path()) print() for format in ArchiveFormats: print(format, "files:") for command in ArchiveCommands: programs = ArchivePrograms[format] if command not in programs and None not in programs: print(" %8s: - (not supported)" % command) continue try: program = find_archive_program(format, command) print(" %8s: %s" % (command, program), end=' ') if format == 'tar': encs = [x for x in ArchiveCompressions if util.find_program(x)] if encs: print("(supported compressions: %s)" % ", ".join(encs), end=' ') elif format == '7z': if util.p7zip_supports_rar(): print("(rar archives supported)", end=' ') else: print("(rar archives not supported)", end=' ') print() except util.PatoolError: # display information what programs can handle this archive format handlers = programs.get(None, programs.get(command)) print(" %8s: - (no program found; install %s)" % (command, util.strlist_with_or(handlers)))
def list_formats ()
Print information about available archive formats to stdout.
5.283613
5.087169
1.038616
program = os.path.basename(program) if compression: # check if compression is supported if not program_supports_compression(program, compression): if command == 'create': comp_command = command else: comp_command = 'extract' comp_prog = find_archive_program(compression, comp_command) if not comp_prog: msg = "cannot %s archive `%s': compression `%s' not supported" raise util.PatoolError(msg % (command, archive, compression))
def check_program_compression(archive, command, program, compression)
Check if a program supports the given compression.
4.210064
4.278527
0.983999
entries = os.listdir(outdir) if len(entries) == 1: src = os.path.join(outdir, entries[0]) dst = os.path.join(os.path.dirname(outdir), entries[0]) if os.path.exists(dst) or os.path.islink(dst): return (False, "local file exists") shutil.move(src, dst) os.rmdir(outdir) return (True, entries[0]) return (False, "multiple files in root")
def move_outdir_orphan (outdir)
Move a single file or directory inside outdir a level up. Never overwrite files. Return (True, outfile) if successful, (False, reason) if not.
2.41282
2.229447
1.08225
# archive_cmdlist is a command list with optional keyword arguments if isinstance(archive_cmdlist, tuple): cmdlist, runkwargs = archive_cmdlist else: cmdlist, runkwargs = archive_cmdlist, {} return util.run_checked(cmdlist, verbosity=verbosity, **runkwargs)
def run_archive_cmdlist (archive_cmdlist, verbosity=0)
Run archive command.
4.073823
3.953458
1.030445
if not os.path.islink(filename): util.set_mode(filename, stat.S_IRUSR)
def make_file_readable (filename)
Make file user readable if it is not a link.
4.581609
3.232358
1.41742
for root, dirs, files in os.walk(directory, onerror=util.log_error): for filename in files: make_file_readable(os.path.join(root, filename)) for dirname in dirs: make_dir_readable(os.path.join(root, dirname))
def make_user_readable (directory)
Make all files in given directory user readable. Also recurse into subdirectories.
2.265985
2.193091
1.033238
make_user_readable(outdir) # move single directory or file in outdir (success, msg) = move_outdir_orphan(outdir) if success: # msg is a single directory or filename return msg, "`%s'" % msg # outdir remains unchanged # rename it to something more user-friendly (basically the archive # name without extension) outdir2 = util.get_single_outfile("", archive) os.rename(outdir, outdir2) return outdir2, "`%s' (%s)" % (outdir2, msg)
def cleanup_outdir (outdir, archive)
Cleanup outdir after extraction and return target file name and result string.
9.453372
9.103266
1.038459
if format is None: format, compression = get_archive_format(archive) check_archive_format(format, compression) program = find_archive_program(format, 'extract', program=program) check_program_compression(archive, 'extract', program, compression) get_archive_cmdlist = get_archive_cmdlist_func(program, 'extract', format) if outdir is None: outdir = util.tmpdir(dir=".") do_cleanup_outdir = True else: do_cleanup_outdir = False try: cmdlist = get_archive_cmdlist(archive, compression, program, verbosity, interactive, outdir) if cmdlist: # an empty command list means the get_archive_cmdlist() function # already handled the command (eg. when it's a builtin Python # function) run_archive_cmdlist(cmdlist, verbosity=verbosity) if do_cleanup_outdir: target, msg = cleanup_outdir(outdir, archive) else: target, msg = outdir, "`%s'" % outdir if verbosity >= 0: util.log_info("... %s extracted to %s." % (archive, msg)) return target finally: # try to remove an empty temporary output directory if do_cleanup_outdir: try: os.rmdir(outdir) except OSError: pass
def _extract_archive(archive, verbosity=0, interactive=True, outdir=None, program=None, format=None, compression=None)
Extract an archive. @return: output directory if command is 'extract', else None
3.879462
3.907402
0.99285
if format is None: format, compression = get_archive_format(archive) check_archive_format(format, compression) program = find_archive_program(format, 'create', program=program) check_program_compression(archive, 'create', program, compression) get_archive_cmdlist = get_archive_cmdlist_func(program, 'create', format) origarchive = None if os.path.basename(program) == 'arc' and \ ".arc" in archive and not archive.endswith(".arc"): # the arc program mangles the archive name if it contains ".arc" origarchive = archive archive = util.tmpfile(dir=os.path.dirname(archive), suffix=".arc") cmdlist = get_archive_cmdlist(archive, compression, program, verbosity, interactive, filenames) if cmdlist: # an empty command list means the get_archive_cmdlist() function # already handled the command (eg. when it's a builtin Python # function) run_archive_cmdlist(cmdlist, verbosity=verbosity) if origarchive: shutil.move(archive, origarchive)
def _create_archive(archive, filenames, verbosity=0, interactive=True, program=None, format=None, compression=None)
Create an archive.
4.463144
4.494586
0.993004
if format is None: format, compression = get_archive_format(archive) check_archive_format(format, compression) if command not in ('list', 'test'): raise util.PatoolError("invalid archive command `%s'" % command) program = find_archive_program(format, command, program=program) check_program_compression(archive, command, program, compression) get_archive_cmdlist = get_archive_cmdlist_func(program, command, format) # prepare keyword arguments for command list cmdlist = get_archive_cmdlist(archive, compression, program, verbosity, interactive) if cmdlist: # an empty command list means the get_archive_cmdlist() function # already handled the command (eg. when it's a builtin Python # function) run_archive_cmdlist(cmdlist, verbosity=verbosity)
def _handle_archive(archive, command, verbosity=0, interactive=True, program=None, format=None, compression=None)
Test and list archives.
4.847889
4.831723
1.003346
# get python module for given archive program key = util.stripext(os.path.basename(program).lower()) modulename = ".programs." + ProgramModules.get(key, key) # import the module try: module = importlib.import_module(modulename, __name__) except ImportError as msg: raise util.PatoolError(msg) # get archive handler function (eg. patoolib.programs.star.extract_tar) try: return getattr(module, '%s_%s' % (command, format)) except AttributeError as msg: raise util.PatoolError(msg)
def get_archive_cmdlist_func (program, command, format)
Get the Python function that executes the given program.
5.321344
5.54809
0.959131
msg = "Error in %s(%s): %s" % (func.__name__, path, str(exc[1])) util.log_error(msg)
def rmtree_log_error (func, path, exc)
Error function for shutil.rmtree(). Raises a PatoolError.
4.438535
4.454053
0.996516
if util.is_same_file(archive1, archive2): return 0 diff = util.find_program("diff") if not diff: msg = "The diff(1) program is required for showing archive differences, please install it." raise util.PatoolError(msg) tmpdir1 = util.tmpdir() try: path1 = _extract_archive(archive1, outdir=tmpdir1, verbosity=-1) tmpdir2 = util.tmpdir() try: path2 = _extract_archive(archive2, outdir=tmpdir2, verbosity=-1) return util.run_checked([diff, "-urN", path1, path2], verbosity=1, ret_ok=(0, 1)) finally: shutil.rmtree(tmpdir2, onerror=rmtree_log_error) finally: shutil.rmtree(tmpdir1, onerror=rmtree_log_error)
def _diff_archives (archive1, archive2, verbosity=0, interactive=True)
Show differences between two archives. @return 0 if archives are the same, else 1 @raises: PatoolError on errors
3.242328
2.992022
1.083658
grep = util.find_program("grep") if not grep: msg = "The grep(1) program is required for searching archive contents, please install it." raise util.PatoolError(msg) tmpdir = util.tmpdir() try: path = _extract_archive(archive, outdir=tmpdir, verbosity=-1) return util.run_checked([grep, "-r", "-e", pattern, "."], ret_ok=(0, 1), verbosity=1, cwd=path) finally: shutil.rmtree(tmpdir, onerror=rmtree_log_error)
def _search_archive(pattern, archive, verbosity=0, interactive=True)
Search for given pattern in an archive.
5.236221
5.126387
1.021425
format1, compression1 = get_archive_format(archive1) format2, compression2 = get_archive_format(archive2) if format1 == format2 and compression1 == compression2: # same format and compression allows to copy the file util.link_or_copy(archive1, archive2, verbosity=verbosity) return tmpdir = util.tmpdir() try: kwargs = dict(verbosity=verbosity, outdir=tmpdir) same_format = (format1 == format2 and compression1 and compression2) if same_format: # only decompress since the format is the same kwargs['format'] = compression1 path = _extract_archive(archive1, **kwargs) archive = os.path.abspath(archive2) files = tuple(os.listdir(path)) olddir = os.getcwd() os.chdir(path) try: kwargs = dict(verbosity=verbosity, interactive=interactive) if same_format: # only compress since the format is the same kwargs['format'] = compression2 _create_archive(archive, files, **kwargs) finally: os.chdir(olddir) finally: shutil.rmtree(tmpdir, onerror=rmtree_log_error)
def _repack_archive (archive1, archive2, verbosity=0, interactive=True)
Repackage an archive to a different format.
3.072739
2.991745
1.027073
format, compression = get_archive_format(archive) if compression: # only recompress the compression itself (eg. for .tar.xz) format = compression tmpdir = util.tmpdir() tmpdir2 = util.tmpdir() base, ext = os.path.splitext(os.path.basename(archive)) archive2 = util.get_single_outfile(tmpdir2, base, extension=ext) try: # extract kwargs = dict(verbosity=verbosity, format=format, outdir=tmpdir) path = _extract_archive(archive, **kwargs) # compress to new file olddir = os.getcwd() os.chdir(path) try: kwargs = dict(verbosity=verbosity, interactive=interactive, format=format) files = tuple(os.listdir(path)) _create_archive(archive2, files, **kwargs) finally: os.chdir(olddir) # check file sizes and replace if new file is smaller filesize = util.get_filesize(archive) filesize2 = util.get_filesize(archive2) if filesize2 < filesize: # replace file os.remove(archive) shutil.move(archive2, archive) diffsize = filesize - filesize2 return "... recompressed file is now %s smaller." % util.strsize(diffsize) finally: shutil.rmtree(tmpdir, onerror=rmtree_log_error) shutil.rmtree(tmpdir2, onerror=rmtree_log_error) return "... recompressed file is not smaller, leaving archive as is."
def _recompress_archive(archive, verbosity=0, interactive=True)
Try to recompress an archive to smaller size.
3.693299
3.662635
1.008372
util.check_existing_filename(archive) if verbosity >= 0: util.log_info("Extracting %s ..." % archive) return _extract_archive(archive, verbosity=verbosity, interactive=interactive, outdir=outdir, program=program)
def extract_archive(archive, verbosity=0, outdir=None, program=None, interactive=True)
Extract given archive.
3.589395
3.590453
0.999705
# Set default verbosity to 1 since the listing output should be visible. util.check_existing_filename(archive) if verbosity >= 0: util.log_info("Listing %s ..." % archive) return _handle_archive(archive, 'list', verbosity=verbosity, interactive=interactive, program=program)
def list_archive(archive, verbosity=1, program=None, interactive=True)
List given archive.
7.709162
7.471518
1.031807
util.check_new_filename(archive) util.check_archive_filelist(filenames) if verbosity >= 0: util.log_info("Creating %s ..." % archive) res = _create_archive(archive, filenames, verbosity=verbosity, interactive=interactive, program=program) if verbosity >= 0: util.log_info("... %s created." % archive) return res
def create_archive(archive, filenames, verbosity=0, program=None, interactive=True)
Create given archive with given files.
2.999855
2.984551
1.005128
util.check_existing_filename(archive1) util.check_existing_filename(archive2) if verbosity >= 0: util.log_info("Comparing %s with %s ..." % (archive1, archive2)) res = _diff_archives(archive1, archive2, verbosity=verbosity, interactive=interactive) if res == 0 and verbosity >= 0: util.log_info("... no differences found.")
def diff_archives(archive1, archive2, verbosity=0, interactive=True)
Print differences between two archives.
2.762937
2.772002
0.99673
if not pattern: raise util.PatoolError("empty search pattern") util.check_existing_filename(archive) if verbosity >= 0: util.log_info("Searching %r in %s ..." % (pattern, archive)) res = _search_archive(pattern, archive, verbosity=verbosity, interactive=interactive) if res == 1 and verbosity >= 0: util.log_info("... %r not found" % pattern) return res
def search_archive(pattern, archive, verbosity=0, interactive=True)
Search pattern in archive members.
3.704334
3.663782
1.011068
util.check_existing_filename(archive) util.check_new_filename(archive_new) if verbosity >= 0: util.log_info("Repacking %s to %s ..." % (archive, archive_new)) res = _repack_archive(archive, archive_new, verbosity=verbosity, interactive=interactive) if verbosity >= 0: util.log_info("... repacking successful.") return res
def repack_archive (archive, archive_new, verbosity=0, interactive=True)
Repack archive to different file and/or format.
2.794281
2.658222
1.051184
util.check_existing_filename(archive) util.check_writable_filename(archive) if verbosity >= 0: util.log_info("Recompressing %s ..." % (archive,)) res = _recompress_archive(archive, verbosity=verbosity, interactive=interactive) if res and verbosity >= 0: util.log_info(res) return 0
def recompress_archive(archive, verbosity=0, interactive=True)
Recompress an archive to hopefully smaller size.
3.365462
3.350877
1.004352
global mimedb try: mimedb = mimetypes.MimeTypes(strict=False) except Exception as msg: log_error("could not initialize MIME database: %s" % msg) return add_mimedb_data(mimedb)
def init_mimedb()
Initialize the internal MIME database.
4.495564
4.603543
0.976544
mimedb.encodings_map['.bz2'] = 'bzip2' mimedb.encodings_map['.lzma'] = 'lzma' mimedb.encodings_map['.xz'] = 'xz' mimedb.encodings_map['.lz'] = 'lzip' mimedb.suffix_map['.tbz2'] = '.tar.bz2' add_mimetype(mimedb, 'application/x-lzop', '.lzo') add_mimetype(mimedb, 'application/x-adf', '.adf') add_mimetype(mimedb, 'application/x-arj', '.arj') add_mimetype(mimedb, 'application/x-lzma', '.lzma') add_mimetype(mimedb, 'application/x-xz', '.xz') add_mimetype(mimedb, 'application/java-archive', '.jar') add_mimetype(mimedb, 'application/x-rar', '.rar') add_mimetype(mimedb, 'application/x-rar', '.cbr') add_mimetype(mimedb, 'application/x-7z-compressed', '.7z') add_mimetype(mimedb, 'application/x-7z-compressed', '.cb7') add_mimetype(mimedb, 'application/x-cab', '.cab') add_mimetype(mimedb, 'application/x-rpm', '.rpm') add_mimetype(mimedb, 'application/x-debian-package', '.deb') add_mimetype(mimedb, 'application/x-ace', '.ace') add_mimetype(mimedb, 'application/x-ace', '.cba') add_mimetype(mimedb, 'application/x-archive', '.a') add_mimetype(mimedb, 'application/x-alzip', '.alz') add_mimetype(mimedb, 'application/x-arc', '.arc') add_mimetype(mimedb, 'application/x-lrzip', '.lrz') add_mimetype(mimedb, 'application/x-lha', '.lha') add_mimetype(mimedb, 'application/x-lzh', '.lzh') add_mimetype(mimedb, 'application/x-rzip', '.rz') add_mimetype(mimedb, 'application/x-zoo', '.zoo') add_mimetype(mimedb, 'application/x-dms', '.dms') add_mimetype(mimedb, 'application/x-zip-compressed', '.crx') add_mimetype(mimedb, 'application/x-shar', '.shar') add_mimetype(mimedb, 'application/x-tar', '.cbt') add_mimetype(mimedb, 'application/x-vhd', '.vhd') add_mimetype(mimedb, 'audio/x-ape', '.ape') add_mimetype(mimedb, 'audio/x-shn', '.shn') add_mimetype(mimedb, 'audio/flac', '.flac') add_mimetype(mimedb, 'application/x-chm', '.chm') add_mimetype(mimedb, 'application/x-iso9660-image', '.iso') add_mimetype(mimedb, 'application/zip', '.cbz') add_mimetype(mimedb, 'application/zip', '.epub') add_mimetype(mimedb, 'application/zip', '.apk') add_mimetype(mimedb, 'application/zpaq', '.zpaq')
def add_mimedb_data(mimedb)
Add missing encodings and mimetypes to MIME database.
1.659915
1.652514
1.004479
data = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0] return data.decode(encoding)
def backtick (cmd, encoding='utf-8')
Return decoded output from command.
2.898232
2.77879
1.042983
# Note that shell_quote_nt() result is not suitable for copy-paste # (especially on Unix systems), but it looks nicer than shell_quote(). if verbosity >= 0: log_info("running %s" % " ".join(map(shell_quote_nt, cmd))) if kwargs: if verbosity >= 0: log_info(" with %s" % ", ".join("%s=%s" % (k, shell_quote(str(v)))\ for k, v in kwargs.items())) if kwargs.get("shell"): # for shell calls the command must be a string cmd = " ".join(cmd) if verbosity < 1: # hide command output on stdout with open(os.devnull, 'wb') as devnull: kwargs['stdout'] = devnull res = subprocess.call(cmd, **kwargs) else: res = subprocess.call(cmd, **kwargs) return res
def run (cmd, verbosity=0, **kwargs)
Run command without error checking. @return: command return code
3.737951
3.770232
0.991438
retcode = run(cmd, **kwargs) if retcode not in ret_ok: msg = "Command `%s' returned non-zero exit status %d" % (cmd, retcode) raise PatoolError(msg) return retcode
def run_checked (cmd, ret_ok=(0,), **kwargs)
Run command and raise PatoolError on error.
2.842315
2.113879
1.344597
mime, encoding = guess_mime_file(filename) if mime is None: mime, encoding = guess_mime_mimedb(filename) assert mime is not None or encoding is None return mime, encoding
def guess_mime (filename)
Guess the MIME type of given filename using file(1) and if that fails by looking at the filename extension with the Python mimetypes module. The result of this function is cached.
3.802245
4.138139
0.91883
mime, encoding = None, None if mimedb is not None: mime, encoding = mimedb.guess_type(filename, strict=False) if mime not in ArchiveMimetypes and encoding in ArchiveCompressions: # Files like 't.txt.gz' are recognized with encoding as format, and # an unsupported mime-type like 'text/plain'. Fix this. mime = Encoding2Mime[encoding] encoding = None return mime, encoding
def guess_mime_mimedb (filename)
Guess MIME type from given filename. @return: tuple (mime, encoding)
7.292461
7.414321
0.983564
mime, encoding = None, None base, ext = os.path.splitext(filename) if ext.lower() in ('.alz',): # let mimedb recognize these extensions return mime, encoding if os.path.isfile(filename): file_prog = find_program("file") if file_prog: mime, encoding = guess_mime_file_mime(file_prog, filename) if mime is None: mime = guess_mime_file_text(file_prog, filename) encoding = None if mime in Mime2Encoding: # try to look inside compressed archives cmd = [file_prog, "--brief", "--mime", "--uncompress", filename] try: outparts = backtick(cmd).strip().split(";") except OSError: # ignore errors, as file(1) is only a fallback return mime, encoding mime2 = outparts[0].split(" ", 1)[0] # Some file(1) implementations return an empty or unknown mime type # when the uncompressor program is not installed, other # implementation return the original file type. # The following detects both cases. if (mime2 in ('application/x-empty', 'application/octet-stream') or mime2 in Mime2Encoding): # The uncompressor program file(1) uses is not installed # or is not able to uncompress. # Try to get mime information from the file extension. mime2, encoding2 = guess_mime_mimedb(filename) if mime2 in ArchiveMimetypes: mime = mime2 encoding = encoding2 elif mime2 in ArchiveMimetypes: mime = mime2 encoding = get_file_mime_encoding(outparts) # Only return mime and encoding if the given mime can natively support the encoding. if program_supports_compression(ArchiveMimetypes.get(mime), encoding): return mime, encoding else: # If encoding is None, default back to `mime`. return Encoding2Mime.get(encoding, mime), None
def guess_mime_file (filename)
Determine MIME type of filename with file(1): (a) using `file --mime` (b) using `file` and look the result string @return: tuple (mime, encoding)
5.490761
5.442085
1.008944
mime, encoding = None, None cmd = [file_prog, "--brief", "--mime-type", filename] try: mime = backtick(cmd).strip() except OSError: # ignore errors, as file(1) is only a fallback pass if mime not in ArchiveMimetypes: mime, encoding = None, None return mime, encoding
def guess_mime_file_mime (file_prog, filename)
Determine MIME type of filename with file(1) and --mime option. @return: tuple (mime, encoding)
6.659467
6.946528
0.958676
for part in parts: for subpart in part.split(" "): if subpart.startswith("compressed-encoding="): mime = subpart.split("=")[1].strip() return Mime2Encoding.get(mime) return None
def get_file_mime_encoding (parts)
Get encoding value from splitted output of file --mime --uncompress.
5.011178
4.027546
1.244226
cmd = [file_prog, "--brief", filename] try: output = backtick(cmd).strip() except OSError: # ignore errors, as file(1) is only a fallback return None # match output against known strings for matcher, mime in FileText2Mime.items(): if output.startswith(matcher) and mime in ArchiveMimetypes: return mime return None
def guess_mime_file_text (file_prog, filename)
Determine MIME type of filename with file(1).
8.871283
7.643609
1.160614
if not os.path.exists(filename): raise PatoolError("file `%s' was not found" % filename) if not os.access(filename, os.R_OK): raise PatoolError("file `%s' is not readable" % filename) if onlyfiles and not os.path.isfile(filename): raise PatoolError("`%s' is not a file" % filename)
def check_existing_filename (filename, onlyfiles=True)
Ensure that given filename is a valid, existing file.
2.182207
2.052023
1.063442
if not filenames: raise PatoolError("cannot create archive with empty filelist") for filename in filenames: check_existing_filename(filename, onlyfiles=False)
def check_archive_filelist (filenames)
Check that file list is not empty and contains only existing files.
8.496387
6.885698
1.233918
try: mode = os.lstat(filename).st_mode except OSError: # ignore return if not (mode & flags): try: os.chmod(filename, flags | mode) except OSError as msg: log_error("could not set mode flags for `%s': %s" % (filename, msg))
def set_mode (filename, flags)
Set mode flags for given filename if not already set.
3.419758
3.09585
1.104627
return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir)[1]
def tmpfile (dir=None, prefix="temp", suffix=None)
Return a temporary file.
3.634566
3.196599
1.13701
outfile = os.path.join(directory, stripext(archive)) if os.path.exists(outfile + extension): # prevent overwriting existing files i = 1 newfile = "%s%d" % (outfile, i) while os.path.exists(newfile + extension): newfile = "%s%d" % (outfile, i) i += 1 outfile = newfile return outfile + extension
def get_single_outfile (directory, archive, extension="")
Get output filename if archive is in a single file format like gzip.
2.52747
2.48155
1.018505
value = os.getenv(key) if value is not None: print(key, "=", repr(value), file=out)
def print_env_info(key, out=sys.stderr)
If given environment key is defined, print it out.
3.115072
2.900347
1.074034
print("System info:", file=out) print(configuration.App, file=out) print("Python %(version)s on %(platform)s" % {"version": sys.version, "platform": sys.platform}, file=out) stime = strtime(time.time()) print("Local time:", stime, file=out) print("sys.argv", sys.argv, file=out)
def print_app_info(out=sys.stderr)
Print system and application info (output defaults to stderr).
4.076899
4.003558
1.018319
if os.name == 'nt': # Assume RAR support is compiled into the binary. return True # the subdirectory and codec name codecname = 'p7zip/Codecs/Rar29.so' # search canonical user library dirs for libdir in ('/usr/lib', '/usr/local/lib', '/usr/lib64', '/usr/local/lib64', '/usr/lib/i386-linux-gnu', '/usr/lib/x86_64-linux-gnu'): fname = os.path.join(libdir, codecname) if os.path.exists(fname): return True return False
def p7zip_supports_rar()
Determine if the RAR codec is installed for 7z program.
4.28382
4.082279
1.04937
if os.name == 'nt': # Add some well-known archiver programs to the search path path = os.environ['PATH'] path = append_to_path(path, get_nt_7z_dir()) path = append_to_path(path, get_nt_mac_dir()) path = append_to_path(path, get_nt_winrar_dir()) else: # use default path path = None return which(program, path=path)
def find_program (program)
Look for program in environment PATH variable.
3.8029
3.762541
1.010727
if not os.path.isdir(directory) or directory in path: return path if not path.endswith(os.pathsep): path += os.pathsep return path + directory
def append_to_path (path, directory)
Add a directory to the PATH environment variable, if it is a valid directory.
2.786218
2.784835
1.000497
# Python 3.x renamed the _winreg module to winreg try: import _winreg as winreg except ImportError: import winreg try: key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r"SOFTWARE\7-Zip") try: return winreg.QueryValueEx(key, "Path")[0] finally: winreg.CloseKey(key) except WindowsError: return ""
def get_nt_7z_dir ()
Return 7-Zip directory from registry, or an empty string.
2.253663
2.012751
1.119693
if filename1 == filename2: return True if os.name == 'posix': return os.path.samefile(filename1, filename2) return is_same_filename(filename1, filename2)
def is_same_file (filename1, filename2)
Check if filename1 and filename2 point to the same file object. There can be false negatives, ie. the result is False, but it is the same file anyway. Reason is that network filesystems can create different paths to the same physical file.
2.553263
2.521353
1.012656
return os.path.realpath(filename1) == os.path.realpath(filename2)
def is_same_filename (filename1, filename2)
Check if filename1 and filename2 are the same filename.
2.891154
2.346729
1.231993
if verbosity > 0: log_info("Copying %s -> %s" % (src, dst)) try: os.link(src, dst) except (AttributeError, OSError): try: shutil.copy(src, dst) except OSError as msg: raise PatoolError(msg)
def link_or_copy(src, dst, verbosity=0)
Try to make a hard link from src to dst and if that fails copy the file. Hard links save some disk space and linking should fail fast since no copying is involved.
2.524749
2.456254
1.027886
try: olddir = os.getcwd() except OSError: olddir = None os.chdir(directory) return olddir
def chdir(directory)
Remember and return current directory before calling os.chdir(). If the current directory could not be determined, return None.
2.576847
2.398239
1.074475
outfile = util.get_single_outfile(outdir, archive, extension=".wav") cmdlist = [cmd, '--decode', archive, '--output-name', outfile] return cmdlist
def extract_flac (archive, compression, cmd, verbosity, interactive, outdir)
Decompress a FLAC archive to a WAV file.
9.176444
8.576126
1.069999
cmdlist = [cmd, filenames[0], '--best', '--output-name', archive] return cmdlist
def create_flac (archive, compression, cmd, verbosity, interactive, filenames)
Compress a WAV file to a FLAC archive.
12.813111
13.500435
0.949089
cmdlist = [cmd, 'x'] if not interactive: cmdlist.extend(['-p-', '-y']) cmdlist.extend(['--', os.path.abspath(archive)]) return (cmdlist, {'cwd': outdir})
def extract_rar (archive, compression, cmd, verbosity, interactive, outdir)
Extract a RAR archive.
5.028095
4.780218
1.051855
cmdlist = [util.shell_quote(cmd), '--extract', '--make-directories', '--preserve-modification-time'] if sys.platform.startswith('linux') and not cmd.endswith('bsdcpio'): cmdlist.extend(['--no-absolute-filenames', '--force-local', '--nonmatching', r'"*\.\.*"']) if verbosity > 1: cmdlist.append('-v') cmdlist.extend(['<', util.shell_quote(os.path.abspath(archive))]) return (cmdlist, {'cwd': outdir, 'shell': True})
def extract_cpio (archive, compression, cmd, verbosity, interactive, outdir)
Extract a CPIO archive.
5.638616
5.526847
1.020223
cmdlist = [util.shell_quote(cmd), '--create'] if verbosity > 1: cmdlist.append('-v') if len(filenames) != 0: findcmd = ['find'] findcmd.extend([util.shell_quote(x) for x in filenames]) findcmd.extend(['-print0', '|']) cmdlist[0:0] = findcmd cmdlist.append('-0') cmdlist.extend([">", util.shell_quote(archive)]) return (cmdlist, {'shell': True})
def create_cpio(archive, compression, cmd, verbosity, interactive, filenames)
Create a CPIO archive.
3.580843
3.575953
1.001368
# Since extracted files will be placed in the current directory, # the cwd argument has to be the output directory. cmdlist = [cmd, 'x', os.path.abspath(archive)] return (cmdlist, {'cwd': outdir})
def extract_arc (archive, compression, cmd, verbosity, interactive, outdir)
Extract a ARC archive.
8.116602
7.905382
1.026719
cmdlist = [cmd] if verbosity > 1: cmdlist.append('v') else: cmdlist.append('l') cmdlist.append(archive) return cmdlist
def list_arc (archive, compression, cmd, verbosity, interactive)
List a ARC archive.
3.568218
3.456496
1.032322
targetname = util.get_single_outfile(outdir, archive) try: with gzip.GzipFile(archive) as gzipfile: with open(targetname, 'wb') as targetfile: data = gzipfile.read(READ_SIZE_BYTES) while data: targetfile.write(data) data = gzipfile.read(READ_SIZE_BYTES) except Exception as err: msg = "error extracting %s to %s: %s" % (archive, targetname, err) raise util.PatoolError(msg) return None
def extract_gzip (archive, compression, cmd, verbosity, interactive, outdir)
Extract a GZIP archive with the gzip Python module.
2.917709
2.920153
0.999163
if len(filenames) > 1: raise util.PatoolError('multi-file compression not supported in Python gzip') try: with gzip.GzipFile(archive, 'wb') as gzipfile: filename = filenames[0] with open(filename, 'rb') as srcfile: data = srcfile.read(READ_SIZE_BYTES) while data: gzipfile.write(data) data = srcfile.read(READ_SIZE_BYTES) except Exception as err: msg = "error creating %s: %s" % (archive, err) raise util.PatoolError(msg) return None
def create_gzip (archive, compression, cmd, verbosity, interactive, filenames)
Create a GZIP archive with the gzip Python module.
2.93437
2.827944
1.037634
targetname = util.get_single_outfile(outdir, archive) try: with lzma.LZMAFile(archive, **_get_lzma_options(format)) as lzmafile: with open(targetname, 'wb') as targetfile: data = lzmafile.read(READ_SIZE_BYTES) while data: targetfile.write(data) data = lzmafile.read(READ_SIZE_BYTES) except Exception as err: msg = "error extracting %s to %s: %s" % (archive, targetname, err) raise util.PatoolError(msg) return None
def _extract(archive, compression, cmd, format, verbosity, outdir)
Extract an LZMA or XZ archive with the lzma Python module.
3.139783
2.942366
1.067094
return _extract(archive, compression, cmd, 'alone', verbosity, outdir)
def extract_lzma(archive, compression, cmd, verbosity, interactive, outdir)
Extract an LZMA archive with the lzma Python module.
12.043806
16.457476
0.731814
return _extract(archive, compression, cmd, 'xz', verbosity, outdir)
def extract_xz(archive, compression, cmd, verbosity, interactive, outdir)
Extract an XZ archive with the lzma Python module.
5.432067
7.695666
0.705861
if len(filenames) > 1: raise util.PatoolError('multi-file compression not supported in Python lzma') try: with lzma.LZMAFile(archive, mode='wb', **_get_lzma_options(format, preset=9)) as lzmafile: filename = filenames[0] with open(filename, 'rb') as srcfile: data = srcfile.read(READ_SIZE_BYTES) while data: lzmafile.write(data) data = srcfile.read(READ_SIZE_BYTES) except Exception as err: msg = "error creating %s: %s" % (archive, err) raise util.PatoolError(msg) return None
def _create(archive, compression, cmd, format, verbosity, filenames)
Create an LZMA or XZ archive with the lzma Python module.
3.217279
3.103233
1.036751
return _create(archive, compression, cmd, 'alone', verbosity, filenames)
def create_lzma(archive, compression, cmd, verbosity, interactive, filenames)
Create an LZMA archive with the lzma Python module.
13.468284
15.683348
0.858763
return _create(archive, compression, cmd, 'xz', verbosity, filenames)
def create_xz(archive, compression, cmd, verbosity, interactive, filenames)
Create an XZ archive with the lzma Python module.
5.975244
7.222532
0.827306
def wrapper(view_func): def _checkssl(request, *args, **kwargs): # allow_non_ssl=True lets non-https requests to come # through to this view (and hence not redirect) if hasattr(settings, 'SSL_ENABLED') and settings.SSL_ENABLED \ and not request.is_secure() and not allow_non_ssl: return HttpResponseRedirect( request.build_absolute_uri().replace('http://', 'https://')) return view_func(request, *args, **kwargs) return _checkssl return wrapper
def ssl_required(allow_non_ssl=False)
Views decorated with this will always get redirected to https except when allow_non_ssl is set to true.
3.144449
2.950107
1.065876
if redirect_to is None: redirect_to = settings.LOGIN_REDIRECT_URL @wraps(view) def wrapper(request, *a, **k): if request.user and request.user.is_authenticated(): return HttpResponseRedirect(redirect_to) return view(request, *a, **k) return wrapper
def anonymous_required(view, redirect_to=None)
Only allow if user is NOT authenticated.
1.936638
1.856197
1.043336
return self._get_or_set('{0}{1}'.format(self._GENERIC_VAR_KEY_PREFIX, key), value)
def generic_var(self, key, value=None)
Stores generic variables in the session prepending it with _GENERIC_VAR_KEY_PREFIX.
5.718583
3.463655
1.651026
try: user = User.objects.get(email=username) if user.check_password(password): return user except (User.DoesNotExist, User.MultipleObjectsReturned): logging.warning('Unsuccessful login attempt using username/email: {0}'.format(username)) return None
def authenticate(self, username=None, password=None, **kwargs)
"username" being passed is really email address and being compared to as such.
2.917452
2.680624
1.088348
try: help_text = None css_classes = None token_split = token.split_contents() if len(token_split) == 4: tag_name, form_field, help_text, css_classes = token.split_contents() elif len(token_split) == 3: tag_name, form_field, help_text = token.split_contents() else: tag_name, form_field = token.split_contents() except ValueError: raise template.TemplateSyntaxError( "Unable to parse arguments for {0}".format(repr(token.contents.split()[0]))) return FormFieldNode(form_field, help_text=help_text, css_classes=css_classes)
def render_form_field(parser, token)
Usage is {% render_form_field form.field_name optional_help_text optional_css_classes %} - optional_help_text and optional_css_classes are strings - if optional_help_text is not given, then it is taken from form field object
2.098281
1.953885
1.073902
parser.add_argument('--length', default=self.length, type=int, help=_('SECRET_KEY length default=%d' % self.length)) parser.add_argument('--alphabet', default=self.allowed_chars, type=str, help=_('alphabet to use default=%s' % self.allowed_chars))
def add_arguments(self, parser)
Define optional arguments with default values
4.03659
3.725533
1.083493
if self.declared_fieldsets: fields = flatten_fieldsets(self.declared_fieldsets) else: fields = None if self.exclude is None: exclude = [] else: exclude = list(self.exclude) exclude.extend(kwargs.get("exclude", [])) exclude.extend(self.get_readonly_fields(request, obj)) # if exclude is an empty list we use None, since that's the actual # default exclude = exclude or None defaults = { "form": self.form, "formset": self.formset, "fk_name": self.fk_name, "fields": fields, "exclude": exclude, "formfield_callback": curry(self.formfield_for_dbfield, request=request), "extra": self.extra, "max_num": self.max_num, "can_delete": self.can_delete, } defaults.update(kwargs) return inlineformset_factory(self.parent_model, self.model, **defaults)
def get_formset(self, request, obj=None, **kwargs)
Returns a BaseInlineFormSet class for use in admin add/change views.
2.067654
1.981809
1.043316
if value == "": return None if isinstance(value, dict): value = json.dumps(value, cls=DjangoJSONEncoder) return value
def get_prep_value(self, value)
Convert our JSON object to a string before we save
3.566118
3.170186
1.124892
for possibility in possibilities: if value == possibility: return True raise Exception('A different request value was encountered than expected: {0}'.format(value))
def is_among(value, *possibilities)
Ensure that the method that has been used for the request is one of the expected ones (e.g., GET or POST).
6.720038
5.447973
1.233493
import django.core.mail try: logging.debug('Sending mail to: {0}'.format(', '.join(r for r in recipient_emails))) logging.debug('Message: {0}'.format(message)) email = django.core.mail.EmailMessage(subject, message, from_email, recipient_emails, bcc, cc=cc) if html: email.content_subtype = "html" if files: for file in files: email.attach_file(file) if files_manually: for filename, content, mimetype in files_manually: email.attach(filename, content, mimetype) if reply_to: email.extra_headers = {'Reply-To': reply_to} email.send() except Exception as e: # TODO: Raise error again so that more information is included in the logs? logging.error('Error sending message [{0}] from {1} to {2} {3}'.format( subject, from_email, recipient_emails, e))
def send_mail(subject, message, from_email, recipient_emails, files=None, html=False, reply_to=None, bcc=None, cc=None, files_manually=None)
Sends email with advanced optional parameters To attach non-file content (e.g. content not saved on disk), use files_manually parameter and provide list of 3 element tuples, e.g. [('design.png', img_data, 'image/png'),] which will be passed to email.attach().
2.389552
2.441668
0.978656
timeDiff = datetime.datetime.now() - timestamp days = timeDiff.days hours = timeDiff.seconds / 3600 minutes = timeDiff.seconds % 3600 / 60 seconds = timeDiff.seconds % 3600 % 60 str = "" if days > 0: if days == 1: t_str = "day" else: t_str = "days" str += "{0} {1}".format(days, t_str) return str elif hours > 0: if hours == 1: t_str = "hour" else: t_str = "hours" str += "{0} {1}".format(hours, t_str) return str elif minutes > 0: if minutes == 1: t_str = "min" else: t_str = "mins" str += "{0} {1}".format(minutes, t_str) return str elif seconds > 0: if seconds == 1: t_str = "sec" else: t_str = "secs" str += "{0} {1}".format(seconds, t_str) return str else: return str
def humanize_time_since(timestamp=None)
Returns a fuzzy time since. Will only return the largest time. EX: 20 days, 14 min
1.387473
1.381245
1.004509
if skipset is None: skipset = set() for item in iterable: if item not in skipset: skipset.add(item) yield item
def skip_redundant(iterable, skipset=None)
Redundant items are repeated items or items in the original skipset.
1.916529
1.833051
1.04554
# make tuple of needed metaclasses in specified priority order metas = left_metas + tuple(map(type, bases)) + right_metas needed_metas = remove_redundant(metas) # return existing confict-solving meta, if any if needed_metas in memoized_metaclasses_map: return memoized_metaclasses_map[needed_metas] # nope: compute, memoize and return needed conflict-solving meta elif not needed_metas: # wee, a trivial case, happy us meta = type elif len(needed_metas) == 1: # another trivial case meta = needed_metas[0] # check for recursion, can happen i.e. for Zope ExtensionClasses elif needed_metas == bases: raise TypeError("Incompatible root metatypes", needed_metas) else: # gotta work ... metaname = '_' + ''.join([m.__name__ for m in needed_metas]) meta = classmaker()(metaname, needed_metas, {}) memoized_metaclasses_map[needed_metas] = meta return meta
def get_noconflict_metaclass(bases, left_metas, right_metas)
Not intended to be used outside of this module, unless you know what you are doing.
6.445683
6.404524
1.006427
request.method_params['captcha_key'] = self.get_captcha_key(request) request.method_params['captcha_sid'] = request.api_error.captcha_sid return self.send(request)
def on_api_error_14(self, request)
14. Captcha needed
5.130621
4.271749
1.201059
logger.error('Authorization failed. Access token will be dropped') self.access_token = self.get_access_token() return self.send(request)
def on_api_error_15(self, request)
15. Access denied - due to scope
7.815634
9.490342
0.823536
# Ask access ask_access_response = auth_session.post(self.AUTHORIZE_URL, self.get_auth_params()) url_queries = self.get_response_url_queries(ask_access_response) if 'access_token' not in url_queries: # Grant access grant_access_action = self.get_form_action(ask_access_response) grant_access_response = auth_session.post(grant_access_action) url_queries = self.get_response_url_queries(grant_access_response) return self.process_auth_url_queries(url_queries)
def authorize(self, auth_session)
OAuth2
3.143698
3.156028
0.996093
quote = self.get_quote() if quote: self.reply("Actually, she said things like this: \n%s" % quote)
def talk_back(self, message)
that's what she said: Tells you some things she actually said. :)
9.826841
7.352331
1.336561
import uuid import time import random import hashlib node = uuid.getnode() h = hashlib.md5() h.update(str("%s" % node).encode('utf-8')) key1 = h.hexdigest() time.sleep(random.uniform(0, 0.5)) node = uuid.getnode() h = hashlib.md5() h.update(str("%s" % node).encode('utf-8')) key2 = h.hexdigest() time.sleep(random.uniform(0, 0.5)) node = uuid.getnode() h = hashlib.md5() h.update(str("%s" % node).encode('utf-8')) key3 = h.hexdigest() if key1 == key2 and key2 == key3: return key1 return False
def auto_key()
This method attempts to auto-generate a unique cryptographic key based on the hardware ID. It should *NOT* be used in production, or to replace a proper key, but it can help get will running in local and test environments more easily.
1.880335
1.767921
1.063586
if ( getattr(settings, "GOOGLE_API_KEY", False) and getattr(settings, "GOOGLE_CUSTOM_SEARCH_ENGINE_ID", False) ): self.say( "Sorry, I'm missing my GOOGLE_API_KEY and GOOGLE_CUSTOM_SEARCH_ENGINE_ID." " Can someone give them to me?", color="red" ) # https://developers.google.com/custom-search/json-api/v1/reference/cse/list?hl=en data = { "q": search_query, "key": settings.GOOGLE_API_KEY, "cx": settings.GOOGLE_CUSTOM_SEARCH_ENGINE_ID, "safe": "medium", "num": 8, "searchType": "image", } r = requests.get("https://www.googleapis.com/customsearch/v1", params=data) r.raise_for_status() try: response = r.json() results = [result["link"] for result in response["items"] if "items" in r.json()] except TypeError: results = [] else: # Fall back to a really ugly hack. logging.warn( "Hey, I'm using a pretty ugly hack to get those images, and it might break. " "Please set my GOOGLE_API_KEY and GOOGLE_CUSTOM_SEARCH_ENGINE_ID when you have a chance." ) r = requests.get("https://www.google.com/search?tbm=isch&safe=active&q=%s" % search_query) results = [] content = r.content.decode("utf-8") index = content.find("<img") while index != -1: src_start = content.find('src=', index) src_end = content.find(" ", src_start) match = content[src_start+5: src_end-1] index = content.find("<img", src_end) results.append(match) if results: url = random.choice(results) self.say("%s" % url, message=message) else: self.say("Couldn't find anything!", message=message)
def image_me(self, message, search_query)
image me ___ : Search google images for ___, and post a random one.
2.741021
2.73603
1.001824