response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Walk upwards from start, looking for a directory containing all files and directories given as arguments:: >>> searchupwards('.', ['foo.txt'], ['bar', 'bam']) If not found, return None
def searchupwards(start, files=[], dirs=[]): """ Walk upwards from start, looking for a directory containing all files and directories given as arguments:: >>> searchupwards('.', ['foo.txt'], ['bar', 'bam']) If not found, return None """ start = os.path.abspath(start) parents = start.split(os.sep) exists = os.path.exists join = os.sep.join isdir = os.path.isdir while len(parents): candidate = join(parents) + os.sep allpresent = 1 for f in files: if not exists(f"{candidate}{f}"): allpresent = 0 break if allpresent: for d in dirs: if not isdir(f"{candidate}{d}"): allpresent = 0 break if allpresent: return candidate parents.pop(-1) return None
Determine whether the given call raises the given exception.
def raises(exception, f, *args, **kwargs): """ Determine whether the given call raises the given exception. """ try: f(*args, **kwargs) except exception: return 1 return 0
Attempts to switch the uid/euid and gid/egid for the current process. If C{uid} is the same value as L{os.getuid} (or L{os.geteuid}), this function will issue a L{UserWarning} and not raise an exception. @type uid: C{int} or L{None} @param uid: the UID (or EUID) to switch the current process to. This parameter will be ignored if the value is L{None}. @type gid: C{int} or L{None} @param gid: the GID (or EGID) to switch the current process to. This parameter will be ignored if the value is L{None}. @type euid: C{bool} @param euid: if True, set only effective user-id rather than real user-id. (This option has no effect unless the process is running as root, in which case it means not to shed all privileges, retaining the option to regain privileges in cases such as spawning processes. Use with caution.)
def switchUID(uid, gid, euid=False): """ Attempts to switch the uid/euid and gid/egid for the current process. If C{uid} is the same value as L{os.getuid} (or L{os.geteuid}), this function will issue a L{UserWarning} and not raise an exception. @type uid: C{int} or L{None} @param uid: the UID (or EUID) to switch the current process to. This parameter will be ignored if the value is L{None}. @type gid: C{int} or L{None} @param gid: the GID (or EGID) to switch the current process to. This parameter will be ignored if the value is L{None}. @type euid: C{bool} @param euid: if True, set only effective user-id rather than real user-id. (This option has no effect unless the process is running as root, in which case it means not to shed all privileges, retaining the option to regain privileges in cases such as spawning processes. Use with caution.) """ if euid: setuid = os.seteuid setgid = os.setegid getuid = os.geteuid else: setuid = os.setuid setgid = os.setgid getuid = os.getuid if gid is not None: setgid(gid) if uid is not None: if uid == getuid(): uidText = euid and "euid" or "uid" actionText = f"tried to drop privileges and set{uidText} {uid}" problemText = f"{uidText} is already {getuid()}" warnings.warn( "{} but {}; should we be root? Continuing.".format( actionText, problemText ) ) else: initgroups(uid, gid) setuid(uid)
Call C{f} with the given arguments, handling C{EINTR} by retrying. @param f: A function to call. @param a: Positional arguments to pass to C{f}. @param kw: Keyword arguments to pass to C{f}. @return: Whatever C{f} returns. @raise Exception: Whatever C{f} raises, except for C{OSError} with C{errno} set to C{EINTR}.
def untilConcludes(f, *a, **kw): """ Call C{f} with the given arguments, handling C{EINTR} by retrying. @param f: A function to call. @param a: Positional arguments to pass to C{f}. @param kw: Keyword arguments to pass to C{f}. @return: Whatever C{f} returns. @raise Exception: Whatever C{f} raises, except for C{OSError} with C{errno} set to C{EINTR}. """ while True: try: return f(*a, **kw) except OSError as e: if e.args[0] == errno.EINTR: continue raise
Overwrite C{g}'s name and docstring with values from C{f}. Update C{g}'s instance dictionary with C{f}'s. @return: A function that has C{g}'s behavior and metadata merged from C{f}.
def mergeFunctionMetadata(f, g): """ Overwrite C{g}'s name and docstring with values from C{f}. Update C{g}'s instance dictionary with C{f}'s. @return: A function that has C{g}'s behavior and metadata merged from C{f}. """ try: g.__name__ = f.__name__ except TypeError: pass try: g.__doc__ = f.__doc__ except (TypeError, AttributeError): pass try: g.__dict__.update(f.__dict__) except (TypeError, AttributeError): pass try: g.__module__ = f.__module__ except TypeError: pass return g
Convert a string like a variable name into a slightly more human-friendly string with spaces and capitalized letters. @type mname: C{str} @param mname: The name to convert to a label. This must be a string which could be used as a Python identifier. Strings which do not take this form will result in unpredictable behavior. @rtype: C{str}
def nameToLabel(mname): """ Convert a string like a variable name into a slightly more human-friendly string with spaces and capitalized letters. @type mname: C{str} @param mname: The name to convert to a label. This must be a string which could be used as a Python identifier. Strings which do not take this form will result in unpredictable behavior. @rtype: C{str} """ labelList = [] word = "" lastWasUpper = False for letter in mname: if letter.isupper() == lastWasUpper: # Continuing a word. word += letter else: # breaking a word OR beginning a word if lastWasUpper: # could be either if len(word) == 1: # keep going word += letter else: # acronym # we're processing the lowercase letter after the acronym-then-capital lastWord = word[:-1] firstLetter = word[-1] labelList.append(lastWord) word = firstLetter + letter else: # definitely breaking: lower to upper labelList.append(word) word = letter lastWasUpper = letter.isupper() if labelList: labelList[0] = labelList[0].capitalize() else: return mname.capitalize() labelList.append(word) return " ".join(labelList)
Convert a user identifier, as a string, into an integer UID. @type uidString: C{str} @param uidString: A string giving the base-ten representation of a UID or the name of a user which can be converted to a UID via L{pwd.getpwnam}. @rtype: C{int} @return: The integer UID corresponding to the given string. @raise ValueError: If the user name is supplied and L{pwd} is not available.
def uidFromString(uidString): """ Convert a user identifier, as a string, into an integer UID. @type uidString: C{str} @param uidString: A string giving the base-ten representation of a UID or the name of a user which can be converted to a UID via L{pwd.getpwnam}. @rtype: C{int} @return: The integer UID corresponding to the given string. @raise ValueError: If the user name is supplied and L{pwd} is not available. """ try: return int(uidString) except ValueError: if pwd is None: raise return pwd.getpwnam(uidString)[2]
Convert a group identifier, as a string, into an integer GID. @type gidString: C{str} @param gidString: A string giving the base-ten representation of a GID or the name of a group which can be converted to a GID via L{grp.getgrnam}. @rtype: C{int} @return: The integer GID corresponding to the given string. @raise ValueError: If the group name is supplied and L{grp} is not available.
def gidFromString(gidString): """ Convert a group identifier, as a string, into an integer GID. @type gidString: C{str} @param gidString: A string giving the base-ten representation of a GID or the name of a group which can be converted to a GID via L{grp.getgrnam}. @rtype: C{int} @return: The integer GID corresponding to the given string. @raise ValueError: If the group name is supplied and L{grp} is not available. """ try: return int(gidString) except ValueError: if grp is None: raise return grp.getgrnam(gidString)[2]
Run the given function wrapped with seteuid/setegid calls. This will try to minimize the number of seteuid/setegid calls, comparing current and wanted permissions @param euid: effective UID used to call the function. @type euid: C{int} @type egid: effective GID used to call the function. @param egid: C{int} @param function: the function run with the specific permission. @type function: any callable @param args: arguments passed to C{function} @param kwargs: keyword arguments passed to C{function}
def runAsEffectiveUser(euid, egid, function, *args, **kwargs): """ Run the given function wrapped with seteuid/setegid calls. This will try to minimize the number of seteuid/setegid calls, comparing current and wanted permissions @param euid: effective UID used to call the function. @type euid: C{int} @type egid: effective GID used to call the function. @param egid: C{int} @param function: the function run with the specific permission. @type function: any callable @param args: arguments passed to C{function} @param kwargs: keyword arguments passed to C{function} """ uid, gid = os.geteuid(), os.getegid() if uid == euid and gid == egid: return function(*args, **kwargs) else: if uid != 0 and (uid != euid or gid != egid): os.seteuid(0) if gid != egid: os.setegid(egid) if euid != 0 and (euid != uid or gid != egid): os.seteuid(euid) try: return function(*args, **kwargs) finally: if euid != 0 and (uid != euid or gid != egid): os.seteuid(0) if gid != egid: os.setegid(gid) if uid != 0 and (uid != euid or gid != egid): os.seteuid(uid)
Run C{f(*args, **kwargs)}, but with some warnings suppressed. Unlike L{twisted.internet.utils.runWithWarningsSuppressed}, it has no special support for L{twisted.internet.defer.Deferred}. @param suppressedWarnings: A list of arguments to pass to L{warnings.filterwarnings}. Must be a sequence of 2-tuples (args, kwargs). @param f: A callable. @param args: Arguments for C{f}. @param kwargs: Keyword arguments for C{f} @return: The result of C{f(*args, **kwargs)}.
def runWithWarningsSuppressed(suppressedWarnings, f, *args, **kwargs): """ Run C{f(*args, **kwargs)}, but with some warnings suppressed. Unlike L{twisted.internet.utils.runWithWarningsSuppressed}, it has no special support for L{twisted.internet.defer.Deferred}. @param suppressedWarnings: A list of arguments to pass to L{warnings.filterwarnings}. Must be a sequence of 2-tuples (args, kwargs). @param f: A callable. @param args: Arguments for C{f}. @param kwargs: Keyword arguments for C{f} @return: The result of C{f(*args, **kwargs)}. """ with warnings.catch_warnings(): for a, kw in suppressedWarnings: warnings.filterwarnings(*a, **kw) return f(*args, **kwargs)
Internal method for quoting a single command-line argument. @param s: an unquoted string that you want to quote so that something that does cmd.exe-style unquoting will interpret it as a single argument, even if it contains spaces. @type s: C{str} @return: a quoted string. @rtype: C{str}
def cmdLineQuote(s): """ Internal method for quoting a single command-line argument. @param s: an unquoted string that you want to quote so that something that does cmd.exe-style unquoting will interpret it as a single argument, even if it contains spaces. @type s: C{str} @return: a quoted string. @rtype: C{str} """ quote = ((" " in s) or ("\t" in s) or ('"' in s) or s == "") and '"' or "" return ( quote + _cmdLineQuoteRe2.sub(r"\1\1", _cmdLineQuoteRe.sub(r'\1\1\\"', s)) + quote )
Quote an iterable of command-line arguments for passing to CreateProcess or a similar API. This allows the list passed to C{reactor.spawnProcess} to match the child process's C{sys.argv} properly. @param arguments: an iterable of C{str}, each unquoted. @return: a single string, with the given sequence quoted as necessary.
def quoteArguments(arguments): """ Quote an iterable of command-line arguments for passing to CreateProcess or a similar API. This allows the list passed to C{reactor.spawnProcess} to match the child process's C{sys.argv} properly. @param arguments: an iterable of C{str}, each unquoted. @return: a single string, with the given sequence quoted as necessary. """ return " ".join([cmdLineQuote(a) for a in arguments])
Predict the number of chunks that will be extracted from the entire zipfile, given chunksize blocks.
def countZipFileChunks(filename, chunksize): """ Predict the number of chunks that will be extracted from the entire zipfile, given chunksize blocks. """ totalchunks = 0 zf = ChunkingZipFile(filename) for info in zf.infolist(): totalchunks += countFileChunks(info, chunksize) return totalchunks
Count the number of chunks that will result from the given C{ZipInfo}. @param zipinfo: a C{zipfile.ZipInfo} instance describing an entry in a zip archive to be counted. @return: the number of chunks present in the zip file. (Even an empty file counts as one chunk.) @rtype: L{int}
def countFileChunks(zipinfo, chunksize): """ Count the number of chunks that will result from the given C{ZipInfo}. @param zipinfo: a C{zipfile.ZipInfo} instance describing an entry in a zip archive to be counted. @return: the number of chunks present in the zip file. (Even an empty file counts as one chunk.) @rtype: L{int} """ count, extra = divmod(zipinfo.file_size, chunksize) if extra > 0: count += 1 return count or 1
Return a generator for the zipfile. This implementation will yield after every chunksize uncompressed bytes, or at the end of a file, whichever comes first. The value it yields is the number of chunks left to unzip.
def unzipIterChunky(filename, directory=".", overwrite=0, chunksize=4096): """ Return a generator for the zipfile. This implementation will yield after every chunksize uncompressed bytes, or at the end of a file, whichever comes first. The value it yields is the number of chunks left to unzip. """ czf = ChunkingZipFile(filename, "r") if not os.path.exists(directory): os.makedirs(directory) remaining = countZipFileChunks(filename, chunksize) names = czf.namelist() infos = czf.infolist() for entry, info in zip(names, infos): isdir = info.external_attr & DIR_BIT f = os.path.join(directory, entry) if isdir: # overwrite flag only applies to files if not os.path.exists(f): os.makedirs(f) remaining -= 1 yield remaining else: # create the directory the file will be in first, # since we can't guarantee it exists fdir = os.path.split(f)[0] if not os.path.exists(fdir): os.makedirs(fdir) if overwrite or not os.path.exists(f): fp = czf.readfile(entry) if info.file_size == 0: remaining -= 1 yield remaining with open(f, "wb") as outfile: while fp.tell() < info.file_size: hunk = fp.read(chunksize) outfile.write(hunk) remaining -= 1 yield remaining else: remaining -= countFileChunks(info, chunksize) yield remaining
Get a data directory for the caller function, or C{moduleName} if given. @param moduleName: The module name if you don't wish to have the caller's module. @returns: A directory for putting data in.
def getDataDirectory(moduleName: str = "") -> str: """ Get a data directory for the caller function, or C{moduleName} if given. @param moduleName: The module name if you don't wish to have the caller's module. @returns: A directory for putting data in. """ if not moduleName: caller = currentframe(1) module = inspect.getmodule(caller) assert module is not None moduleName = module.__name__ return cast(str, appdirs.user_data_dir(moduleName))
Create an inotify instance and return the associated file descriptor.
def init() -> int: """ Create an inotify instance and return the associated file descriptor. """ fd = cast(int, libc.inotify_init()) if fd < 0: raise INotifyError("INotify initialization error.") return fd
Add a watch for the given path to the inotify file descriptor, and return the watch descriptor. @param fd: The file descriptor returned by C{libc.inotify_init}. @param path: The path to watch via inotify. @param mask: Bitmask specifying the events that inotify should monitor.
def add(fd: int, path: FilePath[Any], mask: int) -> int: """ Add a watch for the given path to the inotify file descriptor, and return the watch descriptor. @param fd: The file descriptor returned by C{libc.inotify_init}. @param path: The path to watch via inotify. @param mask: Bitmask specifying the events that inotify should monitor. """ wd = cast(int, libc.inotify_add_watch(fd, path.asBytesMode().path, mask)) if wd < 0: raise INotifyError(f"Failed to add watch on '{path!r}' - ({wd!r})") return wd
Remove the given watch descriptor from the inotify file descriptor.
def remove(fd: int, wd: int) -> None: """ Remove the given watch descriptor from the inotify file descriptor. """ # When inotify_rm_watch returns -1 there's an error: # The errno for this call can be either one of the following: # EBADF: fd is not a valid file descriptor. # EINVAL: The watch descriptor wd is not valid; or fd is # not an inotify file descriptor. # # if we can't access the errno here we cannot even raise # an exception and we need to ignore the problem, one of # the most common cases is when you remove a directory from # the filesystem and that directory is observed. When inotify # tries to call inotify_rm_watch with a non existing directory # either of the 2 errors might come up because the files inside # it might have events generated way before they were handled. # Unfortunately only ctypes in Python 2.6 supports accessing errno: # http://bugs.python.org/issue1798 and in order to solve # the problem for previous versions we need to introduce # code that is quite complex: # http://stackoverflow.com/questions/661017/access-to-errno-from-python # # See #4310 for future resolution of this issue. libc.inotify_rm_watch(fd, wd)
Initialize the module, checking if the expected APIs exist and setting the argtypes and restype for C{inotify_init}, C{inotify_add_watch}, and C{inotify_rm_watch}.
def initializeModule(libc: ctypes.CDLL) -> None: """ Initialize the module, checking if the expected APIs exist and setting the argtypes and restype for C{inotify_init}, C{inotify_add_watch}, and C{inotify_rm_watch}. """ for function in ("inotify_add_watch", "inotify_init", "inotify_rm_watch"): if getattr(libc, function, None) is None: raise ImportError("libc6 2.4 or higher needed") libc.inotify_init.argtypes = [] libc.inotify_init.restype = ctypes.c_int libc.inotify_rm_watch.argtypes = [ctypes.c_int, ctypes.c_int] libc.inotify_rm_watch.restype = ctypes.c_int libc.inotify_add_watch.argtypes = [ctypes.c_int, ctypes.c_char_p, ctypes.c_uint32] libc.inotify_add_watch.restype = ctypes.c_int
Execute a vector of arguments. This is a wrapper around L{subprocess.check_output}, so it takes the same arguments as L{subprocess.Popen} with one difference: all arguments after the vector must be keyword arguments. @param args: arguments passed to L{subprocess.check_output} @param kwargs: keyword arguments passed to L{subprocess.check_output} @return: command output @rtype: L{bytes}
def runCommand(args, **kwargs): """Execute a vector of arguments. This is a wrapper around L{subprocess.check_output}, so it takes the same arguments as L{subprocess.Popen} with one difference: all arguments after the vector must be keyword arguments. @param args: arguments passed to L{subprocess.check_output} @param kwargs: keyword arguments passed to L{subprocess.check_output} @return: command output @rtype: L{bytes} """ kwargs["stderr"] = STDOUT return check_output(args, **kwargs)
Detect the VCS used in the specified directory and return a L{GitCommand} if the directory is a Git repository. If the directory is not git, it raises a L{NotWorkingDirectory} exception. @type directory: L{FilePath} @param directory: The directory to detect the VCS used from. @rtype: L{GitCommand} @raise NotWorkingDirectory: if no supported VCS can be found from the specified directory.
def getRepositoryCommand(directory): """ Detect the VCS used in the specified directory and return a L{GitCommand} if the directory is a Git repository. If the directory is not git, it raises a L{NotWorkingDirectory} exception. @type directory: L{FilePath} @param directory: The directory to detect the VCS used from. @rtype: L{GitCommand} @raise NotWorkingDirectory: if no supported VCS can be found from the specified directory. """ try: GitCommand.ensureIsWorkingDirectory(directory) return GitCommand except (NotWorkingDirectory, OSError): # It's not Git, but that's okay, eat the error pass raise NotWorkingDirectory(f"No supported VCS can be found in {directory.path}")
Find all Twisted-style projects beneath a base directory. @param baseDirectory: A L{twisted.python.filepath.FilePath} to look inside. @return: A list of L{Project}.
def findTwistedProjects(baseDirectory): """ Find all Twisted-style projects beneath a base directory. @param baseDirectory: A L{twisted.python.filepath.FilePath} to look inside. @return: A list of L{Project}. """ projects = [] for filePath in baseDirectory.walk(): if filePath.basename() == "newsfragments": projectDirectory = filePath.parent() projects.append(Project(projectDirectory)) return projects
I replace the text `oldstr' with `newstr' in `filename' using science.
def replaceInFile(filename, oldToNew): """ I replace the text `oldstr' with `newstr' in `filename' using science. """ os.rename(filename, filename + ".bak") with open(filename + ".bak") as f: d = f.read() for k, v in oldToNew.items(): d = d.replace(k, v) with open(filename + ".new", "w") as f: f.write(d) os.rename(filename + ".new", filename) os.unlink(filename + ".bak")
Return a list of strings that represent C{destination} as a path relative to C{origin}. It is assumed that both paths represent directories, not files. That is to say, the delta of L{twisted.python.filepath.FilePath} /foo/bar to L{twisted.python.filepath.FilePath} /foo/baz will be C{../baz}, not C{baz}. @type origin: L{twisted.python.filepath.FilePath} @param origin: The origin of the relative path. @type destination: L{twisted.python.filepath.FilePath} @param destination: The destination of the relative path.
def filePathDelta(origin, destination): """ Return a list of strings that represent C{destination} as a path relative to C{origin}. It is assumed that both paths represent directories, not files. That is to say, the delta of L{twisted.python.filepath.FilePath} /foo/bar to L{twisted.python.filepath.FilePath} /foo/baz will be C{../baz}, not C{baz}. @type origin: L{twisted.python.filepath.FilePath} @param origin: The origin of the relative path. @type destination: L{twisted.python.filepath.FilePath} @param destination: The destination of the relative path. """ commonItems = 0 path1 = origin.path.split(os.sep) path2 = destination.path.split(os.sep) for elem1, elem2 in zip(path1, path2): if elem1 == elem2: commonItems += 1 else: break path = [".."] * (len(path1) - commonItems) return path + path2[commonItems:]
Perform shell completion. A completion function (shell script) is generated for the requested shell and written to C{shellCompFile}, typically C{stdout}. The result is then eval'd by the shell to produce the desired completions. @type config: L{twisted.python.usage.Options} @param config: The L{twisted.python.usage.Options} instance to generate completions for. @type cmdName: C{str} @param cmdName: The name of the command we're generating completions for. In the case of zsh, this is used to print an appropriate "#compdef $CMD" line at the top of the output. This is not necessary for the functionality of the system, but it helps in debugging, since the output we produce is properly formed and may be saved in a file and used as a stand-alone completion function. @type words: C{list} of C{str} @param words: The raw command-line words passed to use by the shell stub function. argv[0] has already been stripped off. @type shellCompFile: C{file} @param shellCompFile: The file to write completion data to.
def shellComplete(config, cmdName, words, shellCompFile): """ Perform shell completion. A completion function (shell script) is generated for the requested shell and written to C{shellCompFile}, typically C{stdout}. The result is then eval'd by the shell to produce the desired completions. @type config: L{twisted.python.usage.Options} @param config: The L{twisted.python.usage.Options} instance to generate completions for. @type cmdName: C{str} @param cmdName: The name of the command we're generating completions for. In the case of zsh, this is used to print an appropriate "#compdef $CMD" line at the top of the output. This is not necessary for the functionality of the system, but it helps in debugging, since the output we produce is properly formed and may be saved in a file and used as a stand-alone completion function. @type words: C{list} of C{str} @param words: The raw command-line words passed to use by the shell stub function. argv[0] has already been stripped off. @type shellCompFile: C{file} @param shellCompFile: The file to write completion data to. """ # If given a file with unicode semantics, such as sys.stdout on Python 3, # we must get at the the underlying buffer which has bytes semantics. if shellCompFile and ioType(shellCompFile) == str: shellCompFile = shellCompFile.buffer # shellName is provided for forward-compatibility. It is not used, # since we currently only support zsh. shellName, position = words[-1].split(":") position = int(position) # zsh gives the completion position ($CURRENT) as a 1-based index, # and argv[0] has already been stripped off, so we subtract 2 to # get the real 0-based index. position -= 2 cWord = words[position] # since the user may hit TAB at any time, we may have been called with an # incomplete command-line that would generate getopt errors if parsed # verbatim. However, we must do *some* parsing in order to determine if # there is a specific subcommand that we need to provide completion for. # So, to make the command-line more sane we work backwards from the # current completion position and strip off all words until we find one # that "looks" like a subcommand. It may in fact be the argument to a # normal command-line option, but that won't matter for our purposes. while position >= 1: if words[position - 1].startswith("-"): position -= 1 else: break words = words[:position] subCommands = getattr(config, "subCommands", None) if subCommands: # OK, this command supports sub-commands, so lets see if we have been # given one. # If the command-line arguments are not valid then we won't be able to # sanely detect the sub-command, so just generate completions as if no # sub-command was found. args = None try: opts, args = getopt.getopt(words, config.shortOpt, config.longOpt) except getopt.error: pass if args: # yes, we have a subcommand. Try to find it. for cmd, short, parser, doc in config.subCommands: if args[0] == cmd or args[0] == short: subOptions = parser() subOptions.parent = config gen: ZshBuilder = ZshSubcommandBuilder( subOptions, config, cmdName, shellCompFile ) gen.write() return # sub-command not given, or did not match any knowns sub-command names genSubs = True if cWord.startswith("-"): # optimization: if the current word being completed starts # with a hyphen then it can't be a sub-command, so skip # the expensive generation of the sub-command list genSubs = False gen = ZshBuilder(config, cmdName, shellCompFile) gen.write(genSubs=genSubs) else: gen = ZshBuilder(config, cmdName, shellCompFile) gen.write()
Generate an appropriate description from docstring of the given object
def descrFromDoc(obj): """ Generate an appropriate description from docstring of the given object """ if obj.__doc__ is None or obj.__doc__.isspace(): return None lines = [x.strip() for x in obj.__doc__.split("\n") if x and not x.isspace()] return " ".join(lines)
Shell escape the given string Implementation borrowed from now-deprecated commands.mkarg() in the stdlib
def escape(x): """ Shell escape the given string Implementation borrowed from now-deprecated commands.mkarg() in the stdlib """ if "'" not in x: return "'" + x + "'" s = '"' for c in x: if c in '\\$"`': s = s + "\\" s = s + c s = s + '"' return s
Serialize a sequence of characters with attribute information The resulting string can be interpreted by compatible software so that the contained characters are displayed and, for those attributes which are supported by the software, the attributes expressed. The exact result of the serialization depends on the behavior of the method specified by I{attributeRenderer}. For example, if your terminal is VT102 compatible, you might run this for a colorful variation on the "hello world" theme:: from twisted.conch.insults.text import flatten, attributes as A from twisted.conch.insults.helper import CharacterAttribute print(flatten( A.normal[A.bold[A.fg.red['He'], A.fg.green['ll'], A.fg.magenta['o'], ' ', A.fg.yellow['Wo'], A.fg.blue['rl'], A.fg.cyan['d!']]], CharacterAttribute())) @param output: Object returned by accessing attributes of the module-level attributes object. @param attrs: A formatting state instance used to determine how to serialize C{output}. @type attributeRenderer: C{str} @param attributeRenderer: Name of the method on I{attrs} that should be called to render the attributes during serialization. Defaults to C{'toVT102'}. @return: A string expressing the text and display attributes specified by L{output}.
def flatten(output, attrs, attributeRenderer="toVT102"): """ Serialize a sequence of characters with attribute information The resulting string can be interpreted by compatible software so that the contained characters are displayed and, for those attributes which are supported by the software, the attributes expressed. The exact result of the serialization depends on the behavior of the method specified by I{attributeRenderer}. For example, if your terminal is VT102 compatible, you might run this for a colorful variation on the \"hello world\" theme:: from twisted.conch.insults.text import flatten, attributes as A from twisted.conch.insults.helper import CharacterAttribute print(flatten( A.normal[A.bold[A.fg.red['He'], A.fg.green['ll'], A.fg.magenta['o'], ' ', A.fg.yellow['Wo'], A.fg.blue['rl'], A.fg.cyan['d!']]], CharacterAttribute())) @param output: Object returned by accessing attributes of the module-level attributes object. @param attrs: A formatting state instance used to determine how to serialize C{output}. @type attributeRenderer: C{str} @param attributeRenderer: Name of the method on I{attrs} that should be called to render the attributes during serialization. Defaults to C{'toVT102'}. @return: A string expressing the text and display attributes specified by L{output}. """ flattened: List[str] = [] output.serialize(flattened.append, attrs, attributeRenderer) return "".join(flattened)
Receive a file descriptor from a L{sendmsg} message on the given C{AF_UNIX} socket. @param socketfd: An C{AF_UNIX} socket, attached to another process waiting to send sockets via the ancillary data mechanism in L{send1msg}. @param fd: C{int} @return: a 2-tuple of (new file descriptor, description). @rtype: 2-tuple of (C{int}, C{bytes})
def recvfd(socketfd: int) -> Tuple[int, bytes]: """ Receive a file descriptor from a L{sendmsg} message on the given C{AF_UNIX} socket. @param socketfd: An C{AF_UNIX} socket, attached to another process waiting to send sockets via the ancillary data mechanism in L{send1msg}. @param fd: C{int} @return: a 2-tuple of (new file descriptor, description). @rtype: 2-tuple of (C{int}, C{bytes}) """ ourSocket = socket.fromfd(socketfd, socket.AF_UNIX, socket.SOCK_STREAM) data, ancillary, flags = recvmsg(ourSocket) [(cmsgLevel, cmsgType, packedFD)] = ancillary # cmsgLevel and cmsgType really need to be SOL_SOCKET / SCM_RIGHTS, but # since those are the *only* standard values, there's not much point in # checking. [unpackedFD] = unpack("i", packedFD) return (unpackedFD, data)
Build strings that are legal values for the systemd I{FileDescriptorName} field.
def systemdDescriptorNames() -> SearchStrategy[str]: """ Build strings that are legal values for the systemd I{FileDescriptorName} field. """ # systemd.socket(5) says: # # > Names may contain any ASCII character, but must exclude control # > characters and ":", and must be at most 255 characters in length. control_characters: Literal["Cc"] = "Cc" return text( # The docs don't say there is a min size so I'm guessing... min_size=1, max_size=255, alphabet=characters( # These constraints restrict us to ASCII. min_codepoint=0, max_codepoint=127, # This one excludes control characters. blacklist_categories=(control_characters,), # And this excludes the separator. blacklist_characters=(":",), ), )
Do nothing. This is used to test the deprecation decorators.
def dummyCallable(): """ Do nothing. This is used to test the deprecation decorators. """
Do nothing. This is used to test the replacement parameter to L{deprecated}.
def dummyReplacementMethod(): """ Do nothing. This is used to test the replacement parameter to L{deprecated}. """
Function with a deprecated keyword parameter.
def functionWithDeprecatedParameter(a, b, c=1, foo=2, bar=3): """ Function with a deprecated keyword parameter. """
By convention, UIDs less than 1000 are reserved for the system. A system which allocated every single one of those UIDs would likely have practical problems with allocating new ones, so let's assume that we'll be able to find one. (If we don't, this will wrap around to negative values and I{eventually} find something.) @return: a user ID which does not exist on the local system. Or, on systems without a L{pwd} module, return C{SYSTEM_UID_MAX}.
def findInvalidUID(): """ By convention, UIDs less than 1000 are reserved for the system. A system which allocated every single one of those UIDs would likely have practical problems with allocating new ones, so let's assume that we'll be able to find one. (If we don't, this will wrap around to negative values and I{eventually} find something.) @return: a user ID which does not exist on the local system. Or, on systems without a L{pwd} module, return C{SYSTEM_UID_MAX}. """ guess = SYSTEM_UID_MAX if pwd is not None: while True: try: pwd.getpwuid(guess) except KeyError: break else: guess -= 1 return guess
Set some config in the repo that Git requires to make commits. This isn't needed in real usage, just for tests. @param path: The path to the Git repository. @type path: L{FilePath}
def _gitConfig(path): """ Set some config in the repo that Git requires to make commits. This isn't needed in real usage, just for tests. @param path: The path to the Git repository. @type path: L{FilePath} """ runCommand( [ "git", "config", "--file", path.child(".git").child("config").path, "user.name", '"someone"', ] ) runCommand( [ "git", "config", "--file", path.child(".git").child("config").path, "user.email", '"[email protected]"', ] )
Run a git init, and set some config that git requires. This isn't needed in real usage. @param path: The path to where the Git repo will be created. @type path: L{FilePath}
def _gitInit(path): """ Run a git init, and set some config that git requires. This isn't needed in real usage. @param path: The path to where the Git repo will be created. @type path: L{FilePath} """ runCommand(["git", "init", path.path]) _gitConfig(path)
A convenience for generating _version.py data. @param args: Arguments to pass to L{Version}. @param kwargs: Keyword arguments to pass to L{Version}.
def genVersion(*args, **kwargs): """ A convenience for generating _version.py data. @param args: Arguments to pass to L{Version}. @param kwargs: Keyword arguments to pass to L{Version}. """ return "from incremental import Version\n__version__={!r}".format( Version(*args, **kwargs) )
Create a pipe, and return the two FDs wrapped in L{_FDHolders}.
def _makePipe(): """ Create a pipe, and return the two FDs wrapped in L{_FDHolders}. """ r, w = pipe() return (_FDHolder(r), _FDHolder(w))
Start a script that is a peer of this test as a subprocess. @param script: the module name of the script in this directory (no package prefix, no '.py') @type script: C{str} @rtype: L{StartStopProcessProtocol}
def _spawn(script, outputFD): """ Start a script that is a peer of this test as a subprocess. @param script: the module name of the script in this directory (no package prefix, no '.py') @type script: C{str} @rtype: L{StartStopProcessProtocol} """ pyExe = FilePath(sys.executable).asTextMode().path env = dict(os.environ) env["PYTHONPATH"] = FilePath(pathsep.join(sys.path)).asTextMode().path sspp = StartStopProcessProtocol() reactor.spawnProcess( sspp, pyExe, [ pyExe, FilePath(__file__).sibling(script + ".py").asTextMode().path, b"17", ], env=env, childFDs={0: "w", 1: "r", 2: "r", 17: outputFD}, ) return sspp
Generate completion functions for given twisted command - no errors should be raised @type cmdName: C{str} @param cmdName: The name of the command-line utility e.g. 'twistd' @type optionsFQPN: C{str} @param optionsFQPN: The Fully Qualified Python Name of the C{Options} class to be tested.
def test_genZshFunction(self, cmdName, optionsFQPN): """ Generate completion functions for given twisted command - no errors should be raised @type cmdName: C{str} @param cmdName: The name of the command-line utility e.g. 'twistd' @type optionsFQPN: C{str} @param optionsFQPN: The Fully Qualified Python Name of the C{Options} class to be tested. """ outputFile = BytesIO() self.patch(usage.Options, "_shellCompFile", outputFile) # some scripts won't import or instantiate because of missing # dependencies (pyOpenSSL, etc) so we have to skip them. try: o = reflect.namedAny(optionsFQPN)() except Exception as e: raise unittest.SkipTest( "Couldn't import or instantiate " "Options class: %s" % (e,) ) try: o.parseOptions(["", "--_shell-completion", "zsh:2"]) except ImportError as e: # this can happen for commands which don't have all # the necessary dependencies installed. skip test. # skip raise unittest.SkipTest("ImportError calling parseOptions(): %s", (e,)) except SystemExit: pass # expected else: self.fail("SystemExit not raised") outputFile.seek(0) # test that we got some output self.assertEqual(1, len(outputFile.read(1))) outputFile.seek(0) outputFile.truncate() # now, if it has sub commands, we have to test those too if hasattr(o, "subCommands"): for cmd, short, parser, doc in o.subCommands: try: o.parseOptions([cmd, "", "--_shell-completion", "zsh:3"]) except ImportError as e: # this can happen for commands which don't have all # the necessary dependencies installed. skip test. raise unittest.SkipTest( "ImportError calling parseOptions() " "on subcommand: %s", (e,) ) except SystemExit: pass # expected else: self.fail("SystemExit not raised") outputFile.seek(0) # test that we got some output self.assertEqual(1, len(outputFile.read(1))) outputFile.seek(0) outputFile.truncate() # flushed because we don't want DeprecationWarnings to be printed when # running these test cases. self.flushWarnings()
@param count: The number of file descriptors to indicate as inherited. @param pid: The pid of the inheriting process to indicate. @return: A copy of the current process environment with the I{systemd} file descriptor inheritance-related environment variables added to it.
def buildEnvironment(count: int, pid: object) -> Dict[str, str]: """ @param count: The number of file descriptors to indicate as inherited. @param pid: The pid of the inheriting process to indicate. @return: A copy of the current process environment with the I{systemd} file descriptor inheritance-related environment variables added to it. """ result = os.environ.copy() result["LISTEN_FDS"] = str(count) result["LISTEN_FDNAMES"] = ":".join([f"{n}.socket" for n in range(count)]) result["LISTEN_PID"] = str(pid) return result
Call L{mktime_real}, and if it raises L{OverflowError}, catch it and raise SkipTest instead. @param t9: A time as a 9-item tuple. @type t9: L{tuple} @return: A timestamp. @rtype: L{float}
def mktime(t9: tuple[int, int, int, int, int, int, int, int, int]) -> float: """ Call L{mktime_real}, and if it raises L{OverflowError}, catch it and raise SkipTest instead. @param t9: A time as a 9-item tuple. @type t9: L{tuple} @return: A timestamp. @rtype: L{float} """ try: return mktime_real(t9) except OverflowError: raise SkipTest(f"Platform cannot construct time zone for {t9!r}")
Set time zone. @param name: a time zone name @type name: L{str}
def setTZ(name: str | None) -> None: """ Set time zone. @param name: a time zone name @type name: L{str} """ if tzset is None: return if name is None: try: del environ["TZ"] except KeyError: pass else: environ["TZ"] = name tzset()
Add cleanup hooks to a test case to reset timezone to original value. @param testCase: the test case to add the cleanup to. @type testCase: L{unittest.TestCase}
def addTZCleanup(testCase: TestCase) -> None: """ Add cleanup hooks to a test case to reset timezone to original value. @param testCase: the test case to add the cleanup to. @type testCase: L{unittest.TestCase} """ tzIn = environ.get("TZ", None) @testCase.addCleanup def resetTZ() -> None: setTZ(tzIn)
Create a zipfile on zfname, containing the contents of dirname'
def zipit(dirname: str | bytes, zfname: str | bytes) -> None: """ Create a zipfile on zfname, containing the contents of dirname' """ coercedDirname = _coerceToFilesystemEncoding("", dirname) coercedZfname = _coerceToFilesystemEncoding("", zfname) with zipfile.ZipFile(coercedZfname, "w") as zf: for ( root, ignored, files, ) in os.walk(coercedDirname): for fname in files: fspath = os.path.join(root, fname) arcpath = os.path.join(root, fname)[len(dirname) + 1 :] zf.write(fspath, arcpath)
Accepts a single line in Emacs local variable declaration format and returns a dict of all the variables {name: value}. Raises ValueError if 'line' is in the wrong format. See http://www.gnu.org/software/emacs/manual/html_node/File-Variables.html
def _parseLocalVariables(line): """ Accepts a single line in Emacs local variable declaration format and returns a dict of all the variables {name: value}. Raises ValueError if 'line' is in the wrong format. See http://www.gnu.org/software/emacs/manual/html_node/File-Variables.html """ paren = "-*-" start = line.find(paren) + len(paren) end = line.rfind(paren) if start == -1 or end == -1: raise ValueError(f"{line!r} not a valid local variable declaration") items = line[start:end].split(";") localVars = {} for item in items: if len(item.strip()) == 0: continue split = item.split(":") if len(split) != 2: raise ValueError(f"{line!r} contains invalid declaration {item!r}") localVars[split[0].strip()] = split[1].strip() return localVars
Accepts a filename and attempts to load the Emacs variable declarations from that file, simulating what Emacs does. See http://www.gnu.org/software/emacs/manual/html_node/File-Variables.html
def loadLocalVariables(filename): """ Accepts a filename and attempts to load the Emacs variable declarations from that file, simulating what Emacs does. See http://www.gnu.org/software/emacs/manual/html_node/File-Variables.html """ with open(filename) as f: lines = [f.readline(), f.readline()] for line in lines: try: return _parseLocalVariables(line) except ValueError: pass return {}
Returns true if 'filename' looks like a file containing unit tests. False otherwise. Doesn't care whether filename exists.
def isTestFile(filename): """ Returns true if 'filename' looks like a file containing unit tests. False otherwise. Doesn't care whether filename exists. """ basename = os.path.basename(filename) return basename.startswith("test_") and os.path.splitext(basename)[1] == (".py")
Try to find the source line of the given test thing. @param testThing: the test item to attempt to inspect @type testThing: an L{TestCase}, test method, or module, though only the former two have a chance to succeed @rtype: int @return: the starting source line, or -1 if one couldn't be found
def _maybeFindSourceLine(testThing): """ Try to find the source line of the given test thing. @param testThing: the test item to attempt to inspect @type testThing: an L{TestCase}, test method, or module, though only the former two have a chance to succeed @rtype: int @return: the starting source line, or -1 if one couldn't be found """ # an instance of L{TestCase} -- locate the test it will run method = getattr(testThing, "_testMethodName", None) if method is not None: testThing = getattr(testThing, method) # If it's a function, we can get the line number even if the source file no # longer exists code = getattr(testThing, "__code__", None) if code is not None: return code.co_firstlineno try: return inspect.getsourcelines(testThing)[1] except (OSError, TypeError): # either testThing is a module, which raised a TypeError, or the file # couldn't be read return -1
Check that the given order is a known test running order. Does nothing else, since looking up the appropriate callable to sort the tests should be done when it actually will be used, as the default argument will not be coerced by this function. @param order: one of the known orders in C{_runOrders} @return: the order unmodified
def _checkKnownRunOrder(order): """ Check that the given order is a known test running order. Does nothing else, since looking up the appropriate callable to sort the tests should be done when it actually will be used, as the default argument will not be coerced by this function. @param order: one of the known orders in C{_runOrders} @return: the order unmodified """ if order not in _runOrders: raise usage.UsageError( "--order must be one of: %s. See --help-orders for details" % (", ".join(repr(order) for order in _runOrders),) ) return order
Wrap an instance of C{pdb.Pdb} with readline support and load any .rcs.
def _wrappedPdb(): """ Wrap an instance of C{pdb.Pdb} with readline support and load any .rcs. """ dbg = pdb.Pdb() try: namedModule("readline") except ImportError: print("readline module not available") for path in (".pdbrc", "pdbrc"): if os.path.exists(path): try: rcFile = open(path) except OSError: pass else: with rcFile: dbg.rcLines.extend(rcFile.readlines()) return dbg
Return a trial runner class set up with the parameters extracted from C{config}. @return: A trial runner instance.
def _makeRunner(config: Options) -> runner._Runner: """ Return a trial runner class set up with the parameters extracted from C{config}. @return: A trial runner instance. """ cls: Type[runner._Runner] = runner.TrialRunner args = { "reporterFactory": config["reporter"], "tracebackFormat": config["tbformat"], "realTimeErrors": config["rterrors"], "uncleanWarnings": config["unclean-warnings"], "logfile": config["logfile"], "workingDirectory": config["temp-directory"], "exitFirst": config["exitfirst"], } if config["dry-run"]: args["mode"] = runner.TrialRunner.DRY_RUN elif config["jobs"]: cls = DistTrialRunner args["maxWorkers"] = config["jobs"] args["workerArguments"] = config._getWorkerArguments() else: if config["debug"]: args["mode"] = runner.TrialRunner.DEBUG debugger = config["debugger"] if debugger != "pdb": try: args["debugger"] = reflect.namedAny(debugger) except reflect.ModuleNotFound: raise _DebuggerNotFound( f"{debugger!r} debugger could not be found." ) else: args["debugger"] = _wrappedPdb() args["profile"] = config["profile"] args["forceGarbageCollection"] = config["force-gc"] return cls(**args)
Synchronously run a Python script, with the same Python interpreter that ran the process calling this function, using L{Popen}, using the given command-line arguments, with standard input and standard error both redirected to L{os.devnull}, and return its output as a string. @param script: The path to the script. @type script: L{FilePath} @param args: The command-line arguments to follow the script in its invocation (the desired C{sys.argv[1:]}). @type args: L{tuple} of L{str} @return: the output passed to the proces's C{stdout}, without any messages from C{stderr}. @rtype: L{bytes}
def outputFromPythonScript(script, *args): """ Synchronously run a Python script, with the same Python interpreter that ran the process calling this function, using L{Popen}, using the given command-line arguments, with standard input and standard error both redirected to L{os.devnull}, and return its output as a string. @param script: The path to the script. @type script: L{FilePath} @param args: The command-line arguments to follow the script in its invocation (the desired C{sys.argv[1:]}). @type args: L{tuple} of L{str} @return: the output passed to the proces's C{stdout}, without any messages from C{stderr}. @rtype: L{bytes} """ with open(devnull, "rb") as nullInput, open(devnull, "wb") as nullError: process = Popen( [executable, script.path] + list(args), stdout=PIPE, stderr=nullError, stdin=nullInput, ) stdout = process.communicate()[0] return stdout
Convert an integer represented as a base 128 string into an L{int}. @param st: The integer encoded in a byte string. @type st: L{bytes} @return: The integer value extracted from the byte string. @rtype: L{int}
def b1282int(st): """ Convert an integer represented as a base 128 string into an L{int}. @param st: The integer encoded in a byte string. @type st: L{bytes} @return: The integer value extracted from the byte string. @rtype: L{int} """ e = 1 i = 0 for char in iterbytes(st): n = ord(char) i += n * e e <<= 7 return i
Set the limit on the prefix length for all Banana connections established after this call. The prefix length limit determines how many bytes of prefix a banana decoder will allow before rejecting a potential object as too large. @type limit: L{int} @param limit: The number of bytes of prefix for banana to allow when decoding.
def setPrefixLimit(limit): """ Set the limit on the prefix length for all Banana connections established after this call. The prefix length limit determines how many bytes of prefix a banana decoder will allow before rejecting a potential object as too large. @type limit: L{int} @param limit: The number of bytes of prefix for banana to allow when decoding. """ global _PREFIX_LIMIT _PREFIX_LIMIT = limit
Encode a list s-expression.
def encode(lst): """Encode a list s-expression.""" encodeStream = BytesIO() _i.transport = encodeStream _i.sendEncoded(lst) return encodeStream.getvalue()
Decode a banana-encoded string.
def decode(st): """ Decode a banana-encoded string. """ l = [] _i.expressionReceived = l.append try: _i.dataReceived(st) finally: _i.buffer = b"" del _i.expressionReceived return l[0]
Given an object, if that object is a type, return a new, blank instance of that type which has not had C{__init__} called on it. If the object is not a type, return L{None}. @param cls: The type (or class) to create an instance of. @type cls: L{type} or something else that cannot be instantiated. @return: a new blank instance or L{None} if C{cls} is not a class or type.
def _createBlank(cls): """ Given an object, if that object is a type, return a new, blank instance of that type which has not had C{__init__} called on it. If the object is not a type, return L{None}. @param cls: The type (or class) to create an instance of. @type cls: L{type} or something else that cannot be instantiated. @return: a new blank instance or L{None} if C{cls} is not a class or type. """ if isinstance(cls, type): return cls.__new__(cls)
Make a new instance of a class without calling its __init__ method. @param state: A C{dict} used to update C{inst.__dict__} either directly or via C{__setstate__}, if available. @return: A new instance of C{cls}.
def _newInstance(cls, state): """ Make a new instance of a class without calling its __init__ method. @param state: A C{dict} used to update C{inst.__dict__} either directly or via C{__setstate__}, if available. @return: A new instance of C{cls}. """ instance = _createBlank(cls) def defaultSetter(state): if isinstance(state, dict): instance.__dict__ = state or {} setter = getattr(instance, "__setstate__", defaultSetter) setter(state) return instance
Set which local class will represent a remote type. If you have written a Copyable class that you expect your client to be receiving, write a local "copy" class to represent it, then call:: jellier.setUnjellyableForClass('module.package.Class', MyCopier). Call this at the module level immediately after its class definition. MyCopier should be a subclass of RemoteCopy. The classname may be a special tag returned by 'Copyable.getTypeToCopyFor' rather than an actual classname. This call is also for cached classes, since there will be no overlap. The rules are the same.
def setUnjellyableForClass(classname, unjellyable): """ Set which local class will represent a remote type. If you have written a Copyable class that you expect your client to be receiving, write a local "copy" class to represent it, then call:: jellier.setUnjellyableForClass('module.package.Class', MyCopier). Call this at the module level immediately after its class definition. MyCopier should be a subclass of RemoteCopy. The classname may be a special tag returned by 'Copyable.getTypeToCopyFor' rather than an actual classname. This call is also for cached classes, since there will be no overlap. The rules are the same. """ global unjellyableRegistry classname = _maybeClass(classname) unjellyableRegistry[classname] = unjellyable globalSecurity.allowTypes(classname)
Set the factory to construct a remote instance of a type:: jellier.setUnjellyableFactoryForClass('module.package.Class', MyFactory) Call this at the module level immediately after its class definition. C{copyFactory} should return an instance or subclass of L{RemoteCopy<pb.RemoteCopy>}. Similar to L{setUnjellyableForClass} except it uses a factory instead of creating an instance.
def setUnjellyableFactoryForClass(classname, copyFactory): """ Set the factory to construct a remote instance of a type:: jellier.setUnjellyableFactoryForClass('module.package.Class', MyFactory) Call this at the module level immediately after its class definition. C{copyFactory} should return an instance or subclass of L{RemoteCopy<pb.RemoteCopy>}. Similar to L{setUnjellyableForClass} except it uses a factory instead of creating an instance. """ global unjellyableFactoryRegistry classname = _maybeClass(classname) unjellyableFactoryRegistry[classname] = copyFactory globalSecurity.allowTypes(classname)
Set all classes in a module derived from C{baseClass} as copiers for a corresponding remote class. When you have a hierarchy of Copyable (or Cacheable) classes on one side, and a mirror structure of Copied (or RemoteCache) classes on the other, use this to setUnjellyableForClass all your Copieds for the Copyables. Each copyTag (the "classname" argument to getTypeToCopyFor, and what the Copyable's getTypeToCopyFor returns) is formed from adding a prefix to the Copied's class name. The prefix defaults to module.__name__. If you wish the copy tag to consist of solely the classname, pass the empty string ''. @param module: a module object from which to pull the Copied classes. (passing sys.modules[__name__] might be useful) @param baseClass: the base class from which all your Copied classes derive. @param prefix: the string prefixed to classnames to form the unjellyableRegistry.
def setUnjellyableForClassTree(module, baseClass, prefix=None): """ Set all classes in a module derived from C{baseClass} as copiers for a corresponding remote class. When you have a hierarchy of Copyable (or Cacheable) classes on one side, and a mirror structure of Copied (or RemoteCache) classes on the other, use this to setUnjellyableForClass all your Copieds for the Copyables. Each copyTag (the \"classname\" argument to getTypeToCopyFor, and what the Copyable's getTypeToCopyFor returns) is formed from adding a prefix to the Copied's class name. The prefix defaults to module.__name__. If you wish the copy tag to consist of solely the classname, pass the empty string \'\'. @param module: a module object from which to pull the Copied classes. (passing sys.modules[__name__] might be useful) @param baseClass: the base class from which all your Copied classes derive. @param prefix: the string prefixed to classnames to form the unjellyableRegistry. """ if prefix is None: prefix = module.__name__ if prefix: prefix = "%s." % prefix for name in dir(module): loaded = getattr(module, name) try: yes = issubclass(loaded, baseClass) except TypeError: "It's not a class." else: if yes: setUnjellyableForClass(f"{prefix}{name}", loaded)
Utility method to default to 'normal' state rules in serialization.
def getInstanceState(inst, jellier): """ Utility method to default to 'normal' state rules in serialization. """ if hasattr(inst, "__getstate__"): state = inst.__getstate__() else: state = inst.__dict__ sxp = jellier.prepare(inst) sxp.extend([qual(inst.__class__).encode("utf-8"), jellier.jelly(state)]) return jellier.preserve(inst, sxp)
Utility method to default to 'normal' state rules in unserialization.
def setInstanceState(inst, unjellier, jellyList): """ Utility method to default to 'normal' state rules in unserialization. """ state = unjellier.unjelly(jellyList[1]) if hasattr(inst, "__setstate__"): inst.__setstate__(state) else: inst.__dict__ = state return inst
Serialize to s-expression. Returns a list which is the serialized representation of an object. An optional 'taster' argument takes a SecurityOptions and will mark any insecure objects as unpersistable rather than serializing them.
def jelly(object, taster=DummySecurityOptions(), persistentStore=None, invoker=None): """ Serialize to s-expression. Returns a list which is the serialized representation of an object. An optional 'taster' argument takes a SecurityOptions and will mark any insecure objects as unpersistable rather than serializing them. """ return _Jellier(taster, persistentStore, invoker).jelly(object)
Unserialize from s-expression. Takes a list that was the result from a call to jelly() and unserializes an arbitrary object from it. The optional 'taster' argument, an instance of SecurityOptions, will cause an InsecureJelly exception to be raised if a disallowed type, module, or class attempted to unserialize.
def unjelly(sexp, taster=DummySecurityOptions(), persistentLoad=None, invoker=None): """ Unserialize from s-expression. Takes a list that was the result from a call to jelly() and unserializes an arbitrary object from it. The optional 'taster' argument, an instance of SecurityOptions, will cause an InsecureJelly exception to be raised if a disallowed type, module, or class attempted to unserialize. """ return _Unjellier(taster, persistentLoad, invoker).unjellyFull(sexp)
Respond to a challenge. This is useful for challenge/response authentication. @param challenge: A challenge. @param password: A password. @return: The password hashed twice.
def respond(challenge, password): """ Respond to a challenge. This is useful for challenge/response authentication. @param challenge: A challenge. @param password: A password. @return: The password hashed twice. """ m = md5() m.update(password) hashedPassword = m.digest() m = md5() m.update(hashedPassword) m.update(challenge) doubleHashedPassword = m.digest() return doubleHashedPassword
@return: Some random data.
def challenge(): """ @return: Some random data. """ crap = bytes(random.randint(65, 90) for x in range(random.randrange(15, 25))) crap = md5(crap).digest() return crap
Wrap a deferred returned from a pb method in another deferred that expects a RemotePublished as a result. This will allow you to wait until the result is really available. Idiomatic usage would look like:: publish.whenReady(serverObject.getMeAPublishable()).addCallback(lookAtThePublishable)
def whenReady(d): """ Wrap a deferred returned from a pb method in another deferred that expects a RemotePublished as a result. This will allow you to wait until the result is really available. Idiomatic usage would look like:: publish.whenReady(serverObject.getMeAPublishable()).addCallback(lookAtThePublishable) """ d2 = defer.Deferred() d.addCallbacks(_pubReady, d2.errback, callbackArgs=(d2,)) return d2
(internal)
def _pubReady(result, d2): "(internal)" result.callWhenActivated(d2.callback)
A utility method that will call a remote method which expects a PageCollector as the first argument.
def getAllPages(referenceable, methodName, *args, **kw): """ A utility method that will call a remote method which expects a PageCollector as the first argument. """ d = defer.Deferred() referenceable.callRemote(methodName, CallbackPageCollector(d.callback), *args, **kw) return d
Dictate a Banana dialect to use. @param protocol: A L{banana.Banana} instance which has not yet had a dialect negotiated. @param dialect: A L{bytes} instance naming a Banana dialect to select.
def selectDialect(protocol, dialect): """ Dictate a Banana dialect to use. @param protocol: A L{banana.Banana} instance which has not yet had a dialect negotiated. @param dialect: A L{bytes} instance naming a Banana dialect to select. """ # We can't do this the normal way by delivering bytes because other setup # stuff gets in the way (for example, clients and servers have incompatible # negotiations for this step). So use the private API to make this happen. protocol._selectDialect(dialect)
Banana encode an object using L{banana.Banana.sendEncoded}. @param bananaFactory: A no-argument callable which will return a new, unconnected protocol instance to use to do the encoding (this should most likely be a L{banana.Banana} instance). @param obj: The object to encode. @type obj: Any type supported by Banana. @return: A L{bytes} instance giving the encoded form of C{obj}.
def encode(bananaFactory, obj): """ Banana encode an object using L{banana.Banana.sendEncoded}. @param bananaFactory: A no-argument callable which will return a new, unconnected protocol instance to use to do the encoding (this should most likely be a L{banana.Banana} instance). @param obj: The object to encode. @type obj: Any type supported by Banana. @return: A L{bytes} instance giving the encoded form of C{obj}. """ transport = StringTransport() banana = bananaFactory() banana.makeConnection(transport) transport.clear() banana.sendEncoded(obj) return transport.value()
A dummy function to test function serialization.
def afunc(self): """ A dummy function to test function serialization. """
Verify that the given object round-trips through jelly & banana and comes out equivalent to the input.
def jellyRoundTrip(testCase, toSerialize): """ Verify that the given object round-trips through jelly & banana and comes out equivalent to the input. """ jellied = jelly.jelly(toSerialize) encoded = banana.encode(jellied) decoded = banana.decode(encoded) unjellied = jelly.unjelly(decoded) testCase.assertEqual(toSerialize, unjellied)
Create a server and a client and connect the two with an L{IOPump}. @param test: the test case where the client and server will be used. @type test: L{twisted.trial.unittest.TestCase} @param clientFactory: The factory that creates the client object. @type clientFactory: L{twisted.spread.pb.PBClientFactory} @param serverFactory: The factory that creates the server object. @type serverFactory: L{twisted.spread.pb.PBServerFactory} @return: a 3-tuple of (client, server, pump) @rtype: (L{twisted.spread.pb.Broker}, L{twisted.spread.pb.Broker}, L{IOPump})
def connectServerAndClient(test, clientFactory, serverFactory): """ Create a server and a client and connect the two with an L{IOPump}. @param test: the test case where the client and server will be used. @type test: L{twisted.trial.unittest.TestCase} @param clientFactory: The factory that creates the client object. @type clientFactory: L{twisted.spread.pb.PBClientFactory} @param serverFactory: The factory that creates the server object. @type serverFactory: L{twisted.spread.pb.PBServerFactory} @return: a 3-tuple of (client, server, pump) @rtype: (L{twisted.spread.pb.Broker}, L{twisted.spread.pb.Broker}, L{IOPump}) """ addr = ("127.0.0.1",) clientBroker = clientFactory.buildProtocol(addr) serverBroker = serverFactory.buildProtocol(addr) clientTransport = StringIO() serverTransport = StringIO() clientBroker.makeConnection(protocol.FileWrapper(clientTransport)) serverBroker.makeConnection(protocol.FileWrapper(serverTransport)) pump = IOPump(clientBroker, serverBroker, clientTransport, serverTransport) def maybeDisconnect(broker): if not broker.disconnected: broker.connectionLost(failure.Failure(main.CONNECTION_DONE)) def disconnectClientFactory(): # There's no connector, just a FileWrapper mediated by the # IOPump. Fortunately PBClientFactory.clientConnectionLost # doesn't do anything with the connector so we can get away # with passing None here. clientFactory.clientConnectionLost( connector=None, reason=failure.Failure(main.CONNECTION_DONE) ) test.addCleanup(maybeDisconnect, clientBroker) test.addCleanup(maybeDisconnect, serverBroker) test.addCleanup(disconnectClientFactory) # Establish the connection pump.pump() return clientBroker, serverBroker, pump
Connect a client and server L{Broker} together with an L{IOPump} @param realm: realm to use, defaulting to a L{DummyRealm} @returns: a 3-tuple (client, server, pump).
def connectedServerAndClient(test, realm=None): """ Connect a client and server L{Broker} together with an L{IOPump} @param realm: realm to use, defaulting to a L{DummyRealm} @returns: a 3-tuple (client, server, pump). """ realm = realm or DummyRealm() checker = checkers.InMemoryUsernamePasswordDatabaseDontUse(guest=b"guest") serverFactory = pb.PBServerFactory(portal.Portal(realm, [checker])) clientFactory = pb.PBClientFactory() return connectServerAndClient(test, clientFactory, serverFactory)
Factory of L{SimpleFactoryCopy}, getting a created instance given the C{id} found in C{state}.
def createFactoryCopy(state): """ Factory of L{SimpleFactoryCopy}, getting a created instance given the C{id} found in C{state}. """ stateId = state.get("id", None) if stateId is None: raise RuntimeError(f"factory copy state has no 'id' member {repr(state)}") if stateId not in SimpleFactoryCopy.allIDs: raise RuntimeError(f"factory class has no ID: {SimpleFactoryCopy.allIDs}") inst = SimpleFactoryCopy.allIDs[stateId] if not inst: raise RuntimeError("factory method found no object with id") return inst
Create and return a new in-memory transport hooked up to the given protocol. @param clientProtocol: The client protocol to use. @type clientProtocol: L{IProtocol} provider @return: The transport. @rtype: L{FakeTransport}
def makeFakeClient(clientProtocol): """ Create and return a new in-memory transport hooked up to the given protocol. @param clientProtocol: The client protocol to use. @type clientProtocol: L{IProtocol} provider @return: The transport. @rtype: L{FakeTransport} """ return FakeTransport(clientProtocol, isServer=False)
Create and return a new in-memory transport hooked up to the given protocol. @param serverProtocol: The server protocol to use. @type serverProtocol: L{IProtocol} provider @return: The transport. @rtype: L{FakeTransport}
def makeFakeServer(serverProtocol): """ Create and return a new in-memory transport hooked up to the given protocol. @param serverProtocol: The server protocol to use. @type serverProtocol: L{IProtocol} provider @return: The transport. @rtype: L{FakeTransport} """ return FakeTransport(serverProtocol, isServer=True)
Create a new L{IOPump} connecting two protocols. @param serverProtocol: The protocol to use on the accepting side of the connection. @type serverProtocol: L{IProtocol} provider @param serverTransport: The transport to associate with C{serverProtocol}. @type serverTransport: L{FakeTransport} @param clientProtocol: The protocol to use on the initiating side of the connection. @type clientProtocol: L{IProtocol} provider @param clientTransport: The transport to associate with C{clientProtocol}. @type clientTransport: L{FakeTransport} @param debug: A flag indicating whether to log information about what the L{IOPump} is doing. @type debug: L{bool} @param greet: Should the L{IOPump} be L{flushed <IOPump.flush>} once before returning to put the protocols into their post-handshake or post-server-greeting state? @type greet: L{bool} @param clock: An optional L{Clock}. Pumping the resulting L{IOPump} will also increase clock time by a small increment. @return: An L{IOPump} which connects C{serverProtocol} and C{clientProtocol} and delivers bytes between them when it is pumped. @rtype: L{IOPump}
def connect( serverProtocol, serverTransport, clientProtocol, clientTransport, debug=False, greet=True, clock=None, ): """ Create a new L{IOPump} connecting two protocols. @param serverProtocol: The protocol to use on the accepting side of the connection. @type serverProtocol: L{IProtocol} provider @param serverTransport: The transport to associate with C{serverProtocol}. @type serverTransport: L{FakeTransport} @param clientProtocol: The protocol to use on the initiating side of the connection. @type clientProtocol: L{IProtocol} provider @param clientTransport: The transport to associate with C{clientProtocol}. @type clientTransport: L{FakeTransport} @param debug: A flag indicating whether to log information about what the L{IOPump} is doing. @type debug: L{bool} @param greet: Should the L{IOPump} be L{flushed <IOPump.flush>} once before returning to put the protocols into their post-handshake or post-server-greeting state? @type greet: L{bool} @param clock: An optional L{Clock}. Pumping the resulting L{IOPump} will also increase clock time by a small increment. @return: An L{IOPump} which connects C{serverProtocol} and C{clientProtocol} and delivers bytes between them when it is pumped. @rtype: L{IOPump} """ serverProtocol.makeConnection(serverTransport) clientProtocol.makeConnection(clientTransport) pump = IOPump( clientProtocol, serverProtocol, clientTransport, serverTransport, debug, clock=clock, ) if greet: # Kick off server greeting, etc pump.flush() return pump
Connect a given server and client class to each other. @param ServerClass: a callable that produces the server-side protocol. @type ServerClass: 0-argument callable returning L{IProtocol} provider. @param ClientClass: like C{ServerClass} but for the other side of the connection. @type ClientClass: 0-argument callable returning L{IProtocol} provider. @param clientTransportFactory: a callable that produces the transport which will be attached to the protocol returned from C{ClientClass}. @type clientTransportFactory: callable taking (L{IProtocol}) and returning L{FakeTransport} @param serverTransportFactory: a callable that produces the transport which will be attached to the protocol returned from C{ServerClass}. @type serverTransportFactory: callable taking (L{IProtocol}) and returning L{FakeTransport} @param debug: Should this dump an escaped version of all traffic on this connection to stdout for inspection? @type debug: L{bool} @param greet: Should the L{IOPump} be L{flushed <IOPump.flush>} once before returning to put the protocols into their post-handshake or post-server-greeting state? @type greet: L{bool} @param clock: An optional L{Clock}. Pumping the resulting L{IOPump} will also increase clock time by a small increment. @return: the client protocol, the server protocol, and an L{IOPump} which, when its C{pump} and C{flush} methods are called, will move data between the created client and server protocol instances. @rtype: 3-L{tuple} of L{IProtocol}, L{IProtocol}, L{IOPump}
def connectedServerAndClient( ServerClass, ClientClass, clientTransportFactory=makeFakeClient, serverTransportFactory=makeFakeServer, debug=False, greet=True, clock=None, ): """ Connect a given server and client class to each other. @param ServerClass: a callable that produces the server-side protocol. @type ServerClass: 0-argument callable returning L{IProtocol} provider. @param ClientClass: like C{ServerClass} but for the other side of the connection. @type ClientClass: 0-argument callable returning L{IProtocol} provider. @param clientTransportFactory: a callable that produces the transport which will be attached to the protocol returned from C{ClientClass}. @type clientTransportFactory: callable taking (L{IProtocol}) and returning L{FakeTransport} @param serverTransportFactory: a callable that produces the transport which will be attached to the protocol returned from C{ServerClass}. @type serverTransportFactory: callable taking (L{IProtocol}) and returning L{FakeTransport} @param debug: Should this dump an escaped version of all traffic on this connection to stdout for inspection? @type debug: L{bool} @param greet: Should the L{IOPump} be L{flushed <IOPump.flush>} once before returning to put the protocols into their post-handshake or post-server-greeting state? @type greet: L{bool} @param clock: An optional L{Clock}. Pumping the resulting L{IOPump} will also increase clock time by a small increment. @return: the client protocol, the server protocol, and an L{IOPump} which, when its C{pump} and C{flush} methods are called, will move data between the created client and server protocol instances. @rtype: 3-L{tuple} of L{IProtocol}, L{IProtocol}, L{IOPump} """ c = ClientClass() s = ServerClass() cio = clientTransportFactory(c) sio = serverTransportFactory(s) return c, s, connect(s, sio, c, cio, debug, greet, clock=clock)
Should the client and server described by the arguments be connected to each other, i.e. do their port numbers match? @param clientInfo: the args for connectTCP @type clientInfo: L{tuple} @param serverInfo: the args for listenTCP @type serverInfo: L{tuple} @return: If they do match, return factories for the client and server that should connect; otherwise return L{None}, indicating they shouldn't be connected. @rtype: L{None} or 2-L{tuple} of (L{ClientFactory}, L{IProtocolFactory})
def _factoriesShouldConnect(clientInfo, serverInfo): """ Should the client and server described by the arguments be connected to each other, i.e. do their port numbers match? @param clientInfo: the args for connectTCP @type clientInfo: L{tuple} @param serverInfo: the args for listenTCP @type serverInfo: L{tuple} @return: If they do match, return factories for the client and server that should connect; otherwise return L{None}, indicating they shouldn't be connected. @rtype: L{None} or 2-L{tuple} of (L{ClientFactory}, L{IProtocolFactory}) """ ( clientHost, clientPort, clientFactory, clientTimeout, clientBindAddress, ) = clientInfo (serverPort, serverFactory, serverBacklog, serverInterface) = serverInfo if serverPort == clientPort: return clientFactory, serverFactory else: return None
Create an endpoint that can be fired on demand. @param debug: A flag; whether to dump output from the established connection to stdout. @type debug: L{bool} @return: A client endpoint, and an object that will cause one of the L{Deferred}s returned by that client endpoint. @rtype: 2-L{tuple} of (L{IStreamClientEndpoint}, L{ConnectionCompleter})
def connectableEndpoint(debug=False): """ Create an endpoint that can be fired on demand. @param debug: A flag; whether to dump output from the established connection to stdout. @type debug: L{bool} @return: A client endpoint, and an object that will cause one of the L{Deferred}s returned by that client endpoint. @rtype: 2-L{tuple} of (L{IStreamClientEndpoint}, L{ConnectionCompleter}) """ reactor = MemoryReactorClock() clientEndpoint = TCP4ClientEndpoint(reactor, "0.0.0.0", 4321) serverEndpoint = TCP4ServerEndpoint(reactor, 4321) serverEndpoint.listen(Factory.forProtocol(Protocol)) return clientEndpoint, ConnectionCompleter(reactor)
This function mocks the generated pid aspect of the win32.CreateProcess function. - the true win32process.CreateProcess is called - return values are harvested in a tuple. - all return values from createProcess are passed back to the calling function except for the pid, the returned pid is hardcoded to 42
def CreateProcess( appName, cmdline, procSecurity, threadSecurity, inheritHandles, newEnvironment, env, workingDir, startupInfo, ): """ This function mocks the generated pid aspect of the win32.CreateProcess function. - the true win32process.CreateProcess is called - return values are harvested in a tuple. - all return values from createProcess are passed back to the calling function except for the pid, the returned pid is hardcoded to 42 """ hProcess, hThread, dwPid, dwTid = win32process.CreateProcess( appName, cmdline, procSecurity, threadSecurity, inheritHandles, newEnvironment, env, workingDir, startupInfo, ) dwPid = 42 return (hProcess, hThread, dwPid, dwTid)
Take two Protocol instances and connect them.
def returnConnected(server, client): """Take two Protocol instances and connect them.""" cio = BytesIO() sio = BytesIO() client.makeConnection(FileWrapper(cio)) server.makeConnection(FileWrapper(sio)) pump = IOPump(client, server, cio, sio) # Challenge-response authentication: pump.flush() # Uh... pump.flush() return pump
Make a test case for every db connector which can connect. @param base: Base class for test case. Additional base classes will be a DBConnector subclass and unittest.TestCase @param suffix: A suffix used to create test case names. Prefixes are defined in the DBConnector subclasses.
def makeSQLTests(base, suffix, globals): """ Make a test case for every db connector which can connect. @param base: Base class for test case. Additional base classes will be a DBConnector subclass and unittest.TestCase @param suffix: A suffix used to create test case names. Prefixes are defined in the DBConnector subclasses. """ connectors = [ PySQLite2Connector, SQLite3Connector, PyPgSQLConnector, PsycopgConnector, MySQLConnector, FirebirdConnector, ] tests = {} for connclass in connectors: name = connclass.TEST_PREFIX + suffix class testcase(connclass, base, unittest.TestCase): __module__ = connclass.__module__ testcase.__name__ = name if hasattr(connclass, "__qualname__"): testcase.__qualname__ = ".".join( connclass.__qualname__.split()[0:-1] + [name] ) tests[name] = testcase globals.update(tests)
Returns a 3-tuple: (client, server, pump)
def connectedServerAndClient( ServerClass=SimpleSymmetricProtocol, ClientClass=SimpleSymmetricProtocol, *a, **kw ): """Returns a 3-tuple: (client, server, pump)""" return iosim.connectedServerAndClient(ServerClass, ClientClass, *a, **kw)
Make a L{Failure} of a divide-by-zero error. @param args: Any C{*args} are passed to Failure's constructor. @param kwargs: Any C{**kwargs} are passed to Failure's constructor.
def getDivisionFailure(*args: object, **kwargs: object) -> Failure: """ Make a L{Failure} of a divide-by-zero error. @param args: Any C{*args} are passed to Failure's constructor. @param kwargs: Any C{**kwargs} are passed to Failure's constructor. """ try: 1 / 0 except BaseException: f = Failure(*args, **kwargs) return f
A fake L{Deferred} canceller which callbacks the L{Deferred} with C{str} "Callback Result" when cancelling it. @param deferred: The cancelled L{Deferred}.
def fakeCallbackCanceller(deferred: Deferred[str]) -> None: """ A fake L{Deferred} canceller which callbacks the L{Deferred} with C{str} "Callback Result" when cancelling it. @param deferred: The cancelled L{Deferred}. """ deferred.callback("Callback Result")
Create a list of Deferreds and a corresponding list of integers tracking how many times each Deferred has been cancelled. Without additional steps the Deferreds will never fire.
def _setupRaceState(numDeferreds: int) -> tuple[list[int], list[Deferred[object]]]: """ Create a list of Deferreds and a corresponding list of integers tracking how many times each Deferred has been cancelled. Without additional steps the Deferreds will never fire. """ cancelledState = [0] * numDeferreds ds: list[Deferred[object]] = [] for n in range(numDeferreds): def cancel(d: Deferred[object], n: int = n) -> None: cancelledState[n] += 1 ds.append(Deferred(canceller=cancel)) return cancelledState, ds
Private function to be used to pass as an alternate onTimeoutCancel value to timeoutDeferred
def _overrideFunc(v: object, t: float) -> str: """ Private function to be used to pass as an alternate onTimeoutCancel value to timeoutDeferred """ return "OVERRIDDEN"
Tickle an asyncio event loop to call all of the things scheduled with call_soon, inasmuch as this can be done via the public API. @param loop: The asyncio event loop to flush the previously-called C{call_soon} entries from.
def callAllSoonCalls(loop: AbstractEventLoop) -> None: """ Tickle an asyncio event loop to call all of the things scheduled with call_soon, inasmuch as this can be done via the public API. @param loop: The asyncio event loop to flush the previously-called C{call_soon} entries from. """ loop.call_soon(loop.stop) loop.run_forever()
Calls L{deferredGenerator} while suppressing the deprecation warning. @param f: Function to call @return: Return value of function.
def deprecatedDeferredGenerator(f): """ Calls L{deferredGenerator} while suppressing the deprecation warning. @param f: Function to call @return: Return value of function. """ return runWithWarningsSuppressed( [ SUPPRESS( message="twisted.internet.defer.deferredGenerator was " "deprecated" ) ], deferredGenerator, f, )
Make a C{Failure} of a divide-by-zero error.
def getDivisionFailure(*, captureVars: bool = False) -> failure.Failure: """ Make a C{Failure} of a divide-by-zero error. """ try: 1 / 0 except BaseException: f = failure.Failure(captureVars=captureVars) return f
Construct a passive mode message with the correct encoding @param protocol: the FTP protocol from which to base the encoding @param host: the hostname @param port: the port @return: the passive mode message
def passivemode_msg(protocol, host="127.0.0.1", port=12345): """ Construct a passive mode message with the correct encoding @param protocol: the FTP protocol from which to base the encoding @param host: the hostname @param port: the port @return: the passive mode message """ msg = f"227 Entering Passive Mode ({ftp.encodeHostPort(host, port)})." return msg.encode(protocol._encoding)
Externally implement C{__reduce__} for L{CopyRegistered}. @param cr: The L{CopyRegistered} instance. @return: a 2-tuple of callable and argument list, in this case L{CopyRegisteredLoaded} and no arguments.
def reduceCopyRegistered(cr: object) -> tuple[type[CopyRegisteredLoaded], tuple[()]]: """ Externally implement C{__reduce__} for L{CopyRegistered}. @param cr: The L{CopyRegistered} instance. @return: a 2-tuple of callable and argument list, in this case L{CopyRegisteredLoaded} and no arguments. """ return CopyRegisteredLoaded, ()
Create a plugindummy package.
def _createPluginDummy( entrypath: FilePath[str], pluginContent: bytes, real: bool, pluginModule: str ) -> FilePath[str]: """ Create a plugindummy package. """ entrypath.createDirectory() pkg = entrypath.child("plugindummy") pkg.createDirectory() if real: pkg.child("__init__.py").setContent(b"") plugs = pkg.child("plugins") plugs.createDirectory() if real: plugs.child("__init__.py").setContent(pluginInitFile) plugs.child(pluginModule + ".py").setContent(pluginContent) return plugs
Create a certificate for given C{organization} and C{organizationalUnit}. @return: a tuple of (key, request, certificate) objects.
def generateCertificateObjects(organization, organizationalUnit): """ Create a certificate for given C{organization} and C{organizationalUnit}. @return: a tuple of (key, request, certificate) objects. """ pkey = crypto.PKey() pkey.generate_key(crypto.TYPE_RSA, 2048) req = crypto.X509Req() subject = req.get_subject() subject.O = organization subject.OU = organizationalUnit req.set_pubkey(pkey) req.sign(pkey, "md5") # Here comes the actual certificate cert = crypto.X509() cert.set_serial_number(1) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(60) # Testing certificates need not be long lived cert.set_issuer(req.get_subject()) cert.set_subject(req.get_subject()) cert.set_pubkey(req.get_pubkey()) cert.sign(pkey, "md5") return pkey, req, cert