repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
sequencelengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
sequencelengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
itamarst/crochet
crochet/_eventloop.py
EventualResult._connect_deferred
def _connect_deferred(self, deferred): """ Hook up the Deferred that that this will be the result of. Should only be run in Twisted thread, and only called once. """ self._deferred = deferred # Because we use __del__, we need to make sure there are no cycles # involving this object, which is why we use a weakref: def put(result, eventual=weakref.ref(self)): eventual = eventual() if eventual: eventual._set_result(result) else: err(result, "Unhandled error in EventualResult") deferred.addBoth(put)
python
def _connect_deferred(self, deferred): """ Hook up the Deferred that that this will be the result of. Should only be run in Twisted thread, and only called once. """ self._deferred = deferred # Because we use __del__, we need to make sure there are no cycles # involving this object, which is why we use a weakref: def put(result, eventual=weakref.ref(self)): eventual = eventual() if eventual: eventual._set_result(result) else: err(result, "Unhandled error in EventualResult") deferred.addBoth(put)
[ "def", "_connect_deferred", "(", "self", ",", "deferred", ")", ":", "self", ".", "_deferred", "=", "deferred", "# Because we use __del__, we need to make sure there are no cycles", "# involving this object, which is why we use a weakref:", "def", "put", "(", "result", ",", "eventual", "=", "weakref", ".", "ref", "(", "self", ")", ")", ":", "eventual", "=", "eventual", "(", ")", "if", "eventual", ":", "eventual", ".", "_set_result", "(", "result", ")", "else", ":", "err", "(", "result", ",", "\"Unhandled error in EventualResult\"", ")", "deferred", ".", "addBoth", "(", "put", ")" ]
Hook up the Deferred that that this will be the result of. Should only be run in Twisted thread, and only called once.
[ "Hook", "up", "the", "Deferred", "that", "that", "this", "will", "be", "the", "result", "of", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L127-L144
itamarst/crochet
crochet/_eventloop.py
EventualResult._set_result
def _set_result(self, result): """ Set the result of the EventualResult, if not already set. This can only happen in the reactor thread, either as a result of Deferred firing, or as a result of ResultRegistry.stop(). So, no need for thread-safety. """ if self._result_set.isSet(): return self._value = result self._result_set.set()
python
def _set_result(self, result): """ Set the result of the EventualResult, if not already set. This can only happen in the reactor thread, either as a result of Deferred firing, or as a result of ResultRegistry.stop(). So, no need for thread-safety. """ if self._result_set.isSet(): return self._value = result self._result_set.set()
[ "def", "_set_result", "(", "self", ",", "result", ")", ":", "if", "self", ".", "_result_set", ".", "isSet", "(", ")", ":", "return", "self", ".", "_value", "=", "result", "self", ".", "_result_set", ".", "set", "(", ")" ]
Set the result of the EventualResult, if not already set. This can only happen in the reactor thread, either as a result of Deferred firing, or as a result of ResultRegistry.stop(). So, no need for thread-safety.
[ "Set", "the", "result", "of", "the", "EventualResult", "if", "not", "already", "set", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L146-L157
itamarst/crochet
crochet/_eventloop.py
EventualResult._result
def _result(self, timeout=None): """ Return the result, if available. It may take an unknown amount of time to return the result, so a timeout option is provided. If the given number of seconds pass with no result, a TimeoutError will be thrown. If a previous call timed out, additional calls to this function will still wait for a result and return it if available. If a result was returned on one call, additional calls will return/raise the same result. """ if timeout is None: warnings.warn( "Unlimited timeouts are deprecated.", DeprecationWarning, stacklevel=3) # Queue.get(None) won't get interrupted by Ctrl-C... timeout = 2**31 self._result_set.wait(timeout) # In Python 2.6 we can't rely on the return result of wait(), so we # have to check manually: if not self._result_set.is_set(): raise TimeoutError() self._result_retrieved = True return self._value
python
def _result(self, timeout=None): """ Return the result, if available. It may take an unknown amount of time to return the result, so a timeout option is provided. If the given number of seconds pass with no result, a TimeoutError will be thrown. If a previous call timed out, additional calls to this function will still wait for a result and return it if available. If a result was returned on one call, additional calls will return/raise the same result. """ if timeout is None: warnings.warn( "Unlimited timeouts are deprecated.", DeprecationWarning, stacklevel=3) # Queue.get(None) won't get interrupted by Ctrl-C... timeout = 2**31 self._result_set.wait(timeout) # In Python 2.6 we can't rely on the return result of wait(), so we # have to check manually: if not self._result_set.is_set(): raise TimeoutError() self._result_retrieved = True return self._value
[ "def", "_result", "(", "self", ",", "timeout", "=", "None", ")", ":", "if", "timeout", "is", "None", ":", "warnings", ".", "warn", "(", "\"Unlimited timeouts are deprecated.\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "3", ")", "# Queue.get(None) won't get interrupted by Ctrl-C...", "timeout", "=", "2", "**", "31", "self", ".", "_result_set", ".", "wait", "(", "timeout", ")", "# In Python 2.6 we can't rely on the return result of wait(), so we", "# have to check manually:", "if", "not", "self", ".", "_result_set", ".", "is_set", "(", ")", ":", "raise", "TimeoutError", "(", ")", "self", ".", "_result_retrieved", "=", "True", "return", "self", ".", "_value" ]
Return the result, if available. It may take an unknown amount of time to return the result, so a timeout option is provided. If the given number of seconds pass with no result, a TimeoutError will be thrown. If a previous call timed out, additional calls to this function will still wait for a result and return it if available. If a result was returned on one call, additional calls will return/raise the same result.
[ "Return", "the", "result", "if", "available", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L177-L203
itamarst/crochet
crochet/_eventloop.py
EventualResult.wait
def wait(self, timeout=None): """ Return the result, or throw the exception if result is a failure. It may take an unknown amount of time to return the result, so a timeout option is provided. If the given number of seconds pass with no result, a TimeoutError will be thrown. If a previous call timed out, additional calls to this function will still wait for a result and return it if available. If a result was returned or raised on one call, additional calls will return/raise the same result. """ if threadable.isInIOThread(): raise RuntimeError( "EventualResult.wait() must not be run in the reactor thread.") if imp.lock_held(): try: imp.release_lock() except RuntimeError: # The lock is held by some other thread. We should be safe # to continue. pass else: # If EventualResult.wait() is run during module import, if the # Twisted code that is being run also imports something the # result will be a deadlock. Even if that is not an issue it # would prevent importing in other threads until the call # returns. raise RuntimeError( "EventualResult.wait() must not be run at module " "import time.") result = self._result(timeout) if isinstance(result, Failure): result.raiseException() return result
python
def wait(self, timeout=None): """ Return the result, or throw the exception if result is a failure. It may take an unknown amount of time to return the result, so a timeout option is provided. If the given number of seconds pass with no result, a TimeoutError will be thrown. If a previous call timed out, additional calls to this function will still wait for a result and return it if available. If a result was returned or raised on one call, additional calls will return/raise the same result. """ if threadable.isInIOThread(): raise RuntimeError( "EventualResult.wait() must not be run in the reactor thread.") if imp.lock_held(): try: imp.release_lock() except RuntimeError: # The lock is held by some other thread. We should be safe # to continue. pass else: # If EventualResult.wait() is run during module import, if the # Twisted code that is being run also imports something the # result will be a deadlock. Even if that is not an issue it # would prevent importing in other threads until the call # returns. raise RuntimeError( "EventualResult.wait() must not be run at module " "import time.") result = self._result(timeout) if isinstance(result, Failure): result.raiseException() return result
[ "def", "wait", "(", "self", ",", "timeout", "=", "None", ")", ":", "if", "threadable", ".", "isInIOThread", "(", ")", ":", "raise", "RuntimeError", "(", "\"EventualResult.wait() must not be run in the reactor thread.\"", ")", "if", "imp", ".", "lock_held", "(", ")", ":", "try", ":", "imp", ".", "release_lock", "(", ")", "except", "RuntimeError", ":", "# The lock is held by some other thread. We should be safe", "# to continue.", "pass", "else", ":", "# If EventualResult.wait() is run during module import, if the", "# Twisted code that is being run also imports something the", "# result will be a deadlock. Even if that is not an issue it", "# would prevent importing in other threads until the call", "# returns.", "raise", "RuntimeError", "(", "\"EventualResult.wait() must not be run at module \"", "\"import time.\"", ")", "result", "=", "self", ".", "_result", "(", "timeout", ")", "if", "isinstance", "(", "result", ",", "Failure", ")", ":", "result", ".", "raiseException", "(", ")", "return", "result" ]
Return the result, or throw the exception if result is a failure. It may take an unknown amount of time to return the result, so a timeout option is provided. If the given number of seconds pass with no result, a TimeoutError will be thrown. If a previous call timed out, additional calls to this function will still wait for a result and return it if available. If a result was returned or raised on one call, additional calls will return/raise the same result.
[ "Return", "the", "result", "or", "throw", "the", "exception", "if", "result", "is", "a", "failure", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L205-L242
itamarst/crochet
crochet/_eventloop.py
EventualResult.original_failure
def original_failure(self): """ Return the underlying Failure object, if the result is an error. If no result is yet available, or the result was not an error, None is returned. This method is useful if you want to get the original traceback for an error result. """ try: result = self._result(0.0) except TimeoutError: return None if isinstance(result, Failure): return result else: return None
python
def original_failure(self): """ Return the underlying Failure object, if the result is an error. If no result is yet available, or the result was not an error, None is returned. This method is useful if you want to get the original traceback for an error result. """ try: result = self._result(0.0) except TimeoutError: return None if isinstance(result, Failure): return result else: return None
[ "def", "original_failure", "(", "self", ")", ":", "try", ":", "result", "=", "self", ".", "_result", "(", "0.0", ")", "except", "TimeoutError", ":", "return", "None", "if", "isinstance", "(", "result", ",", "Failure", ")", ":", "return", "result", "else", ":", "return", "None" ]
Return the underlying Failure object, if the result is an error. If no result is yet available, or the result was not an error, None is returned. This method is useful if you want to get the original traceback for an error result.
[ "Return", "the", "underlying", "Failure", "object", "if", "the", "result", "is", "an", "error", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L253-L270
itamarst/crochet
crochet/_eventloop.py
EventLoop._startReapingProcesses
def _startReapingProcesses(self): """ Start a LoopingCall that calls reapAllProcesses. """ lc = LoopingCall(self._reapAllProcesses) lc.clock = self._reactor lc.start(0.1, False)
python
def _startReapingProcesses(self): """ Start a LoopingCall that calls reapAllProcesses. """ lc = LoopingCall(self._reapAllProcesses) lc.clock = self._reactor lc.start(0.1, False)
[ "def", "_startReapingProcesses", "(", "self", ")", ":", "lc", "=", "LoopingCall", "(", "self", ".", "_reapAllProcesses", ")", "lc", ".", "clock", "=", "self", ".", "_reactor", "lc", ".", "start", "(", "0.1", ",", "False", ")" ]
Start a LoopingCall that calls reapAllProcesses.
[ "Start", "a", "LoopingCall", "that", "calls", "reapAllProcesses", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L357-L363
itamarst/crochet
crochet/_eventloop.py
EventLoop._common_setup
def _common_setup(self): """ The minimal amount of setup done by both setup() and no_setup(). """ self._started = True self._reactor = self._reactorFactory() self._registry = ResultRegistry() # We want to unblock EventualResult regardless of how the reactor is # run, so we always register this: self._reactor.addSystemEventTrigger( "before", "shutdown", self._registry.stop)
python
def _common_setup(self): """ The minimal amount of setup done by both setup() and no_setup(). """ self._started = True self._reactor = self._reactorFactory() self._registry = ResultRegistry() # We want to unblock EventualResult regardless of how the reactor is # run, so we always register this: self._reactor.addSystemEventTrigger( "before", "shutdown", self._registry.stop)
[ "def", "_common_setup", "(", "self", ")", ":", "self", ".", "_started", "=", "True", "self", ".", "_reactor", "=", "self", ".", "_reactorFactory", "(", ")", "self", ".", "_registry", "=", "ResultRegistry", "(", ")", "# We want to unblock EventualResult regardless of how the reactor is", "# run, so we always register this:", "self", ".", "_reactor", ".", "addSystemEventTrigger", "(", "\"before\"", ",", "\"shutdown\"", ",", "self", ".", "_registry", ".", "stop", ")" ]
The minimal amount of setup done by both setup() and no_setup().
[ "The", "minimal", "amount", "of", "setup", "done", "by", "both", "setup", "()", "and", "no_setup", "()", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L365-L375
itamarst/crochet
crochet/_eventloop.py
EventLoop.setup
def setup(self): """ Initialize the crochet library. This starts the reactor in a thread, and connect's Twisted's logs to Python's standard library logging module. This must be called at least once before the library can be used, and can be called multiple times. """ if self._started: return self._common_setup() if platform.type == "posix": self._reactor.callFromThread(self._startReapingProcesses) if self._startLoggingWithObserver: observer = ThreadLogObserver(PythonLoggingObserver().emit) def start(): # Twisted is going to override warnings.showwarning; let's # make sure that has no effect: from twisted.python import log original = log.showwarning log.showwarning = warnings.showwarning self._startLoggingWithObserver(observer, False) log.showwarning = original self._reactor.callFromThread(start) # We only want to stop the logging thread once the reactor has # shut down: self._reactor.addSystemEventTrigger( "after", "shutdown", observer.stop) t = threading.Thread( target=lambda: self._reactor.run(installSignalHandlers=False), name="CrochetReactor") t.start() self._atexit_register(self._reactor.callFromThread, self._reactor.stop) self._atexit_register(_store.log_errors) if self._watchdog_thread is not None: self._watchdog_thread.start()
python
def setup(self): """ Initialize the crochet library. This starts the reactor in a thread, and connect's Twisted's logs to Python's standard library logging module. This must be called at least once before the library can be used, and can be called multiple times. """ if self._started: return self._common_setup() if platform.type == "posix": self._reactor.callFromThread(self._startReapingProcesses) if self._startLoggingWithObserver: observer = ThreadLogObserver(PythonLoggingObserver().emit) def start(): # Twisted is going to override warnings.showwarning; let's # make sure that has no effect: from twisted.python import log original = log.showwarning log.showwarning = warnings.showwarning self._startLoggingWithObserver(observer, False) log.showwarning = original self._reactor.callFromThread(start) # We only want to stop the logging thread once the reactor has # shut down: self._reactor.addSystemEventTrigger( "after", "shutdown", observer.stop) t = threading.Thread( target=lambda: self._reactor.run(installSignalHandlers=False), name="CrochetReactor") t.start() self._atexit_register(self._reactor.callFromThread, self._reactor.stop) self._atexit_register(_store.log_errors) if self._watchdog_thread is not None: self._watchdog_thread.start()
[ "def", "setup", "(", "self", ")", ":", "if", "self", ".", "_started", ":", "return", "self", ".", "_common_setup", "(", ")", "if", "platform", ".", "type", "==", "\"posix\"", ":", "self", ".", "_reactor", ".", "callFromThread", "(", "self", ".", "_startReapingProcesses", ")", "if", "self", ".", "_startLoggingWithObserver", ":", "observer", "=", "ThreadLogObserver", "(", "PythonLoggingObserver", "(", ")", ".", "emit", ")", "def", "start", "(", ")", ":", "# Twisted is going to override warnings.showwarning; let's", "# make sure that has no effect:", "from", "twisted", ".", "python", "import", "log", "original", "=", "log", ".", "showwarning", "log", ".", "showwarning", "=", "warnings", ".", "showwarning", "self", ".", "_startLoggingWithObserver", "(", "observer", ",", "False", ")", "log", ".", "showwarning", "=", "original", "self", ".", "_reactor", ".", "callFromThread", "(", "start", ")", "# We only want to stop the logging thread once the reactor has", "# shut down:", "self", ".", "_reactor", ".", "addSystemEventTrigger", "(", "\"after\"", ",", "\"shutdown\"", ",", "observer", ".", "stop", ")", "t", "=", "threading", ".", "Thread", "(", "target", "=", "lambda", ":", "self", ".", "_reactor", ".", "run", "(", "installSignalHandlers", "=", "False", ")", ",", "name", "=", "\"CrochetReactor\"", ")", "t", ".", "start", "(", ")", "self", ".", "_atexit_register", "(", "self", ".", "_reactor", ".", "callFromThread", ",", "self", ".", "_reactor", ".", "stop", ")", "self", ".", "_atexit_register", "(", "_store", ".", "log_errors", ")", "if", "self", ".", "_watchdog_thread", "is", "not", "None", ":", "self", ".", "_watchdog_thread", ".", "start", "(", ")" ]
Initialize the crochet library. This starts the reactor in a thread, and connect's Twisted's logs to Python's standard library logging module. This must be called at least once before the library can be used, and can be called multiple times.
[ "Initialize", "the", "crochet", "library", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L378-L418
itamarst/crochet
crochet/_eventloop.py
EventLoop._run_in_reactor
def _run_in_reactor(self, function, _, args, kwargs): """ Implementation: A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, an EventualResult is returned. """ def runs_in_reactor(result, args, kwargs): d = maybeDeferred(function, *args, **kwargs) result._connect_deferred(d) result = EventualResult(None, self._reactor) self._registry.register(result) self._reactor.callFromThread(runs_in_reactor, result, args, kwargs) return result
python
def _run_in_reactor(self, function, _, args, kwargs): """ Implementation: A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, an EventualResult is returned. """ def runs_in_reactor(result, args, kwargs): d = maybeDeferred(function, *args, **kwargs) result._connect_deferred(d) result = EventualResult(None, self._reactor) self._registry.register(result) self._reactor.callFromThread(runs_in_reactor, result, args, kwargs) return result
[ "def", "_run_in_reactor", "(", "self", ",", "function", ",", "_", ",", "args", ",", "kwargs", ")", ":", "def", "runs_in_reactor", "(", "result", ",", "args", ",", "kwargs", ")", ":", "d", "=", "maybeDeferred", "(", "function", ",", "*", "args", ",", "*", "*", "kwargs", ")", "result", ".", "_connect_deferred", "(", "d", ")", "result", "=", "EventualResult", "(", "None", ",", "self", ".", "_reactor", ")", "self", ".", "_registry", ".", "register", "(", "result", ")", "self", ".", "_reactor", ".", "callFromThread", "(", "runs_in_reactor", ",", "result", ",", "args", ",", "kwargs", ")", "return", "result" ]
Implementation: A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, an EventualResult is returned.
[ "Implementation", ":", "A", "decorator", "that", "ensures", "the", "wrapped", "function", "runs", "in", "the", "reactor", "thread", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L440-L455
itamarst/crochet
crochet/_eventloop.py
EventLoop.run_in_reactor
def run_in_reactor(self, function): """ A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, an EventualResult is returned. """ result = self._run_in_reactor(function) # Backwards compatibility; use __wrapped__ instead. try: result.wrapped_function = function except AttributeError: pass return result
python
def run_in_reactor(self, function): """ A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, an EventualResult is returned. """ result = self._run_in_reactor(function) # Backwards compatibility; use __wrapped__ instead. try: result.wrapped_function = function except AttributeError: pass return result
[ "def", "run_in_reactor", "(", "self", ",", "function", ")", ":", "result", "=", "self", ".", "_run_in_reactor", "(", "function", ")", "# Backwards compatibility; use __wrapped__ instead.", "try", ":", "result", ".", "wrapped_function", "=", "function", "except", "AttributeError", ":", "pass", "return", "result" ]
A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, an EventualResult is returned.
[ "A", "decorator", "that", "ensures", "the", "wrapped", "function", "runs", "in", "the", "reactor", "thread", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L457-L470
itamarst/crochet
crochet/_eventloop.py
EventLoop.wait_for_reactor
def wait_for_reactor(self, function): """ DEPRECATED, use wait_for(timeout) instead. A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, its result is returned or its exception raised. Deferreds are handled transparently. """ warnings.warn( "@wait_for_reactor is deprecated, use @wait_for instead", DeprecationWarning, stacklevel=2) # This will timeout, in theory. In practice the process will be dead # long before that. return self.wait_for(2**31)(function)
python
def wait_for_reactor(self, function): """ DEPRECATED, use wait_for(timeout) instead. A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, its result is returned or its exception raised. Deferreds are handled transparently. """ warnings.warn( "@wait_for_reactor is deprecated, use @wait_for instead", DeprecationWarning, stacklevel=2) # This will timeout, in theory. In practice the process will be dead # long before that. return self.wait_for(2**31)(function)
[ "def", "wait_for_reactor", "(", "self", ",", "function", ")", ":", "warnings", ".", "warn", "(", "\"@wait_for_reactor is deprecated, use @wait_for instead\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "# This will timeout, in theory. In practice the process will be dead", "# long before that.", "return", "self", ".", "wait_for", "(", "2", "**", "31", ")", "(", "function", ")" ]
DEPRECATED, use wait_for(timeout) instead. A decorator that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, its result is returned or its exception raised. Deferreds are handled transparently.
[ "DEPRECATED", "use", "wait_for", "(", "timeout", ")", "instead", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L472-L488
itamarst/crochet
crochet/_eventloop.py
EventLoop.wait_for
def wait_for(self, timeout): """ A decorator factory that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, its result is returned or its exception raised. Deferreds are handled transparently. Calls will timeout after the given number of seconds (a float), raising a crochet.TimeoutError, and cancelling the Deferred being waited on. """ def decorator(function): @wrapt.decorator def wrapper(function, _, args, kwargs): @self.run_in_reactor def run(): return function(*args, **kwargs) eventual_result = run() try: return eventual_result.wait(timeout) except TimeoutError: eventual_result.cancel() raise result = wrapper(function) # Expose underling function for testing purposes; this attribute is # deprecated, use __wrapped__ instead: try: result.wrapped_function = function except AttributeError: pass return result return decorator
python
def wait_for(self, timeout): """ A decorator factory that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, its result is returned or its exception raised. Deferreds are handled transparently. Calls will timeout after the given number of seconds (a float), raising a crochet.TimeoutError, and cancelling the Deferred being waited on. """ def decorator(function): @wrapt.decorator def wrapper(function, _, args, kwargs): @self.run_in_reactor def run(): return function(*args, **kwargs) eventual_result = run() try: return eventual_result.wait(timeout) except TimeoutError: eventual_result.cancel() raise result = wrapper(function) # Expose underling function for testing purposes; this attribute is # deprecated, use __wrapped__ instead: try: result.wrapped_function = function except AttributeError: pass return result return decorator
[ "def", "wait_for", "(", "self", ",", "timeout", ")", ":", "def", "decorator", "(", "function", ")", ":", "@", "wrapt", ".", "decorator", "def", "wrapper", "(", "function", ",", "_", ",", "args", ",", "kwargs", ")", ":", "@", "self", ".", "run_in_reactor", "def", "run", "(", ")", ":", "return", "function", "(", "*", "args", ",", "*", "*", "kwargs", ")", "eventual_result", "=", "run", "(", ")", "try", ":", "return", "eventual_result", ".", "wait", "(", "timeout", ")", "except", "TimeoutError", ":", "eventual_result", ".", "cancel", "(", ")", "raise", "result", "=", "wrapper", "(", "function", ")", "# Expose underling function for testing purposes; this attribute is", "# deprecated, use __wrapped__ instead:", "try", ":", "result", ".", "wrapped_function", "=", "function", "except", "AttributeError", ":", "pass", "return", "result", "return", "decorator" ]
A decorator factory that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, its result is returned or its exception raised. Deferreds are handled transparently. Calls will timeout after the given number of seconds (a float), raising a crochet.TimeoutError, and cancelling the Deferred being waited on.
[ "A", "decorator", "factory", "that", "ensures", "the", "wrapped", "function", "runs", "in", "the", "reactor", "thread", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L490-L524
itamarst/crochet
crochet/_eventloop.py
EventLoop.in_reactor
def in_reactor(self, function): """ DEPRECATED, use run_in_reactor. A decorator that ensures the wrapped function runs in the reactor thread. The wrapped function will get the reactor passed in as a first argument, in addition to any arguments it is called with. When the wrapped function is called, an EventualResult is returned. """ warnings.warn( "@in_reactor is deprecated, use @run_in_reactor", DeprecationWarning, stacklevel=2) @self.run_in_reactor @wraps(function) def add_reactor(*args, **kwargs): return function(self._reactor, *args, **kwargs) return add_reactor
python
def in_reactor(self, function): """ DEPRECATED, use run_in_reactor. A decorator that ensures the wrapped function runs in the reactor thread. The wrapped function will get the reactor passed in as a first argument, in addition to any arguments it is called with. When the wrapped function is called, an EventualResult is returned. """ warnings.warn( "@in_reactor is deprecated, use @run_in_reactor", DeprecationWarning, stacklevel=2) @self.run_in_reactor @wraps(function) def add_reactor(*args, **kwargs): return function(self._reactor, *args, **kwargs) return add_reactor
[ "def", "in_reactor", "(", "self", ",", "function", ")", ":", "warnings", ".", "warn", "(", "\"@in_reactor is deprecated, use @run_in_reactor\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "@", "self", ".", "run_in_reactor", "@", "wraps", "(", "function", ")", "def", "add_reactor", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "function", "(", "self", ".", "_reactor", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "add_reactor" ]
DEPRECATED, use run_in_reactor. A decorator that ensures the wrapped function runs in the reactor thread. The wrapped function will get the reactor passed in as a first argument, in addition to any arguments it is called with. When the wrapped function is called, an EventualResult is returned.
[ "DEPRECATED", "use", "run_in_reactor", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_eventloop.py#L526-L548
itamarst/crochet
examples/mxquery.py
_mx
def _mx(domain): """ Return Deferred that fires with a list of (priority, MX domain) tuples for a given domain. """ def got_records(result): return sorted( [(int(record.payload.preference), str(record.payload.name)) for record in result[0]]) d = lookupMailExchange(domain) d.addCallback(got_records) return d
python
def _mx(domain): """ Return Deferred that fires with a list of (priority, MX domain) tuples for a given domain. """ def got_records(result): return sorted( [(int(record.payload.preference), str(record.payload.name)) for record in result[0]]) d = lookupMailExchange(domain) d.addCallback(got_records) return d
[ "def", "_mx", "(", "domain", ")", ":", "def", "got_records", "(", "result", ")", ":", "return", "sorted", "(", "[", "(", "int", "(", "record", ".", "payload", ".", "preference", ")", ",", "str", "(", "record", ".", "payload", ".", "name", ")", ")", "for", "record", "in", "result", "[", "0", "]", "]", ")", "d", "=", "lookupMailExchange", "(", "domain", ")", "d", ".", "addCallback", "(", "got_records", ")", "return", "d" ]
Return Deferred that fires with a list of (priority, MX domain) tuples for a given domain.
[ "Return", "Deferred", "that", "fires", "with", "a", "list", "of", "(", "priority", "MX", "domain", ")", "tuples", "for", "a", "given", "domain", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/examples/mxquery.py#L14-L25
itamarst/crochet
crochet/_resultstore.py
ResultStore.store
def store(self, deferred_result): """ Store a EventualResult. Return an integer, a unique identifier that can be used to retrieve the object. """ self._counter += 1 self._stored[self._counter] = deferred_result return self._counter
python
def store(self, deferred_result): """ Store a EventualResult. Return an integer, a unique identifier that can be used to retrieve the object. """ self._counter += 1 self._stored[self._counter] = deferred_result return self._counter
[ "def", "store", "(", "self", ",", "deferred_result", ")", ":", "self", ".", "_counter", "+=", "1", "self", ".", "_stored", "[", "self", ".", "_counter", "]", "=", "deferred_result", "return", "self", ".", "_counter" ]
Store a EventualResult. Return an integer, a unique identifier that can be used to retrieve the object.
[ "Store", "a", "EventualResult", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_resultstore.py#L30-L39
itamarst/crochet
crochet/_resultstore.py
ResultStore.log_errors
def log_errors(self): """ Log errors for all stored EventualResults that have error results. """ for result in self._stored.values(): failure = result.original_failure() if failure is not None: log.err(failure, "Unhandled error in stashed EventualResult:")
python
def log_errors(self): """ Log errors for all stored EventualResults that have error results. """ for result in self._stored.values(): failure = result.original_failure() if failure is not None: log.err(failure, "Unhandled error in stashed EventualResult:")
[ "def", "log_errors", "(", "self", ")", ":", "for", "result", "in", "self", ".", "_stored", ".", "values", "(", ")", ":", "failure", "=", "result", ".", "original_failure", "(", ")", "if", "failure", "is", "not", "None", ":", "log", ".", "err", "(", "failure", ",", "\"Unhandled error in stashed EventualResult:\"", ")" ]
Log errors for all stored EventualResults that have error results.
[ "Log", "errors", "for", "all", "stored", "EventualResults", "that", "have", "error", "results", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_resultstore.py#L49-L56
itamarst/crochet
examples/ssh.py
start_ssh_server
def start_ssh_server(port, username, password, namespace): """ Start an SSH server on the given port, exposing a Python prompt with the given namespace. """ # This is a lot of boilerplate, see http://tm.tl/6429 for a ticket to # provide a utility function that simplifies this. from twisted.internet import reactor from twisted.conch.insults import insults from twisted.conch import manhole, manhole_ssh from twisted.cred.checkers import ( InMemoryUsernamePasswordDatabaseDontUse as MemoryDB) from twisted.cred.portal import Portal sshRealm = manhole_ssh.TerminalRealm() def chainedProtocolFactory(): return insults.ServerProtocol(manhole.Manhole, namespace) sshRealm.chainedProtocolFactory = chainedProtocolFactory sshPortal = Portal(sshRealm, [MemoryDB(**{username: password})]) reactor.listenTCP(port, manhole_ssh.ConchFactory(sshPortal), interface="127.0.0.1")
python
def start_ssh_server(port, username, password, namespace): """ Start an SSH server on the given port, exposing a Python prompt with the given namespace. """ # This is a lot of boilerplate, see http://tm.tl/6429 for a ticket to # provide a utility function that simplifies this. from twisted.internet import reactor from twisted.conch.insults import insults from twisted.conch import manhole, manhole_ssh from twisted.cred.checkers import ( InMemoryUsernamePasswordDatabaseDontUse as MemoryDB) from twisted.cred.portal import Portal sshRealm = manhole_ssh.TerminalRealm() def chainedProtocolFactory(): return insults.ServerProtocol(manhole.Manhole, namespace) sshRealm.chainedProtocolFactory = chainedProtocolFactory sshPortal = Portal(sshRealm, [MemoryDB(**{username: password})]) reactor.listenTCP(port, manhole_ssh.ConchFactory(sshPortal), interface="127.0.0.1")
[ "def", "start_ssh_server", "(", "port", ",", "username", ",", "password", ",", "namespace", ")", ":", "# This is a lot of boilerplate, see http://tm.tl/6429 for a ticket to", "# provide a utility function that simplifies this.", "from", "twisted", ".", "internet", "import", "reactor", "from", "twisted", ".", "conch", ".", "insults", "import", "insults", "from", "twisted", ".", "conch", "import", "manhole", ",", "manhole_ssh", "from", "twisted", ".", "cred", ".", "checkers", "import", "(", "InMemoryUsernamePasswordDatabaseDontUse", "as", "MemoryDB", ")", "from", "twisted", ".", "cred", ".", "portal", "import", "Portal", "sshRealm", "=", "manhole_ssh", ".", "TerminalRealm", "(", ")", "def", "chainedProtocolFactory", "(", ")", ":", "return", "insults", ".", "ServerProtocol", "(", "manhole", ".", "Manhole", ",", "namespace", ")", "sshRealm", ".", "chainedProtocolFactory", "=", "chainedProtocolFactory", "sshPortal", "=", "Portal", "(", "sshRealm", ",", "[", "MemoryDB", "(", "*", "*", "{", "username", ":", "password", "}", ")", "]", ")", "reactor", ".", "listenTCP", "(", "port", ",", "manhole_ssh", ".", "ConchFactory", "(", "sshPortal", ")", ",", "interface", "=", "\"127.0.0.1\"", ")" ]
Start an SSH server on the given port, exposing a Python prompt with the given namespace.
[ "Start", "an", "SSH", "server", "on", "the", "given", "port", "exposing", "a", "Python", "prompt", "with", "the", "given", "namespace", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/examples/ssh.py#L40-L61
itamarst/crochet
crochet/_util.py
_synced
def _synced(method, self, args, kwargs): """Underlying synchronized wrapper.""" with self._lock: return method(*args, **kwargs)
python
def _synced(method, self, args, kwargs): """Underlying synchronized wrapper.""" with self._lock: return method(*args, **kwargs)
[ "def", "_synced", "(", "method", ",", "self", ",", "args", ",", "kwargs", ")", ":", "with", "self", ".", "_lock", ":", "return", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Underlying synchronized wrapper.
[ "Underlying", "synchronized", "wrapper", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_util.py#L9-L12
itamarst/crochet
crochet/_shutdown.py
FunctionRegistry.register
def register(self, f, *args, **kwargs): """ Register a function and arguments to be called later. """ self._functions.append(lambda: f(*args, **kwargs))
python
def register(self, f, *args, **kwargs): """ Register a function and arguments to be called later. """ self._functions.append(lambda: f(*args, **kwargs))
[ "def", "register", "(", "self", ",", "f", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_functions", ".", "append", "(", "lambda", ":", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")" ]
Register a function and arguments to be called later.
[ "Register", "a", "function", "and", "arguments", "to", "be", "called", "later", "." ]
train
https://github.com/itamarst/crochet/blob/ecfc22cefa90f3dfbafa71883c1470e7294f2b6d/crochet/_shutdown.py#L40-L44
tkf/python-epc
epc/core.py
EPCDispatcher.register_function
def register_function(self, function, name=None): """ Register function to be called from EPC client. :type function: callable :arg function: Function to publish. :type name: str :arg name: Name by which function is published. This method returns the given `function` as-is, so that you can use it as a decorator. """ if name is None: name = function.__name__ self.funcs[name] = function return function
python
def register_function(self, function, name=None): """ Register function to be called from EPC client. :type function: callable :arg function: Function to publish. :type name: str :arg name: Name by which function is published. This method returns the given `function` as-is, so that you can use it as a decorator. """ if name is None: name = function.__name__ self.funcs[name] = function return function
[ "def", "register_function", "(", "self", ",", "function", ",", "name", "=", "None", ")", ":", "if", "name", "is", "None", ":", "name", "=", "function", ".", "__name__", "self", ".", "funcs", "[", "name", "]", "=", "function", "return", "function" ]
Register function to be called from EPC client. :type function: callable :arg function: Function to publish. :type name: str :arg name: Name by which function is published. This method returns the given `function` as-is, so that you can use it as a decorator.
[ "Register", "function", "to", "be", "called", "from", "EPC", "client", "." ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/core.py#L57-L73
tkf/python-epc
epc/core.py
EPCDispatcher.get_method
def get_method(self, name): """ Get registered method callend `name`. """ try: return self.funcs[name] except KeyError: try: return self.instance._get_method(name) except AttributeError: return SimpleXMLRPCServer.resolve_dotted_attribute( self.instance, name, self.allow_dotted_names)
python
def get_method(self, name): """ Get registered method callend `name`. """ try: return self.funcs[name] except KeyError: try: return self.instance._get_method(name) except AttributeError: return SimpleXMLRPCServer.resolve_dotted_attribute( self.instance, name, self.allow_dotted_names)
[ "def", "get_method", "(", "self", ",", "name", ")", ":", "try", ":", "return", "self", ".", "funcs", "[", "name", "]", "except", "KeyError", ":", "try", ":", "return", "self", ".", "instance", ".", "_get_method", "(", "name", ")", "except", "AttributeError", ":", "return", "SimpleXMLRPCServer", ".", "resolve_dotted_attribute", "(", "self", ".", "instance", ",", "name", ",", "self", ".", "allow_dotted_names", ")" ]
Get registered method callend `name`.
[ "Get", "registered", "method", "callend", "name", "." ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/core.py#L75-L86
tkf/python-epc
epc/core.py
EPCCore.set_debugger
def set_debugger(self, debugger): """ Set debugger to run when an error occurs in published method. You can also set debugger by passing `debugger` argument to the class constructor. :type debugger: {'pdb', 'ipdb', None} :arg debugger: type of debugger. """ if debugger == 'pdb': import pdb self.debugger = pdb elif debugger == 'ipdb': import ipdb self.debugger = ipdb else: self.debugger = debugger
python
def set_debugger(self, debugger): """ Set debugger to run when an error occurs in published method. You can also set debugger by passing `debugger` argument to the class constructor. :type debugger: {'pdb', 'ipdb', None} :arg debugger: type of debugger. """ if debugger == 'pdb': import pdb self.debugger = pdb elif debugger == 'ipdb': import ipdb self.debugger = ipdb else: self.debugger = debugger
[ "def", "set_debugger", "(", "self", ",", "debugger", ")", ":", "if", "debugger", "==", "'pdb'", ":", "import", "pdb", "self", ".", "debugger", "=", "pdb", "elif", "debugger", "==", "'ipdb'", ":", "import", "ipdb", "self", ".", "debugger", "=", "ipdb", "else", ":", "self", ".", "debugger", "=", "debugger" ]
Set debugger to run when an error occurs in published method. You can also set debugger by passing `debugger` argument to the class constructor. :type debugger: {'pdb', 'ipdb', None} :arg debugger: type of debugger.
[ "Set", "debugger", "to", "run", "when", "an", "error", "occurs", "in", "published", "method", "." ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/core.py#L102-L120
tkf/python-epc
epc/client.py
EPCClient.connect
def connect(self, socket_or_address): """ Connect to server and start serving registered functions. :type socket_or_address: tuple or socket object :arg socket_or_address: A ``(host, port)`` pair to be passed to `socket.create_connection`, or a socket object. """ if isinstance(socket_or_address, tuple): import socket self.socket = socket.create_connection(socket_or_address) else: self.socket = socket_or_address # This is what BaseServer.finish_request does: address = None # it is not used, so leave it empty self.handler = EPCClientHandler(self.socket, address, self) self.call = self.handler.call self.call_sync = self.handler.call_sync self.methods = self.handler.methods self.methods_sync = self.handler.methods_sync self.handler_thread = newthread(self, target=self.handler.start) self.handler_thread.daemon = self.thread_daemon self.handler_thread.start() self.handler.wait_until_ready()
python
def connect(self, socket_or_address): """ Connect to server and start serving registered functions. :type socket_or_address: tuple or socket object :arg socket_or_address: A ``(host, port)`` pair to be passed to `socket.create_connection`, or a socket object. """ if isinstance(socket_or_address, tuple): import socket self.socket = socket.create_connection(socket_or_address) else: self.socket = socket_or_address # This is what BaseServer.finish_request does: address = None # it is not used, so leave it empty self.handler = EPCClientHandler(self.socket, address, self) self.call = self.handler.call self.call_sync = self.handler.call_sync self.methods = self.handler.methods self.methods_sync = self.handler.methods_sync self.handler_thread = newthread(self, target=self.handler.start) self.handler_thread.daemon = self.thread_daemon self.handler_thread.start() self.handler.wait_until_ready()
[ "def", "connect", "(", "self", ",", "socket_or_address", ")", ":", "if", "isinstance", "(", "socket_or_address", ",", "tuple", ")", ":", "import", "socket", "self", ".", "socket", "=", "socket", ".", "create_connection", "(", "socket_or_address", ")", "else", ":", "self", ".", "socket", "=", "socket_or_address", "# This is what BaseServer.finish_request does:", "address", "=", "None", "# it is not used, so leave it empty", "self", ".", "handler", "=", "EPCClientHandler", "(", "self", ".", "socket", ",", "address", ",", "self", ")", "self", ".", "call", "=", "self", ".", "handler", ".", "call", "self", ".", "call_sync", "=", "self", ".", "handler", ".", "call_sync", "self", ".", "methods", "=", "self", ".", "handler", ".", "methods", "self", ".", "methods_sync", "=", "self", ".", "handler", ".", "methods_sync", "self", ".", "handler_thread", "=", "newthread", "(", "self", ",", "target", "=", "self", ".", "handler", ".", "start", ")", "self", ".", "handler_thread", ".", "daemon", "=", "self", ".", "thread_daemon", "self", ".", "handler_thread", ".", "start", "(", ")", "self", ".", "handler", ".", "wait_until_ready", "(", ")" ]
Connect to server and start serving registered functions. :type socket_or_address: tuple or socket object :arg socket_or_address: A ``(host, port)`` pair to be passed to `socket.create_connection`, or a socket object.
[ "Connect", "to", "server", "and", "start", "serving", "registered", "functions", "." ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/client.py#L98-L126
tkf/python-epc
epc/server.py
main
def main(args=None): """ Quick CLI to serve Python functions in a module. Example usage:: python -m epc.server --allow-dotted-names os Note that only the functions which gets and returns simple built-in types (str, int, float, list, tuple, dict) works. """ import argparse from textwrap import dedent parser = argparse.ArgumentParser( formatter_class=type('EPCHelpFormatter', (argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter), {}), description=dedent(main.__doc__)) parser.add_argument( 'module', help='Serve python functions in this module.') parser.add_argument( '--address', default='localhost', help='server address') parser.add_argument( '--port', default=0, type=int, help='server port. 0 means to pick up random port.') parser.add_argument( '--allow-dotted-names', default=False, action='store_true') parser.add_argument( '--pdb', dest='debugger', const='pdb', action='store_const', help='start pdb when error occurs.') parser.add_argument( '--ipdb', dest='debugger', const='ipdb', action='store_const', help='start ipdb when error occurs.') parser.add_argument( '--log-traceback', action='store_true', default=False) ns = parser.parse_args(args) server = EPCServer((ns.address, ns.port), debugger=ns.debugger, log_traceback=ns.log_traceback) server.register_instance( __import__(ns.module), allow_dotted_names=ns.allow_dotted_names) server.print_port() server.serve_forever()
python
def main(args=None): """ Quick CLI to serve Python functions in a module. Example usage:: python -m epc.server --allow-dotted-names os Note that only the functions which gets and returns simple built-in types (str, int, float, list, tuple, dict) works. """ import argparse from textwrap import dedent parser = argparse.ArgumentParser( formatter_class=type('EPCHelpFormatter', (argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter), {}), description=dedent(main.__doc__)) parser.add_argument( 'module', help='Serve python functions in this module.') parser.add_argument( '--address', default='localhost', help='server address') parser.add_argument( '--port', default=0, type=int, help='server port. 0 means to pick up random port.') parser.add_argument( '--allow-dotted-names', default=False, action='store_true') parser.add_argument( '--pdb', dest='debugger', const='pdb', action='store_const', help='start pdb when error occurs.') parser.add_argument( '--ipdb', dest='debugger', const='ipdb', action='store_const', help='start ipdb when error occurs.') parser.add_argument( '--log-traceback', action='store_true', default=False) ns = parser.parse_args(args) server = EPCServer((ns.address, ns.port), debugger=ns.debugger, log_traceback=ns.log_traceback) server.register_instance( __import__(ns.module), allow_dotted_names=ns.allow_dotted_names) server.print_port() server.serve_forever()
[ "def", "main", "(", "args", "=", "None", ")", ":", "import", "argparse", "from", "textwrap", "import", "dedent", "parser", "=", "argparse", ".", "ArgumentParser", "(", "formatter_class", "=", "type", "(", "'EPCHelpFormatter'", ",", "(", "argparse", ".", "ArgumentDefaultsHelpFormatter", ",", "argparse", ".", "RawDescriptionHelpFormatter", ")", ",", "{", "}", ")", ",", "description", "=", "dedent", "(", "main", ".", "__doc__", ")", ")", "parser", ".", "add_argument", "(", "'module'", ",", "help", "=", "'Serve python functions in this module.'", ")", "parser", ".", "add_argument", "(", "'--address'", ",", "default", "=", "'localhost'", ",", "help", "=", "'server address'", ")", "parser", ".", "add_argument", "(", "'--port'", ",", "default", "=", "0", ",", "type", "=", "int", ",", "help", "=", "'server port. 0 means to pick up random port.'", ")", "parser", ".", "add_argument", "(", "'--allow-dotted-names'", ",", "default", "=", "False", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'--pdb'", ",", "dest", "=", "'debugger'", ",", "const", "=", "'pdb'", ",", "action", "=", "'store_const'", ",", "help", "=", "'start pdb when error occurs.'", ")", "parser", ".", "add_argument", "(", "'--ipdb'", ",", "dest", "=", "'debugger'", ",", "const", "=", "'ipdb'", ",", "action", "=", "'store_const'", ",", "help", "=", "'start ipdb when error occurs.'", ")", "parser", ".", "add_argument", "(", "'--log-traceback'", ",", "action", "=", "'store_true'", ",", "default", "=", "False", ")", "ns", "=", "parser", ".", "parse_args", "(", "args", ")", "server", "=", "EPCServer", "(", "(", "ns", ".", "address", ",", "ns", ".", "port", ")", ",", "debugger", "=", "ns", ".", "debugger", ",", "log_traceback", "=", "ns", ".", "log_traceback", ")", "server", ".", "register_instance", "(", "__import__", "(", "ns", ".", "module", ")", ",", "allow_dotted_names", "=", "ns", ".", "allow_dotted_names", ")", "server", ".", "print_port", "(", ")", "server", ".", "serve_forever", "(", ")" ]
Quick CLI to serve Python functions in a module. Example usage:: python -m epc.server --allow-dotted-names os Note that only the functions which gets and returns simple built-in types (str, int, float, list, tuple, dict) works.
[ "Quick", "CLI", "to", "serve", "Python", "functions", "in", "a", "module", "." ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/server.py#L174-L221
tkf/python-epc
epc/server.py
EPCServer.print_port
def print_port(self, stream=sys.stdout): """ Print port this EPC server runs on. As Emacs client reads port number from STDOUT, you need to call this just before calling :meth:`serve_forever`. :type stream: text stream :arg stream: A stream object to write port on. Default is :data:`sys.stdout`. """ stream.write(str(self.server_address[1])) stream.write("\n") stream.flush()
python
def print_port(self, stream=sys.stdout): """ Print port this EPC server runs on. As Emacs client reads port number from STDOUT, you need to call this just before calling :meth:`serve_forever`. :type stream: text stream :arg stream: A stream object to write port on. Default is :data:`sys.stdout`. """ stream.write(str(self.server_address[1])) stream.write("\n") stream.flush()
[ "def", "print_port", "(", "self", ",", "stream", "=", "sys", ".", "stdout", ")", ":", "stream", ".", "write", "(", "str", "(", "self", ".", "server_address", "[", "1", "]", ")", ")", "stream", ".", "write", "(", "\"\\n\"", ")", "stream", ".", "flush", "(", ")" ]
Print port this EPC server runs on. As Emacs client reads port number from STDOUT, you need to call this just before calling :meth:`serve_forever`. :type stream: text stream :arg stream: A stream object to write port on. Default is :data:`sys.stdout`.
[ "Print", "port", "this", "EPC", "server", "runs", "on", "." ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/server.py#L138-L152
tkf/python-epc
epc/handler.py
EPCHandler.call
def call(self, name, *args, **kwds): """ Call method connected to this handler. :type name: str :arg name: Method name to call. :type args: list :arg args: Arguments for remote method to call. :type callback: callable :arg callback: A function to be called with returned value of the remote method. :type errback: callable :arg errback: A function to be called with an error occurred in the remote method. It is either an instance of :class:`ReturnError` or :class:`EPCError`. """ self.callmanager.call(self, name, *args, **kwds)
python
def call(self, name, *args, **kwds): """ Call method connected to this handler. :type name: str :arg name: Method name to call. :type args: list :arg args: Arguments for remote method to call. :type callback: callable :arg callback: A function to be called with returned value of the remote method. :type errback: callable :arg errback: A function to be called with an error occurred in the remote method. It is either an instance of :class:`ReturnError` or :class:`EPCError`. """ self.callmanager.call(self, name, *args, **kwds)
[ "def", "call", "(", "self", ",", "name", ",", "*", "args", ",", "*", "*", "kwds", ")", ":", "self", ".", "callmanager", ".", "call", "(", "self", ",", "name", ",", "*", "args", ",", "*", "*", "kwds", ")" ]
Call method connected to this handler. :type name: str :arg name: Method name to call. :type args: list :arg args: Arguments for remote method to call. :type callback: callable :arg callback: A function to be called with returned value of the remote method. :type errback: callable :arg errback: A function to be called with an error occurred in the remote method. It is either an instance of :class:`ReturnError` or :class:`EPCError`.
[ "Call", "method", "connected", "to", "this", "handler", "." ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/handler.py#L362-L379
tkf/python-epc
epc/handler.py
EPCHandler.methods
def methods(self, *args, **kwds): """ Request info of callable remote methods. Arguments for :meth:`call` except for `name` can be applied to this function too. """ self.callmanager.methods(self, *args, **kwds)
python
def methods(self, *args, **kwds): """ Request info of callable remote methods. Arguments for :meth:`call` except for `name` can be applied to this function too. """ self.callmanager.methods(self, *args, **kwds)
[ "def", "methods", "(", "self", ",", "*", "args", ",", "*", "*", "kwds", ")", ":", "self", ".", "callmanager", ".", "methods", "(", "self", ",", "*", "args", ",", "*", "*", "kwds", ")" ]
Request info of callable remote methods. Arguments for :meth:`call` except for `name` can be applied to this function too.
[ "Request", "info", "of", "callable", "remote", "methods", "." ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/handler.py#L381-L389
tkf/python-epc
epc/handler.py
EPCHandler.call_sync
def call_sync(self, name, args, timeout=None): """ Blocking version of :meth:`call`. :type name: str :arg name: Remote function name to call. :type args: list :arg args: Arguments passed to the remote function. :type timeout: int or None :arg timeout: Timeout in second. None means no timeout. If the called remote function raise an exception, this method raise an exception. If you give `timeout`, this method may raise an `Empty` exception. """ return self._blocking_request(self.call, timeout, name, args)
python
def call_sync(self, name, args, timeout=None): """ Blocking version of :meth:`call`. :type name: str :arg name: Remote function name to call. :type args: list :arg args: Arguments passed to the remote function. :type timeout: int or None :arg timeout: Timeout in second. None means no timeout. If the called remote function raise an exception, this method raise an exception. If you give `timeout`, this method may raise an `Empty` exception. """ return self._blocking_request(self.call, timeout, name, args)
[ "def", "call_sync", "(", "self", ",", "name", ",", "args", ",", "timeout", "=", "None", ")", ":", "return", "self", ".", "_blocking_request", "(", "self", ".", "call", ",", "timeout", ",", "name", ",", "args", ")" ]
Blocking version of :meth:`call`. :type name: str :arg name: Remote function name to call. :type args: list :arg args: Arguments passed to the remote function. :type timeout: int or None :arg timeout: Timeout in second. None means no timeout. If the called remote function raise an exception, this method raise an exception. If you give `timeout`, this method may raise an `Empty` exception.
[ "Blocking", "version", "of", ":", "meth", ":", "call", "." ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/handler.py#L397-L413
tkf/python-epc
epc/utils.py
func_call_as_str
def func_call_as_str(name, *args, **kwds): """ Return arguments and keyword arguments as formatted string >>> func_call_as_str('f', 1, 2, a=1) 'f(1, 2, a=1)' """ return '{0}({1})'.format( name, ', '.join(itertools.chain( map('{0!r}'.format, args), map('{0[0]!s}={0[1]!r}'.format, sorted(kwds.items())))))
python
def func_call_as_str(name, *args, **kwds): """ Return arguments and keyword arguments as formatted string >>> func_call_as_str('f', 1, 2, a=1) 'f(1, 2, a=1)' """ return '{0}({1})'.format( name, ', '.join(itertools.chain( map('{0!r}'.format, args), map('{0[0]!s}={0[1]!r}'.format, sorted(kwds.items())))))
[ "def", "func_call_as_str", "(", "name", ",", "*", "args", ",", "*", "*", "kwds", ")", ":", "return", "'{0}({1})'", ".", "format", "(", "name", ",", "', '", ".", "join", "(", "itertools", ".", "chain", "(", "map", "(", "'{0!r}'", ".", "format", ",", "args", ")", ",", "map", "(", "'{0[0]!s}={0[1]!r}'", ".", "format", ",", "sorted", "(", "kwds", ".", "items", "(", ")", ")", ")", ")", ")", ")" ]
Return arguments and keyword arguments as formatted string >>> func_call_as_str('f', 1, 2, a=1) 'f(1, 2, a=1)'
[ "Return", "arguments", "and", "keyword", "arguments", "as", "formatted", "string" ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/utils.py#L26-L38
tkf/python-epc
epc/utils.py
newthread
def newthread(template="EPCThread-{0}", **kwds): """ Instantiate :class:`threading.Thread` with an appropriate name. """ if not isinstance(template, str): template = '{0}.{1}-{{0}}'.format(template.__module__, template.__class__.__name__) return threading.Thread( name=newname(template), **kwds)
python
def newthread(template="EPCThread-{0}", **kwds): """ Instantiate :class:`threading.Thread` with an appropriate name. """ if not isinstance(template, str): template = '{0}.{1}-{{0}}'.format(template.__module__, template.__class__.__name__) return threading.Thread( name=newname(template), **kwds)
[ "def", "newthread", "(", "template", "=", "\"EPCThread-{0}\"", ",", "*", "*", "kwds", ")", ":", "if", "not", "isinstance", "(", "template", ",", "str", ")", ":", "template", "=", "'{0}.{1}-{{0}}'", ".", "format", "(", "template", ".", "__module__", ",", "template", ".", "__class__", ".", "__name__", ")", "return", "threading", ".", "Thread", "(", "name", "=", "newname", "(", "template", ")", ",", "*", "*", "kwds", ")" ]
Instantiate :class:`threading.Thread` with an appropriate name.
[ "Instantiate", ":", "class", ":", "threading", ".", "Thread", "with", "an", "appropriate", "name", "." ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/utils.py#L81-L89
tkf/python-epc
epc/utils.py
callwith
def callwith(context_manager): """ A decorator to wrap execution of function with a context manager. """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwds): with context_manager: return func(*args, **kwds) return wrapper return decorator
python
def callwith(context_manager): """ A decorator to wrap execution of function with a context manager. """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwds): with context_manager: return func(*args, **kwds) return wrapper return decorator
[ "def", "callwith", "(", "context_manager", ")", ":", "def", "decorator", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwds", ")", ":", "with", "context_manager", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwds", ")", "return", "wrapper", "return", "decorator" ]
A decorator to wrap execution of function with a context manager.
[ "A", "decorator", "to", "wrap", "execution", "of", "function", "with", "a", "context", "manager", "." ]
train
https://github.com/tkf/python-epc/blob/f3673ae5c35f20a0f71546ab34c28e3dde3595c1/epc/utils.py#L121-L131
dgketchum/satellite_image
sat_image/image.py
LandsatImage._scene_centroid
def _scene_centroid(self): """ Compute image center coordinates :return: Tuple of image center in lat, lon """ ul_lat = self.corner_ul_lat_product ll_lat = self.corner_ll_lat_product ul_lon = self.corner_ul_lon_product ur_lon = self.corner_ur_lon_product lat = (ul_lat + ll_lat) / 2. lon = (ul_lon + ur_lon) / 2. return lat, lon
python
def _scene_centroid(self): """ Compute image center coordinates :return: Tuple of image center in lat, lon """ ul_lat = self.corner_ul_lat_product ll_lat = self.corner_ll_lat_product ul_lon = self.corner_ul_lon_product ur_lon = self.corner_ur_lon_product lat = (ul_lat + ll_lat) / 2. lon = (ul_lon + ur_lon) / 2. return lat, lon
[ "def", "_scene_centroid", "(", "self", ")", ":", "ul_lat", "=", "self", ".", "corner_ul_lat_product", "ll_lat", "=", "self", ".", "corner_ll_lat_product", "ul_lon", "=", "self", ".", "corner_ul_lon_product", "ur_lon", "=", "self", ".", "corner_ur_lon_product", "lat", "=", "(", "ul_lat", "+", "ll_lat", ")", "/", "2.", "lon", "=", "(", "ul_lon", "+", "ur_lon", ")", "/", "2.", "return", "lat", ",", "lon" ]
Compute image center coordinates :return: Tuple of image center in lat, lon
[ "Compute", "image", "center", "coordinates", ":", "return", ":", "Tuple", "of", "image", "center", "in", "lat", "lon" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L126-L137
dgketchum/satellite_image
sat_image/image.py
LandsatImage.earth_sun_d
def earth_sun_d(dtime): """ Earth-sun distance in AU :param dtime time, e.g. datetime.datetime(2007, 5, 1) :type datetime object :return float(distance from sun to earth in astronomical units) """ doy = int(dtime.strftime('%j')) rad_term = 0.9856 * (doy - 4) * pi / 180 distance_au = 1 - 0.01672 * cos(rad_term) return distance_au
python
def earth_sun_d(dtime): """ Earth-sun distance in AU :param dtime time, e.g. datetime.datetime(2007, 5, 1) :type datetime object :return float(distance from sun to earth in astronomical units) """ doy = int(dtime.strftime('%j')) rad_term = 0.9856 * (doy - 4) * pi / 180 distance_au = 1 - 0.01672 * cos(rad_term) return distance_au
[ "def", "earth_sun_d", "(", "dtime", ")", ":", "doy", "=", "int", "(", "dtime", ".", "strftime", "(", "'%j'", ")", ")", "rad_term", "=", "0.9856", "*", "(", "doy", "-", "4", ")", "*", "pi", "/", "180", "distance_au", "=", "1", "-", "0.01672", "*", "cos", "(", "rad_term", ")", "return", "distance_au" ]
Earth-sun distance in AU :param dtime time, e.g. datetime.datetime(2007, 5, 1) :type datetime object :return float(distance from sun to earth in astronomical units)
[ "Earth", "-", "sun", "distance", "in", "AU", ":", "param", "dtime", "time", "e", ".", "g", ".", "datetime", ".", "datetime", "(", "2007", "5", "1", ")", ":", "type", "datetime", "object", ":", "return", "float", "(", "distance", "from", "sun", "to", "earth", "in", "astronomical", "units", ")" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L140-L150
dgketchum/satellite_image
sat_image/image.py
Landsat5.reflectance
def reflectance(self, band): """ :param band: An optical band, i.e. 1-5, 7 :return: At satellite reflectance, [-] """ if band == 6: raise ValueError('LT5 reflectance must be other than band 6') rad = self.radiance(band) esun = self.ex_atm_irrad[band - 1] toa_reflect = (pi * rad * self.earth_sun_dist ** 2) / (esun * cos(self.solar_zenith_rad)) return toa_reflect
python
def reflectance(self, band): """ :param band: An optical band, i.e. 1-5, 7 :return: At satellite reflectance, [-] """ if band == 6: raise ValueError('LT5 reflectance must be other than band 6') rad = self.radiance(band) esun = self.ex_atm_irrad[band - 1] toa_reflect = (pi * rad * self.earth_sun_dist ** 2) / (esun * cos(self.solar_zenith_rad)) return toa_reflect
[ "def", "reflectance", "(", "self", ",", "band", ")", ":", "if", "band", "==", "6", ":", "raise", "ValueError", "(", "'LT5 reflectance must be other than band 6'", ")", "rad", "=", "self", ".", "radiance", "(", "band", ")", "esun", "=", "self", ".", "ex_atm_irrad", "[", "band", "-", "1", "]", "toa_reflect", "=", "(", "pi", "*", "rad", "*", "self", ".", "earth_sun_dist", "**", "2", ")", "/", "(", "esun", "*", "cos", "(", "self", ".", "solar_zenith_rad", ")", ")", "return", "toa_reflect" ]
:param band: An optical band, i.e. 1-5, 7 :return: At satellite reflectance, [-]
[ ":", "param", "band", ":", "An", "optical", "band", "i", ".", "e", ".", "1", "-", "5", "7", ":", "return", ":", "At", "satellite", "reflectance", "[", "-", "]" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L279-L291
dgketchum/satellite_image
sat_image/image.py
Landsat5.albedo
def albedo(self, model='smith'): """Finds broad-band surface reflectance (albedo) Smith (2010), “The heat budget of the earth’s surface deduced from space” LT5 toa reflectance bands 1, 3, 4, 5, 7 # normalized i.e. 0.356 + 0.130 + 0.373 + 0.085 + 0.07 = 1.014 Should have option for Liang, 2000; Tasumi (2008), "At-Surface Reflectance and Albedo from Satellite for Operational Calculation of Land Surface Energy Balance" :return albedo array of floats """ if model == 'smith': blue, red, nir, swir1, swir2 = (self.reflectance(1), self.reflectance(3), self.reflectance(4), self.reflectance(5), self.reflectance(7)) alb = (0.356 * blue + 0.130 * red + 0.373 * nir + 0.085 * swir1 + 0.072 * swir2 - 0.0018) / 1.014 elif model == 'tasumi': pass # add tasumi algorithm TODO return alb
python
def albedo(self, model='smith'): """Finds broad-band surface reflectance (albedo) Smith (2010), “The heat budget of the earth’s surface deduced from space” LT5 toa reflectance bands 1, 3, 4, 5, 7 # normalized i.e. 0.356 + 0.130 + 0.373 + 0.085 + 0.07 = 1.014 Should have option for Liang, 2000; Tasumi (2008), "At-Surface Reflectance and Albedo from Satellite for Operational Calculation of Land Surface Energy Balance" :return albedo array of floats """ if model == 'smith': blue, red, nir, swir1, swir2 = (self.reflectance(1), self.reflectance(3), self.reflectance(4), self.reflectance(5), self.reflectance(7)) alb = (0.356 * blue + 0.130 * red + 0.373 * nir + 0.085 * swir1 + 0.072 * swir2 - 0.0018) / 1.014 elif model == 'tasumi': pass # add tasumi algorithm TODO return alb
[ "def", "albedo", "(", "self", ",", "model", "=", "'smith'", ")", ":", "if", "model", "==", "'smith'", ":", "blue", ",", "red", ",", "nir", ",", "swir1", ",", "swir2", "=", "(", "self", ".", "reflectance", "(", "1", ")", ",", "self", ".", "reflectance", "(", "3", ")", ",", "self", ".", "reflectance", "(", "4", ")", ",", "self", ".", "reflectance", "(", "5", ")", ",", "self", ".", "reflectance", "(", "7", ")", ")", "alb", "=", "(", "0.356", "*", "blue", "+", "0.130", "*", "red", "+", "0.373", "*", "nir", "+", "0.085", "*", "swir1", "+", "0.072", "*", "swir2", "-", "0.0018", ")", "/", "1.014", "elif", "model", "==", "'tasumi'", ":", "pass", "# add tasumi algorithm TODO", "return", "alb" ]
Finds broad-band surface reflectance (albedo) Smith (2010), “The heat budget of the earth’s surface deduced from space” LT5 toa reflectance bands 1, 3, 4, 5, 7 # normalized i.e. 0.356 + 0.130 + 0.373 + 0.085 + 0.07 = 1.014 Should have option for Liang, 2000; Tasumi (2008), "At-Surface Reflectance and Albedo from Satellite for Operational Calculation of Land Surface Energy Balance" :return albedo array of floats
[ "Finds", "broad", "-", "band", "surface", "reflectance", "(", "albedo", ")", "Smith", "(", "2010", ")", "“The", "heat", "budget", "of", "the", "earth’s", "surface", "deduced", "from", "space”", "LT5", "toa", "reflectance", "bands", "1", "3", "4", "5", "7", "#", "normalized", "i", ".", "e", ".", "0", ".", "356", "+", "0", ".", "130", "+", "0", ".", "373", "+", "0", ".", "085", "+", "0", ".", "07", "=", "1", ".", "014", "Should", "have", "option", "for", "Liang", "2000", ";", "Tasumi", "(", "2008", ")", "At", "-", "Surface", "Reflectance", "and", "Albedo", "from", "Satellite", "for", "Operational", "Calculation", "of", "Land", "Surface", "Energy", "Balance", ":", "return", "albedo", "array", "of", "floats" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L293-L315
dgketchum/satellite_image
sat_image/image.py
Landsat5.saturation_mask
def saturation_mask(self, band, value=255): """ Mask saturated pixels, 1 (True) is saturated. :param band: Image band with dn values, type: array :param value: Maximum (saturated) value, i.e. 255 for 8-bit data, type: int :return: boolean array """ dn = self._get_band('b{}'.format(band)) mask = self.mask() mask = where((dn == value) & (mask > 0), True, False) return mask
python
def saturation_mask(self, band, value=255): """ Mask saturated pixels, 1 (True) is saturated. :param band: Image band with dn values, type: array :param value: Maximum (saturated) value, i.e. 255 for 8-bit data, type: int :return: boolean array """ dn = self._get_band('b{}'.format(band)) mask = self.mask() mask = where((dn == value) & (mask > 0), True, False) return mask
[ "def", "saturation_mask", "(", "self", ",", "band", ",", "value", "=", "255", ")", ":", "dn", "=", "self", ".", "_get_band", "(", "'b{}'", ".", "format", "(", "band", ")", ")", "mask", "=", "self", ".", "mask", "(", ")", "mask", "=", "where", "(", "(", "dn", "==", "value", ")", "&", "(", "mask", ">", "0", ")", ",", "True", ",", "False", ")", "return", "mask" ]
Mask saturated pixels, 1 (True) is saturated. :param band: Image band with dn values, type: array :param value: Maximum (saturated) value, i.e. 255 for 8-bit data, type: int :return: boolean array
[ "Mask", "saturated", "pixels", "1", "(", "True", ")", "is", "saturated", ".", ":", "param", "band", ":", "Image", "band", "with", "dn", "values", "type", ":", "array", ":", "param", "value", ":", "Maximum", "(", "saturated", ")", "value", "i", ".", "e", ".", "255", "for", "8", "-", "bit", "data", "type", ":", "int", ":", "return", ":", "boolean", "array" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L317-L327
dgketchum/satellite_image
sat_image/image.py
Landsat5.ndvi
def ndvi(self): """ Normalized difference vegetation index. :return: NDVI """ red, nir = self.reflectance(3), self.reflectance(4) ndvi = self._divide_zero((nir - red), (nir + red), nan) return ndvi
python
def ndvi(self): """ Normalized difference vegetation index. :return: NDVI """ red, nir = self.reflectance(3), self.reflectance(4) ndvi = self._divide_zero((nir - red), (nir + red), nan) return ndvi
[ "def", "ndvi", "(", "self", ")", ":", "red", ",", "nir", "=", "self", ".", "reflectance", "(", "3", ")", ",", "self", ".", "reflectance", "(", "4", ")", "ndvi", "=", "self", ".", "_divide_zero", "(", "(", "nir", "-", "red", ")", ",", "(", "nir", "+", "red", ")", ",", "nan", ")", "return", "ndvi" ]
Normalized difference vegetation index. :return: NDVI
[ "Normalized", "difference", "vegetation", "index", ".", ":", "return", ":", "NDVI" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L329-L336
dgketchum/satellite_image
sat_image/image.py
Landsat5.lai
def lai(self): """ Leaf area index (LAI), or the surface area of leaves to surface area ground. Trezza and Allen, 2014 :param ndvi: normalized difference vegetation index [-] :return: LAI [-] """ ndvi = self.ndvi() lai = 7.0 * (ndvi ** 3) lai = where(lai > 6., 6., lai) return lai
python
def lai(self): """ Leaf area index (LAI), or the surface area of leaves to surface area ground. Trezza and Allen, 2014 :param ndvi: normalized difference vegetation index [-] :return: LAI [-] """ ndvi = self.ndvi() lai = 7.0 * (ndvi ** 3) lai = where(lai > 6., 6., lai) return lai
[ "def", "lai", "(", "self", ")", ":", "ndvi", "=", "self", ".", "ndvi", "(", ")", "lai", "=", "7.0", "*", "(", "ndvi", "**", "3", ")", "lai", "=", "where", "(", "lai", ">", "6.", ",", "6.", ",", "lai", ")", "return", "lai" ]
Leaf area index (LAI), or the surface area of leaves to surface area ground. Trezza and Allen, 2014 :param ndvi: normalized difference vegetation index [-] :return: LAI [-]
[ "Leaf", "area", "index", "(", "LAI", ")", "or", "the", "surface", "area", "of", "leaves", "to", "surface", "area", "ground", ".", "Trezza", "and", "Allen", "2014", ":", "param", "ndvi", ":", "normalized", "difference", "vegetation", "index", "[", "-", "]", ":", "return", ":", "LAI", "[", "-", "]" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L338-L348
dgketchum/satellite_image
sat_image/image.py
Landsat5.land_surface_temp
def land_surface_temp(self): """ Mean values from Allen (2007) :return: """ rp = 0.91 tau = 0.866 rsky = 1.32 epsilon = self.emissivity(approach='tasumi') radiance = self.radiance(6) rc = ((radiance - rp) / tau) - ((1 - epsilon) * rsky) lst = self.k2 / (log((epsilon * self.k1 / rc) + 1)) return lst
python
def land_surface_temp(self): """ Mean values from Allen (2007) :return: """ rp = 0.91 tau = 0.866 rsky = 1.32 epsilon = self.emissivity(approach='tasumi') radiance = self.radiance(6) rc = ((radiance - rp) / tau) - ((1 - epsilon) * rsky) lst = self.k2 / (log((epsilon * self.k1 / rc) + 1)) return lst
[ "def", "land_surface_temp", "(", "self", ")", ":", "rp", "=", "0.91", "tau", "=", "0.866", "rsky", "=", "1.32", "epsilon", "=", "self", ".", "emissivity", "(", "approach", "=", "'tasumi'", ")", "radiance", "=", "self", ".", "radiance", "(", "6", ")", "rc", "=", "(", "(", "radiance", "-", "rp", ")", "/", "tau", ")", "-", "(", "(", "1", "-", "epsilon", ")", "*", "rsky", ")", "lst", "=", "self", ".", "k2", "/", "(", "log", "(", "(", "epsilon", "*", "self", ".", "k1", "/", "rc", ")", "+", "1", ")", ")", "return", "lst" ]
Mean values from Allen (2007) :return:
[ "Mean", "values", "from", "Allen", "(", "2007", ")", ":", "return", ":" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L375-L387
dgketchum/satellite_image
sat_image/image.py
Landsat8.brightness_temp
def brightness_temp(self, band, temp_scale='K'): """Calculate brightness temperature of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php T = K2 / log((K1 / L) + 1) and L = ML * Q + AL where: T = At-satellite brightness temperature (degrees kelvin) L = TOA spectral radiance (Watts / (m2 * srad * mm)) ML = Band-specific multiplicative rescaling factor from the metadata (RADIANCE_MULT_BAND_x, where x is the band number) AL = Band-specific additive rescaling factor from the metadata (RADIANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) (ndarray img) K1 = Band-specific thermal conversion constant from the metadata (K1_CONSTANT_BAND_x, where x is the thermal band number) K2 = Band-specific thermal conversion constant from the metadata (K1_CONSTANT_BAND_x, where x is the thermal band number) Returns -------- ndarray: float32 ndarray with shape == input shape """ if band in self.oli_bands: raise ValueError('Landsat 8 brightness should be TIRS band (i.e. 10 or 11)') k1 = getattr(self, 'k1_constant_band_{}'.format(band)) k2 = getattr(self, 'k2_constant_band_{}'.format(band)) rad = self.radiance(band) brightness = k2 / log((k1 / rad) + 1) if temp_scale == 'K': return brightness elif temp_scale == 'F': return brightness * (9 / 5.0) - 459.67 elif temp_scale == 'C': return brightness - 273.15 else: raise ValueError('{} is not a valid temperature scale'.format(temp_scale))
python
def brightness_temp(self, band, temp_scale='K'): """Calculate brightness temperature of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php T = K2 / log((K1 / L) + 1) and L = ML * Q + AL where: T = At-satellite brightness temperature (degrees kelvin) L = TOA spectral radiance (Watts / (m2 * srad * mm)) ML = Band-specific multiplicative rescaling factor from the metadata (RADIANCE_MULT_BAND_x, where x is the band number) AL = Band-specific additive rescaling factor from the metadata (RADIANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) (ndarray img) K1 = Band-specific thermal conversion constant from the metadata (K1_CONSTANT_BAND_x, where x is the thermal band number) K2 = Band-specific thermal conversion constant from the metadata (K1_CONSTANT_BAND_x, where x is the thermal band number) Returns -------- ndarray: float32 ndarray with shape == input shape """ if band in self.oli_bands: raise ValueError('Landsat 8 brightness should be TIRS band (i.e. 10 or 11)') k1 = getattr(self, 'k1_constant_band_{}'.format(band)) k2 = getattr(self, 'k2_constant_band_{}'.format(band)) rad = self.radiance(band) brightness = k2 / log((k1 / rad) + 1) if temp_scale == 'K': return brightness elif temp_scale == 'F': return brightness * (9 / 5.0) - 459.67 elif temp_scale == 'C': return brightness - 273.15 else: raise ValueError('{} is not a valid temperature scale'.format(temp_scale))
[ "def", "brightness_temp", "(", "self", ",", "band", ",", "temp_scale", "=", "'K'", ")", ":", "if", "band", "in", "self", ".", "oli_bands", ":", "raise", "ValueError", "(", "'Landsat 8 brightness should be TIRS band (i.e. 10 or 11)'", ")", "k1", "=", "getattr", "(", "self", ",", "'k1_constant_band_{}'", ".", "format", "(", "band", ")", ")", "k2", "=", "getattr", "(", "self", ",", "'k2_constant_band_{}'", ".", "format", "(", "band", ")", ")", "rad", "=", "self", ".", "radiance", "(", "band", ")", "brightness", "=", "k2", "/", "log", "(", "(", "k1", "/", "rad", ")", "+", "1", ")", "if", "temp_scale", "==", "'K'", ":", "return", "brightness", "elif", "temp_scale", "==", "'F'", ":", "return", "brightness", "*", "(", "9", "/", "5.0", ")", "-", "459.67", "elif", "temp_scale", "==", "'C'", ":", "return", "brightness", "-", "273.15", "else", ":", "raise", "ValueError", "(", "'{} is not a valid temperature scale'", ".", "format", "(", "temp_scale", ")", ")" ]
Calculate brightness temperature of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php T = K2 / log((K1 / L) + 1) and L = ML * Q + AL where: T = At-satellite brightness temperature (degrees kelvin) L = TOA spectral radiance (Watts / (m2 * srad * mm)) ML = Band-specific multiplicative rescaling factor from the metadata (RADIANCE_MULT_BAND_x, where x is the band number) AL = Band-specific additive rescaling factor from the metadata (RADIANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) (ndarray img) K1 = Band-specific thermal conversion constant from the metadata (K1_CONSTANT_BAND_x, where x is the thermal band number) K2 = Band-specific thermal conversion constant from the metadata (K1_CONSTANT_BAND_x, where x is the thermal band number) Returns -------- ndarray: float32 ndarray with shape == input shape
[ "Calculate", "brightness", "temperature", "of", "Landsat", "8", "as", "outlined", "here", ":", "http", ":", "//", "landsat", ".", "usgs", ".", "gov", "/", "Landsat8_Using_Product", ".", "php" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L560-L608
dgketchum/satellite_image
sat_image/image.py
Landsat8.reflectance
def reflectance(self, band): """Calculate top of atmosphere reflectance of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php R_raw = MR * Q + AR R = R_raw / cos(Z) = R_raw / sin(E) Z = 90 - E (in degrees) where: R_raw = TOA planetary reflectance, without correction for solar angle. R = TOA reflectance with a correction for the sun angle. MR = Band-specific multiplicative rescaling factor from the metadata (REFLECTANCE_MULT_BAND_x, where x is the band number) AR = Band-specific additive rescaling factor from the metadata (REFLECTANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) E = Local sun elevation angle. The scene center sun elevation angle in degrees is provided in the metadata (SUN_ELEVATION). Z = Local solar zenith angle (same angle as E, but measured from the zenith instead of from the horizon). Returns -------- ndarray: float32 ndarray with shape == input shape """ if band not in self.oli_bands: raise ValueError('Landsat 8 reflectance should OLI band (i.e. bands 1-8)') elev = getattr(self, 'sun_elevation') dn = self._get_band('b{}'.format(band)) mr = getattr(self, 'reflectance_mult_band_{}'.format(band)) ar = getattr(self, 'reflectance_add_band_{}'.format(band)) if elev < 0.0: raise ValueError("Sun elevation must be non-negative " "(sun must be above horizon for entire scene)") rf = ((mr * dn.astype(float32)) + ar) / sin(deg2rad(elev)) return rf
python
def reflectance(self, band): """Calculate top of atmosphere reflectance of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php R_raw = MR * Q + AR R = R_raw / cos(Z) = R_raw / sin(E) Z = 90 - E (in degrees) where: R_raw = TOA planetary reflectance, without correction for solar angle. R = TOA reflectance with a correction for the sun angle. MR = Band-specific multiplicative rescaling factor from the metadata (REFLECTANCE_MULT_BAND_x, where x is the band number) AR = Band-specific additive rescaling factor from the metadata (REFLECTANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) E = Local sun elevation angle. The scene center sun elevation angle in degrees is provided in the metadata (SUN_ELEVATION). Z = Local solar zenith angle (same angle as E, but measured from the zenith instead of from the horizon). Returns -------- ndarray: float32 ndarray with shape == input shape """ if band not in self.oli_bands: raise ValueError('Landsat 8 reflectance should OLI band (i.e. bands 1-8)') elev = getattr(self, 'sun_elevation') dn = self._get_band('b{}'.format(band)) mr = getattr(self, 'reflectance_mult_band_{}'.format(band)) ar = getattr(self, 'reflectance_add_band_{}'.format(band)) if elev < 0.0: raise ValueError("Sun elevation must be non-negative " "(sun must be above horizon for entire scene)") rf = ((mr * dn.astype(float32)) + ar) / sin(deg2rad(elev)) return rf
[ "def", "reflectance", "(", "self", ",", "band", ")", ":", "if", "band", "not", "in", "self", ".", "oli_bands", ":", "raise", "ValueError", "(", "'Landsat 8 reflectance should OLI band (i.e. bands 1-8)'", ")", "elev", "=", "getattr", "(", "self", ",", "'sun_elevation'", ")", "dn", "=", "self", ".", "_get_band", "(", "'b{}'", ".", "format", "(", "band", ")", ")", "mr", "=", "getattr", "(", "self", ",", "'reflectance_mult_band_{}'", ".", "format", "(", "band", ")", ")", "ar", "=", "getattr", "(", "self", ",", "'reflectance_add_band_{}'", ".", "format", "(", "band", ")", ")", "if", "elev", "<", "0.0", ":", "raise", "ValueError", "(", "\"Sun elevation must be non-negative \"", "\"(sun must be above horizon for entire scene)\"", ")", "rf", "=", "(", "(", "mr", "*", "dn", ".", "astype", "(", "float32", ")", ")", "+", "ar", ")", "/", "sin", "(", "deg2rad", "(", "elev", ")", ")", "return", "rf" ]
Calculate top of atmosphere reflectance of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php R_raw = MR * Q + AR R = R_raw / cos(Z) = R_raw / sin(E) Z = 90 - E (in degrees) where: R_raw = TOA planetary reflectance, without correction for solar angle. R = TOA reflectance with a correction for the sun angle. MR = Band-specific multiplicative rescaling factor from the metadata (REFLECTANCE_MULT_BAND_x, where x is the band number) AR = Band-specific additive rescaling factor from the metadata (REFLECTANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) E = Local sun elevation angle. The scene center sun elevation angle in degrees is provided in the metadata (SUN_ELEVATION). Z = Local solar zenith angle (same angle as E, but measured from the zenith instead of from the horizon). Returns -------- ndarray: float32 ndarray with shape == input shape
[ "Calculate", "top", "of", "atmosphere", "reflectance", "of", "Landsat", "8", "as", "outlined", "here", ":", "http", ":", "//", "landsat", ".", "usgs", ".", "gov", "/", "Landsat8_Using_Product", ".", "php", "R_raw", "=", "MR", "*", "Q", "+", "AR", "R", "=", "R_raw", "/", "cos", "(", "Z", ")", "=", "R_raw", "/", "sin", "(", "E", ")", "Z", "=", "90", "-", "E", "(", "in", "degrees", ")", "where", ":", "R_raw", "=", "TOA", "planetary", "reflectance", "without", "correction", "for", "solar", "angle", ".", "R", "=", "TOA", "reflectance", "with", "a", "correction", "for", "the", "sun", "angle", ".", "MR", "=", "Band", "-", "specific", "multiplicative", "rescaling", "factor", "from", "the", "metadata", "(", "REFLECTANCE_MULT_BAND_x", "where", "x", "is", "the", "band", "number", ")", "AR", "=", "Band", "-", "specific", "additive", "rescaling", "factor", "from", "the", "metadata", "(", "REFLECTANCE_ADD_BAND_x", "where", "x", "is", "the", "band", "number", ")", "Q", "=", "Quantized", "and", "calibrated", "standard", "product", "pixel", "values", "(", "DN", ")", "E", "=", "Local", "sun", "elevation", "angle", ".", "The", "scene", "center", "sun", "elevation", "angle", "in", "degrees", "is", "provided", "in", "the", "metadata", "(", "SUN_ELEVATION", ")", ".", "Z", "=", "Local", "solar", "zenith", "angle", "(", "same", "angle", "as", "E", "but", "measured", "from", "the", "zenith", "instead", "of", "from", "the", "horizon", ")", ".", "Returns", "--------", "ndarray", ":", "float32", "ndarray", "with", "shape", "==", "input", "shape" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L610-L655
dgketchum/satellite_image
sat_image/image.py
Landsat8.radiance
def radiance(self, band): """Calculate top of atmosphere radiance of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php L = ML * Q + AL where: L = TOA spectral radiance (Watts / (m2 * srad * mm)) ML = Band-specific multiplicative rescaling factor from the metadata (RADIANCE_MULT_BAND_x, where x is the band number) AL = Band-specific additive rescaling factor from the metadata (RADIANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) (ndarray img) Returns -------- ndarray: float32 ndarray with shape == input shape """ ml = getattr(self, 'radiance_mult_band_{}'.format(band)) al = getattr(self, 'radiance_add_band_{}'.format(band)) dn = self._get_band('b{}'.format(band)) rad = ml * dn.astype(float32) + al return rad
python
def radiance(self, band): """Calculate top of atmosphere radiance of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php L = ML * Q + AL where: L = TOA spectral radiance (Watts / (m2 * srad * mm)) ML = Band-specific multiplicative rescaling factor from the metadata (RADIANCE_MULT_BAND_x, where x is the band number) AL = Band-specific additive rescaling factor from the metadata (RADIANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) (ndarray img) Returns -------- ndarray: float32 ndarray with shape == input shape """ ml = getattr(self, 'radiance_mult_band_{}'.format(band)) al = getattr(self, 'radiance_add_band_{}'.format(band)) dn = self._get_band('b{}'.format(band)) rad = ml * dn.astype(float32) + al return rad
[ "def", "radiance", "(", "self", ",", "band", ")", ":", "ml", "=", "getattr", "(", "self", ",", "'radiance_mult_band_{}'", ".", "format", "(", "band", ")", ")", "al", "=", "getattr", "(", "self", ",", "'radiance_add_band_{}'", ".", "format", "(", "band", ")", ")", "dn", "=", "self", ".", "_get_band", "(", "'b{}'", ".", "format", "(", "band", ")", ")", "rad", "=", "ml", "*", "dn", ".", "astype", "(", "float32", ")", "+", "al", "return", "rad" ]
Calculate top of atmosphere radiance of Landsat 8 as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php L = ML * Q + AL where: L = TOA spectral radiance (Watts / (m2 * srad * mm)) ML = Band-specific multiplicative rescaling factor from the metadata (RADIANCE_MULT_BAND_x, where x is the band number) AL = Band-specific additive rescaling factor from the metadata (RADIANCE_ADD_BAND_x, where x is the band number) Q = Quantized and calibrated standard product pixel values (DN) (ndarray img) Returns -------- ndarray: float32 ndarray with shape == input shape
[ "Calculate", "top", "of", "atmosphere", "radiance", "of", "Landsat", "8", "as", "outlined", "here", ":", "http", ":", "//", "landsat", ".", "usgs", ".", "gov", "/", "Landsat8_Using_Product", ".", "php", "L", "=", "ML", "*", "Q", "+", "AL", "where", ":", "L", "=", "TOA", "spectral", "radiance", "(", "Watts", "/", "(", "m2", "*", "srad", "*", "mm", "))", "ML", "=", "Band", "-", "specific", "multiplicative", "rescaling", "factor", "from", "the", "metadata", "(", "RADIANCE_MULT_BAND_x", "where", "x", "is", "the", "band", "number", ")", "AL", "=", "Band", "-", "specific", "additive", "rescaling", "factor", "from", "the", "metadata", "(", "RADIANCE_ADD_BAND_x", "where", "x", "is", "the", "band", "number", ")", "Q", "=", "Quantized", "and", "calibrated", "standard", "product", "pixel", "values", "(", "DN", ")", "(", "ndarray", "img", ")", "Returns", "--------", "ndarray", ":", "float32", "ndarray", "with", "shape", "==", "input", "shape" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L657-L682
dgketchum/satellite_image
sat_image/image.py
Landsat8.ndsi
def ndsi(self): """ Normalized difference snow index. :return: NDSI """ green, swir1 = self.reflectance(3), self.reflectance(6) ndsi = self._divide_zero((green - swir1), (green + swir1), nan) return ndsi
python
def ndsi(self): """ Normalized difference snow index. :return: NDSI """ green, swir1 = self.reflectance(3), self.reflectance(6) ndsi = self._divide_zero((green - swir1), (green + swir1), nan) return ndsi
[ "def", "ndsi", "(", "self", ")", ":", "green", ",", "swir1", "=", "self", ".", "reflectance", "(", "3", ")", ",", "self", ".", "reflectance", "(", "6", ")", "ndsi", "=", "self", ".", "_divide_zero", "(", "(", "green", "-", "swir1", ")", ",", "(", "green", "+", "swir1", ")", ",", "nan", ")", "return", "ndsi" ]
Normalized difference snow index. :return: NDSI
[ "Normalized", "difference", "snow", "index", ".", ":", "return", ":", "NDSI" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/image.py#L761-L768
dgketchum/satellite_image
sat_image/fmask.py
Fmask.whiteness_index
def whiteness_index(self): """Index of "Whiteness" based on visible bands. Parameters ---------- Output ------ ndarray: whiteness index """ mean_vis = (self.blue + self.green + self.red) / 3 blue_absdiff = np.absolute(self._divide_zero(self.blue - mean_vis, mean_vis)) green_absdiff = np.absolute(self._divide_zero(self.green - mean_vis, mean_vis)) red_absdiff = np.absolute(self._divide_zero(self.red - mean_vis, mean_vis)) return blue_absdiff + green_absdiff + red_absdiff
python
def whiteness_index(self): """Index of "Whiteness" based on visible bands. Parameters ---------- Output ------ ndarray: whiteness index """ mean_vis = (self.blue + self.green + self.red) / 3 blue_absdiff = np.absolute(self._divide_zero(self.blue - mean_vis, mean_vis)) green_absdiff = np.absolute(self._divide_zero(self.green - mean_vis, mean_vis)) red_absdiff = np.absolute(self._divide_zero(self.red - mean_vis, mean_vis)) return blue_absdiff + green_absdiff + red_absdiff
[ "def", "whiteness_index", "(", "self", ")", ":", "mean_vis", "=", "(", "self", ".", "blue", "+", "self", ".", "green", "+", "self", ".", "red", ")", "/", "3", "blue_absdiff", "=", "np", ".", "absolute", "(", "self", ".", "_divide_zero", "(", "self", ".", "blue", "-", "mean_vis", ",", "mean_vis", ")", ")", "green_absdiff", "=", "np", ".", "absolute", "(", "self", ".", "_divide_zero", "(", "self", ".", "green", "-", "mean_vis", ",", "mean_vis", ")", ")", "red_absdiff", "=", "np", ".", "absolute", "(", "self", ".", "_divide_zero", "(", "self", ".", "red", "-", "mean_vis", ",", "mean_vis", ")", ")", "return", "blue_absdiff", "+", "green_absdiff", "+", "red_absdiff" ]
Index of "Whiteness" based on visible bands. Parameters ---------- Output ------ ndarray: whiteness index
[ "Index", "of", "Whiteness", "based", "on", "visible", "bands", ".", "Parameters", "----------" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L119-L135
dgketchum/satellite_image
sat_image/fmask.py
Fmask.potential_cloud_pixels
def potential_cloud_pixels(self): """Determine potential cloud pixels (PCPs) Combine basic spectral testsr to get a premliminary cloud mask First pass, section 3.1.1 in Zhu and Woodcock 2012 Equation 6 (Zhu and Woodcock, 2012) Parameters ---------- ndvi: ndarray ndsi: ndarray blue: ndarray green: ndarray red: ndarray nir: ndarray swir1: ndarray swir2: ndarray cirrus: ndarray tirs1: ndarray Output ------ ndarray: potential cloud mask, boolean """ eq1 = self.basic_test() eq2 = self.whiteness_test() eq3 = self.hot_test() eq4 = self.nirswir_test() if self.sat == 'LC8': cir = self.cirrus_test() return (eq1 & eq2 & eq3 & eq4) | cir else: return eq1 & eq2 & eq3 & eq4
python
def potential_cloud_pixels(self): """Determine potential cloud pixels (PCPs) Combine basic spectral testsr to get a premliminary cloud mask First pass, section 3.1.1 in Zhu and Woodcock 2012 Equation 6 (Zhu and Woodcock, 2012) Parameters ---------- ndvi: ndarray ndsi: ndarray blue: ndarray green: ndarray red: ndarray nir: ndarray swir1: ndarray swir2: ndarray cirrus: ndarray tirs1: ndarray Output ------ ndarray: potential cloud mask, boolean """ eq1 = self.basic_test() eq2 = self.whiteness_test() eq3 = self.hot_test() eq4 = self.nirswir_test() if self.sat == 'LC8': cir = self.cirrus_test() return (eq1 & eq2 & eq3 & eq4) | cir else: return eq1 & eq2 & eq3 & eq4
[ "def", "potential_cloud_pixels", "(", "self", ")", ":", "eq1", "=", "self", ".", "basic_test", "(", ")", "eq2", "=", "self", ".", "whiteness_test", "(", ")", "eq3", "=", "self", ".", "hot_test", "(", ")", "eq4", "=", "self", ".", "nirswir_test", "(", ")", "if", "self", ".", "sat", "==", "'LC8'", ":", "cir", "=", "self", ".", "cirrus_test", "(", ")", "return", "(", "eq1", "&", "eq2", "&", "eq3", "&", "eq4", ")", "|", "cir", "else", ":", "return", "eq1", "&", "eq2", "&", "eq3", "&", "eq4" ]
Determine potential cloud pixels (PCPs) Combine basic spectral testsr to get a premliminary cloud mask First pass, section 3.1.1 in Zhu and Woodcock 2012 Equation 6 (Zhu and Woodcock, 2012) Parameters ---------- ndvi: ndarray ndsi: ndarray blue: ndarray green: ndarray red: ndarray nir: ndarray swir1: ndarray swir2: ndarray cirrus: ndarray tirs1: ndarray Output ------ ndarray: potential cloud mask, boolean
[ "Determine", "potential", "cloud", "pixels", "(", "PCPs", ")", "Combine", "basic", "spectral", "testsr", "to", "get", "a", "premliminary", "cloud", "mask", "First", "pass", "section", "3", ".", "1", ".", "1", "in", "Zhu", "and", "Woodcock", "2012", "Equation", "6", "(", "Zhu", "and", "Woodcock", "2012", ")", "Parameters", "----------", "ndvi", ":", "ndarray", "ndsi", ":", "ndarray", "blue", ":", "ndarray", "green", ":", "ndarray", "red", ":", "ndarray", "nir", ":", "ndarray", "swir1", ":", "ndarray", "swir2", ":", "ndarray", "cirrus", ":", "ndarray", "tirs1", ":", "ndarray", "Output", "------", "ndarray", ":", "potential", "cloud", "mask", "boolean" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L224-L254
dgketchum/satellite_image
sat_image/fmask.py
Fmask.temp_water
def temp_water(self): """Use water to mask tirs and find 82.5 pctile Equation 7 and 8 (Zhu and Woodcock, 2012) Parameters ---------- is_water: ndarray, boolean water mask, water is True, land is False swir2: ndarray tirs1: ndarray Output ------ float: 82.5th percentile temperature over water """ # eq7 th_swir2 = 0.03 water = self.water_test() clear_sky_water = water & (self.swir2 < th_swir2) # eq8 clear_water_temp = self.tirs1.copy() clear_water_temp[~clear_sky_water] = np.nan clear_water_temp[~self.mask] = np.nan pctl_clwt = np.nanpercentile(clear_water_temp, 82.5) return pctl_clwt
python
def temp_water(self): """Use water to mask tirs and find 82.5 pctile Equation 7 and 8 (Zhu and Woodcock, 2012) Parameters ---------- is_water: ndarray, boolean water mask, water is True, land is False swir2: ndarray tirs1: ndarray Output ------ float: 82.5th percentile temperature over water """ # eq7 th_swir2 = 0.03 water = self.water_test() clear_sky_water = water & (self.swir2 < th_swir2) # eq8 clear_water_temp = self.tirs1.copy() clear_water_temp[~clear_sky_water] = np.nan clear_water_temp[~self.mask] = np.nan pctl_clwt = np.nanpercentile(clear_water_temp, 82.5) return pctl_clwt
[ "def", "temp_water", "(", "self", ")", ":", "# eq7", "th_swir2", "=", "0.03", "water", "=", "self", ".", "water_test", "(", ")", "clear_sky_water", "=", "water", "&", "(", "self", ".", "swir2", "<", "th_swir2", ")", "# eq8", "clear_water_temp", "=", "self", ".", "tirs1", ".", "copy", "(", ")", "clear_water_temp", "[", "~", "clear_sky_water", "]", "=", "np", ".", "nan", "clear_water_temp", "[", "~", "self", ".", "mask", "]", "=", "np", ".", "nan", "pctl_clwt", "=", "np", ".", "nanpercentile", "(", "clear_water_temp", ",", "82.5", ")", "return", "pctl_clwt" ]
Use water to mask tirs and find 82.5 pctile Equation 7 and 8 (Zhu and Woodcock, 2012) Parameters ---------- is_water: ndarray, boolean water mask, water is True, land is False swir2: ndarray tirs1: ndarray Output ------ float: 82.5th percentile temperature over water
[ "Use", "water", "to", "mask", "tirs", "and", "find", "82", ".", "5", "pctile", "Equation", "7", "and", "8", "(", "Zhu", "and", "Woodcock", "2012", ")", "Parameters", "----------", "is_water", ":", "ndarray", "boolean", "water", "mask", "water", "is", "True", "land", "is", "False", "swir2", ":", "ndarray", "tirs1", ":", "ndarray", "Output", "------", "float", ":", "82", ".", "5th", "percentile", "temperature", "over", "water" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L256-L280
dgketchum/satellite_image
sat_image/fmask.py
Fmask.water_temp_prob
def water_temp_prob(self): """Temperature probability for water Equation 9 (Zhu and Woodcock, 2012) Parameters ---------- water_temp: float 82.5th percentile temperature over water swir2: ndarray tirs1: ndarray Output ------ ndarray: probability of cloud over water based on temperature """ temp_const = 4.0 # degrees C water_temp = self.temp_water() return (water_temp - self.tirs1) / temp_const
python
def water_temp_prob(self): """Temperature probability for water Equation 9 (Zhu and Woodcock, 2012) Parameters ---------- water_temp: float 82.5th percentile temperature over water swir2: ndarray tirs1: ndarray Output ------ ndarray: probability of cloud over water based on temperature """ temp_const = 4.0 # degrees C water_temp = self.temp_water() return (water_temp - self.tirs1) / temp_const
[ "def", "water_temp_prob", "(", "self", ")", ":", "temp_const", "=", "4.0", "# degrees C", "water_temp", "=", "self", ".", "temp_water", "(", ")", "return", "(", "water_temp", "-", "self", ".", "tirs1", ")", "/", "temp_const" ]
Temperature probability for water Equation 9 (Zhu and Woodcock, 2012) Parameters ---------- water_temp: float 82.5th percentile temperature over water swir2: ndarray tirs1: ndarray Output ------ ndarray: probability of cloud over water based on temperature
[ "Temperature", "probability", "for", "water", "Equation", "9", "(", "Zhu", "and", "Woodcock", "2012", ")", "Parameters", "----------", "water_temp", ":", "float", "82", ".", "5th", "percentile", "temperature", "over", "water", "swir2", ":", "ndarray", "tirs1", ":", "ndarray", "Output", "------", "ndarray", ":", "probability", "of", "cloud", "over", "water", "based", "on", "temperature" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L282-L298
dgketchum/satellite_image
sat_image/fmask.py
Fmask.brightness_prob
def brightness_prob(self, clip=True): """The brightest water may have Band 5 reflectance as high as LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF.11 Equation 10 (Zhu and Woodcock, 2012) Parameters ---------- nir: ndarray clip: boolean Output ------ ndarray: brightness probability, constrained LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF..1 """ thresh = 0.11 bp = np.minimum(thresh, self.nir) / thresh if clip: bp[bp > 1] = 1 bp[bp < 0] = 0 return bp
python
def brightness_prob(self, clip=True): """The brightest water may have Band 5 reflectance as high as LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF.11 Equation 10 (Zhu and Woodcock, 2012) Parameters ---------- nir: ndarray clip: boolean Output ------ ndarray: brightness probability, constrained LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF..1 """ thresh = 0.11 bp = np.minimum(thresh, self.nir) / thresh if clip: bp[bp > 1] = 1 bp[bp < 0] = 0 return bp
[ "def", "brightness_prob", "(", "self", ",", "clip", "=", "True", ")", ":", "thresh", "=", "0.11", "bp", "=", "np", ".", "minimum", "(", "thresh", ",", "self", ".", "nir", ")", "/", "thresh", "if", "clip", ":", "bp", "[", "bp", ">", "1", "]", "=", "1", "bp", "[", "bp", "<", "0", "]", "=", "0", "return", "bp" ]
The brightest water may have Band 5 reflectance as high as LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF.11 Equation 10 (Zhu and Woodcock, 2012) Parameters ---------- nir: ndarray clip: boolean Output ------ ndarray: brightness probability, constrained LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF..1
[ "The", "brightest", "water", "may", "have", "Band", "5", "reflectance", "as", "high", "as", "LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1", ".", "TIF", ".", "11", "Equation", "10", "(", "Zhu", "and", "Woodcock", "2012", ")", "Parameters", "----------", "nir", ":", "ndarray", "clip", ":", "boolean", "Output", "------", "ndarray", ":", "brightness", "probability", "constrained", "LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1", ".", "TIF", "..", "1" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L300-L318
dgketchum/satellite_image
sat_image/fmask.py
Fmask.temp_land
def temp_land(self, pcps, water): """Derive high/low percentiles of land temperature Equations 12 an 13 (Zhu and Woodcock, 2012) Parameters ---------- pcps: ndarray potential cloud pixels, boolean water: ndarray water mask, boolean tirs1: ndarray Output ------ tuple: 17.5 and 82.5 percentile temperature over clearsky land """ # eq 12 clearsky_land = ~(pcps | water) # use clearsky_land to mask tirs1 clear_land_temp = self.tirs1.copy() clear_land_temp[~clearsky_land] = np.nan clear_land_temp[~self.mask] = np.nan # take 17.5 and 82.5 percentile, eq 13 low, high = np.nanpercentile(clear_land_temp, (17.5, 82.5)) return low, high
python
def temp_land(self, pcps, water): """Derive high/low percentiles of land temperature Equations 12 an 13 (Zhu and Woodcock, 2012) Parameters ---------- pcps: ndarray potential cloud pixels, boolean water: ndarray water mask, boolean tirs1: ndarray Output ------ tuple: 17.5 and 82.5 percentile temperature over clearsky land """ # eq 12 clearsky_land = ~(pcps | water) # use clearsky_land to mask tirs1 clear_land_temp = self.tirs1.copy() clear_land_temp[~clearsky_land] = np.nan clear_land_temp[~self.mask] = np.nan # take 17.5 and 82.5 percentile, eq 13 low, high = np.nanpercentile(clear_land_temp, (17.5, 82.5)) return low, high
[ "def", "temp_land", "(", "self", ",", "pcps", ",", "water", ")", ":", "# eq 12", "clearsky_land", "=", "~", "(", "pcps", "|", "water", ")", "# use clearsky_land to mask tirs1", "clear_land_temp", "=", "self", ".", "tirs1", ".", "copy", "(", ")", "clear_land_temp", "[", "~", "clearsky_land", "]", "=", "np", ".", "nan", "clear_land_temp", "[", "~", "self", ".", "mask", "]", "=", "np", ".", "nan", "# take 17.5 and 82.5 percentile, eq 13", "low", ",", "high", "=", "np", ".", "nanpercentile", "(", "clear_land_temp", ",", "(", "17.5", ",", "82.5", ")", ")", "return", "low", ",", "high" ]
Derive high/low percentiles of land temperature Equations 12 an 13 (Zhu and Woodcock, 2012) Parameters ---------- pcps: ndarray potential cloud pixels, boolean water: ndarray water mask, boolean tirs1: ndarray Output ------ tuple: 17.5 and 82.5 percentile temperature over clearsky land
[ "Derive", "high", "/", "low", "percentiles", "of", "land", "temperature", "Equations", "12", "an", "13", "(", "Zhu", "and", "Woodcock", "2012", ")", "Parameters", "----------", "pcps", ":", "ndarray", "potential", "cloud", "pixels", "boolean", "water", ":", "ndarray", "water", "mask", "boolean", "tirs1", ":", "ndarray", "Output", "------", "tuple", ":", "17", ".", "5", "and", "82", ".", "5", "percentile", "temperature", "over", "clearsky", "land" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L320-L345
dgketchum/satellite_image
sat_image/fmask.py
Fmask.land_temp_prob
def land_temp_prob(self, tlow, thigh): """Temperature-based probability of cloud over land Equation 14 (Zhu and Woodcock, 2012) Parameters ---------- tirs1: ndarray tlow: float Low (17.5 percentile) temperature of land thigh: float High (82.5 percentile) temperature of land Output ------ ndarray : probability of cloud over land based on temperature """ temp_diff = 4 # degrees return (thigh + temp_diff - self.tirs1) / (thigh + 4 - (tlow - 4))
python
def land_temp_prob(self, tlow, thigh): """Temperature-based probability of cloud over land Equation 14 (Zhu and Woodcock, 2012) Parameters ---------- tirs1: ndarray tlow: float Low (17.5 percentile) temperature of land thigh: float High (82.5 percentile) temperature of land Output ------ ndarray : probability of cloud over land based on temperature """ temp_diff = 4 # degrees return (thigh + temp_diff - self.tirs1) / (thigh + 4 - (tlow - 4))
[ "def", "land_temp_prob", "(", "self", ",", "tlow", ",", "thigh", ")", ":", "temp_diff", "=", "4", "# degrees", "return", "(", "thigh", "+", "temp_diff", "-", "self", ".", "tirs1", ")", "/", "(", "thigh", "+", "4", "-", "(", "tlow", "-", "4", ")", ")" ]
Temperature-based probability of cloud over land Equation 14 (Zhu and Woodcock, 2012) Parameters ---------- tirs1: ndarray tlow: float Low (17.5 percentile) temperature of land thigh: float High (82.5 percentile) temperature of land Output ------ ndarray : probability of cloud over land based on temperature
[ "Temperature", "-", "based", "probability", "of", "cloud", "over", "land", "Equation", "14", "(", "Zhu", "and", "Woodcock", "2012", ")", "Parameters", "----------", "tirs1", ":", "ndarray", "tlow", ":", "float", "Low", "(", "17", ".", "5", "percentile", ")", "temperature", "of", "land", "thigh", ":", "float", "High", "(", "82", ".", "5", "percentile", ")", "temperature", "of", "land", "Output", "------", "ndarray", ":", "probability", "of", "cloud", "over", "land", "based", "on", "temperature" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L347-L363
dgketchum/satellite_image
sat_image/fmask.py
Fmask.variability_prob
def variability_prob(self, whiteness): """Use the probability of the spectral variability to identify clouds over land. Equation 15 (Zhu and Woodcock, 2012) Parameters ---------- ndvi: ndarray ndsi: ndarray whiteness: ndarray Output ------ ndarray : probability of cloud over land based on variability """ if self.sat in ['LT5', 'LE7']: # check for green and red saturation # if red is saturated and less than nir, ndvi = LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF mod_ndvi = np.where(self.red_saturated & (self.nir > self.red), 0, self.ndvi) # if green is saturated and less than swir1, ndsi = LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF mod_ndsi = np.where(self.green_saturated & (self.swir1 > self.green), 0, self.ndsi) ndi_max = np.fmax(np.absolute(mod_ndvi), np.absolute(mod_ndsi)) else: ndi_max = np.fmax(np.absolute(self.ndvi), np.absolute(self.ndsi)) f_max = 1.0 - np.fmax(ndi_max, whiteness) return f_max
python
def variability_prob(self, whiteness): """Use the probability of the spectral variability to identify clouds over land. Equation 15 (Zhu and Woodcock, 2012) Parameters ---------- ndvi: ndarray ndsi: ndarray whiteness: ndarray Output ------ ndarray : probability of cloud over land based on variability """ if self.sat in ['LT5', 'LE7']: # check for green and red saturation # if red is saturated and less than nir, ndvi = LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF mod_ndvi = np.where(self.red_saturated & (self.nir > self.red), 0, self.ndvi) # if green is saturated and less than swir1, ndsi = LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF mod_ndsi = np.where(self.green_saturated & (self.swir1 > self.green), 0, self.ndsi) ndi_max = np.fmax(np.absolute(mod_ndvi), np.absolute(mod_ndsi)) else: ndi_max = np.fmax(np.absolute(self.ndvi), np.absolute(self.ndsi)) f_max = 1.0 - np.fmax(ndi_max, whiteness) return f_max
[ "def", "variability_prob", "(", "self", ",", "whiteness", ")", ":", "if", "self", ".", "sat", "in", "[", "'LT5'", ",", "'LE7'", "]", ":", "# check for green and red saturation", "# if red is saturated and less than nir, ndvi = LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF", "mod_ndvi", "=", "np", ".", "where", "(", "self", ".", "red_saturated", "&", "(", "self", ".", "nir", ">", "self", ".", "red", ")", ",", "0", ",", "self", ".", "ndvi", ")", "# if green is saturated and less than swir1, ndsi = LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF", "mod_ndsi", "=", "np", ".", "where", "(", "self", ".", "green_saturated", "&", "(", "self", ".", "swir1", ">", "self", ".", "green", ")", ",", "0", ",", "self", ".", "ndsi", ")", "ndi_max", "=", "np", ".", "fmax", "(", "np", ".", "absolute", "(", "mod_ndvi", ")", ",", "np", ".", "absolute", "(", "mod_ndsi", ")", ")", "else", ":", "ndi_max", "=", "np", ".", "fmax", "(", "np", ".", "absolute", "(", "self", ".", "ndvi", ")", ",", "np", ".", "absolute", "(", "self", ".", "ndsi", ")", ")", "f_max", "=", "1.0", "-", "np", ".", "fmax", "(", "ndi_max", ",", "whiteness", ")", "return", "f_max" ]
Use the probability of the spectral variability to identify clouds over land. Equation 15 (Zhu and Woodcock, 2012) Parameters ---------- ndvi: ndarray ndsi: ndarray whiteness: ndarray Output ------ ndarray : probability of cloud over land based on variability
[ "Use", "the", "probability", "of", "the", "spectral", "variability", "to", "identify", "clouds", "over", "land", ".", "Equation", "15", "(", "Zhu", "and", "Woodcock", "2012", ")", "Parameters", "----------", "ndvi", ":", "ndarray", "ndsi", ":", "ndarray", "whiteness", ":", "ndarray", "Output", "------", "ndarray", ":", "probability", "of", "cloud", "over", "land", "based", "on", "variability" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L365-L395
dgketchum/satellite_image
sat_image/fmask.py
Fmask.land_threshold
def land_threshold(self, land_cloud_prob, pcps, water): """Dynamic threshold for determining cloud cutoff Equation 17 (Zhu and Woodcock, 2012) Parameters ---------- land_cloud_prob: ndarray probability of cloud over land pcps: ndarray potential cloud pixels water: ndarray water mask Output ------ float: land cloud threshold """ # eq 12 clearsky_land = ~(pcps | water) # 82.5th percentile of lCloud_Prob(masked by clearsky_land) + LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF.2 cloud_prob = land_cloud_prob.copy() cloud_prob[~clearsky_land] = np.nan cloud_prob[~self.mask] = np.nan # eq 17 th_const = 0.2 return np.nanpercentile(cloud_prob, 82.5) + th_const
python
def land_threshold(self, land_cloud_prob, pcps, water): """Dynamic threshold for determining cloud cutoff Equation 17 (Zhu and Woodcock, 2012) Parameters ---------- land_cloud_prob: ndarray probability of cloud over land pcps: ndarray potential cloud pixels water: ndarray water mask Output ------ float: land cloud threshold """ # eq 12 clearsky_land = ~(pcps | water) # 82.5th percentile of lCloud_Prob(masked by clearsky_land) + LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF.2 cloud_prob = land_cloud_prob.copy() cloud_prob[~clearsky_land] = np.nan cloud_prob[~self.mask] = np.nan # eq 17 th_const = 0.2 return np.nanpercentile(cloud_prob, 82.5) + th_const
[ "def", "land_threshold", "(", "self", ",", "land_cloud_prob", ",", "pcps", ",", "water", ")", ":", "# eq 12", "clearsky_land", "=", "~", "(", "pcps", "|", "water", ")", "# 82.5th percentile of lCloud_Prob(masked by clearsky_land) + LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF.2", "cloud_prob", "=", "land_cloud_prob", ".", "copy", "(", ")", "cloud_prob", "[", "~", "clearsky_land", "]", "=", "np", ".", "nan", "cloud_prob", "[", "~", "self", ".", "mask", "]", "=", "np", ".", "nan", "# eq 17", "th_const", "=", "0.2", "return", "np", ".", "nanpercentile", "(", "cloud_prob", ",", "82.5", ")", "+", "th_const" ]
Dynamic threshold for determining cloud cutoff Equation 17 (Zhu and Woodcock, 2012) Parameters ---------- land_cloud_prob: ndarray probability of cloud over land pcps: ndarray potential cloud pixels water: ndarray water mask Output ------ float: land cloud threshold
[ "Dynamic", "threshold", "for", "determining", "cloud", "cutoff", "Equation", "17", "(", "Zhu", "and", "Woodcock", "2012", ")", "Parameters", "----------", "land_cloud_prob", ":", "ndarray", "probability", "of", "cloud", "over", "land", "pcps", ":", "ndarray", "potential", "cloud", "pixels", "water", ":", "ndarray", "water", "mask", "Output", "------", "float", ":", "land", "cloud", "threshold" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L397-L423
dgketchum/satellite_image
sat_image/fmask.py
Fmask.potential_cloud_layer
def potential_cloud_layer(self, pcp, water, tlow, land_cloud_prob, land_threshold, water_cloud_prob, water_threshold=0.5): """Final step of determining potential cloud layer Equation 18 (Zhu and Woodcock, 2012) Saturation (green or red) test is not in the algorithm Parameters ---------- pcps: ndarray potential cloud pixels water: ndarray water mask tirs1: ndarray tlow: float low percentile of land temperature land_cloud_prob: ndarray probability of cloud over land land_threshold: float cutoff for cloud over land water_cloud_prob: ndarray probability of cloud over water water_threshold: float cutoff for cloud over water Output ------ ndarray: potential cloud layer, boolean """ # Using pcp and water as mask todo # change water threshold to dynamic, line 132 in Zhu, 2015 todo part1 = (pcp & water & (water_cloud_prob > water_threshold)) part2 = (pcp & ~water & (land_cloud_prob > land_threshold)) temptest = self.tirs1 < (tlow - 35) # 35degrees C colder if self.sat in ['LT5', 'LE7']: saturation = self.blue_saturated | self.green_saturated | self.red_saturated return part1 | part2 | temptest | saturation else: return part1 | part2 | temptest
python
def potential_cloud_layer(self, pcp, water, tlow, land_cloud_prob, land_threshold, water_cloud_prob, water_threshold=0.5): """Final step of determining potential cloud layer Equation 18 (Zhu and Woodcock, 2012) Saturation (green or red) test is not in the algorithm Parameters ---------- pcps: ndarray potential cloud pixels water: ndarray water mask tirs1: ndarray tlow: float low percentile of land temperature land_cloud_prob: ndarray probability of cloud over land land_threshold: float cutoff for cloud over land water_cloud_prob: ndarray probability of cloud over water water_threshold: float cutoff for cloud over water Output ------ ndarray: potential cloud layer, boolean """ # Using pcp and water as mask todo # change water threshold to dynamic, line 132 in Zhu, 2015 todo part1 = (pcp & water & (water_cloud_prob > water_threshold)) part2 = (pcp & ~water & (land_cloud_prob > land_threshold)) temptest = self.tirs1 < (tlow - 35) # 35degrees C colder if self.sat in ['LT5', 'LE7']: saturation = self.blue_saturated | self.green_saturated | self.red_saturated return part1 | part2 | temptest | saturation else: return part1 | part2 | temptest
[ "def", "potential_cloud_layer", "(", "self", ",", "pcp", ",", "water", ",", "tlow", ",", "land_cloud_prob", ",", "land_threshold", ",", "water_cloud_prob", ",", "water_threshold", "=", "0.5", ")", ":", "# Using pcp and water as mask todo", "# change water threshold to dynamic, line 132 in Zhu, 2015 todo", "part1", "=", "(", "pcp", "&", "water", "&", "(", "water_cloud_prob", ">", "water_threshold", ")", ")", "part2", "=", "(", "pcp", "&", "~", "water", "&", "(", "land_cloud_prob", ">", "land_threshold", ")", ")", "temptest", "=", "self", ".", "tirs1", "<", "(", "tlow", "-", "35", ")", "# 35degrees C colder", "if", "self", ".", "sat", "in", "[", "'LT5'", ",", "'LE7'", "]", ":", "saturation", "=", "self", ".", "blue_saturated", "|", "self", ".", "green_saturated", "|", "self", ".", "red_saturated", "return", "part1", "|", "part2", "|", "temptest", "|", "saturation", "else", ":", "return", "part1", "|", "part2", "|", "temptest" ]
Final step of determining potential cloud layer Equation 18 (Zhu and Woodcock, 2012) Saturation (green or red) test is not in the algorithm Parameters ---------- pcps: ndarray potential cloud pixels water: ndarray water mask tirs1: ndarray tlow: float low percentile of land temperature land_cloud_prob: ndarray probability of cloud over land land_threshold: float cutoff for cloud over land water_cloud_prob: ndarray probability of cloud over water water_threshold: float cutoff for cloud over water Output ------ ndarray: potential cloud layer, boolean
[ "Final", "step", "of", "determining", "potential", "cloud", "layer", "Equation", "18", "(", "Zhu", "and", "Woodcock", "2012", ")", "Saturation", "(", "green", "or", "red", ")", "test", "is", "not", "in", "the", "algorithm", "Parameters", "----------", "pcps", ":", "ndarray", "potential", "cloud", "pixels", "water", ":", "ndarray", "water", "mask", "tirs1", ":", "ndarray", "tlow", ":", "float", "low", "percentile", "of", "land", "temperature", "land_cloud_prob", ":", "ndarray", "probability", "of", "cloud", "over", "land", "land_threshold", ":", "float", "cutoff", "for", "cloud", "over", "land", "water_cloud_prob", ":", "ndarray", "probability", "of", "cloud", "over", "water", "water_threshold", ":", "float", "cutoff", "for", "cloud", "over", "water", "Output", "------", "ndarray", ":", "potential", "cloud", "layer", "boolean" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L425-L466
dgketchum/satellite_image
sat_image/fmask.py
Fmask.potential_snow_layer
def potential_snow_layer(self): """Spectral test to determine potential snow Uses the 9.85C (283K) threshold defined in Zhu, Woodcock 2015 Parameters ---------- ndsi: ndarray green: ndarray nir: ndarray tirs1: ndarray Output ------ ndarray: boolean, True is potential snow """ return (self.ndsi > 0.15) & (self.tirs1 < 9.85) & (self.nir > 0.11) & (self.green > 0.1)
python
def potential_snow_layer(self): """Spectral test to determine potential snow Uses the 9.85C (283K) threshold defined in Zhu, Woodcock 2015 Parameters ---------- ndsi: ndarray green: ndarray nir: ndarray tirs1: ndarray Output ------ ndarray: boolean, True is potential snow """ return (self.ndsi > 0.15) & (self.tirs1 < 9.85) & (self.nir > 0.11) & (self.green > 0.1)
[ "def", "potential_snow_layer", "(", "self", ")", ":", "return", "(", "self", ".", "ndsi", ">", "0.15", ")", "&", "(", "self", ".", "tirs1", "<", "9.85", ")", "&", "(", "self", ".", "nir", ">", "0.11", ")", "&", "(", "self", ".", "green", ">", "0.1", ")" ]
Spectral test to determine potential snow Uses the 9.85C (283K) threshold defined in Zhu, Woodcock 2015 Parameters ---------- ndsi: ndarray green: ndarray nir: ndarray tirs1: ndarray Output ------ ndarray: boolean, True is potential snow
[ "Spectral", "test", "to", "determine", "potential", "snow", "Uses", "the", "9", ".", "85C", "(", "283K", ")", "threshold", "defined", "in", "Zhu", "Woodcock", "2015", "Parameters", "----------", "ndsi", ":", "ndarray", "green", ":", "ndarray", "nir", ":", "ndarray", "tirs1", ":", "ndarray", "Output", "------", "ndarray", ":", "boolean", "True", "is", "potential", "snow" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L484-L498
dgketchum/satellite_image
sat_image/fmask.py
Fmask.cloud_mask
def cloud_mask(self, min_filter=(3, 3), max_filter=(10, 10), combined=False, cloud_and_shadow=False): """Calculate the potential cloud layer from source data *This is the high level function which ties together all the equations for generating potential clouds* Parameters ---------- blue: ndarray green: ndarray red: ndarray nir: ndarray swir1: ndarray swir2: ndarray cirrus: ndarray tirs1: ndarray min_filter: 2-element tuple, default=(3,3) Defines the window for the minimum_filter, for removing outliers max_filter: 2-element tuple, default=(21, 21) Defines the window for the maximum_filter, for "buffering" the edges combined: make a boolean array masking all (cloud, shadow, water) Output ------ ndarray, boolean: potential cloud layer; True = cloud ndarray, boolean potential cloud shadow layer; True = cloud shadow :param cloud_and_shadow: """ # logger.info("Running initial testsr") whiteness = self.whiteness_index() water = self.water_test() # First pass, potential clouds pcps = self.potential_cloud_pixels() if self.sat == 'LC8': cirrus_prob = self.cirrus / 0.04 else: cirrus_prob = 0.0 # Clouds over water wtp = self.water_temp_prob() bp = self.brightness_prob() water_cloud_prob = (wtp * bp) + cirrus_prob wthreshold = 0.5 # Clouds over land tlow, thigh = self.temp_land(pcps, water) ltp = self.land_temp_prob(tlow, thigh) vp = self.variability_prob(whiteness) land_cloud_prob = (ltp * vp) + cirrus_prob lthreshold = self.land_threshold(land_cloud_prob, pcps, water) # logger.info("Calculate potential clouds") pcloud = self.potential_cloud_layer( pcps, water, tlow, land_cloud_prob, lthreshold, water_cloud_prob, wthreshold) # Ignoring snow for now as it exhibits many false positives and negatives # when used as a binary mask # psnow = potential_snow_layer(ndsi, green, nir, tirs1) # pcloud = pcloud & ~psnow # logger.info("Calculate potential cloud shadows") pshadow = self.potential_cloud_shadow_layer(water) # The remainder of the algorithm differs significantly from Fmask # In an attempt to make a more visually appealling cloud mask # with fewer inclusions and more broad shapes if min_filter: # Remove outliers # logger.info("Remove outliers with minimum filter") from scipy.ndimage.filters import minimum_filter from scipy.ndimage.morphology import distance_transform_edt # remove cloud outliers by nibbling the edges pcloud = minimum_filter(pcloud, size=min_filter) # crude, just look x pixels away for potential cloud pixels dist = distance_transform_edt(~pcloud) pixel_radius = 100.0 pshadow = (dist < pixel_radius) & pshadow # remove cloud shadow outliers pshadow = minimum_filter(pshadow, size=min_filter) if max_filter: # grow around the edges # logger.info("Buffer edges with maximum filter") from scipy.ndimage.filters import maximum_filter pcloud = maximum_filter(pcloud, size=max_filter) pshadow = maximum_filter(pshadow, size=max_filter) # mystery, save pcloud here, shows no nan in qgis, save later, shows nan # outfile = '/data01/images/sandbox/pcloud.tif' # georeference = self.sat_image.rasterio_geometry # array = pcloud # array = array.reshape(1, array.shape[LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF], array.shape[1]) # array = np.array(array, dtype=georeference['dtype']) # with rasterio.open(outfile, 'w', **georeference) as dst: # dst.write(array) # mystery test if combined: return pcloud | pshadow | water if cloud_and_shadow: return pcloud | pshadow return pcloud, pshadow, water
python
def cloud_mask(self, min_filter=(3, 3), max_filter=(10, 10), combined=False, cloud_and_shadow=False): """Calculate the potential cloud layer from source data *This is the high level function which ties together all the equations for generating potential clouds* Parameters ---------- blue: ndarray green: ndarray red: ndarray nir: ndarray swir1: ndarray swir2: ndarray cirrus: ndarray tirs1: ndarray min_filter: 2-element tuple, default=(3,3) Defines the window for the minimum_filter, for removing outliers max_filter: 2-element tuple, default=(21, 21) Defines the window for the maximum_filter, for "buffering" the edges combined: make a boolean array masking all (cloud, shadow, water) Output ------ ndarray, boolean: potential cloud layer; True = cloud ndarray, boolean potential cloud shadow layer; True = cloud shadow :param cloud_and_shadow: """ # logger.info("Running initial testsr") whiteness = self.whiteness_index() water = self.water_test() # First pass, potential clouds pcps = self.potential_cloud_pixels() if self.sat == 'LC8': cirrus_prob = self.cirrus / 0.04 else: cirrus_prob = 0.0 # Clouds over water wtp = self.water_temp_prob() bp = self.brightness_prob() water_cloud_prob = (wtp * bp) + cirrus_prob wthreshold = 0.5 # Clouds over land tlow, thigh = self.temp_land(pcps, water) ltp = self.land_temp_prob(tlow, thigh) vp = self.variability_prob(whiteness) land_cloud_prob = (ltp * vp) + cirrus_prob lthreshold = self.land_threshold(land_cloud_prob, pcps, water) # logger.info("Calculate potential clouds") pcloud = self.potential_cloud_layer( pcps, water, tlow, land_cloud_prob, lthreshold, water_cloud_prob, wthreshold) # Ignoring snow for now as it exhibits many false positives and negatives # when used as a binary mask # psnow = potential_snow_layer(ndsi, green, nir, tirs1) # pcloud = pcloud & ~psnow # logger.info("Calculate potential cloud shadows") pshadow = self.potential_cloud_shadow_layer(water) # The remainder of the algorithm differs significantly from Fmask # In an attempt to make a more visually appealling cloud mask # with fewer inclusions and more broad shapes if min_filter: # Remove outliers # logger.info("Remove outliers with minimum filter") from scipy.ndimage.filters import minimum_filter from scipy.ndimage.morphology import distance_transform_edt # remove cloud outliers by nibbling the edges pcloud = minimum_filter(pcloud, size=min_filter) # crude, just look x pixels away for potential cloud pixels dist = distance_transform_edt(~pcloud) pixel_radius = 100.0 pshadow = (dist < pixel_radius) & pshadow # remove cloud shadow outliers pshadow = minimum_filter(pshadow, size=min_filter) if max_filter: # grow around the edges # logger.info("Buffer edges with maximum filter") from scipy.ndimage.filters import maximum_filter pcloud = maximum_filter(pcloud, size=max_filter) pshadow = maximum_filter(pshadow, size=max_filter) # mystery, save pcloud here, shows no nan in qgis, save later, shows nan # outfile = '/data01/images/sandbox/pcloud.tif' # georeference = self.sat_image.rasterio_geometry # array = pcloud # array = array.reshape(1, array.shape[LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF], array.shape[1]) # array = np.array(array, dtype=georeference['dtype']) # with rasterio.open(outfile, 'w', **georeference) as dst: # dst.write(array) # mystery test if combined: return pcloud | pshadow | water if cloud_and_shadow: return pcloud | pshadow return pcloud, pshadow, water
[ "def", "cloud_mask", "(", "self", ",", "min_filter", "=", "(", "3", ",", "3", ")", ",", "max_filter", "=", "(", "10", ",", "10", ")", ",", "combined", "=", "False", ",", "cloud_and_shadow", "=", "False", ")", ":", "# logger.info(\"Running initial testsr\")", "whiteness", "=", "self", ".", "whiteness_index", "(", ")", "water", "=", "self", ".", "water_test", "(", ")", "# First pass, potential clouds", "pcps", "=", "self", ".", "potential_cloud_pixels", "(", ")", "if", "self", ".", "sat", "==", "'LC8'", ":", "cirrus_prob", "=", "self", ".", "cirrus", "/", "0.04", "else", ":", "cirrus_prob", "=", "0.0", "# Clouds over water", "wtp", "=", "self", ".", "water_temp_prob", "(", ")", "bp", "=", "self", ".", "brightness_prob", "(", ")", "water_cloud_prob", "=", "(", "wtp", "*", "bp", ")", "+", "cirrus_prob", "wthreshold", "=", "0.5", "# Clouds over land", "tlow", ",", "thigh", "=", "self", ".", "temp_land", "(", "pcps", ",", "water", ")", "ltp", "=", "self", ".", "land_temp_prob", "(", "tlow", ",", "thigh", ")", "vp", "=", "self", ".", "variability_prob", "(", "whiteness", ")", "land_cloud_prob", "=", "(", "ltp", "*", "vp", ")", "+", "cirrus_prob", "lthreshold", "=", "self", ".", "land_threshold", "(", "land_cloud_prob", ",", "pcps", ",", "water", ")", "# logger.info(\"Calculate potential clouds\")", "pcloud", "=", "self", ".", "potential_cloud_layer", "(", "pcps", ",", "water", ",", "tlow", ",", "land_cloud_prob", ",", "lthreshold", ",", "water_cloud_prob", ",", "wthreshold", ")", "# Ignoring snow for now as it exhibits many false positives and negatives", "# when used as a binary mask", "# psnow = potential_snow_layer(ndsi, green, nir, tirs1)", "# pcloud = pcloud & ~psnow", "# logger.info(\"Calculate potential cloud shadows\")", "pshadow", "=", "self", ".", "potential_cloud_shadow_layer", "(", "water", ")", "# The remainder of the algorithm differs significantly from Fmask", "# In an attempt to make a more visually appealling cloud mask", "# with fewer inclusions and more broad shapes", "if", "min_filter", ":", "# Remove outliers", "# logger.info(\"Remove outliers with minimum filter\")", "from", "scipy", ".", "ndimage", ".", "filters", "import", "minimum_filter", "from", "scipy", ".", "ndimage", ".", "morphology", "import", "distance_transform_edt", "# remove cloud outliers by nibbling the edges", "pcloud", "=", "minimum_filter", "(", "pcloud", ",", "size", "=", "min_filter", ")", "# crude, just look x pixels away for potential cloud pixels", "dist", "=", "distance_transform_edt", "(", "~", "pcloud", ")", "pixel_radius", "=", "100.0", "pshadow", "=", "(", "dist", "<", "pixel_radius", ")", "&", "pshadow", "# remove cloud shadow outliers", "pshadow", "=", "minimum_filter", "(", "pshadow", ",", "size", "=", "min_filter", ")", "if", "max_filter", ":", "# grow around the edges", "# logger.info(\"Buffer edges with maximum filter\")", "from", "scipy", ".", "ndimage", ".", "filters", "import", "maximum_filter", "pcloud", "=", "maximum_filter", "(", "pcloud", ",", "size", "=", "max_filter", ")", "pshadow", "=", "maximum_filter", "(", "pshadow", ",", "size", "=", "max_filter", ")", "# mystery, save pcloud here, shows no nan in qgis, save later, shows nan", "# outfile = '/data01/images/sandbox/pcloud.tif'", "# georeference = self.sat_image.rasterio_geometry", "# array = pcloud", "# array = array.reshape(1, array.shape[LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF], array.shape[1])", "# array = np.array(array, dtype=georeference['dtype'])", "# with rasterio.open(outfile, 'w', **georeference) as dst:", "# dst.write(array)", "# mystery test", "if", "combined", ":", "return", "pcloud", "|", "pshadow", "|", "water", "if", "cloud_and_shadow", ":", "return", "pcloud", "|", "pshadow", "return", "pcloud", ",", "pshadow", ",", "water" ]
Calculate the potential cloud layer from source data *This is the high level function which ties together all the equations for generating potential clouds* Parameters ---------- blue: ndarray green: ndarray red: ndarray nir: ndarray swir1: ndarray swir2: ndarray cirrus: ndarray tirs1: ndarray min_filter: 2-element tuple, default=(3,3) Defines the window for the minimum_filter, for removing outliers max_filter: 2-element tuple, default=(21, 21) Defines the window for the maximum_filter, for "buffering" the edges combined: make a boolean array masking all (cloud, shadow, water) Output ------ ndarray, boolean: potential cloud layer; True = cloud ndarray, boolean potential cloud shadow layer; True = cloud shadow :param cloud_and_shadow:
[ "Calculate", "the", "potential", "cloud", "layer", "from", "source", "data", "*", "This", "is", "the", "high", "level", "function", "which", "ties", "together", "all", "the", "equations", "for", "generating", "potential", "clouds", "*", "Parameters", "----------", "blue", ":", "ndarray", "green", ":", "ndarray", "red", ":", "ndarray", "nir", ":", "ndarray", "swir1", ":", "ndarray", "swir2", ":", "ndarray", "cirrus", ":", "ndarray", "tirs1", ":", "ndarray", "min_filter", ":", "2", "-", "element", "tuple", "default", "=", "(", "3", "3", ")", "Defines", "the", "window", "for", "the", "minimum_filter", "for", "removing", "outliers", "max_filter", ":", "2", "-", "element", "tuple", "default", "=", "(", "21", "21", ")", "Defines", "the", "window", "for", "the", "maximum_filter", "for", "buffering", "the", "edges", "combined", ":", "make", "a", "boolean", "array", "masking", "all", "(", "cloud", "shadow", "water", ")", "Output", "------", "ndarray", "boolean", ":", "potential", "cloud", "layer", ";", "True", "=", "cloud", "ndarray", "boolean", "potential", "cloud", "shadow", "layer", ";", "True", "=", "cloud", "shadow", ":", "param", "cloud_and_shadow", ":" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L500-L612
dgketchum/satellite_image
sat_image/fmask.py
Fmask.gdal_nodata_mask
def gdal_nodata_mask(pcl, pcsl, tirs_arr): """ Given a boolean potential cloud layer, a potential cloud shadow layer and a thermal band Calculate the GDAL-style uint8 mask """ tirs_mask = np.isnan(tirs_arr) | (tirs_arr == 0) return ((~(pcl | pcsl | tirs_mask)) * 255).astype('uint8')
python
def gdal_nodata_mask(pcl, pcsl, tirs_arr): """ Given a boolean potential cloud layer, a potential cloud shadow layer and a thermal band Calculate the GDAL-style uint8 mask """ tirs_mask = np.isnan(tirs_arr) | (tirs_arr == 0) return ((~(pcl | pcsl | tirs_mask)) * 255).astype('uint8')
[ "def", "gdal_nodata_mask", "(", "pcl", ",", "pcsl", ",", "tirs_arr", ")", ":", "tirs_mask", "=", "np", ".", "isnan", "(", "tirs_arr", ")", "|", "(", "tirs_arr", "==", "0", ")", "return", "(", "(", "~", "(", "pcl", "|", "pcsl", "|", "tirs_mask", ")", ")", "*", "255", ")", ".", "astype", "(", "'uint8'", ")" ]
Given a boolean potential cloud layer, a potential cloud shadow layer and a thermal band Calculate the GDAL-style uint8 mask
[ "Given", "a", "boolean", "potential", "cloud", "layer", "a", "potential", "cloud", "shadow", "layer", "and", "a", "thermal", "band", "Calculate", "the", "GDAL", "-", "style", "uint8", "mask" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/fmask.py#L634-L641
dgketchum/satellite_image
sat_image/mtl.py
parsemeta
def parsemeta(metadataloc): """Parses the metadata from a Landsat image bundle. Arguments: metadataloc: a filename or a directory. Returns metadata dictionary """ # filename or directory? if several fit, use first one and warn if os.path.isdir(metadataloc): metalist = glob.glob(os.path.join(metadataloc, METAPATTERN)) if not metalist: raise MTLParseError( "No files matching metadata file pattern in directory %s." % metadataloc) elif len(metalist) > 0: metadatafn = metalist[0] filehandle = open(metadatafn, 'r') if len(metalist) > 1: logging.warning( "More than one file in directory match metadata " + "file pattern. Using %s." % metadatafn) elif os.path.isfile(metadataloc): metadatafn = metadataloc filehandle = open(metadatafn, 'r') logging.info("Using file %s." % metadatafn) elif 'L1_METADATA_FILE' in metadataloc: filehandle = StringIO(metadataloc) else: raise MTLParseError( "File location %s is unavailable " % metadataloc + "or doesn't contain a suitable metadata file.") # Reading file line by line and inserting data into metadata dictionary status = 0 metadata = {} grouppath = [] dictpath = [metadata] for line in filehandle: if status == 4: # we reached the end in the previous iteration, # but are still reading lines logging.warning( "Metadata file %s appears to " % metadatafn + "have extra lines after the end of the metadata. " + "This is probably, but not necessarily, harmless.") status = _checkstatus(status, line) grouppath, dictpath = _transstat(status, grouppath, dictpath, line) return metadata
python
def parsemeta(metadataloc): """Parses the metadata from a Landsat image bundle. Arguments: metadataloc: a filename or a directory. Returns metadata dictionary """ # filename or directory? if several fit, use first one and warn if os.path.isdir(metadataloc): metalist = glob.glob(os.path.join(metadataloc, METAPATTERN)) if not metalist: raise MTLParseError( "No files matching metadata file pattern in directory %s." % metadataloc) elif len(metalist) > 0: metadatafn = metalist[0] filehandle = open(metadatafn, 'r') if len(metalist) > 1: logging.warning( "More than one file in directory match metadata " + "file pattern. Using %s." % metadatafn) elif os.path.isfile(metadataloc): metadatafn = metadataloc filehandle = open(metadatafn, 'r') logging.info("Using file %s." % metadatafn) elif 'L1_METADATA_FILE' in metadataloc: filehandle = StringIO(metadataloc) else: raise MTLParseError( "File location %s is unavailable " % metadataloc + "or doesn't contain a suitable metadata file.") # Reading file line by line and inserting data into metadata dictionary status = 0 metadata = {} grouppath = [] dictpath = [metadata] for line in filehandle: if status == 4: # we reached the end in the previous iteration, # but are still reading lines logging.warning( "Metadata file %s appears to " % metadatafn + "have extra lines after the end of the metadata. " + "This is probably, but not necessarily, harmless.") status = _checkstatus(status, line) grouppath, dictpath = _transstat(status, grouppath, dictpath, line) return metadata
[ "def", "parsemeta", "(", "metadataloc", ")", ":", "# filename or directory? if several fit, use first one and warn", "if", "os", ".", "path", ".", "isdir", "(", "metadataloc", ")", ":", "metalist", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "metadataloc", ",", "METAPATTERN", ")", ")", "if", "not", "metalist", ":", "raise", "MTLParseError", "(", "\"No files matching metadata file pattern in directory %s.\"", "%", "metadataloc", ")", "elif", "len", "(", "metalist", ")", ">", "0", ":", "metadatafn", "=", "metalist", "[", "0", "]", "filehandle", "=", "open", "(", "metadatafn", ",", "'r'", ")", "if", "len", "(", "metalist", ")", ">", "1", ":", "logging", ".", "warning", "(", "\"More than one file in directory match metadata \"", "+", "\"file pattern. Using %s.\"", "%", "metadatafn", ")", "elif", "os", ".", "path", ".", "isfile", "(", "metadataloc", ")", ":", "metadatafn", "=", "metadataloc", "filehandle", "=", "open", "(", "metadatafn", ",", "'r'", ")", "logging", ".", "info", "(", "\"Using file %s.\"", "%", "metadatafn", ")", "elif", "'L1_METADATA_FILE'", "in", "metadataloc", ":", "filehandle", "=", "StringIO", "(", "metadataloc", ")", "else", ":", "raise", "MTLParseError", "(", "\"File location %s is unavailable \"", "%", "metadataloc", "+", "\"or doesn't contain a suitable metadata file.\"", ")", "# Reading file line by line and inserting data into metadata dictionary", "status", "=", "0", "metadata", "=", "{", "}", "grouppath", "=", "[", "]", "dictpath", "=", "[", "metadata", "]", "for", "line", "in", "filehandle", ":", "if", "status", "==", "4", ":", "# we reached the end in the previous iteration,", "# but are still reading lines", "logging", ".", "warning", "(", "\"Metadata file %s appears to \"", "%", "metadatafn", "+", "\"have extra lines after the end of the metadata. \"", "+", "\"This is probably, but not necessarily, harmless.\"", ")", "status", "=", "_checkstatus", "(", "status", ",", "line", ")", "grouppath", ",", "dictpath", "=", "_transstat", "(", "status", ",", "grouppath", ",", "dictpath", ",", "line", ")", "return", "metadata" ]
Parses the metadata from a Landsat image bundle. Arguments: metadataloc: a filename or a directory. Returns metadata dictionary
[ "Parses", "the", "metadata", "from", "a", "Landsat", "image", "bundle", ".", "Arguments", ":", "metadataloc", ":", "a", "filename", "or", "a", "directory", ".", "Returns", "metadata", "dictionary" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/mtl.py#L60-L112
dgketchum/satellite_image
sat_image/mtl.py
_checkstatus
def _checkstatus(status, line): """Returns state/status after reading the next line. The status codes are:: LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF - BEGIN parsing; 1 - ENTER METADATA GROUP, 2 - READ METADATA LINE, 3 - END METDADATA GROUP, 4 - END PARSING Permitted Transitions:: LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF --> 1, LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF --> 4 1 --> 1, 1 --> 2, 1 --> 3 2 --> 2, 2 --> 3 3 --> 1, 1 --> 3, 3 --> 4 """ newstatus = 0 if status == 0: # begin --> enter metadata group OR end if _islinetype(line, GRPSTART): newstatus = 1 elif _isfinal(line): newstatus = 4 elif status == 1: # enter metadata group --> enter metadata group # OR add metadata item OR leave metadata group if _islinetype(line, GRPSTART): newstatus = 1 elif _islinetype(line, GRPEND): newstatus = 3 elif _isassignment(line): # test AFTER start and end, as both are also assignments newstatus = 2 elif status == 2: if _islinetype(line, GRPEND): newstatus = 3 elif _isassignment(line): # test AFTER start and end, as both are also assignments newstatus = 2 elif status == 3: if _islinetype(line, GRPSTART): newstatus = 1 elif _islinetype(line, GRPEND): newstatus = 3 elif _isfinal(line): newstatus = 4 if newstatus != 0: return newstatus elif status != 4: raise MTLParseError( "Cannot parse the following line after status " + "'%s':\n%s" % (STATUSCODE[status], line))
python
def _checkstatus(status, line): """Returns state/status after reading the next line. The status codes are:: LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF - BEGIN parsing; 1 - ENTER METADATA GROUP, 2 - READ METADATA LINE, 3 - END METDADATA GROUP, 4 - END PARSING Permitted Transitions:: LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF --> 1, LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF --> 4 1 --> 1, 1 --> 2, 1 --> 3 2 --> 2, 2 --> 3 3 --> 1, 1 --> 3, 3 --> 4 """ newstatus = 0 if status == 0: # begin --> enter metadata group OR end if _islinetype(line, GRPSTART): newstatus = 1 elif _isfinal(line): newstatus = 4 elif status == 1: # enter metadata group --> enter metadata group # OR add metadata item OR leave metadata group if _islinetype(line, GRPSTART): newstatus = 1 elif _islinetype(line, GRPEND): newstatus = 3 elif _isassignment(line): # test AFTER start and end, as both are also assignments newstatus = 2 elif status == 2: if _islinetype(line, GRPEND): newstatus = 3 elif _isassignment(line): # test AFTER start and end, as both are also assignments newstatus = 2 elif status == 3: if _islinetype(line, GRPSTART): newstatus = 1 elif _islinetype(line, GRPEND): newstatus = 3 elif _isfinal(line): newstatus = 4 if newstatus != 0: return newstatus elif status != 4: raise MTLParseError( "Cannot parse the following line after status " + "'%s':\n%s" % (STATUSCODE[status], line))
[ "def", "_checkstatus", "(", "status", ",", "line", ")", ":", "newstatus", "=", "0", "if", "status", "==", "0", ":", "# begin --> enter metadata group OR end", "if", "_islinetype", "(", "line", ",", "GRPSTART", ")", ":", "newstatus", "=", "1", "elif", "_isfinal", "(", "line", ")", ":", "newstatus", "=", "4", "elif", "status", "==", "1", ":", "# enter metadata group --> enter metadata group", "# OR add metadata item OR leave metadata group", "if", "_islinetype", "(", "line", ",", "GRPSTART", ")", ":", "newstatus", "=", "1", "elif", "_islinetype", "(", "line", ",", "GRPEND", ")", ":", "newstatus", "=", "3", "elif", "_isassignment", "(", "line", ")", ":", "# test AFTER start and end, as both are also assignments", "newstatus", "=", "2", "elif", "status", "==", "2", ":", "if", "_islinetype", "(", "line", ",", "GRPEND", ")", ":", "newstatus", "=", "3", "elif", "_isassignment", "(", "line", ")", ":", "# test AFTER start and end, as both are also assignments", "newstatus", "=", "2", "elif", "status", "==", "3", ":", "if", "_islinetype", "(", "line", ",", "GRPSTART", ")", ":", "newstatus", "=", "1", "elif", "_islinetype", "(", "line", ",", "GRPEND", ")", ":", "newstatus", "=", "3", "elif", "_isfinal", "(", "line", ")", ":", "newstatus", "=", "4", "if", "newstatus", "!=", "0", ":", "return", "newstatus", "elif", "status", "!=", "4", ":", "raise", "MTLParseError", "(", "\"Cannot parse the following line after status \"", "+", "\"'%s':\\n%s\"", "%", "(", "STATUSCODE", "[", "status", "]", ",", "line", ")", ")" ]
Returns state/status after reading the next line. The status codes are:: LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF - BEGIN parsing; 1 - ENTER METADATA GROUP, 2 - READ METADATA LINE, 3 - END METDADATA GROUP, 4 - END PARSING Permitted Transitions:: LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF --> 1, LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1.TIF --> 4 1 --> 1, 1 --> 2, 1 --> 3 2 --> 2, 2 --> 3 3 --> 1, 1 --> 3, 3 --> 4
[ "Returns", "state", "/", "status", "after", "reading", "the", "next", "line", ".", "The", "status", "codes", "are", "::", "LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1", ".", "TIF", "-", "BEGIN", "parsing", ";", "1", "-", "ENTER", "METADATA", "GROUP", "2", "-", "READ", "METADATA", "LINE", "3", "-", "END", "METDADATA", "GROUP", "4", "-", "END", "PARSING", "Permitted", "Transitions", "::", "LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1", ".", "TIF", "--", ">", "1", "LE07_clip_L1TP_039027_20150529_20160902_01_T1_B1", ".", "TIF", "--", ">", "4", "1", "--", ">", "1", "1", "--", ">", "2", "1", "--", ">", "3", "2", "--", ">", "2", "2", "--", ">", "3", "3", "--", ">", "1", "1", "--", ">", "3", "3", "--", ">", "4" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/mtl.py#L148-L194
dgketchum/satellite_image
sat_image/mtl.py
_transstat
def _transstat(status, grouppath, dictpath, line): """Executes processing steps when reading a line""" if status == 0: raise MTLParseError( "Status should not be '%s' after reading line:\n%s" % (STATUSCODE[status], line)) elif status == 1: currentdict = dictpath[-1] currentgroup = _getgroupname(line) grouppath.append(currentgroup) currentdict[currentgroup] = {} dictpath.append(currentdict[currentgroup]) elif status == 2: currentdict = dictpath[-1] newkey, newval = _getmetadataitem(line) # USGS has started quoting the scene center time. If this # happens strip quotes before post processing. if newkey == 'SCENE_CENTER_TIME' and newval.startswith('"') \ and newval.endswith('"'): # logging.warning('Strip quotes off SCENE_CENTER_TIME.') newval = newval[1:-1] currentdict[newkey] = _postprocess(newval) elif status == 3: oldgroup = _getendgroupname(line) if oldgroup != grouppath[-1]: raise MTLParseError( "Reached line '%s' while reading group '%s'." % (line.strip(), grouppath[-1])) del grouppath[-1] del dictpath[-1] try: currentgroup = grouppath[-1] except IndexError: currentgroup = None elif status == 4: if grouppath: raise MTLParseError( "Reached end before end of group '%s'" % grouppath[-1]) return grouppath, dictpath
python
def _transstat(status, grouppath, dictpath, line): """Executes processing steps when reading a line""" if status == 0: raise MTLParseError( "Status should not be '%s' after reading line:\n%s" % (STATUSCODE[status], line)) elif status == 1: currentdict = dictpath[-1] currentgroup = _getgroupname(line) grouppath.append(currentgroup) currentdict[currentgroup] = {} dictpath.append(currentdict[currentgroup]) elif status == 2: currentdict = dictpath[-1] newkey, newval = _getmetadataitem(line) # USGS has started quoting the scene center time. If this # happens strip quotes before post processing. if newkey == 'SCENE_CENTER_TIME' and newval.startswith('"') \ and newval.endswith('"'): # logging.warning('Strip quotes off SCENE_CENTER_TIME.') newval = newval[1:-1] currentdict[newkey] = _postprocess(newval) elif status == 3: oldgroup = _getendgroupname(line) if oldgroup != grouppath[-1]: raise MTLParseError( "Reached line '%s' while reading group '%s'." % (line.strip(), grouppath[-1])) del grouppath[-1] del dictpath[-1] try: currentgroup = grouppath[-1] except IndexError: currentgroup = None elif status == 4: if grouppath: raise MTLParseError( "Reached end before end of group '%s'" % grouppath[-1]) return grouppath, dictpath
[ "def", "_transstat", "(", "status", ",", "grouppath", ",", "dictpath", ",", "line", ")", ":", "if", "status", "==", "0", ":", "raise", "MTLParseError", "(", "\"Status should not be '%s' after reading line:\\n%s\"", "%", "(", "STATUSCODE", "[", "status", "]", ",", "line", ")", ")", "elif", "status", "==", "1", ":", "currentdict", "=", "dictpath", "[", "-", "1", "]", "currentgroup", "=", "_getgroupname", "(", "line", ")", "grouppath", ".", "append", "(", "currentgroup", ")", "currentdict", "[", "currentgroup", "]", "=", "{", "}", "dictpath", ".", "append", "(", "currentdict", "[", "currentgroup", "]", ")", "elif", "status", "==", "2", ":", "currentdict", "=", "dictpath", "[", "-", "1", "]", "newkey", ",", "newval", "=", "_getmetadataitem", "(", "line", ")", "# USGS has started quoting the scene center time. If this", "# happens strip quotes before post processing.", "if", "newkey", "==", "'SCENE_CENTER_TIME'", "and", "newval", ".", "startswith", "(", "'\"'", ")", "and", "newval", ".", "endswith", "(", "'\"'", ")", ":", "# logging.warning('Strip quotes off SCENE_CENTER_TIME.')", "newval", "=", "newval", "[", "1", ":", "-", "1", "]", "currentdict", "[", "newkey", "]", "=", "_postprocess", "(", "newval", ")", "elif", "status", "==", "3", ":", "oldgroup", "=", "_getendgroupname", "(", "line", ")", "if", "oldgroup", "!=", "grouppath", "[", "-", "1", "]", ":", "raise", "MTLParseError", "(", "\"Reached line '%s' while reading group '%s'.\"", "%", "(", "line", ".", "strip", "(", ")", ",", "grouppath", "[", "-", "1", "]", ")", ")", "del", "grouppath", "[", "-", "1", "]", "del", "dictpath", "[", "-", "1", "]", "try", ":", "currentgroup", "=", "grouppath", "[", "-", "1", "]", "except", "IndexError", ":", "currentgroup", "=", "None", "elif", "status", "==", "4", ":", "if", "grouppath", ":", "raise", "MTLParseError", "(", "\"Reached end before end of group '%s'\"", "%", "grouppath", "[", "-", "1", "]", ")", "return", "grouppath", ",", "dictpath" ]
Executes processing steps when reading a line
[ "Executes", "processing", "steps", "when", "reading", "a", "line" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/mtl.py#L198-L238
dgketchum/satellite_image
sat_image/mtl.py
_postprocess
def _postprocess(valuestr): """ Takes value as str, returns str, int, float, date, datetime, or time """ # USGS has started quoting time sometimes. Grr, strip quotes in this case intpattern = re.compile(r'^\-?\d+$') floatpattern = re.compile(r'^\-?\d+\.\d+(E[+-]?\d\d+)?$') datedtpattern = '%Y-%m-%d' datedttimepattern = '%Y-%m-%dT%H:%M:%SZ' timedtpattern = '%H:%M:%S.%f' timepattern = re.compile(r'^\d{2}:\d{2}:\d{2}(\.\d{6})?') if valuestr.startswith('"') and valuestr.endswith('"'): # it's a string return valuestr[1:-1] elif re.match(intpattern, valuestr): # it's an integer return int(valuestr) elif re.match(floatpattern, valuestr): # floating point number return float(valuestr) # now let's try the datetime objects; throws exception if it doesn't match try: return datetime.datetime.strptime(valuestr, datedtpattern).date() except ValueError: pass try: return datetime.datetime.strptime(valuestr, datedttimepattern) except ValueError: pass # time parsing is complicated: Python's datetime module only accepts # fractions of a second only up to 6 digits mat = re.match(timepattern, valuestr) if mat: test = mat.group(0) try: return datetime.datetime.strptime(test, timedtpattern).time() except ValueError: pass # If we get here, we still haven't returned anything. logging.info( "The value %s couldn't be parsed as " % valuestr + "int, float, date, time, datetime. Returning it as string.") return valuestr
python
def _postprocess(valuestr): """ Takes value as str, returns str, int, float, date, datetime, or time """ # USGS has started quoting time sometimes. Grr, strip quotes in this case intpattern = re.compile(r'^\-?\d+$') floatpattern = re.compile(r'^\-?\d+\.\d+(E[+-]?\d\d+)?$') datedtpattern = '%Y-%m-%d' datedttimepattern = '%Y-%m-%dT%H:%M:%SZ' timedtpattern = '%H:%M:%S.%f' timepattern = re.compile(r'^\d{2}:\d{2}:\d{2}(\.\d{6})?') if valuestr.startswith('"') and valuestr.endswith('"'): # it's a string return valuestr[1:-1] elif re.match(intpattern, valuestr): # it's an integer return int(valuestr) elif re.match(floatpattern, valuestr): # floating point number return float(valuestr) # now let's try the datetime objects; throws exception if it doesn't match try: return datetime.datetime.strptime(valuestr, datedtpattern).date() except ValueError: pass try: return datetime.datetime.strptime(valuestr, datedttimepattern) except ValueError: pass # time parsing is complicated: Python's datetime module only accepts # fractions of a second only up to 6 digits mat = re.match(timepattern, valuestr) if mat: test = mat.group(0) try: return datetime.datetime.strptime(test, timedtpattern).time() except ValueError: pass # If we get here, we still haven't returned anything. logging.info( "The value %s couldn't be parsed as " % valuestr + "int, float, date, time, datetime. Returning it as string.") return valuestr
[ "def", "_postprocess", "(", "valuestr", ")", ":", "# USGS has started quoting time sometimes. Grr, strip quotes in this case", "intpattern", "=", "re", ".", "compile", "(", "r'^\\-?\\d+$'", ")", "floatpattern", "=", "re", ".", "compile", "(", "r'^\\-?\\d+\\.\\d+(E[+-]?\\d\\d+)?$'", ")", "datedtpattern", "=", "'%Y-%m-%d'", "datedttimepattern", "=", "'%Y-%m-%dT%H:%M:%SZ'", "timedtpattern", "=", "'%H:%M:%S.%f'", "timepattern", "=", "re", ".", "compile", "(", "r'^\\d{2}:\\d{2}:\\d{2}(\\.\\d{6})?'", ")", "if", "valuestr", ".", "startswith", "(", "'\"'", ")", "and", "valuestr", ".", "endswith", "(", "'\"'", ")", ":", "# it's a string", "return", "valuestr", "[", "1", ":", "-", "1", "]", "elif", "re", ".", "match", "(", "intpattern", ",", "valuestr", ")", ":", "# it's an integer", "return", "int", "(", "valuestr", ")", "elif", "re", ".", "match", "(", "floatpattern", ",", "valuestr", ")", ":", "# floating point number", "return", "float", "(", "valuestr", ")", "# now let's try the datetime objects; throws exception if it doesn't match", "try", ":", "return", "datetime", ".", "datetime", ".", "strptime", "(", "valuestr", ",", "datedtpattern", ")", ".", "date", "(", ")", "except", "ValueError", ":", "pass", "try", ":", "return", "datetime", ".", "datetime", ".", "strptime", "(", "valuestr", ",", "datedttimepattern", ")", "except", "ValueError", ":", "pass", "# time parsing is complicated: Python's datetime module only accepts", "# fractions of a second only up to 6 digits", "mat", "=", "re", ".", "match", "(", "timepattern", ",", "valuestr", ")", "if", "mat", ":", "test", "=", "mat", ".", "group", "(", "0", ")", "try", ":", "return", "datetime", ".", "datetime", ".", "strptime", "(", "test", ",", "timedtpattern", ")", ".", "time", "(", ")", "except", "ValueError", ":", "pass", "# If we get here, we still haven't returned anything.", "logging", ".", "info", "(", "\"The value %s couldn't be parsed as \"", "%", "valuestr", "+", "\"int, float, date, time, datetime. Returning it as string.\"", ")", "return", "valuestr" ]
Takes value as str, returns str, int, float, date, datetime, or time
[ "Takes", "value", "as", "str", "returns", "str", "int", "float", "date", "datetime", "or", "time" ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/mtl.py#L242-L285
dgketchum/satellite_image
sat_image/warped_vrt.py
warp_vrt
def warp_vrt(directory, delete_extra=False, use_band_map=False, overwrite=False, remove_bqa=True, return_profile=False): """ Read in image geometry, resample subsequent images to same grid. The purpose of this function is to snap many Landsat images to one geometry. Use Landsat578 to download and unzip them, then run them through this to get identical geometries for analysis. Files :param use_band_map: :param delete_extra: :param directory: A directory containing sub-directories of Landsat images. :return: None """ if 'resample_meta.txt' in os.listdir(directory) and not overwrite: print('{} has already had component images warped'.format(directory)) return None mapping = {'LC8': Landsat8, 'LE7': Landsat7, 'LT5': Landsat5} vrt_options = {} list_dir = [x[0] for x in os.walk(directory) if os.path.basename(x[0])[:3] in mapping.keys()] extras = [os.path.join(directory, x) for x in os.listdir(directory) if x.endswith('.tif')] first = True for d in list_dir: sat = LandsatImage(d).satellite paths = extras root = os.path.join(directory, d) if os.path.isdir(root): for x in os.listdir(root): if remove_bqa and x.endswith('BQA.TIF'): try: os.remove(x) except FileNotFoundError: pass elif use_band_map: bands = BandMap().selected for y in bands[sat]: if x.endswith('B{}.TIF'.format(y)): paths.append(os.path.join(directory, d, x)) else: if x.endswith('.TIF') or x.endswith('.tif'): paths.append(os.path.join(directory, d, x)) if x.endswith('MTL.txt'): mtl = os.path.join(directory, d, x) if first: landsat = mapping[sat](os.path.join(directory, d)) dst = landsat.rasterio_geometry vrt_options = {'resampling': Resampling.nearest, 'dst_crs': dst['crs'], 'dst_transform': dst['transform'], 'dst_height': dst['height'], 'dst_width': dst['width']} message = """ This directory has been resampled to same grid. Master grid is {}. {} """.format(d, datetime.now()) with open(os.path.join(directory, 'resample_meta.txt'), 'w') as f: f.write(message) first = False for tif_path in paths: print('warping {}'.format(os.path.basename(tif_path))) with rasopen(tif_path, 'r') as src: with WarpedVRT(src, **vrt_options) as vrt: data = vrt.read() dst_dir, name = os.path.split(tif_path) outfile = os.path.join(dst_dir, name) meta = vrt.meta.copy() meta['driver'] = 'GTiff' with rasopen(outfile, 'w', **meta) as dst: dst.write(data) if delete_extra: for x in os.listdir(os.path.join(directory, d)): x_file = os.path.join(directory, d, x) if x_file not in paths: if x[-7:] not in ['ask.tif', 'MTL.txt']: print('removing {}'.format(x_file)) os.remove(x_file) if return_profile: return dst
python
def warp_vrt(directory, delete_extra=False, use_band_map=False, overwrite=False, remove_bqa=True, return_profile=False): """ Read in image geometry, resample subsequent images to same grid. The purpose of this function is to snap many Landsat images to one geometry. Use Landsat578 to download and unzip them, then run them through this to get identical geometries for analysis. Files :param use_band_map: :param delete_extra: :param directory: A directory containing sub-directories of Landsat images. :return: None """ if 'resample_meta.txt' in os.listdir(directory) and not overwrite: print('{} has already had component images warped'.format(directory)) return None mapping = {'LC8': Landsat8, 'LE7': Landsat7, 'LT5': Landsat5} vrt_options = {} list_dir = [x[0] for x in os.walk(directory) if os.path.basename(x[0])[:3] in mapping.keys()] extras = [os.path.join(directory, x) for x in os.listdir(directory) if x.endswith('.tif')] first = True for d in list_dir: sat = LandsatImage(d).satellite paths = extras root = os.path.join(directory, d) if os.path.isdir(root): for x in os.listdir(root): if remove_bqa and x.endswith('BQA.TIF'): try: os.remove(x) except FileNotFoundError: pass elif use_band_map: bands = BandMap().selected for y in bands[sat]: if x.endswith('B{}.TIF'.format(y)): paths.append(os.path.join(directory, d, x)) else: if x.endswith('.TIF') or x.endswith('.tif'): paths.append(os.path.join(directory, d, x)) if x.endswith('MTL.txt'): mtl = os.path.join(directory, d, x) if first: landsat = mapping[sat](os.path.join(directory, d)) dst = landsat.rasterio_geometry vrt_options = {'resampling': Resampling.nearest, 'dst_crs': dst['crs'], 'dst_transform': dst['transform'], 'dst_height': dst['height'], 'dst_width': dst['width']} message = """ This directory has been resampled to same grid. Master grid is {}. {} """.format(d, datetime.now()) with open(os.path.join(directory, 'resample_meta.txt'), 'w') as f: f.write(message) first = False for tif_path in paths: print('warping {}'.format(os.path.basename(tif_path))) with rasopen(tif_path, 'r') as src: with WarpedVRT(src, **vrt_options) as vrt: data = vrt.read() dst_dir, name = os.path.split(tif_path) outfile = os.path.join(dst_dir, name) meta = vrt.meta.copy() meta['driver'] = 'GTiff' with rasopen(outfile, 'w', **meta) as dst: dst.write(data) if delete_extra: for x in os.listdir(os.path.join(directory, d)): x_file = os.path.join(directory, d, x) if x_file not in paths: if x[-7:] not in ['ask.tif', 'MTL.txt']: print('removing {}'.format(x_file)) os.remove(x_file) if return_profile: return dst
[ "def", "warp_vrt", "(", "directory", ",", "delete_extra", "=", "False", ",", "use_band_map", "=", "False", ",", "overwrite", "=", "False", ",", "remove_bqa", "=", "True", ",", "return_profile", "=", "False", ")", ":", "if", "'resample_meta.txt'", "in", "os", ".", "listdir", "(", "directory", ")", "and", "not", "overwrite", ":", "print", "(", "'{} has already had component images warped'", ".", "format", "(", "directory", ")", ")", "return", "None", "mapping", "=", "{", "'LC8'", ":", "Landsat8", ",", "'LE7'", ":", "Landsat7", ",", "'LT5'", ":", "Landsat5", "}", "vrt_options", "=", "{", "}", "list_dir", "=", "[", "x", "[", "0", "]", "for", "x", "in", "os", ".", "walk", "(", "directory", ")", "if", "os", ".", "path", ".", "basename", "(", "x", "[", "0", "]", ")", "[", ":", "3", "]", "in", "mapping", ".", "keys", "(", ")", "]", "extras", "=", "[", "os", ".", "path", ".", "join", "(", "directory", ",", "x", ")", "for", "x", "in", "os", ".", "listdir", "(", "directory", ")", "if", "x", ".", "endswith", "(", "'.tif'", ")", "]", "first", "=", "True", "for", "d", "in", "list_dir", ":", "sat", "=", "LandsatImage", "(", "d", ")", ".", "satellite", "paths", "=", "extras", "root", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "d", ")", "if", "os", ".", "path", ".", "isdir", "(", "root", ")", ":", "for", "x", "in", "os", ".", "listdir", "(", "root", ")", ":", "if", "remove_bqa", "and", "x", ".", "endswith", "(", "'BQA.TIF'", ")", ":", "try", ":", "os", ".", "remove", "(", "x", ")", "except", "FileNotFoundError", ":", "pass", "elif", "use_band_map", ":", "bands", "=", "BandMap", "(", ")", ".", "selected", "for", "y", "in", "bands", "[", "sat", "]", ":", "if", "x", ".", "endswith", "(", "'B{}.TIF'", ".", "format", "(", "y", ")", ")", ":", "paths", ".", "append", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "d", ",", "x", ")", ")", "else", ":", "if", "x", ".", "endswith", "(", "'.TIF'", ")", "or", "x", ".", "endswith", "(", "'.tif'", ")", ":", "paths", ".", "append", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "d", ",", "x", ")", ")", "if", "x", ".", "endswith", "(", "'MTL.txt'", ")", ":", "mtl", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "d", ",", "x", ")", "if", "first", ":", "landsat", "=", "mapping", "[", "sat", "]", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "d", ")", ")", "dst", "=", "landsat", ".", "rasterio_geometry", "vrt_options", "=", "{", "'resampling'", ":", "Resampling", ".", "nearest", ",", "'dst_crs'", ":", "dst", "[", "'crs'", "]", ",", "'dst_transform'", ":", "dst", "[", "'transform'", "]", ",", "'dst_height'", ":", "dst", "[", "'height'", "]", ",", "'dst_width'", ":", "dst", "[", "'width'", "]", "}", "message", "=", "\"\"\"\n This directory has been resampled to same grid.\n Master grid is {}.\n {}\n \"\"\"", ".", "format", "(", "d", ",", "datetime", ".", "now", "(", ")", ")", "with", "open", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "'resample_meta.txt'", ")", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "message", ")", "first", "=", "False", "for", "tif_path", "in", "paths", ":", "print", "(", "'warping {}'", ".", "format", "(", "os", ".", "path", ".", "basename", "(", "tif_path", ")", ")", ")", "with", "rasopen", "(", "tif_path", ",", "'r'", ")", "as", "src", ":", "with", "WarpedVRT", "(", "src", ",", "*", "*", "vrt_options", ")", "as", "vrt", ":", "data", "=", "vrt", ".", "read", "(", ")", "dst_dir", ",", "name", "=", "os", ".", "path", ".", "split", "(", "tif_path", ")", "outfile", "=", "os", ".", "path", ".", "join", "(", "dst_dir", ",", "name", ")", "meta", "=", "vrt", ".", "meta", ".", "copy", "(", ")", "meta", "[", "'driver'", "]", "=", "'GTiff'", "with", "rasopen", "(", "outfile", ",", "'w'", ",", "*", "*", "meta", ")", "as", "dst", ":", "dst", ".", "write", "(", "data", ")", "if", "delete_extra", ":", "for", "x", "in", "os", ".", "listdir", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "d", ")", ")", ":", "x_file", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "d", ",", "x", ")", "if", "x_file", "not", "in", "paths", ":", "if", "x", "[", "-", "7", ":", "]", "not", "in", "[", "'ask.tif'", ",", "'MTL.txt'", "]", ":", "print", "(", "'removing {}'", ".", "format", "(", "x_file", ")", ")", "os", ".", "remove", "(", "x_file", ")", "if", "return_profile", ":", "return", "dst" ]
Read in image geometry, resample subsequent images to same grid. The purpose of this function is to snap many Landsat images to one geometry. Use Landsat578 to download and unzip them, then run them through this to get identical geometries for analysis. Files :param use_band_map: :param delete_extra: :param directory: A directory containing sub-directories of Landsat images. :return: None
[ "Read", "in", "image", "geometry", "resample", "subsequent", "images", "to", "same", "grid", "." ]
train
https://github.com/dgketchum/satellite_image/blob/0207fbb7b2bbf14f4307db65489bb4d4c5b92f52/sat_image/warped_vrt.py#L30-L121
samstav/tox-pyenv
tox_pyenv.py
tox_get_python_executable
def tox_get_python_executable(envconfig): """Return a python executable for the given python base name. The first plugin/hook which returns an executable path will determine it. ``envconfig`` is the testenv configuration which contains per-testenv configuration, notably the ``.envname`` and ``.basepython`` setting. """ try: # pylint: disable=no-member pyenv = (getattr(py.path.local.sysfind('pyenv'), 'strpath', 'pyenv') or 'pyenv') cmd = [pyenv, 'which', envconfig.basepython] pipe = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True ) out, err = pipe.communicate() except OSError: err = '\'pyenv\': command not found' LOG.warning( "pyenv doesn't seem to be installed, you probably " "don't want this plugin installed either." ) else: if pipe.poll() == 0: return out.strip() else: if not envconfig.tox_pyenv_fallback: raise PyenvWhichFailed(err) LOG.debug("`%s` failed thru tox-pyenv plugin, falling back. " "STDERR: \"%s\" | To disable this behavior, set " "tox_pyenv_fallback=False in your tox.ini or use " " --tox-pyenv-no-fallback on the command line.", ' '.join([str(x) for x in cmd]), err)
python
def tox_get_python_executable(envconfig): """Return a python executable for the given python base name. The first plugin/hook which returns an executable path will determine it. ``envconfig`` is the testenv configuration which contains per-testenv configuration, notably the ``.envname`` and ``.basepython`` setting. """ try: # pylint: disable=no-member pyenv = (getattr(py.path.local.sysfind('pyenv'), 'strpath', 'pyenv') or 'pyenv') cmd = [pyenv, 'which', envconfig.basepython] pipe = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True ) out, err = pipe.communicate() except OSError: err = '\'pyenv\': command not found' LOG.warning( "pyenv doesn't seem to be installed, you probably " "don't want this plugin installed either." ) else: if pipe.poll() == 0: return out.strip() else: if not envconfig.tox_pyenv_fallback: raise PyenvWhichFailed(err) LOG.debug("`%s` failed thru tox-pyenv plugin, falling back. " "STDERR: \"%s\" | To disable this behavior, set " "tox_pyenv_fallback=False in your tox.ini or use " " --tox-pyenv-no-fallback on the command line.", ' '.join([str(x) for x in cmd]), err)
[ "def", "tox_get_python_executable", "(", "envconfig", ")", ":", "try", ":", "# pylint: disable=no-member", "pyenv", "=", "(", "getattr", "(", "py", ".", "path", ".", "local", ".", "sysfind", "(", "'pyenv'", ")", ",", "'strpath'", ",", "'pyenv'", ")", "or", "'pyenv'", ")", "cmd", "=", "[", "pyenv", ",", "'which'", ",", "envconfig", ".", "basepython", "]", "pipe", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "universal_newlines", "=", "True", ")", "out", ",", "err", "=", "pipe", ".", "communicate", "(", ")", "except", "OSError", ":", "err", "=", "'\\'pyenv\\': command not found'", "LOG", ".", "warning", "(", "\"pyenv doesn't seem to be installed, you probably \"", "\"don't want this plugin installed either.\"", ")", "else", ":", "if", "pipe", ".", "poll", "(", ")", "==", "0", ":", "return", "out", ".", "strip", "(", ")", "else", ":", "if", "not", "envconfig", ".", "tox_pyenv_fallback", ":", "raise", "PyenvWhichFailed", "(", "err", ")", "LOG", ".", "debug", "(", "\"`%s` failed thru tox-pyenv plugin, falling back. \"", "\"STDERR: \\\"%s\\\" | To disable this behavior, set \"", "\"tox_pyenv_fallback=False in your tox.ini or use \"", "\" --tox-pyenv-no-fallback on the command line.\"", ",", "' '", ".", "join", "(", "[", "str", "(", "x", ")", "for", "x", "in", "cmd", "]", ")", ",", "err", ")" ]
Return a python executable for the given python base name. The first plugin/hook which returns an executable path will determine it. ``envconfig`` is the testenv configuration which contains per-testenv configuration, notably the ``.envname`` and ``.basepython`` setting.
[ "Return", "a", "python", "executable", "for", "the", "given", "python", "base", "name", "." ]
train
https://github.com/samstav/tox-pyenv/blob/7391610c2c4f1c95abde2e8638763b1b24e604cd/tox_pyenv.py#L69-L106
samstav/tox-pyenv
tox_pyenv.py
_setup_no_fallback
def _setup_no_fallback(parser): """Add the option, --tox-pyenv-no-fallback. If this option is set, do not allow fallback to tox's built-in strategy for looking up python executables if the call to `pyenv which` by this plugin fails. This will allow the error to raise instead of falling back to tox's default behavior. """ cli_dest = 'tox_pyenv_fallback' halp = ('If `pyenv which {basepython}` exits non-zero when looking ' 'up the python executable, do not allow fallback to tox\'s ' 'built-in default logic.') # Add a command-line option. tox_pyenv_group = parser.argparser.add_argument_group( title='{0} plugin options'.format(__title__), ) tox_pyenv_group.add_argument( '--tox-pyenv-no-fallback', '-F', dest=cli_dest, default=True, action='store_false', help=halp ) def _pyenv_fallback(testenv_config, value): cli_says = getattr(testenv_config.config.option, cli_dest) return cli_says or value # Add an equivalent tox.ini [testenv] section option. parser.add_testenv_attribute( name=cli_dest, type="bool", postprocess=_pyenv_fallback, default=False, help=('If `pyenv which {basepython}` exits non-zero when looking ' 'up the python executable, allow fallback to tox\'s ' 'built-in default logic.'), )
python
def _setup_no_fallback(parser): """Add the option, --tox-pyenv-no-fallback. If this option is set, do not allow fallback to tox's built-in strategy for looking up python executables if the call to `pyenv which` by this plugin fails. This will allow the error to raise instead of falling back to tox's default behavior. """ cli_dest = 'tox_pyenv_fallback' halp = ('If `pyenv which {basepython}` exits non-zero when looking ' 'up the python executable, do not allow fallback to tox\'s ' 'built-in default logic.') # Add a command-line option. tox_pyenv_group = parser.argparser.add_argument_group( title='{0} plugin options'.format(__title__), ) tox_pyenv_group.add_argument( '--tox-pyenv-no-fallback', '-F', dest=cli_dest, default=True, action='store_false', help=halp ) def _pyenv_fallback(testenv_config, value): cli_says = getattr(testenv_config.config.option, cli_dest) return cli_says or value # Add an equivalent tox.ini [testenv] section option. parser.add_testenv_attribute( name=cli_dest, type="bool", postprocess=_pyenv_fallback, default=False, help=('If `pyenv which {basepython}` exits non-zero when looking ' 'up the python executable, allow fallback to tox\'s ' 'built-in default logic.'), )
[ "def", "_setup_no_fallback", "(", "parser", ")", ":", "cli_dest", "=", "'tox_pyenv_fallback'", "halp", "=", "(", "'If `pyenv which {basepython}` exits non-zero when looking '", "'up the python executable, do not allow fallback to tox\\'s '", "'built-in default logic.'", ")", "# Add a command-line option.", "tox_pyenv_group", "=", "parser", ".", "argparser", ".", "add_argument_group", "(", "title", "=", "'{0} plugin options'", ".", "format", "(", "__title__", ")", ",", ")", "tox_pyenv_group", ".", "add_argument", "(", "'--tox-pyenv-no-fallback'", ",", "'-F'", ",", "dest", "=", "cli_dest", ",", "default", "=", "True", ",", "action", "=", "'store_false'", ",", "help", "=", "halp", ")", "def", "_pyenv_fallback", "(", "testenv_config", ",", "value", ")", ":", "cli_says", "=", "getattr", "(", "testenv_config", ".", "config", ".", "option", ",", "cli_dest", ")", "return", "cli_says", "or", "value", "# Add an equivalent tox.ini [testenv] section option.", "parser", ".", "add_testenv_attribute", "(", "name", "=", "cli_dest", ",", "type", "=", "\"bool\"", ",", "postprocess", "=", "_pyenv_fallback", ",", "default", "=", "False", ",", "help", "=", "(", "'If `pyenv which {basepython}` exits non-zero when looking '", "'up the python executable, allow fallback to tox\\'s '", "'built-in default logic.'", ")", ",", ")" ]
Add the option, --tox-pyenv-no-fallback. If this option is set, do not allow fallback to tox's built-in strategy for looking up python executables if the call to `pyenv which` by this plugin fails. This will allow the error to raise instead of falling back to tox's default behavior.
[ "Add", "the", "option", "--", "tox", "-", "pyenv", "-", "no", "-", "fallback", "." ]
train
https://github.com/samstav/tox-pyenv/blob/7391610c2c4f1c95abde2e8638763b1b24e604cd/tox_pyenv.py#L109-L147
alanjcastonguay/pyforce
src/pyforce/xmlclient.py
Client.sendEmail
def sendEmail(self, emails, massType='SingleEmailMessage'): """ Send one or more emails from Salesforce. Parameters: emails - a dictionary or list of dictionaries, each representing a single email as described by https://www.salesforce.com/us /developer/docs/api/Content/sforce_api_calls_sendemail.htm massType - 'SingleEmailMessage' or 'MassEmailMessage'. MassEmailMessage is used for mailmerge of up to 250 recepients in a single pass. Note: Newly created Salesforce Sandboxes default to System email only. In this situation, sendEmail() will fail with NO_MASS_MAIL_PERMISSION. """ return SendEmailRequest( self.__serverUrl, self.sessionId, emails, massType ).post(self.__conn)
python
def sendEmail(self, emails, massType='SingleEmailMessage'): """ Send one or more emails from Salesforce. Parameters: emails - a dictionary or list of dictionaries, each representing a single email as described by https://www.salesforce.com/us /developer/docs/api/Content/sforce_api_calls_sendemail.htm massType - 'SingleEmailMessage' or 'MassEmailMessage'. MassEmailMessage is used for mailmerge of up to 250 recepients in a single pass. Note: Newly created Salesforce Sandboxes default to System email only. In this situation, sendEmail() will fail with NO_MASS_MAIL_PERMISSION. """ return SendEmailRequest( self.__serverUrl, self.sessionId, emails, massType ).post(self.__conn)
[ "def", "sendEmail", "(", "self", ",", "emails", ",", "massType", "=", "'SingleEmailMessage'", ")", ":", "return", "SendEmailRequest", "(", "self", ".", "__serverUrl", ",", "self", ".", "sessionId", ",", "emails", ",", "massType", ")", ".", "post", "(", "self", ".", "__conn", ")" ]
Send one or more emails from Salesforce. Parameters: emails - a dictionary or list of dictionaries, each representing a single email as described by https://www.salesforce.com/us /developer/docs/api/Content/sforce_api_calls_sendemail.htm massType - 'SingleEmailMessage' or 'MassEmailMessage'. MassEmailMessage is used for mailmerge of up to 250 recepients in a single pass. Note: Newly created Salesforce Sandboxes default to System email only. In this situation, sendEmail() will fail with NO_MASS_MAIL_PERMISSION.
[ "Send", "one", "or", "more", "emails", "from", "Salesforce", "." ]
train
https://github.com/alanjcastonguay/pyforce/blob/d69a73c62725f411aa7c7588f3b231249935c068/src/pyforce/xmlclient.py#L233-L254
alanjcastonguay/pyforce
src/pyforce/xmltramp.py
quote
def quote(myitem, elt=True): '''URL encode string''' if elt and '<' in myitem and len(myitem) > 24 and myitem.find(']]>') == -1: return '<![CDATA[%s]]>' % (myitem) else: myitem = myitem.replace('&', '&amp;').\ replace('<', '&lt;').replace(']]>', ']]&gt;') if not elt: myitem = myitem.replace('"', '&quot;') return myitem
python
def quote(myitem, elt=True): '''URL encode string''' if elt and '<' in myitem and len(myitem) > 24 and myitem.find(']]>') == -1: return '<![CDATA[%s]]>' % (myitem) else: myitem = myitem.replace('&', '&amp;').\ replace('<', '&lt;').replace(']]>', ']]&gt;') if not elt: myitem = myitem.replace('"', '&quot;') return myitem
[ "def", "quote", "(", "myitem", ",", "elt", "=", "True", ")", ":", "if", "elt", "and", "'<'", "in", "myitem", "and", "len", "(", "myitem", ")", ">", "24", "and", "myitem", ".", "find", "(", "']]>'", ")", "==", "-", "1", ":", "return", "'<![CDATA[%s]]>'", "%", "(", "myitem", ")", "else", ":", "myitem", "=", "myitem", ".", "replace", "(", "'&'", ",", "'&amp;'", ")", ".", "replace", "(", "'<'", ",", "'&lt;'", ")", ".", "replace", "(", "']]>'", ",", "']]&gt;'", ")", "if", "not", "elt", ":", "myitem", "=", "myitem", ".", "replace", "(", "'\"'", ",", "'&quot;'", ")", "return", "myitem" ]
URL encode string
[ "URL", "encode", "string" ]
train
https://github.com/alanjcastonguay/pyforce/blob/d69a73c62725f411aa7c7588f3b231249935c068/src/pyforce/xmltramp.py#L37-L46
alanjcastonguay/pyforce
src/pyforce/pyforce.py
_doPrep
def _doPrep(field_dict): """ _doPrep is makes changes in-place. Do some prep work converting python types into formats that Salesforce will accept. This includes converting lists of strings to "apple;orange;pear". Dicts will be converted to embedded objects None or empty list values will be Null-ed """ fieldsToNull = [] for key, value in field_dict.items(): if value is None: fieldsToNull.append(key) field_dict[key] = [] if hasattr(value, '__iter__'): if len(value) == 0: fieldsToNull.append(key) elif isinstance(value, dict): innerCopy = copy.deepcopy(value) _doPrep(innerCopy) field_dict[key] = innerCopy else: field_dict[key] = ";".join(value) if 'fieldsToNull' in field_dict: raise ValueError( "fieldsToNull should be populated by the client, not the caller." ) field_dict['fieldsToNull'] = fieldsToNull
python
def _doPrep(field_dict): """ _doPrep is makes changes in-place. Do some prep work converting python types into formats that Salesforce will accept. This includes converting lists of strings to "apple;orange;pear". Dicts will be converted to embedded objects None or empty list values will be Null-ed """ fieldsToNull = [] for key, value in field_dict.items(): if value is None: fieldsToNull.append(key) field_dict[key] = [] if hasattr(value, '__iter__'): if len(value) == 0: fieldsToNull.append(key) elif isinstance(value, dict): innerCopy = copy.deepcopy(value) _doPrep(innerCopy) field_dict[key] = innerCopy else: field_dict[key] = ";".join(value) if 'fieldsToNull' in field_dict: raise ValueError( "fieldsToNull should be populated by the client, not the caller." ) field_dict['fieldsToNull'] = fieldsToNull
[ "def", "_doPrep", "(", "field_dict", ")", ":", "fieldsToNull", "=", "[", "]", "for", "key", ",", "value", "in", "field_dict", ".", "items", "(", ")", ":", "if", "value", "is", "None", ":", "fieldsToNull", ".", "append", "(", "key", ")", "field_dict", "[", "key", "]", "=", "[", "]", "if", "hasattr", "(", "value", ",", "'__iter__'", ")", ":", "if", "len", "(", "value", ")", "==", "0", ":", "fieldsToNull", ".", "append", "(", "key", ")", "elif", "isinstance", "(", "value", ",", "dict", ")", ":", "innerCopy", "=", "copy", ".", "deepcopy", "(", "value", ")", "_doPrep", "(", "innerCopy", ")", "field_dict", "[", "key", "]", "=", "innerCopy", "else", ":", "field_dict", "[", "key", "]", "=", "\";\"", ".", "join", "(", "value", ")", "if", "'fieldsToNull'", "in", "field_dict", ":", "raise", "ValueError", "(", "\"fieldsToNull should be populated by the client, not the caller.\"", ")", "field_dict", "[", "'fieldsToNull'", "]", "=", "fieldsToNull" ]
_doPrep is makes changes in-place. Do some prep work converting python types into formats that Salesforce will accept. This includes converting lists of strings to "apple;orange;pear". Dicts will be converted to embedded objects None or empty list values will be Null-ed
[ "_doPrep", "is", "makes", "changes", "in", "-", "place", ".", "Do", "some", "prep", "work", "converting", "python", "types", "into", "formats", "that", "Salesforce", "will", "accept", ".", "This", "includes", "converting", "lists", "of", "strings", "to", "apple", ";", "orange", ";", "pear", ".", "Dicts", "will", "be", "converted", "to", "embedded", "objects", "None", "or", "empty", "list", "values", "will", "be", "Null", "-", "ed" ]
train
https://github.com/alanjcastonguay/pyforce/blob/d69a73c62725f411aa7c7588f3b231249935c068/src/pyforce/pyforce.py#L528-L555
alanjcastonguay/pyforce
src/pyforce/pyforce.py
_prepareSObjects
def _prepareSObjects(sObjects): '''Prepare a SObject''' sObjectsCopy = copy.deepcopy(sObjects) if isinstance(sObjectsCopy, dict): # If root element is a dict, then this is a single object not an array _doPrep(sObjectsCopy) else: # else this is an array, and each elelment should be prepped. for listitems in sObjectsCopy: _doPrep(listitems) return sObjectsCopy
python
def _prepareSObjects(sObjects): '''Prepare a SObject''' sObjectsCopy = copy.deepcopy(sObjects) if isinstance(sObjectsCopy, dict): # If root element is a dict, then this is a single object not an array _doPrep(sObjectsCopy) else: # else this is an array, and each elelment should be prepped. for listitems in sObjectsCopy: _doPrep(listitems) return sObjectsCopy
[ "def", "_prepareSObjects", "(", "sObjects", ")", ":", "sObjectsCopy", "=", "copy", ".", "deepcopy", "(", "sObjects", ")", "if", "isinstance", "(", "sObjectsCopy", ",", "dict", ")", ":", "# If root element is a dict, then this is a single object not an array", "_doPrep", "(", "sObjectsCopy", ")", "else", ":", "# else this is an array, and each elelment should be prepped.", "for", "listitems", "in", "sObjectsCopy", ":", "_doPrep", "(", "listitems", ")", "return", "sObjectsCopy" ]
Prepare a SObject
[ "Prepare", "a", "SObject" ]
train
https://github.com/alanjcastonguay/pyforce/blob/d69a73c62725f411aa7c7588f3b231249935c068/src/pyforce/pyforce.py#L562-L572
alanjcastonguay/pyforce
src/pyforce/pyforce.py
Client.sendEmail
def sendEmail(self, emails, mass_type='SingleEmailMessage'): """ Send one or more emails from Salesforce. Parameters: emails - a dictionary or list of dictionaries, each representing a single email as described by https://www.salesforce.com /us/developer/docs/api/Content/sforce_api_calls_sendemail .htm massType - 'SingleEmailMessage' or 'MassEmailMessage'. MassEmailMessage is used for mailmerge of up to 250 recepients in a single pass. Note: Newly created Salesforce Sandboxes default to System email only. In this situation, sendEmail() will fail with NO_MASS_MAIL_PERMISSION. """ preparedEmails = _prepareSObjects(emails) if isinstance(preparedEmails, dict): # If root element is a dict, then this is a single object not an # array del preparedEmails['fieldsToNull'] else: # else this is an array, and each elelment should be prepped. for listitems in preparedEmails: del listitems['fieldsToNull'] res = BaseClient.sendEmail(self, preparedEmails, mass_type) if type(res) not in (TupleType, ListType): res = [res] data = list() for resu in res: d = dict() data.append(d) d['success'] = success = _bool(resu[_tPartnerNS.success]) if not success: d['errors'] = [_extractError(e) for e in resu[_tPartnerNS.errors,]] else: d['errors'] = list() return data
python
def sendEmail(self, emails, mass_type='SingleEmailMessage'): """ Send one or more emails from Salesforce. Parameters: emails - a dictionary or list of dictionaries, each representing a single email as described by https://www.salesforce.com /us/developer/docs/api/Content/sforce_api_calls_sendemail .htm massType - 'SingleEmailMessage' or 'MassEmailMessage'. MassEmailMessage is used for mailmerge of up to 250 recepients in a single pass. Note: Newly created Salesforce Sandboxes default to System email only. In this situation, sendEmail() will fail with NO_MASS_MAIL_PERMISSION. """ preparedEmails = _prepareSObjects(emails) if isinstance(preparedEmails, dict): # If root element is a dict, then this is a single object not an # array del preparedEmails['fieldsToNull'] else: # else this is an array, and each elelment should be prepped. for listitems in preparedEmails: del listitems['fieldsToNull'] res = BaseClient.sendEmail(self, preparedEmails, mass_type) if type(res) not in (TupleType, ListType): res = [res] data = list() for resu in res: d = dict() data.append(d) d['success'] = success = _bool(resu[_tPartnerNS.success]) if not success: d['errors'] = [_extractError(e) for e in resu[_tPartnerNS.errors,]] else: d['errors'] = list() return data
[ "def", "sendEmail", "(", "self", ",", "emails", ",", "mass_type", "=", "'SingleEmailMessage'", ")", ":", "preparedEmails", "=", "_prepareSObjects", "(", "emails", ")", "if", "isinstance", "(", "preparedEmails", ",", "dict", ")", ":", "# If root element is a dict, then this is a single object not an", "# array", "del", "preparedEmails", "[", "'fieldsToNull'", "]", "else", ":", "# else this is an array, and each elelment should be prepped.", "for", "listitems", "in", "preparedEmails", ":", "del", "listitems", "[", "'fieldsToNull'", "]", "res", "=", "BaseClient", ".", "sendEmail", "(", "self", ",", "preparedEmails", ",", "mass_type", ")", "if", "type", "(", "res", ")", "not", "in", "(", "TupleType", ",", "ListType", ")", ":", "res", "=", "[", "res", "]", "data", "=", "list", "(", ")", "for", "resu", "in", "res", ":", "d", "=", "dict", "(", ")", "data", ".", "append", "(", "d", ")", "d", "[", "'success'", "]", "=", "success", "=", "_bool", "(", "resu", "[", "_tPartnerNS", ".", "success", "]", ")", "if", "not", "success", ":", "d", "[", "'errors'", "]", "=", "[", "_extractError", "(", "e", ")", "for", "e", "in", "resu", "[", "_tPartnerNS", ".", "errors", ",", "]", "]", "else", ":", "d", "[", "'errors'", "]", "=", "list", "(", ")", "return", "data" ]
Send one or more emails from Salesforce. Parameters: emails - a dictionary or list of dictionaries, each representing a single email as described by https://www.salesforce.com /us/developer/docs/api/Content/sforce_api_calls_sendemail .htm massType - 'SingleEmailMessage' or 'MassEmailMessage'. MassEmailMessage is used for mailmerge of up to 250 recepients in a single pass. Note: Newly created Salesforce Sandboxes default to System email only. In this situation, sendEmail() will fail with NO_MASS_MAIL_PERMISSION.
[ "Send", "one", "or", "more", "emails", "from", "Salesforce", "." ]
train
https://github.com/alanjcastonguay/pyforce/blob/d69a73c62725f411aa7c7588f3b231249935c068/src/pyforce/pyforce.py#L238-L278
alanjcastonguay/pyforce
src/pyforce/pyforce.py
Client.queryTypesDescriptions
def queryTypesDescriptions(self, types): """ Given a list of types, construct a dictionary such that each key is a type, and each value is the corresponding sObject for that type. """ types = list(types) if types: types_descs = self.describeSObjects(types) else: types_descs = [] return dict(map(lambda t, d: (t, d), types, types_descs))
python
def queryTypesDescriptions(self, types): """ Given a list of types, construct a dictionary such that each key is a type, and each value is the corresponding sObject for that type. """ types = list(types) if types: types_descs = self.describeSObjects(types) else: types_descs = [] return dict(map(lambda t, d: (t, d), types, types_descs))
[ "def", "queryTypesDescriptions", "(", "self", ",", "types", ")", ":", "types", "=", "list", "(", "types", ")", "if", "types", ":", "types_descs", "=", "self", ".", "describeSObjects", "(", "types", ")", "else", ":", "types_descs", "=", "[", "]", "return", "dict", "(", "map", "(", "lambda", "t", ",", "d", ":", "(", "t", ",", "d", ")", ",", "types", ",", "types_descs", ")", ")" ]
Given a list of types, construct a dictionary such that each key is a type, and each value is the corresponding sObject for that type.
[ "Given", "a", "list", "of", "types", "construct", "a", "dictionary", "such", "that", "each", "key", "is", "a", "type", "and", "each", "value", "is", "the", "corresponding", "sObject", "for", "that", "type", "." ]
train
https://github.com/alanjcastonguay/pyforce/blob/d69a73c62725f411aa7c7588f3b231249935c068/src/pyforce/pyforce.py#L316-L327
miguelgrinberg/flask-paranoid
flask_paranoid/paranoid.py
Paranoid.create_token
def create_token(self): """Create a session protection token for this client. This method generates a session protection token for the cilent, which consists in a hash of the user agent and the IP address. This method can be overriden by subclasses to implement different token generation algorithms. """ user_agent = request.headers.get('User-Agent') if user_agent is None: # pragma: no cover user_agent = 'no user agent' user_agent = user_agent.encode('utf-8') base = self._get_remote_addr() + b'|' + user_agent h = sha256() h.update(base) return h.hexdigest()
python
def create_token(self): """Create a session protection token for this client. This method generates a session protection token for the cilent, which consists in a hash of the user agent and the IP address. This method can be overriden by subclasses to implement different token generation algorithms. """ user_agent = request.headers.get('User-Agent') if user_agent is None: # pragma: no cover user_agent = 'no user agent' user_agent = user_agent.encode('utf-8') base = self._get_remote_addr() + b'|' + user_agent h = sha256() h.update(base) return h.hexdigest()
[ "def", "create_token", "(", "self", ")", ":", "user_agent", "=", "request", ".", "headers", ".", "get", "(", "'User-Agent'", ")", "if", "user_agent", "is", "None", ":", "# pragma: no cover", "user_agent", "=", "'no user agent'", "user_agent", "=", "user_agent", ".", "encode", "(", "'utf-8'", ")", "base", "=", "self", ".", "_get_remote_addr", "(", ")", "+", "b'|'", "+", "user_agent", "h", "=", "sha256", "(", ")", "h", ".", "update", "(", "base", ")", "return", "h", ".", "hexdigest", "(", ")" ]
Create a session protection token for this client. This method generates a session protection token for the cilent, which consists in a hash of the user agent and the IP address. This method can be overriden by subclasses to implement different token generation algorithms.
[ "Create", "a", "session", "protection", "token", "for", "this", "client", "." ]
train
https://github.com/miguelgrinberg/flask-paranoid/blob/ec6205756d55edd1b135249b9bb345871fef0977/flask_paranoid/paranoid.py#L63-L78
miguelgrinberg/flask-paranoid
flask_paranoid/paranoid.py
Paranoid.clear_session
def clear_session(self, response): """Clear the session. This method is invoked when the session is found to be invalid. Subclasses can override this method to implement a custom session reset. """ session.clear() # if flask-login is installed, we try to clear the # "remember me" cookie, just in case it is set if 'flask_login' in sys.modules: remember_cookie = current_app.config.get('REMEMBER_COOKIE', 'remember_token') response.set_cookie(remember_cookie, '', expires=0, max_age=0)
python
def clear_session(self, response): """Clear the session. This method is invoked when the session is found to be invalid. Subclasses can override this method to implement a custom session reset. """ session.clear() # if flask-login is installed, we try to clear the # "remember me" cookie, just in case it is set if 'flask_login' in sys.modules: remember_cookie = current_app.config.get('REMEMBER_COOKIE', 'remember_token') response.set_cookie(remember_cookie, '', expires=0, max_age=0)
[ "def", "clear_session", "(", "self", ",", "response", ")", ":", "session", ".", "clear", "(", ")", "# if flask-login is installed, we try to clear the", "# \"remember me\" cookie, just in case it is set", "if", "'flask_login'", "in", "sys", ".", "modules", ":", "remember_cookie", "=", "current_app", ".", "config", ".", "get", "(", "'REMEMBER_COOKIE'", ",", "'remember_token'", ")", "response", ".", "set_cookie", "(", "remember_cookie", ",", "''", ",", "expires", "=", "0", ",", "max_age", "=", "0", ")" ]
Clear the session. This method is invoked when the session is found to be invalid. Subclasses can override this method to implement a custom session reset.
[ "Clear", "the", "session", "." ]
train
https://github.com/miguelgrinberg/flask-paranoid/blob/ec6205756d55edd1b135249b9bb345871fef0977/flask_paranoid/paranoid.py#L99-L113
Atrox/haikunatorpy
haikunator/haikunator.py
Haikunator.haikunate
def haikunate(self, delimiter='-', token_length=4, token_hex=False, token_chars='0123456789'): """ Generate heroku-like random names to use in your python applications :param delimiter: Delimiter :param token_length: TokenLength :param token_hex: TokenHex :param token_chars: TokenChars :type delimiter: str :type token_length: int :type token_hex: bool :type token_chars: str :return: heroku-like random string :rtype: str """ if token_hex: token_chars = '0123456789abcdef' adjective = self._random_element(self._adjectives) noun = self._random_element(self._nouns) token = ''.join(self._random_element(token_chars) for _ in range(token_length)) sections = [adjective, noun, token] return delimiter.join(filter(None, sections))
python
def haikunate(self, delimiter='-', token_length=4, token_hex=False, token_chars='0123456789'): """ Generate heroku-like random names to use in your python applications :param delimiter: Delimiter :param token_length: TokenLength :param token_hex: TokenHex :param token_chars: TokenChars :type delimiter: str :type token_length: int :type token_hex: bool :type token_chars: str :return: heroku-like random string :rtype: str """ if token_hex: token_chars = '0123456789abcdef' adjective = self._random_element(self._adjectives) noun = self._random_element(self._nouns) token = ''.join(self._random_element(token_chars) for _ in range(token_length)) sections = [adjective, noun, token] return delimiter.join(filter(None, sections))
[ "def", "haikunate", "(", "self", ",", "delimiter", "=", "'-'", ",", "token_length", "=", "4", ",", "token_hex", "=", "False", ",", "token_chars", "=", "'0123456789'", ")", ":", "if", "token_hex", ":", "token_chars", "=", "'0123456789abcdef'", "adjective", "=", "self", ".", "_random_element", "(", "self", ".", "_adjectives", ")", "noun", "=", "self", ".", "_random_element", "(", "self", ".", "_nouns", ")", "token", "=", "''", ".", "join", "(", "self", ".", "_random_element", "(", "token_chars", ")", "for", "_", "in", "range", "(", "token_length", ")", ")", "sections", "=", "[", "adjective", ",", "noun", ",", "token", "]", "return", "delimiter", ".", "join", "(", "filter", "(", "None", ",", "sections", ")", ")" ]
Generate heroku-like random names to use in your python applications :param delimiter: Delimiter :param token_length: TokenLength :param token_hex: TokenHex :param token_chars: TokenChars :type delimiter: str :type token_length: int :type token_hex: bool :type token_chars: str :return: heroku-like random string :rtype: str
[ "Generate", "heroku", "-", "like", "random", "names", "to", "use", "in", "your", "python", "applications" ]
train
https://github.com/Atrox/haikunatorpy/blob/47551e8564dc903341706c35852f7b66e7e49a24/haikunator/haikunator.py#L53-L76
ftao/python-ifcfg
src/ifcfg/__init__.py
get_parser_class
def get_parser_class(): """ Returns the parser according to the system platform """ global distro if distro == 'Linux': Parser = parser.LinuxParser if not os.path.exists(Parser.get_command()[0]): Parser = parser.UnixIPParser elif distro in ['Darwin', 'MacOSX']: Parser = parser.MacOSXParser elif distro == 'Windows': # For some strange reason, Windows will always be win32, see: # https://stackoverflow.com/a/2145582/405682 Parser = parser.WindowsParser else: Parser = parser.NullParser Log.error("Unknown distro type '%s'." % distro) Log.debug("Distro detected as '%s'" % distro) Log.debug("Using '%s'" % Parser) return Parser
python
def get_parser_class(): """ Returns the parser according to the system platform """ global distro if distro == 'Linux': Parser = parser.LinuxParser if not os.path.exists(Parser.get_command()[0]): Parser = parser.UnixIPParser elif distro in ['Darwin', 'MacOSX']: Parser = parser.MacOSXParser elif distro == 'Windows': # For some strange reason, Windows will always be win32, see: # https://stackoverflow.com/a/2145582/405682 Parser = parser.WindowsParser else: Parser = parser.NullParser Log.error("Unknown distro type '%s'." % distro) Log.debug("Distro detected as '%s'" % distro) Log.debug("Using '%s'" % Parser) return Parser
[ "def", "get_parser_class", "(", ")", ":", "global", "distro", "if", "distro", "==", "'Linux'", ":", "Parser", "=", "parser", ".", "LinuxParser", "if", "not", "os", ".", "path", ".", "exists", "(", "Parser", ".", "get_command", "(", ")", "[", "0", "]", ")", ":", "Parser", "=", "parser", ".", "UnixIPParser", "elif", "distro", "in", "[", "'Darwin'", ",", "'MacOSX'", "]", ":", "Parser", "=", "parser", ".", "MacOSXParser", "elif", "distro", "==", "'Windows'", ":", "# For some strange reason, Windows will always be win32, see:", "# https://stackoverflow.com/a/2145582/405682", "Parser", "=", "parser", ".", "WindowsParser", "else", ":", "Parser", "=", "parser", ".", "NullParser", "Log", ".", "error", "(", "\"Unknown distro type '%s'.\"", "%", "distro", ")", "Log", ".", "debug", "(", "\"Distro detected as '%s'\"", "%", "distro", ")", "Log", ".", "debug", "(", "\"Using '%s'\"", "%", "Parser", ")", "return", "Parser" ]
Returns the parser according to the system platform
[ "Returns", "the", "parser", "according", "to", "the", "system", "platform" ]
train
https://github.com/ftao/python-ifcfg/blob/724a4a103088fee7dc2bc2f63b0b9006a614e1d0/src/ifcfg/__init__.py#L18-L39
ftao/python-ifcfg
src/ifcfg/__init__.py
default_interface
def default_interface(ifconfig=None, route_output=None): """ Return just the default interface device dictionary. :param ifconfig: For mocking actual command output :param route_output: For mocking actual command output """ global Parser return Parser(ifconfig=ifconfig)._default_interface(route_output=route_output)
python
def default_interface(ifconfig=None, route_output=None): """ Return just the default interface device dictionary. :param ifconfig: For mocking actual command output :param route_output: For mocking actual command output """ global Parser return Parser(ifconfig=ifconfig)._default_interface(route_output=route_output)
[ "def", "default_interface", "(", "ifconfig", "=", "None", ",", "route_output", "=", "None", ")", ":", "global", "Parser", "return", "Parser", "(", "ifconfig", "=", "ifconfig", ")", ".", "_default_interface", "(", "route_output", "=", "route_output", ")" ]
Return just the default interface device dictionary. :param ifconfig: For mocking actual command output :param route_output: For mocking actual command output
[ "Return", "just", "the", "default", "interface", "device", "dictionary", "." ]
train
https://github.com/ftao/python-ifcfg/blob/724a4a103088fee7dc2bc2f63b0b9006a614e1d0/src/ifcfg/__init__.py#L69-L77
ftao/python-ifcfg
src/ifcfg/parser.py
Parser.parse
def parse(self, ifconfig=None): # noqa: max-complexity=12 """ Parse ifconfig output into self._interfaces. Optional Arguments: ifconfig The data (stdout) from the ifconfig command. Default is to call exec_cmd(self.get_command()). """ if not ifconfig: ifconfig, __, __ = exec_cmd(self.get_command()) self.ifconfig_data = ifconfig cur = None patterns = self.get_patterns() for line in self.ifconfig_data.splitlines(): for pattern in patterns: m = re.match(pattern, line) if not m: continue groupdict = m.groupdict() # Special treatment to trigger which interface we're # setting for if 'device' is in the line. Presumably the # device of the interface is within the first line of the # device block. if 'device' in groupdict: cur = groupdict['device'] self.add_device(cur) elif cur is None: raise RuntimeError( "Got results that don't belong to a device" ) for k, v in groupdict.items(): if k in self._interfaces[cur]: if self._interfaces[cur][k] is None: self._interfaces[cur][k] = v elif hasattr(self._interfaces[cur][k], 'append'): self._interfaces[cur][k].append(v) elif self._interfaces[cur][k] == v: # Silently ignore if the it's the same value as last. Example: Multiple # inet4 addresses, result in multiple netmasks. Cardinality mismatch continue else: raise RuntimeError( "Tried to add {}={} multiple times to {}, it was already: {}".format( k, v, cur, self._interfaces[cur][k] ) ) else: self._interfaces[cur][k] = v # Copy the first 'inet4' ip address to 'inet' for backwards compatibility for device, device_dict in self._interfaces.items(): if len(device_dict['inet4']) > 0: device_dict['inet'] = device_dict['inet4'][0] # fix it up self._interfaces = self.alter(self._interfaces)
python
def parse(self, ifconfig=None): # noqa: max-complexity=12 """ Parse ifconfig output into self._interfaces. Optional Arguments: ifconfig The data (stdout) from the ifconfig command. Default is to call exec_cmd(self.get_command()). """ if not ifconfig: ifconfig, __, __ = exec_cmd(self.get_command()) self.ifconfig_data = ifconfig cur = None patterns = self.get_patterns() for line in self.ifconfig_data.splitlines(): for pattern in patterns: m = re.match(pattern, line) if not m: continue groupdict = m.groupdict() # Special treatment to trigger which interface we're # setting for if 'device' is in the line. Presumably the # device of the interface is within the first line of the # device block. if 'device' in groupdict: cur = groupdict['device'] self.add_device(cur) elif cur is None: raise RuntimeError( "Got results that don't belong to a device" ) for k, v in groupdict.items(): if k in self._interfaces[cur]: if self._interfaces[cur][k] is None: self._interfaces[cur][k] = v elif hasattr(self._interfaces[cur][k], 'append'): self._interfaces[cur][k].append(v) elif self._interfaces[cur][k] == v: # Silently ignore if the it's the same value as last. Example: Multiple # inet4 addresses, result in multiple netmasks. Cardinality mismatch continue else: raise RuntimeError( "Tried to add {}={} multiple times to {}, it was already: {}".format( k, v, cur, self._interfaces[cur][k] ) ) else: self._interfaces[cur][k] = v # Copy the first 'inet4' ip address to 'inet' for backwards compatibility for device, device_dict in self._interfaces.items(): if len(device_dict['inet4']) > 0: device_dict['inet'] = device_dict['inet4'][0] # fix it up self._interfaces = self.alter(self._interfaces)
[ "def", "parse", "(", "self", ",", "ifconfig", "=", "None", ")", ":", "# noqa: max-complexity=12", "if", "not", "ifconfig", ":", "ifconfig", ",", "__", ",", "__", "=", "exec_cmd", "(", "self", ".", "get_command", "(", ")", ")", "self", ".", "ifconfig_data", "=", "ifconfig", "cur", "=", "None", "patterns", "=", "self", ".", "get_patterns", "(", ")", "for", "line", "in", "self", ".", "ifconfig_data", ".", "splitlines", "(", ")", ":", "for", "pattern", "in", "patterns", ":", "m", "=", "re", ".", "match", "(", "pattern", ",", "line", ")", "if", "not", "m", ":", "continue", "groupdict", "=", "m", ".", "groupdict", "(", ")", "# Special treatment to trigger which interface we're", "# setting for if 'device' is in the line. Presumably the", "# device of the interface is within the first line of the", "# device block.", "if", "'device'", "in", "groupdict", ":", "cur", "=", "groupdict", "[", "'device'", "]", "self", ".", "add_device", "(", "cur", ")", "elif", "cur", "is", "None", ":", "raise", "RuntimeError", "(", "\"Got results that don't belong to a device\"", ")", "for", "k", ",", "v", "in", "groupdict", ".", "items", "(", ")", ":", "if", "k", "in", "self", ".", "_interfaces", "[", "cur", "]", ":", "if", "self", ".", "_interfaces", "[", "cur", "]", "[", "k", "]", "is", "None", ":", "self", ".", "_interfaces", "[", "cur", "]", "[", "k", "]", "=", "v", "elif", "hasattr", "(", "self", ".", "_interfaces", "[", "cur", "]", "[", "k", "]", ",", "'append'", ")", ":", "self", ".", "_interfaces", "[", "cur", "]", "[", "k", "]", ".", "append", "(", "v", ")", "elif", "self", ".", "_interfaces", "[", "cur", "]", "[", "k", "]", "==", "v", ":", "# Silently ignore if the it's the same value as last. Example: Multiple", "# inet4 addresses, result in multiple netmasks. Cardinality mismatch", "continue", "else", ":", "raise", "RuntimeError", "(", "\"Tried to add {}={} multiple times to {}, it was already: {}\"", ".", "format", "(", "k", ",", "v", ",", "cur", ",", "self", ".", "_interfaces", "[", "cur", "]", "[", "k", "]", ")", ")", "else", ":", "self", ".", "_interfaces", "[", "cur", "]", "[", "k", "]", "=", "v", "# Copy the first 'inet4' ip address to 'inet' for backwards compatibility", "for", "device", ",", "device_dict", "in", "self", ".", "_interfaces", ".", "items", "(", ")", ":", "if", "len", "(", "device_dict", "[", "'inet4'", "]", ")", ">", "0", ":", "device_dict", "[", "'inet'", "]", "=", "device_dict", "[", "'inet4'", "]", "[", "0", "]", "# fix it up", "self", ".", "_interfaces", "=", "self", ".", "alter", "(", "self", ".", "_interfaces", ")" ]
Parse ifconfig output into self._interfaces. Optional Arguments: ifconfig The data (stdout) from the ifconfig command. Default is to call exec_cmd(self.get_command()).
[ "Parse", "ifconfig", "output", "into", "self", ".", "_interfaces", "." ]
train
https://github.com/ftao/python-ifcfg/blob/724a4a103088fee7dc2bc2f63b0b9006a614e1d0/src/ifcfg/parser.py#L42-L104
ftao/python-ifcfg
src/ifcfg/parser.py
Parser.alter
def alter(self, interfaces): """ Used to provide the ability to alter the interfaces dictionary before it is returned from self.parse(). Required Arguments: interfaces The interfaces dictionary. Returns: interfaces dict """ # fixup some things for device, device_dict in interfaces.items(): if len(device_dict['inet4']) > 0: device_dict['inet'] = device_dict['inet4'][0] if 'inet' in device_dict and not device_dict['inet'] is None: try: host = socket.gethostbyaddr(device_dict['inet'])[0] interfaces[device]['hostname'] = host except (socket.herror, socket.gaierror): interfaces[device]['hostname'] = None # To be sure that hex values and similar are always consistent, we # return everything in lowercase. For instance, Windows writes # MACs in upper-case. for key, device_item in device_dict.items(): if hasattr(device_item, 'lower'): interfaces[device][key] = device_dict[key].lower() return interfaces
python
def alter(self, interfaces): """ Used to provide the ability to alter the interfaces dictionary before it is returned from self.parse(). Required Arguments: interfaces The interfaces dictionary. Returns: interfaces dict """ # fixup some things for device, device_dict in interfaces.items(): if len(device_dict['inet4']) > 0: device_dict['inet'] = device_dict['inet4'][0] if 'inet' in device_dict and not device_dict['inet'] is None: try: host = socket.gethostbyaddr(device_dict['inet'])[0] interfaces[device]['hostname'] = host except (socket.herror, socket.gaierror): interfaces[device]['hostname'] = None # To be sure that hex values and similar are always consistent, we # return everything in lowercase. For instance, Windows writes # MACs in upper-case. for key, device_item in device_dict.items(): if hasattr(device_item, 'lower'): interfaces[device][key] = device_dict[key].lower() return interfaces
[ "def", "alter", "(", "self", ",", "interfaces", ")", ":", "# fixup some things", "for", "device", ",", "device_dict", "in", "interfaces", ".", "items", "(", ")", ":", "if", "len", "(", "device_dict", "[", "'inet4'", "]", ")", ">", "0", ":", "device_dict", "[", "'inet'", "]", "=", "device_dict", "[", "'inet4'", "]", "[", "0", "]", "if", "'inet'", "in", "device_dict", "and", "not", "device_dict", "[", "'inet'", "]", "is", "None", ":", "try", ":", "host", "=", "socket", ".", "gethostbyaddr", "(", "device_dict", "[", "'inet'", "]", ")", "[", "0", "]", "interfaces", "[", "device", "]", "[", "'hostname'", "]", "=", "host", "except", "(", "socket", ".", "herror", ",", "socket", ".", "gaierror", ")", ":", "interfaces", "[", "device", "]", "[", "'hostname'", "]", "=", "None", "# To be sure that hex values and similar are always consistent, we", "# return everything in lowercase. For instance, Windows writes", "# MACs in upper-case.", "for", "key", ",", "device_item", "in", "device_dict", ".", "items", "(", ")", ":", "if", "hasattr", "(", "device_item", ",", "'lower'", ")", ":", "interfaces", "[", "device", "]", "[", "key", "]", "=", "device_dict", "[", "key", "]", ".", "lower", "(", ")", "return", "interfaces" ]
Used to provide the ability to alter the interfaces dictionary before it is returned from self.parse(). Required Arguments: interfaces The interfaces dictionary. Returns: interfaces dict
[ "Used", "to", "provide", "the", "ability", "to", "alter", "the", "interfaces", "dictionary", "before", "it", "is", "returned", "from", "self", ".", "parse", "()", "." ]
train
https://github.com/ftao/python-ifcfg/blob/724a4a103088fee7dc2bc2f63b0b9006a614e1d0/src/ifcfg/parser.py#L106-L137
ftao/python-ifcfg
src/ifcfg/parser.py
UnixIPParser._default_interface
def _default_interface(self, route_output=None): """ :param route_output: For mocking actual output """ if not route_output: out, __, __ = exec_cmd('/sbin/ip route') lines = out.splitlines() else: lines = route_output.split("\n") for line in lines: line = line.split() if 'default' in line: iface = line[4] return self.interfaces.get(iface, None)
python
def _default_interface(self, route_output=None): """ :param route_output: For mocking actual output """ if not route_output: out, __, __ = exec_cmd('/sbin/ip route') lines = out.splitlines() else: lines = route_output.split("\n") for line in lines: line = line.split() if 'default' in line: iface = line[4] return self.interfaces.get(iface, None)
[ "def", "_default_interface", "(", "self", ",", "route_output", "=", "None", ")", ":", "if", "not", "route_output", ":", "out", ",", "__", ",", "__", "=", "exec_cmd", "(", "'/sbin/ip route'", ")", "lines", "=", "out", ".", "splitlines", "(", ")", "else", ":", "lines", "=", "route_output", ".", "split", "(", "\"\\n\"", ")", "for", "line", "in", "lines", ":", "line", "=", "line", ".", "split", "(", ")", "if", "'default'", "in", "line", ":", "iface", "=", "line", "[", "4", "]", "return", "self", ".", "interfaces", ".", "get", "(", "iface", ",", "None", ")" ]
:param route_output: For mocking actual output
[ ":", "param", "route_output", ":", "For", "mocking", "actual", "output" ]
train
https://github.com/ftao/python-ifcfg/blob/724a4a103088fee7dc2bc2f63b0b9006a614e1d0/src/ifcfg/parser.py#L312-L326
fossasia/AYABInterface
AYABInterface/communication/cache.py
NeedlePositionCache.get
def get(self, line_number): """Return the needle positions or None. :param int line_number: the number of the line :rtype: list :return: the needle positions for a specific line specified by :paramref:`line_number` or :obj:`None` if no were given """ if line_number not in self._get_cache: self._get_cache[line_number] = self._get(line_number) return self._get_cache[line_number]
python
def get(self, line_number): """Return the needle positions or None. :param int line_number: the number of the line :rtype: list :return: the needle positions for a specific line specified by :paramref:`line_number` or :obj:`None` if no were given """ if line_number not in self._get_cache: self._get_cache[line_number] = self._get(line_number) return self._get_cache[line_number]
[ "def", "get", "(", "self", ",", "line_number", ")", ":", "if", "line_number", "not", "in", "self", ".", "_get_cache", ":", "self", ".", "_get_cache", "[", "line_number", "]", "=", "self", ".", "_get", "(", "line_number", ")", "return", "self", ".", "_get_cache", "[", "line_number", "]" ]
Return the needle positions or None. :param int line_number: the number of the line :rtype: list :return: the needle positions for a specific line specified by :paramref:`line_number` or :obj:`None` if no were given
[ "Return", "the", "needle", "positions", "or", "None", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/cache.py#L17-L27
fossasia/AYABInterface
AYABInterface/communication/cache.py
NeedlePositionCache.get_bytes
def get_bytes(self, line_number): """Get the bytes representing needle positions or None. :param int line_number: the line number to take the bytes from :rtype: bytes :return: the bytes that represent the message or :obj:`None` if no data is there for the line. Depending on the :attr:`machine`, the length and result may vary. """ if line_number not in self._needle_position_bytes_cache: line = self._get(line_number) if line is None: line_bytes = None else: line_bytes = self._machine.needle_positions_to_bytes(line) self._needle_position_bytes_cache[line_number] = line_bytes return self._needle_position_bytes_cache[line_number]
python
def get_bytes(self, line_number): """Get the bytes representing needle positions or None. :param int line_number: the line number to take the bytes from :rtype: bytes :return: the bytes that represent the message or :obj:`None` if no data is there for the line. Depending on the :attr:`machine`, the length and result may vary. """ if line_number not in self._needle_position_bytes_cache: line = self._get(line_number) if line is None: line_bytes = None else: line_bytes = self._machine.needle_positions_to_bytes(line) self._needle_position_bytes_cache[line_number] = line_bytes return self._needle_position_bytes_cache[line_number]
[ "def", "get_bytes", "(", "self", ",", "line_number", ")", ":", "if", "line_number", "not", "in", "self", ".", "_needle_position_bytes_cache", ":", "line", "=", "self", ".", "_get", "(", "line_number", ")", "if", "line", "is", "None", ":", "line_bytes", "=", "None", "else", ":", "line_bytes", "=", "self", ".", "_machine", ".", "needle_positions_to_bytes", "(", "line", ")", "self", ".", "_needle_position_bytes_cache", "[", "line_number", "]", "=", "line_bytes", "return", "self", ".", "_needle_position_bytes_cache", "[", "line_number", "]" ]
Get the bytes representing needle positions or None. :param int line_number: the line number to take the bytes from :rtype: bytes :return: the bytes that represent the message or :obj:`None` if no data is there for the line. Depending on the :attr:`machine`, the length and result may vary.
[ "Get", "the", "bytes", "representing", "needle", "positions", "or", "None", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/cache.py#L37-L54
fossasia/AYABInterface
AYABInterface/communication/cache.py
NeedlePositionCache.get_line_configuration_message
def get_line_configuration_message(self, line_number): """Return the cnfLine content without id for the line. :param int line_number: the number of the line :rtype: bytes :return: a cnfLine message without id as defined in :ref:`cnfLine` """ if line_number not in self._line_configuration_message_cache: line_bytes = self.get_bytes(line_number) if line_bytes is not None: line_bytes = bytes([line_number & 255]) + line_bytes line_bytes += bytes([self.is_last(line_number)]) line_bytes += crc8(line_bytes).digest() self._line_configuration_message_cache[line_number] = line_bytes del line_bytes line = self._line_configuration_message_cache[line_number] if line is None: # no need to cache a lot of empty lines line = (bytes([line_number & 255]) + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01') line += crc8(line).digest() return line
python
def get_line_configuration_message(self, line_number): """Return the cnfLine content without id for the line. :param int line_number: the number of the line :rtype: bytes :return: a cnfLine message without id as defined in :ref:`cnfLine` """ if line_number not in self._line_configuration_message_cache: line_bytes = self.get_bytes(line_number) if line_bytes is not None: line_bytes = bytes([line_number & 255]) + line_bytes line_bytes += bytes([self.is_last(line_number)]) line_bytes += crc8(line_bytes).digest() self._line_configuration_message_cache[line_number] = line_bytes del line_bytes line = self._line_configuration_message_cache[line_number] if line is None: # no need to cache a lot of empty lines line = (bytes([line_number & 255]) + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01') line += crc8(line).digest() return line
[ "def", "get_line_configuration_message", "(", "self", ",", "line_number", ")", ":", "if", "line_number", "not", "in", "self", ".", "_line_configuration_message_cache", ":", "line_bytes", "=", "self", ".", "get_bytes", "(", "line_number", ")", "if", "line_bytes", "is", "not", "None", ":", "line_bytes", "=", "bytes", "(", "[", "line_number", "&", "255", "]", ")", "+", "line_bytes", "line_bytes", "+=", "bytes", "(", "[", "self", ".", "is_last", "(", "line_number", ")", "]", ")", "line_bytes", "+=", "crc8", "(", "line_bytes", ")", ".", "digest", "(", ")", "self", ".", "_line_configuration_message_cache", "[", "line_number", "]", "=", "line_bytes", "del", "line_bytes", "line", "=", "self", ".", "_line_configuration_message_cache", "[", "line_number", "]", "if", "line", "is", "None", ":", "# no need to cache a lot of empty lines", "line", "=", "(", "bytes", "(", "[", "line_number", "&", "255", "]", ")", "+", "b'\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00'", "+", "b'\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01'", ")", "line", "+=", "crc8", "(", "line", ")", ".", "digest", "(", ")", "return", "line" ]
Return the cnfLine content without id for the line. :param int line_number: the number of the line :rtype: bytes :return: a cnfLine message without id as defined in :ref:`cnfLine`
[ "Return", "the", "cnfLine", "content", "without", "id", "for", "the", "line", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/cache.py#L56-L78
fossasia/AYABInterface
AYABInterface/communication/hardware_messages.py
read_message_type
def read_message_type(file): """Read the message type from a file.""" message_byte = file.read(1) if message_byte == b'': return ConnectionClosed message_number = message_byte[0] return _message_types.get(message_number, UnknownMessage)
python
def read_message_type(file): """Read the message type from a file.""" message_byte = file.read(1) if message_byte == b'': return ConnectionClosed message_number = message_byte[0] return _message_types.get(message_number, UnknownMessage)
[ "def", "read_message_type", "(", "file", ")", ":", "message_byte", "=", "file", ".", "read", "(", "1", ")", "if", "message_byte", "==", "b''", ":", "return", "ConnectionClosed", "message_number", "=", "message_byte", "[", "0", "]", "return", "_message_types", ".", "get", "(", "message_number", ",", "UnknownMessage", ")" ]
Read the message type from a file.
[ "Read", "the", "message", "type", "from", "a", "file", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/hardware_messages.py#L485-L491
fossasia/AYABInterface
AYABInterface/communication/hardware_messages.py
FixedSizeMessage.read_end_of_message
def read_end_of_message(self): """Read the b"\\r\\n" at the end of the message.""" read = self._file.read last = read(1) current = read(1) while last != b'' and current != b'' and not \ (last == b'\r' and current == b'\n'): last = current current = read(1)
python
def read_end_of_message(self): """Read the b"\\r\\n" at the end of the message.""" read = self._file.read last = read(1) current = read(1) while last != b'' and current != b'' and not \ (last == b'\r' and current == b'\n'): last = current current = read(1)
[ "def", "read_end_of_message", "(", "self", ")", ":", "read", "=", "self", ".", "_file", ".", "read", "last", "=", "read", "(", "1", ")", "current", "=", "read", "(", "1", ")", "while", "last", "!=", "b''", "and", "current", "!=", "b''", "and", "not", "(", "last", "==", "b'\\r'", "and", "current", "==", "b'\\n'", ")", ":", "last", "=", "current", "current", "=", "read", "(", "1", ")" ]
Read the b"\\r\\n" at the end of the message.
[ "Read", "the", "b", "\\\\", "r", "\\\\", "n", "at", "the", "end", "of", "the", "message", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/hardware_messages.py#L139-L147
fossasia/AYABInterface
AYABInterface/communication/hardware_messages.py
InformationConfirmation._init
def _init(self): """Read the success byte.""" self._api_version = self._file.read(1)[0] self._firmware_version = FirmwareVersion(*self._file.read(2))
python
def _init(self): """Read the success byte.""" self._api_version = self._file.read(1)[0] self._firmware_version = FirmwareVersion(*self._file.read(2))
[ "def", "_init", "(", "self", ")", ":", "self", ".", "_api_version", "=", "self", ".", "_file", ".", "read", "(", "1", ")", "[", "0", "]", "self", ".", "_firmware_version", "=", "FirmwareVersion", "(", "*", "self", ".", "_file", ".", "read", "(", "2", ")", ")" ]
Read the success byte.
[ "Read", "the", "success", "byte", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/hardware_messages.py#L258-L261
fossasia/AYABInterface
AYABInterface/communication/hardware_messages.py
LineRequest._init
def _init(self): """Read the line number.""" self._line_number = next_line( self._communication.last_requested_line_number, self._file.read(1)[0])
python
def _init(self): """Read the line number.""" self._line_number = next_line( self._communication.last_requested_line_number, self._file.read(1)[0])
[ "def", "_init", "(", "self", ")", ":", "self", ".", "_line_number", "=", "next_line", "(", "self", ".", "_communication", ".", "last_requested_line_number", ",", "self", ".", "_file", ".", "read", "(", "1", ")", "[", "0", "]", ")" ]
Read the line number.
[ "Read", "the", "line", "number", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/hardware_messages.py#L342-L346
fossasia/AYABInterface
AYABInterface/communication/hardware_messages.py
StateIndication._init
def _init(self): """Read the success byte.""" self._ready = self._file.read(1) self._hall_left = self._file.read(2) self._hall_right = self._file.read(2) self._carriage_type = self._file.read(1)[0] self._carriage_position = self._file.read(1)[0]
python
def _init(self): """Read the success byte.""" self._ready = self._file.read(1) self._hall_left = self._file.read(2) self._hall_right = self._file.read(2) self._carriage_type = self._file.read(1)[0] self._carriage_position = self._file.read(1)[0]
[ "def", "_init", "(", "self", ")", ":", "self", ".", "_ready", "=", "self", ".", "_file", ".", "read", "(", "1", ")", "self", ".", "_hall_left", "=", "self", ".", "_file", ".", "read", "(", "2", ")", "self", ".", "_hall_right", "=", "self", ".", "_file", ".", "read", "(", "2", ")", "self", ".", "_carriage_type", "=", "self", ".", "_file", ".", "read", "(", "1", ")", "[", "0", "]", "self", ".", "_carriage_position", "=", "self", ".", "_file", ".", "read", "(", "1", ")", "[", "0", "]" ]
Read the success byte.
[ "Read", "the", "success", "byte", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/hardware_messages.py#L375-L381
fossasia/AYABInterface
AYABInterface/communication/hardware_messages.py
Debug._init
def _init(self): """Read the b"\\r\\n" at the end of the message.""" read_values = [] read = self._file.read last = read(1) current = read(1) while last != b'' and current != b'' and not \ (last == b'\r' and current == b'\n'): read_values.append(last) last = current current = read(1) if current == b'' and last != b'\r': read_values.append(last) self._bytes = b''.join(read_values)
python
def _init(self): """Read the b"\\r\\n" at the end of the message.""" read_values = [] read = self._file.read last = read(1) current = read(1) while last != b'' and current != b'' and not \ (last == b'\r' and current == b'\n'): read_values.append(last) last = current current = read(1) if current == b'' and last != b'\r': read_values.append(last) self._bytes = b''.join(read_values)
[ "def", "_init", "(", "self", ")", ":", "read_values", "=", "[", "]", "read", "=", "self", ".", "_file", ".", "read", "last", "=", "read", "(", "1", ")", "current", "=", "read", "(", "1", ")", "while", "last", "!=", "b''", "and", "current", "!=", "b''", "and", "not", "(", "last", "==", "b'\\r'", "and", "current", "==", "b'\\n'", ")", ":", "read_values", ".", "append", "(", "last", ")", "last", "=", "current", "current", "=", "read", "(", "1", ")", "if", "current", "==", "b''", "and", "last", "!=", "b'\\r'", ":", "read_values", ".", "append", "(", "last", ")", "self", ".", "_bytes", "=", "b''", ".", "join", "(", "read_values", ")" ]
Read the b"\\r\\n" at the end of the message.
[ "Read", "the", "b", "\\\\", "r", "\\\\", "n", "at", "the", "end", "of", "the", "message", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/hardware_messages.py#L447-L460
fossasia/AYABInterface
AYABInterface/communication/host_messages.py
Message.send
def send(self): """Send this message to the controller.""" self._file.write(self.as_bytes()) self._file.write(b'\r\n')
python
def send(self): """Send this message to the controller.""" self._file.write(self.as_bytes()) self._file.write(b'\r\n')
[ "def", "send", "(", "self", ")", ":", "self", ".", "_file", ".", "write", "(", "self", ".", "as_bytes", "(", ")", ")", "self", ".", "_file", ".", "write", "(", "b'\\r\\n'", ")" ]
Send this message to the controller.
[ "Send", "this", "message", "to", "the", "controller", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/host_messages.py#L50-L53
fossasia/AYABInterface
AYABInterface/communication/host_messages.py
StartRequest.init
def init(self, left_end_needle, right_end_needle): """Initialize the StartRequest with start and stop needle. :raises TypeError: if the arguments are not integers :raises ValueError: if the values do not match the :ref:`specification <m4-01>` """ if not isinstance(left_end_needle, int): raise TypeError(_left_end_needle_error_message(left_end_needle)) if left_end_needle < 0 or left_end_needle > 198: raise ValueError(_left_end_needle_error_message(left_end_needle)) if not isinstance(right_end_needle, int): raise TypeError(_right_end_needle_error_message(right_end_needle)) if right_end_needle < 1 or right_end_needle > 199: raise ValueError(_right_end_needle_error_message(right_end_needle)) self._left_end_needle = left_end_needle self._right_end_needle = right_end_needle
python
def init(self, left_end_needle, right_end_needle): """Initialize the StartRequest with start and stop needle. :raises TypeError: if the arguments are not integers :raises ValueError: if the values do not match the :ref:`specification <m4-01>` """ if not isinstance(left_end_needle, int): raise TypeError(_left_end_needle_error_message(left_end_needle)) if left_end_needle < 0 or left_end_needle > 198: raise ValueError(_left_end_needle_error_message(left_end_needle)) if not isinstance(right_end_needle, int): raise TypeError(_right_end_needle_error_message(right_end_needle)) if right_end_needle < 1 or right_end_needle > 199: raise ValueError(_right_end_needle_error_message(right_end_needle)) self._left_end_needle = left_end_needle self._right_end_needle = right_end_needle
[ "def", "init", "(", "self", ",", "left_end_needle", ",", "right_end_needle", ")", ":", "if", "not", "isinstance", "(", "left_end_needle", ",", "int", ")", ":", "raise", "TypeError", "(", "_left_end_needle_error_message", "(", "left_end_needle", ")", ")", "if", "left_end_needle", "<", "0", "or", "left_end_needle", ">", "198", ":", "raise", "ValueError", "(", "_left_end_needle_error_message", "(", "left_end_needle", ")", ")", "if", "not", "isinstance", "(", "right_end_needle", ",", "int", ")", ":", "raise", "TypeError", "(", "_right_end_needle_error_message", "(", "right_end_needle", ")", ")", "if", "right_end_needle", "<", "1", "or", "right_end_needle", ">", "199", ":", "raise", "ValueError", "(", "_right_end_needle_error_message", "(", "right_end_needle", ")", ")", "self", ".", "_left_end_needle", "=", "left_end_needle", "self", ".", "_right_end_needle", "=", "right_end_needle" ]
Initialize the StartRequest with start and stop needle. :raises TypeError: if the arguments are not integers :raises ValueError: if the values do not match the :ref:`specification <m4-01>`
[ "Initialize", "the", "StartRequest", "with", "start", "and", "stop", "needle", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/host_messages.py#L83-L100
fossasia/AYABInterface
AYABInterface/communication/host_messages.py
LineConfirmation.content_bytes
def content_bytes(self): """Return the start and stop needle.""" get_message = \ self._communication.needle_positions.get_line_configuration_message return get_message(self._line_number)
python
def content_bytes(self): """Return the start and stop needle.""" get_message = \ self._communication.needle_positions.get_line_configuration_message return get_message(self._line_number)
[ "def", "content_bytes", "(", "self", ")", ":", "get_message", "=", "self", ".", "_communication", ".", "needle_positions", ".", "get_line_configuration_message", "return", "get_message", "(", "self", ".", "_line_number", ")" ]
Return the start and stop needle.
[ "Return", "the", "start", "and", "stop", "needle", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/host_messages.py#L141-L145
fossasia/AYABInterface
AYABInterface/utils.py
sum_all
def sum_all(iterable, start): """Sum up an iterable starting with a start value. In contrast to :func:`sum`, this also works on other types like :class:`lists <list>` and :class:`sets <set>`. """ if hasattr(start, "__add__"): for value in iterable: start += value else: for value in iterable: start |= value return start
python
def sum_all(iterable, start): """Sum up an iterable starting with a start value. In contrast to :func:`sum`, this also works on other types like :class:`lists <list>` and :class:`sets <set>`. """ if hasattr(start, "__add__"): for value in iterable: start += value else: for value in iterable: start |= value return start
[ "def", "sum_all", "(", "iterable", ",", "start", ")", ":", "if", "hasattr", "(", "start", ",", "\"__add__\"", ")", ":", "for", "value", "in", "iterable", ":", "start", "+=", "value", "else", ":", "for", "value", "in", "iterable", ":", "start", "|=", "value", "return", "start" ]
Sum up an iterable starting with a start value. In contrast to :func:`sum`, this also works on other types like :class:`lists <list>` and :class:`sets <set>`.
[ "Sum", "up", "an", "iterable", "starting", "with", "a", "start", "value", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/utils.py#L4-L16
fossasia/AYABInterface
AYABInterface/utils.py
next_line
def next_line(last_line, next_line_8bit): """Compute the next line based on the last line and a 8bit next line. The behaviour of the function is specified in :ref:`reqline`. :param int last_line: the last line that was processed :param int next_line_8bit: the lower 8 bits of the next line :return: the next line closest to :paramref:`last_line` .. seealso:: :ref:`reqline` """ # compute the line without the lowest byte base_line = last_line - (last_line & 255) # compute the three different lines line = base_line + next_line_8bit lower_line = line - 256 upper_line = line + 256 # compute the next line if last_line - lower_line <= line - last_line: return lower_line if upper_line - last_line < last_line - line: return upper_line return line
python
def next_line(last_line, next_line_8bit): """Compute the next line based on the last line and a 8bit next line. The behaviour of the function is specified in :ref:`reqline`. :param int last_line: the last line that was processed :param int next_line_8bit: the lower 8 bits of the next line :return: the next line closest to :paramref:`last_line` .. seealso:: :ref:`reqline` """ # compute the line without the lowest byte base_line = last_line - (last_line & 255) # compute the three different lines line = base_line + next_line_8bit lower_line = line - 256 upper_line = line + 256 # compute the next line if last_line - lower_line <= line - last_line: return lower_line if upper_line - last_line < last_line - line: return upper_line return line
[ "def", "next_line", "(", "last_line", ",", "next_line_8bit", ")", ":", "# compute the line without the lowest byte", "base_line", "=", "last_line", "-", "(", "last_line", "&", "255", ")", "# compute the three different lines", "line", "=", "base_line", "+", "next_line_8bit", "lower_line", "=", "line", "-", "256", "upper_line", "=", "line", "+", "256", "# compute the next line", "if", "last_line", "-", "lower_line", "<=", "line", "-", "last_line", ":", "return", "lower_line", "if", "upper_line", "-", "last_line", "<", "last_line", "-", "line", ":", "return", "upper_line", "return", "line" ]
Compute the next line based on the last line and a 8bit next line. The behaviour of the function is specified in :ref:`reqline`. :param int last_line: the last line that was processed :param int next_line_8bit: the lower 8 bits of the next line :return: the next line closest to :paramref:`last_line` .. seealso:: :ref:`reqline`
[ "Compute", "the", "next", "line", "based", "on", "the", "last", "line", "and", "a", "8bit", "next", "line", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/utils.py#L27-L49
fossasia/AYABInterface
AYABInterface/utils.py
camel_case_to_under_score
def camel_case_to_under_score(camel_case_name): """Return the underscore name of a camel case name. :param str camel_case_name: a name in camel case such as ``"ACamelCaseName"`` :return: the name using underscores, e.g. ``"a_camel_case_name"`` :rtype: str """ result = [] for letter in camel_case_name: if letter.lower() != letter: result.append("_" + letter.lower()) else: result.append(letter.lower()) if result[0].startswith("_"): result[0] = result[0][1:] return "".join(result)
python
def camel_case_to_under_score(camel_case_name): """Return the underscore name of a camel case name. :param str camel_case_name: a name in camel case such as ``"ACamelCaseName"`` :return: the name using underscores, e.g. ``"a_camel_case_name"`` :rtype: str """ result = [] for letter in camel_case_name: if letter.lower() != letter: result.append("_" + letter.lower()) else: result.append(letter.lower()) if result[0].startswith("_"): result[0] = result[0][1:] return "".join(result)
[ "def", "camel_case_to_under_score", "(", "camel_case_name", ")", ":", "result", "=", "[", "]", "for", "letter", "in", "camel_case_name", ":", "if", "letter", ".", "lower", "(", ")", "!=", "letter", ":", "result", ".", "append", "(", "\"_\"", "+", "letter", ".", "lower", "(", ")", ")", "else", ":", "result", ".", "append", "(", "letter", ".", "lower", "(", ")", ")", "if", "result", "[", "0", "]", ".", "startswith", "(", "\"_\"", ")", ":", "result", "[", "0", "]", "=", "result", "[", "0", "]", "[", "1", ":", "]", "return", "\"\"", ".", "join", "(", "result", ")" ]
Return the underscore name of a camel case name. :param str camel_case_name: a name in camel case such as ``"ACamelCaseName"`` :return: the name using underscores, e.g. ``"a_camel_case_name"`` :rtype: str
[ "Return", "the", "underscore", "name", "of", "a", "camel", "case", "name", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/utils.py#L52-L68
fossasia/AYABInterface
AYABInterface/communication/__init__.py
Communication._message_received
def _message_received(self, message): """Notify the observers about the received message.""" with self.lock: self._state.receive_message(message) for callable in chain(self._on_message_received, self._on_message): callable(message)
python
def _message_received(self, message): """Notify the observers about the received message.""" with self.lock: self._state.receive_message(message) for callable in chain(self._on_message_received, self._on_message): callable(message)
[ "def", "_message_received", "(", "self", ",", "message", ")", ":", "with", "self", ".", "lock", ":", "self", ".", "_state", ".", "receive_message", "(", "message", ")", "for", "callable", "in", "chain", "(", "self", ".", "_on_message_received", ",", "self", ".", "_on_message", ")", ":", "callable", "(", "message", ")" ]
Notify the observers about the received message.
[ "Notify", "the", "observers", "about", "the", "received", "message", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/__init__.py#L79-L84
fossasia/AYABInterface
AYABInterface/communication/__init__.py
Communication.receive_message
def receive_message(self): """Receive a message from the file.""" with self.lock: assert self.can_receive_messages() message_type = self._read_message_type(self._file) message = message_type(self._file, self) self._message_received(message)
python
def receive_message(self): """Receive a message from the file.""" with self.lock: assert self.can_receive_messages() message_type = self._read_message_type(self._file) message = message_type(self._file, self) self._message_received(message)
[ "def", "receive_message", "(", "self", ")", ":", "with", "self", ".", "lock", ":", "assert", "self", ".", "can_receive_messages", "(", ")", "message_type", "=", "self", ".", "_read_message_type", "(", "self", ".", "_file", ")", "message", "=", "message_type", "(", "self", ".", "_file", ",", "self", ")", "self", ".", "_message_received", "(", "message", ")" ]
Receive a message from the file.
[ "Receive", "a", "message", "from", "the", "file", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/__init__.py#L96-L102
fossasia/AYABInterface
AYABInterface/communication/__init__.py
Communication.can_receive_messages
def can_receive_messages(self): """Whether tihs communication is ready to receive messages.] :rtype: bool .. code:: python assert not communication.can_receive_messages() communication.start() assert communication.can_receive_messages() communication.stop() assert not communication.can_receive_messages() """ with self.lock: return not self._state.is_waiting_for_start() and \ not self._state.is_connection_closed()
python
def can_receive_messages(self): """Whether tihs communication is ready to receive messages.] :rtype: bool .. code:: python assert not communication.can_receive_messages() communication.start() assert communication.can_receive_messages() communication.stop() assert not communication.can_receive_messages() """ with self.lock: return not self._state.is_waiting_for_start() and \ not self._state.is_connection_closed()
[ "def", "can_receive_messages", "(", "self", ")", ":", "with", "self", ".", "lock", ":", "return", "not", "self", ".", "_state", ".", "is_waiting_for_start", "(", ")", "and", "not", "self", ".", "_state", ".", "is_connection_closed", "(", ")" ]
Whether tihs communication is ready to receive messages.] :rtype: bool .. code:: python assert not communication.can_receive_messages() communication.start() assert communication.can_receive_messages() communication.stop() assert not communication.can_receive_messages()
[ "Whether", "tihs", "communication", "is", "ready", "to", "receive", "messages", ".", "]" ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/__init__.py#L104-L120
fossasia/AYABInterface
AYABInterface/communication/__init__.py
Communication.stop
def stop(self): """Stop the communication with the shield.""" with self.lock: self._message_received(ConnectionClosed(self._file, self))
python
def stop(self): """Stop the communication with the shield.""" with self.lock: self._message_received(ConnectionClosed(self._file, self))
[ "def", "stop", "(", "self", ")", ":", "with", "self", ".", "lock", ":", "self", ".", "_message_received", "(", "ConnectionClosed", "(", "self", ".", "_file", ",", "self", ")", ")" ]
Stop the communication with the shield.
[ "Stop", "the", "communication", "with", "the", "shield", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/__init__.py#L122-L125
fossasia/AYABInterface
AYABInterface/communication/__init__.py
Communication.send
def send(self, host_message_class, *args): """Send a host message. :param type host_message_class: a subclass of :class:`AYABImterface.communication.host_messages.Message` :param args: additional arguments that shall be passed to the :paramref:`host_message_class` as arguments """ message = host_message_class(self._file, self, *args) with self.lock: message.send() for callable in self._on_message: callable(message)
python
def send(self, host_message_class, *args): """Send a host message. :param type host_message_class: a subclass of :class:`AYABImterface.communication.host_messages.Message` :param args: additional arguments that shall be passed to the :paramref:`host_message_class` as arguments """ message = host_message_class(self._file, self, *args) with self.lock: message.send() for callable in self._on_message: callable(message)
[ "def", "send", "(", "self", ",", "host_message_class", ",", "*", "args", ")", ":", "message", "=", "host_message_class", "(", "self", ".", "_file", ",", "self", ",", "*", "args", ")", "with", "self", ".", "lock", ":", "message", ".", "send", "(", ")", "for", "callable", "in", "self", ".", "_on_message", ":", "callable", "(", "message", ")" ]
Send a host message. :param type host_message_class: a subclass of :class:`AYABImterface.communication.host_messages.Message` :param args: additional arguments that shall be passed to the :paramref:`host_message_class` as arguments
[ "Send", "a", "host", "message", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/__init__.py#L138-L150
fossasia/AYABInterface
AYABInterface/communication/__init__.py
Communication.state
def state(self, new_state): """Set the state.""" with self.lock: self._state.exit() self._state = new_state self._state.enter()
python
def state(self, new_state): """Set the state.""" with self.lock: self._state.exit() self._state = new_state self._state.enter()
[ "def", "state", "(", "self", ",", "new_state", ")", ":", "with", "self", ".", "lock", ":", "self", ".", "_state", ".", "exit", "(", ")", "self", ".", "_state", "=", "new_state", "self", ".", "_state", ".", "enter", "(", ")" ]
Set the state.
[ "Set", "the", "state", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/__init__.py#L175-L180
fossasia/AYABInterface
AYABInterface/communication/__init__.py
Communication.parallelize
def parallelize(self, seconds_to_wait=2): """Start a parallel thread for receiving messages. If :meth:`start` was no called before, start will be called in the thread. The thread calls :meth:`receive_message` until the :attr:`state` :meth:`~AYABInterface.communication.states.State.is_connection_closed`. :param float seconds_to_wait: A time in seconds to wait with the parallel execution. This is useful to allow the controller time to initialize. .. seealso:: :attr:`lock`, :meth:`runs_in_parallel` """ with self.lock: thread = Thread(target=self._parallel_receive_loop, args=(seconds_to_wait,)) thread.deamon = True thread.start() self._thread = thread
python
def parallelize(self, seconds_to_wait=2): """Start a parallel thread for receiving messages. If :meth:`start` was no called before, start will be called in the thread. The thread calls :meth:`receive_message` until the :attr:`state` :meth:`~AYABInterface.communication.states.State.is_connection_closed`. :param float seconds_to_wait: A time in seconds to wait with the parallel execution. This is useful to allow the controller time to initialize. .. seealso:: :attr:`lock`, :meth:`runs_in_parallel` """ with self.lock: thread = Thread(target=self._parallel_receive_loop, args=(seconds_to_wait,)) thread.deamon = True thread.start() self._thread = thread
[ "def", "parallelize", "(", "self", ",", "seconds_to_wait", "=", "2", ")", ":", "with", "self", ".", "lock", ":", "thread", "=", "Thread", "(", "target", "=", "self", ".", "_parallel_receive_loop", ",", "args", "=", "(", "seconds_to_wait", ",", ")", ")", "thread", ".", "deamon", "=", "True", "thread", ".", "start", "(", ")", "self", ".", "_thread", "=", "thread" ]
Start a parallel thread for receiving messages. If :meth:`start` was no called before, start will be called in the thread. The thread calls :meth:`receive_message` until the :attr:`state` :meth:`~AYABInterface.communication.states.State.is_connection_closed`. :param float seconds_to_wait: A time in seconds to wait with the parallel execution. This is useful to allow the controller time to initialize. .. seealso:: :attr:`lock`, :meth:`runs_in_parallel`
[ "Start", "a", "parallel", "thread", "for", "receiving", "messages", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/__init__.py#L244-L263
fossasia/AYABInterface
AYABInterface/communication/__init__.py
Communication._parallel_receive_loop
def _parallel_receive_loop(self, seconds_to_wait): """Run the receiving in parallel.""" sleep(seconds_to_wait) with self._lock: self._number_of_threads_receiving_messages += 1 try: with self._lock: if self.state.is_waiting_for_start(): self.start() while True: with self.lock: if self.state.is_connection_closed(): return self.receive_message() finally: with self._lock: self._number_of_threads_receiving_messages -= 1
python
def _parallel_receive_loop(self, seconds_to_wait): """Run the receiving in parallel.""" sleep(seconds_to_wait) with self._lock: self._number_of_threads_receiving_messages += 1 try: with self._lock: if self.state.is_waiting_for_start(): self.start() while True: with self.lock: if self.state.is_connection_closed(): return self.receive_message() finally: with self._lock: self._number_of_threads_receiving_messages -= 1
[ "def", "_parallel_receive_loop", "(", "self", ",", "seconds_to_wait", ")", ":", "sleep", "(", "seconds_to_wait", ")", "with", "self", ".", "_lock", ":", "self", ".", "_number_of_threads_receiving_messages", "+=", "1", "try", ":", "with", "self", ".", "_lock", ":", "if", "self", ".", "state", ".", "is_waiting_for_start", "(", ")", ":", "self", ".", "start", "(", ")", "while", "True", ":", "with", "self", ".", "lock", ":", "if", "self", ".", "state", ".", "is_connection_closed", "(", ")", ":", "return", "self", ".", "receive_message", "(", ")", "finally", ":", "with", "self", ".", "_lock", ":", "self", ".", "_number_of_threads_receiving_messages", "-=", "1" ]
Run the receiving in parallel.
[ "Run", "the", "receiving", "in", "parallel", "." ]
train
https://github.com/fossasia/AYABInterface/blob/e2065eed8daf17b2936f6ca5e488c9bfb850914e/AYABInterface/communication/__init__.py#L265-L281