repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
sequencelengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
sequencelengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.set_base_url
def set_base_url(self, platform: str = "prod"): """Set Isogeo base URLs according to platform. :param str platform: platform to use. Options: * prod [DEFAULT] * qa * int """ platform = platform.lower() self.platform = platform if platform == "prod": ssl = True logging.debug("Using production platform.") elif platform == "qa": ssl = False logging.debug("Using Quality Assurance platform (reduced perfs).") else: logging.error( "Platform must be one of: {}".format(" | ".join(self.API_URLS.keys())) ) raise ValueError( 3, "Platform must be one of: {}".format(" | ".join(self.API_URLS.keys())), ) # method ending return ( platform.lower(), self.API_URLS.get(platform), self.APP_URLS.get(platform), self.CSW_URLS.get(platform), self.MNG_URLS.get(platform), self.OC_URLS.get(platform), ssl, )
python
def set_base_url(self, platform: str = "prod"): """Set Isogeo base URLs according to platform. :param str platform: platform to use. Options: * prod [DEFAULT] * qa * int """ platform = platform.lower() self.platform = platform if platform == "prod": ssl = True logging.debug("Using production platform.") elif platform == "qa": ssl = False logging.debug("Using Quality Assurance platform (reduced perfs).") else: logging.error( "Platform must be one of: {}".format(" | ".join(self.API_URLS.keys())) ) raise ValueError( 3, "Platform must be one of: {}".format(" | ".join(self.API_URLS.keys())), ) # method ending return ( platform.lower(), self.API_URLS.get(platform), self.APP_URLS.get(platform), self.CSW_URLS.get(platform), self.MNG_URLS.get(platform), self.OC_URLS.get(platform), ssl, )
[ "def", "set_base_url", "(", "self", ",", "platform", ":", "str", "=", "\"prod\"", ")", ":", "platform", "=", "platform", ".", "lower", "(", ")", "self", ".", "platform", "=", "platform", "if", "platform", "==", "\"prod\"", ":", "ssl", "=", "True", "logging", ".", "debug", "(", "\"Using production platform.\"", ")", "elif", "platform", "==", "\"qa\"", ":", "ssl", "=", "False", "logging", ".", "debug", "(", "\"Using Quality Assurance platform (reduced perfs).\"", ")", "else", ":", "logging", ".", "error", "(", "\"Platform must be one of: {}\"", ".", "format", "(", "\" | \"", ".", "join", "(", "self", ".", "API_URLS", ".", "keys", "(", ")", ")", ")", ")", "raise", "ValueError", "(", "3", ",", "\"Platform must be one of: {}\"", ".", "format", "(", "\" | \"", ".", "join", "(", "self", ".", "API_URLS", ".", "keys", "(", ")", ")", ")", ",", ")", "# method ending", "return", "(", "platform", ".", "lower", "(", ")", ",", "self", ".", "API_URLS", ".", "get", "(", "platform", ")", ",", "self", ".", "APP_URLS", ".", "get", "(", "platform", ")", ",", "self", ".", "CSW_URLS", ".", "get", "(", "platform", ")", ",", "self", ".", "MNG_URLS", ".", "get", "(", "platform", ")", ",", "self", ".", "OC_URLS", ".", "get", "(", "platform", ")", ",", "ssl", ",", ")" ]
Set Isogeo base URLs according to platform. :param str platform: platform to use. Options: * prod [DEFAULT] * qa * int
[ "Set", "Isogeo", "base", "URLs", "according", "to", "platform", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L121-L155
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.convert_uuid
def convert_uuid(self, in_uuid: str = str, mode: bool = 0): """Convert a metadata UUID to its URI equivalent. And conversely. :param str in_uuid: UUID or URI to convert :param int mode: conversion direction. Options: * 0 to HEX * 1 to URN (RFC4122) * 2 to URN (Isogeo specific style) """ # parameters check if not isinstance(in_uuid, str): raise TypeError("'in_uuid' expected a str value.") else: pass if not checker.check_is_uuid(in_uuid): raise ValueError("{} is not a correct UUID".format(in_uuid)) else: pass if not isinstance(mode, int): raise TypeError("'mode' expects an integer value") else: pass # handle Isogeo specific UUID in XML exports if "isogeo:metadata" in in_uuid: in_uuid = "urn:uuid:{}".format(in_uuid.split(":")[-1]) logging.debug("Isogeo UUUID URN spotted: {}".format(in_uuid)) else: pass # operate if mode == 0: return uuid.UUID(in_uuid).hex elif mode == 1: return uuid.UUID(in_uuid).urn elif mode == 2: urn = uuid.UUID(in_uuid).urn return "urn:isogeo:metadata:uuid:{}".format(urn.split(":")[2]) else: raise ValueError("'mode' must be one of: 0 | 1 | 2")
python
def convert_uuid(self, in_uuid: str = str, mode: bool = 0): """Convert a metadata UUID to its URI equivalent. And conversely. :param str in_uuid: UUID or URI to convert :param int mode: conversion direction. Options: * 0 to HEX * 1 to URN (RFC4122) * 2 to URN (Isogeo specific style) """ # parameters check if not isinstance(in_uuid, str): raise TypeError("'in_uuid' expected a str value.") else: pass if not checker.check_is_uuid(in_uuid): raise ValueError("{} is not a correct UUID".format(in_uuid)) else: pass if not isinstance(mode, int): raise TypeError("'mode' expects an integer value") else: pass # handle Isogeo specific UUID in XML exports if "isogeo:metadata" in in_uuid: in_uuid = "urn:uuid:{}".format(in_uuid.split(":")[-1]) logging.debug("Isogeo UUUID URN spotted: {}".format(in_uuid)) else: pass # operate if mode == 0: return uuid.UUID(in_uuid).hex elif mode == 1: return uuid.UUID(in_uuid).urn elif mode == 2: urn = uuid.UUID(in_uuid).urn return "urn:isogeo:metadata:uuid:{}".format(urn.split(":")[2]) else: raise ValueError("'mode' must be one of: 0 | 1 | 2")
[ "def", "convert_uuid", "(", "self", ",", "in_uuid", ":", "str", "=", "str", ",", "mode", ":", "bool", "=", "0", ")", ":", "# parameters check", "if", "not", "isinstance", "(", "in_uuid", ",", "str", ")", ":", "raise", "TypeError", "(", "\"'in_uuid' expected a str value.\"", ")", "else", ":", "pass", "if", "not", "checker", ".", "check_is_uuid", "(", "in_uuid", ")", ":", "raise", "ValueError", "(", "\"{} is not a correct UUID\"", ".", "format", "(", "in_uuid", ")", ")", "else", ":", "pass", "if", "not", "isinstance", "(", "mode", ",", "int", ")", ":", "raise", "TypeError", "(", "\"'mode' expects an integer value\"", ")", "else", ":", "pass", "# handle Isogeo specific UUID in XML exports", "if", "\"isogeo:metadata\"", "in", "in_uuid", ":", "in_uuid", "=", "\"urn:uuid:{}\"", ".", "format", "(", "in_uuid", ".", "split", "(", "\":\"", ")", "[", "-", "1", "]", ")", "logging", ".", "debug", "(", "\"Isogeo UUUID URN spotted: {}\"", ".", "format", "(", "in_uuid", ")", ")", "else", ":", "pass", "# operate", "if", "mode", "==", "0", ":", "return", "uuid", ".", "UUID", "(", "in_uuid", ")", ".", "hex", "elif", "mode", "==", "1", ":", "return", "uuid", ".", "UUID", "(", "in_uuid", ")", ".", "urn", "elif", "mode", "==", "2", ":", "urn", "=", "uuid", ".", "UUID", "(", "in_uuid", ")", ".", "urn", "return", "\"urn:isogeo:metadata:uuid:{}\"", ".", "format", "(", "urn", ".", "split", "(", "\":\"", ")", "[", "2", "]", ")", "else", ":", "raise", "ValueError", "(", "\"'mode' must be one of: 0 | 1 | 2\"", ")" ]
Convert a metadata UUID to its URI equivalent. And conversely. :param str in_uuid: UUID or URI to convert :param int mode: conversion direction. Options: * 0 to HEX * 1 to URN (RFC4122) * 2 to URN (Isogeo specific style)
[ "Convert", "a", "metadata", "UUID", "to", "its", "URI", "equivalent", ".", "And", "conversely", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L157-L196
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.encoded_words_to_text
def encoded_words_to_text(self, in_encoded_words: str): """Pull out the character set, encoding, and encoded text from the input encoded words. Next, it decodes the encoded words into a byte string, using either the quopri module or base64 module as determined by the encoding. Finally, it decodes the byte string using the character set and returns the result. See: - https://github.com/isogeo/isogeo-api-py-minsdk/issues/32 - https://dmorgan.info/posts/encoded-word-syntax/ :param str in_encoded_words: base64 or quori encoded character string. """ # handle RFC2047 quoting if '"' in in_encoded_words: in_encoded_words = in_encoded_words.strip('"') # regex encoded_word_regex = r"=\?{1}(.+)\?{1}([B|Q])\?{1}(.+)\?{1}=" # pull out try: charset, encoding, encoded_text = re.match( encoded_word_regex, in_encoded_words ).groups() except AttributeError: logging.debug("Input text was not encoded into base64 or quori") return in_encoded_words # decode depending on encoding if encoding == "B": byte_string = base64.b64decode(encoded_text) elif encoding == "Q": byte_string = quopri.decodestring(encoded_text) return byte_string.decode(charset)
python
def encoded_words_to_text(self, in_encoded_words: str): """Pull out the character set, encoding, and encoded text from the input encoded words. Next, it decodes the encoded words into a byte string, using either the quopri module or base64 module as determined by the encoding. Finally, it decodes the byte string using the character set and returns the result. See: - https://github.com/isogeo/isogeo-api-py-minsdk/issues/32 - https://dmorgan.info/posts/encoded-word-syntax/ :param str in_encoded_words: base64 or quori encoded character string. """ # handle RFC2047 quoting if '"' in in_encoded_words: in_encoded_words = in_encoded_words.strip('"') # regex encoded_word_regex = r"=\?{1}(.+)\?{1}([B|Q])\?{1}(.+)\?{1}=" # pull out try: charset, encoding, encoded_text = re.match( encoded_word_regex, in_encoded_words ).groups() except AttributeError: logging.debug("Input text was not encoded into base64 or quori") return in_encoded_words # decode depending on encoding if encoding == "B": byte_string = base64.b64decode(encoded_text) elif encoding == "Q": byte_string = quopri.decodestring(encoded_text) return byte_string.decode(charset)
[ "def", "encoded_words_to_text", "(", "self", ",", "in_encoded_words", ":", "str", ")", ":", "# handle RFC2047 quoting", "if", "'\"'", "in", "in_encoded_words", ":", "in_encoded_words", "=", "in_encoded_words", ".", "strip", "(", "'\"'", ")", "# regex", "encoded_word_regex", "=", "r\"=\\?{1}(.+)\\?{1}([B|Q])\\?{1}(.+)\\?{1}=\"", "# pull out", "try", ":", "charset", ",", "encoding", ",", "encoded_text", "=", "re", ".", "match", "(", "encoded_word_regex", ",", "in_encoded_words", ")", ".", "groups", "(", ")", "except", "AttributeError", ":", "logging", ".", "debug", "(", "\"Input text was not encoded into base64 or quori\"", ")", "return", "in_encoded_words", "# decode depending on encoding", "if", "encoding", "==", "\"B\"", ":", "byte_string", "=", "base64", ".", "b64decode", "(", "encoded_text", ")", "elif", "encoding", "==", "\"Q\"", ":", "byte_string", "=", "quopri", ".", "decodestring", "(", "encoded_text", ")", "return", "byte_string", ".", "decode", "(", "charset", ")" ]
Pull out the character set, encoding, and encoded text from the input encoded words. Next, it decodes the encoded words into a byte string, using either the quopri module or base64 module as determined by the encoding. Finally, it decodes the byte string using the character set and returns the result. See: - https://github.com/isogeo/isogeo-api-py-minsdk/issues/32 - https://dmorgan.info/posts/encoded-word-syntax/ :param str in_encoded_words: base64 or quori encoded character string.
[ "Pull", "out", "the", "character", "set", "encoding", "and", "encoded", "text", "from", "the", "input", "encoded", "words", ".", "Next", "it", "decodes", "the", "encoded", "words", "into", "a", "byte", "string", "using", "either", "the", "quopri", "module", "or", "base64", "module", "as", "determined", "by", "the", "encoding", ".", "Finally", "it", "decodes", "the", "byte", "string", "using", "the", "character", "set", "and", "returns", "the", "result", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L198-L231
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.get_isogeo_version
def get_isogeo_version(self, component: str = "api", prot: str = "https"): """Get Isogeo components versions. Authentication not required. :param str component: which platform component. Options: * api [default] * db * app """ # which component if component == "api": version_url = "{}://v1.{}.isogeo.com/about".format(prot, self.api_url) elif component == "db": version_url = "{}://v1.{}.isogeo.com/about/database".format( prot, self.api_url ) elif component == "app" and self.platform == "prod": version_url = "https://app.isogeo.com/about" elif component == "app" and self.platform == "qa": version_url = "https://qa-isogeo-app.azurewebsites.net/about" else: raise ValueError( "Component value must be one of: " "api [default], db, app." ) # send request version_req = requests.get(version_url, proxies=self.proxies, verify=self.ssl) # checking response checker.check_api_response(version_req) # end of method return version_req.json().get("version")
python
def get_isogeo_version(self, component: str = "api", prot: str = "https"): """Get Isogeo components versions. Authentication not required. :param str component: which platform component. Options: * api [default] * db * app """ # which component if component == "api": version_url = "{}://v1.{}.isogeo.com/about".format(prot, self.api_url) elif component == "db": version_url = "{}://v1.{}.isogeo.com/about/database".format( prot, self.api_url ) elif component == "app" and self.platform == "prod": version_url = "https://app.isogeo.com/about" elif component == "app" and self.platform == "qa": version_url = "https://qa-isogeo-app.azurewebsites.net/about" else: raise ValueError( "Component value must be one of: " "api [default], db, app." ) # send request version_req = requests.get(version_url, proxies=self.proxies, verify=self.ssl) # checking response checker.check_api_response(version_req) # end of method return version_req.json().get("version")
[ "def", "get_isogeo_version", "(", "self", ",", "component", ":", "str", "=", "\"api\"", ",", "prot", ":", "str", "=", "\"https\"", ")", ":", "# which component", "if", "component", "==", "\"api\"", ":", "version_url", "=", "\"{}://v1.{}.isogeo.com/about\"", ".", "format", "(", "prot", ",", "self", ".", "api_url", ")", "elif", "component", "==", "\"db\"", ":", "version_url", "=", "\"{}://v1.{}.isogeo.com/about/database\"", ".", "format", "(", "prot", ",", "self", ".", "api_url", ")", "elif", "component", "==", "\"app\"", "and", "self", ".", "platform", "==", "\"prod\"", ":", "version_url", "=", "\"https://app.isogeo.com/about\"", "elif", "component", "==", "\"app\"", "and", "self", ".", "platform", "==", "\"qa\"", ":", "version_url", "=", "\"https://qa-isogeo-app.azurewebsites.net/about\"", "else", ":", "raise", "ValueError", "(", "\"Component value must be one of: \"", "\"api [default], db, app.\"", ")", "# send request", "version_req", "=", "requests", ".", "get", "(", "version_url", ",", "proxies", "=", "self", ".", "proxies", ",", "verify", "=", "self", ".", "ssl", ")", "# checking response", "checker", ".", "check_api_response", "(", "version_req", ")", "# end of method", "return", "version_req", ".", "json", "(", ")", ".", "get", "(", "\"version\"", ")" ]
Get Isogeo components versions. Authentication not required. :param str component: which platform component. Options: * api [default] * db * app
[ "Get", "Isogeo", "components", "versions", ".", "Authentication", "not", "required", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L233-L266
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.get_edit_url
def get_edit_url( self, md_id: str = None, md_type: str = None, owner_id: str = None, tab: str = "identification", ): """Constructs the edition URL of a metadata. :param str md_id: metadata/resource UUID :param str owner_id: owner UUID :param str tab: target tab in the web form """ # checks inputs if not checker.check_is_uuid(md_id) or not checker.check_is_uuid(owner_id): raise ValueError("One of md_id or owner_id is not a correct UUID.") else: pass if checker.check_edit_tab(tab, md_type=md_type): pass # construct URL return ( "{}" "/groups/{}" "/resources/{}" "/{}".format(self.APP_URLS.get(self.platform), owner_id, md_id, tab) )
python
def get_edit_url( self, md_id: str = None, md_type: str = None, owner_id: str = None, tab: str = "identification", ): """Constructs the edition URL of a metadata. :param str md_id: metadata/resource UUID :param str owner_id: owner UUID :param str tab: target tab in the web form """ # checks inputs if not checker.check_is_uuid(md_id) or not checker.check_is_uuid(owner_id): raise ValueError("One of md_id or owner_id is not a correct UUID.") else: pass if checker.check_edit_tab(tab, md_type=md_type): pass # construct URL return ( "{}" "/groups/{}" "/resources/{}" "/{}".format(self.APP_URLS.get(self.platform), owner_id, md_id, tab) )
[ "def", "get_edit_url", "(", "self", ",", "md_id", ":", "str", "=", "None", ",", "md_type", ":", "str", "=", "None", ",", "owner_id", ":", "str", "=", "None", ",", "tab", ":", "str", "=", "\"identification\"", ",", ")", ":", "# checks inputs", "if", "not", "checker", ".", "check_is_uuid", "(", "md_id", ")", "or", "not", "checker", ".", "check_is_uuid", "(", "owner_id", ")", ":", "raise", "ValueError", "(", "\"One of md_id or owner_id is not a correct UUID.\"", ")", "else", ":", "pass", "if", "checker", ".", "check_edit_tab", "(", "tab", ",", "md_type", "=", "md_type", ")", ":", "pass", "# construct URL", "return", "(", "\"{}\"", "\"/groups/{}\"", "\"/resources/{}\"", "\"/{}\"", ".", "format", "(", "self", ".", "APP_URLS", ".", "get", "(", "self", ".", "platform", ")", ",", "owner_id", ",", "md_id", ",", "tab", ")", ")" ]
Constructs the edition URL of a metadata. :param str md_id: metadata/resource UUID :param str owner_id: owner UUID :param str tab: target tab in the web form
[ "Constructs", "the", "edition", "URL", "of", "a", "metadata", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L269-L295
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.get_view_url
def get_view_url(self, webapp: str = "oc", **kwargs): """Constructs the view URL of a metadata. :param str webapp: web app destination :param dict kwargs: web app specific parameters. For example see WEBAPPS """ # build wbeapp URL depending on choosen webapp if webapp in self.WEBAPPS: webapp_args = self.WEBAPPS.get(webapp).get("args") # check kwargs parameters if set(webapp_args) <= set(kwargs): # construct and return url url = self.WEBAPPS.get(webapp).get("url") return url.format(**kwargs) else: raise TypeError( "'{}' webapp expects {} argument(s): {}." " Args passed: {}".format( webapp, len(webapp_args), webapp_args, kwargs ) ) else: raise ValueError( "'{}' is not a recognized webapp among: {}." " Try to register it.".format(self.WEBAPPS.keys(), webapp) )
python
def get_view_url(self, webapp: str = "oc", **kwargs): """Constructs the view URL of a metadata. :param str webapp: web app destination :param dict kwargs: web app specific parameters. For example see WEBAPPS """ # build wbeapp URL depending on choosen webapp if webapp in self.WEBAPPS: webapp_args = self.WEBAPPS.get(webapp).get("args") # check kwargs parameters if set(webapp_args) <= set(kwargs): # construct and return url url = self.WEBAPPS.get(webapp).get("url") return url.format(**kwargs) else: raise TypeError( "'{}' webapp expects {} argument(s): {}." " Args passed: {}".format( webapp, len(webapp_args), webapp_args, kwargs ) ) else: raise ValueError( "'{}' is not a recognized webapp among: {}." " Try to register it.".format(self.WEBAPPS.keys(), webapp) )
[ "def", "get_view_url", "(", "self", ",", "webapp", ":", "str", "=", "\"oc\"", ",", "*", "*", "kwargs", ")", ":", "# build wbeapp URL depending on choosen webapp", "if", "webapp", "in", "self", ".", "WEBAPPS", ":", "webapp_args", "=", "self", ".", "WEBAPPS", ".", "get", "(", "webapp", ")", ".", "get", "(", "\"args\"", ")", "# check kwargs parameters", "if", "set", "(", "webapp_args", ")", "<=", "set", "(", "kwargs", ")", ":", "# construct and return url", "url", "=", "self", ".", "WEBAPPS", ".", "get", "(", "webapp", ")", ".", "get", "(", "\"url\"", ")", "return", "url", ".", "format", "(", "*", "*", "kwargs", ")", "else", ":", "raise", "TypeError", "(", "\"'{}' webapp expects {} argument(s): {}.\"", "\" Args passed: {}\"", ".", "format", "(", "webapp", ",", "len", "(", "webapp_args", ")", ",", "webapp_args", ",", "kwargs", ")", ")", "else", ":", "raise", "ValueError", "(", "\"'{}' is not a recognized webapp among: {}.\"", "\" Try to register it.\"", ".", "format", "(", "self", ".", "WEBAPPS", ".", "keys", "(", ")", ",", "webapp", ")", ")" ]
Constructs the view URL of a metadata. :param str webapp: web app destination :param dict kwargs: web app specific parameters. For example see WEBAPPS
[ "Constructs", "the", "view", "URL", "of", "a", "metadata", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L297-L323
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.register_webapp
def register_webapp(self, webapp_name: str, webapp_args: list, webapp_url: str): """Register a new WEBAPP to use with the view URL builder. :param str webapp_name: name of the web app to register :param list webapp_args: dynamic arguments to complete the URL. Typically 'md_id'. :param str webapp_url: URL of the web app to register with args tags to replace. Example: 'https://www.ppige-npdc.fr/portail/geocatalogue?uuid={md_id}' """ # check parameters for arg in webapp_args: if arg not in webapp_url: raise ValueError( "Inconsistent web app arguments and URL." " It should contain arguments to replace" " dynamically. Example: 'http://webapp.com" "/isogeo?metadata={md_id}'" ) # register self.WEBAPPS[webapp_name] = {"args": webapp_args, "url": webapp_url}
python
def register_webapp(self, webapp_name: str, webapp_args: list, webapp_url: str): """Register a new WEBAPP to use with the view URL builder. :param str webapp_name: name of the web app to register :param list webapp_args: dynamic arguments to complete the URL. Typically 'md_id'. :param str webapp_url: URL of the web app to register with args tags to replace. Example: 'https://www.ppige-npdc.fr/portail/geocatalogue?uuid={md_id}' """ # check parameters for arg in webapp_args: if arg not in webapp_url: raise ValueError( "Inconsistent web app arguments and URL." " It should contain arguments to replace" " dynamically. Example: 'http://webapp.com" "/isogeo?metadata={md_id}'" ) # register self.WEBAPPS[webapp_name] = {"args": webapp_args, "url": webapp_url}
[ "def", "register_webapp", "(", "self", ",", "webapp_name", ":", "str", ",", "webapp_args", ":", "list", ",", "webapp_url", ":", "str", ")", ":", "# check parameters", "for", "arg", "in", "webapp_args", ":", "if", "arg", "not", "in", "webapp_url", ":", "raise", "ValueError", "(", "\"Inconsistent web app arguments and URL.\"", "\" It should contain arguments to replace\"", "\" dynamically. Example: 'http://webapp.com\"", "\"/isogeo?metadata={md_id}'\"", ")", "# register", "self", ".", "WEBAPPS", "[", "webapp_name", "]", "=", "{", "\"args\"", ":", "webapp_args", ",", "\"url\"", ":", "webapp_url", "}" ]
Register a new WEBAPP to use with the view URL builder. :param str webapp_name: name of the web app to register :param list webapp_args: dynamic arguments to complete the URL. Typically 'md_id'. :param str webapp_url: URL of the web app to register with args tags to replace. Example: 'https://www.ppige-npdc.fr/portail/geocatalogue?uuid={md_id}'
[ "Register", "a", "new", "WEBAPP", "to", "use", "with", "the", "view", "URL", "builder", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L325-L345
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.get_url_base_from_url_token
def get_url_base_from_url_token( self, url_api_token: str = "https://id.api.isogeo.com/oauth/token" ): """Returns the Isogeo API root URL (which is not included into credentials file) from the token URL (which is always included). :param url_api_token str: url to Isogeo API ID token generator """ in_parsed = urlparse(url_api_token) api_url_base = in_parsed._replace( path="", netloc=in_parsed.netloc.replace("id.", "") ) return api_url_base.geturl()
python
def get_url_base_from_url_token( self, url_api_token: str = "https://id.api.isogeo.com/oauth/token" ): """Returns the Isogeo API root URL (which is not included into credentials file) from the token URL (which is always included). :param url_api_token str: url to Isogeo API ID token generator """ in_parsed = urlparse(url_api_token) api_url_base = in_parsed._replace( path="", netloc=in_parsed.netloc.replace("id.", "") ) return api_url_base.geturl()
[ "def", "get_url_base_from_url_token", "(", "self", ",", "url_api_token", ":", "str", "=", "\"https://id.api.isogeo.com/oauth/token\"", ")", ":", "in_parsed", "=", "urlparse", "(", "url_api_token", ")", "api_url_base", "=", "in_parsed", ".", "_replace", "(", "path", "=", "\"\"", ",", "netloc", "=", "in_parsed", ".", "netloc", ".", "replace", "(", "\"id.\"", ",", "\"\"", ")", ")", "return", "api_url_base", ".", "geturl", "(", ")" ]
Returns the Isogeo API root URL (which is not included into credentials file) from the token URL (which is always included). :param url_api_token str: url to Isogeo API ID token generator
[ "Returns", "the", "Isogeo", "API", "root", "URL", "(", "which", "is", "not", "included", "into", "credentials", "file", ")", "from", "the", "token", "URL", "(", "which", "is", "always", "included", ")", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L347-L359
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.pages_counter
def pages_counter(self, total: int, page_size: int = 100) -> int: """Simple helper to handle pagination. Returns the number of pages for a given number of results. :param int total: count of metadata in a search request :param int page_size: count of metadata to display in each page """ if total <= page_size: count_pages = 1 else: if (total % page_size) == 0: count_pages = total / page_size else: count_pages = (total / page_size) + 1 # method ending return int(count_pages)
python
def pages_counter(self, total: int, page_size: int = 100) -> int: """Simple helper to handle pagination. Returns the number of pages for a given number of results. :param int total: count of metadata in a search request :param int page_size: count of metadata to display in each page """ if total <= page_size: count_pages = 1 else: if (total % page_size) == 0: count_pages = total / page_size else: count_pages = (total / page_size) + 1 # method ending return int(count_pages)
[ "def", "pages_counter", "(", "self", ",", "total", ":", "int", ",", "page_size", ":", "int", "=", "100", ")", "->", "int", ":", "if", "total", "<=", "page_size", ":", "count_pages", "=", "1", "else", ":", "if", "(", "total", "%", "page_size", ")", "==", "0", ":", "count_pages", "=", "total", "/", "page_size", "else", ":", "count_pages", "=", "(", "total", "/", "page_size", ")", "+", "1", "# method ending", "return", "int", "(", "count_pages", ")" ]
Simple helper to handle pagination. Returns the number of pages for a given number of results. :param int total: count of metadata in a search request :param int page_size: count of metadata to display in each page
[ "Simple", "helper", "to", "handle", "pagination", ".", "Returns", "the", "number", "of", "pages", "for", "a", "given", "number", "of", "results", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L362-L377
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.tags_to_dict
def tags_to_dict(self, tags=dict, prev_query=dict, duplicated: str = "rename"): """Reverse search tags dictionary to values as keys. Useful to populate filters comboboxes for example. :param dict tags: tags dictionary from a search request :param dict prev_query: query parameters returned after a search request. Typically `search.get("query")`. :param str duplicated: what to do about duplicated tags label. Values: * ignore - last tag parsed survives * merge - add duplicated in value as separated list (sep = '||') * rename [default] - if duplicated tag labels are part of different workgroup, so the tag label is renamed with workgroup. """ # for rename option, get workgroups if duplicated == "rename": wgs = {k.split(":")[1]: v for k, v in tags.items() if k.startswith("owner")} # wgs = list(filter(lambda x[1]: x[0].startswith("owner"), tags.items())) elif duplicated == "ignore" or duplicated == "merge": wgs = None else: raise ValueError( "Duplicated value is not an accepted value." " Please refer to __doc__ method." ) # inner function def _duplicate_mng( target_dict: dict, duplicate, mode: str = duplicated, workgroups: dict = wgs ): if mode == "merge": target_dict[duplicate[0]] += "||" + duplicate[1] elif mode == "rename": # get workgroup uuid if checker.check_is_uuid(k.split(":")[1]): k_uuid = k.split(":")[1] else: k_uuid = k.split(":")[2] # match with workgroups owners if k_uuid in workgroups: repl = workgroups.get(k_uuid) else: repl = k_uuid[:5] target_dict["{} ({})".format(duplicate[0], repl)] = duplicate[1] else: pass return # -- SEARCH TAGS ------------- # output dicts structure tags_as_dicts = { "actions": {}, "catalogs": {}, "contacts": {}, "data-sources": {}, "formats": {}, "inspires": {}, "keywords": {}, "licenses": {}, "owners": {}, "providers": {}, "shares": {}, "srs": {}, "types": {}, } # parsing tags and storing each one in a dict for k, v in sorted(tags.items()): if k.startswith("action"): tags_as_dicts.get("actions")[v] = k continue elif k.startswith("catalog"): if v in tags_as_dicts.get("catalogs") and duplicated != "ignore": _duplicate_mng(tags_as_dicts.get("catalogs"), (v, k)) else: logging.debug( "Duplicated catalog name: {}. Last catalog is retained.".format( v ) ) tags_as_dicts.get("catalogs")[v] = k continue elif k.startswith("contact"): if v in tags_as_dicts.get("contacts") and duplicated != "ignore": _duplicate_mng(tags_as_dicts.get("contacts"), (v, k)) else: logging.debug( "Duplicated contact name: {}. Last contact is retained.".format( v ) ) tags_as_dicts.get("contacts")[v] = k continue elif k.startswith("coordinate-system"): tags_as_dicts.get("srs")[v] = k continue elif k.startswith("data-source"): if v in tags_as_dicts.get("data-sources") and duplicated != "ignore": _duplicate_mng(tags_as_dicts.get("data-sources"), (v, k)) else: logging.debug( "Duplicated data-source name: {}. Last data-source is retained.".format( v ) ) tags_as_dicts.get("data-sources")[v] = k continue elif k.startswith("format"): tags_as_dicts.get("formats")[v] = k continue elif k.startswith("keyword:in"): tags_as_dicts.get("inspires")[v] = k continue elif k.startswith("keyword:is"): tags_as_dicts.get("keywords")[v] = k continue elif k.startswith("license"): if v in tags_as_dicts.get("licenses") and duplicated != "ignore": _duplicate_mng(tags_as_dicts.get("licenses"), (v, k)) else: logging.debug( "Duplicated license name: {}. Last license is retained.".format( v ) ) tags_as_dicts.get("licenses")[v] = k continue elif k.startswith("owner"): tags_as_dicts.get("owners")[v] = k continue elif k.startswith("provider"): # providers are particular bcause its value is always null. tags_as_dicts.get("providers")[k.split(":")[1]] = k continue elif k.startswith("share"): tags_as_dicts.get("shares")[v] = k continue elif k.startswith("type"): tags_as_dicts.get("types")[v] = k continue # ignored tags else: logging.debug("A tag has been ignored during parsing: {}".format(k)) # -- QUERY TAGS ------------- # handle share case if prev_query.get("_shares"): prev_query.get("_tags").append( "share:{}".format(prev_query.get("_shares")[0]) ) else: pass # output dict struture logging.debug(prev_query) query_as_dicts = { "_tags": { "actions": {}, "catalogs": {}, "contacts": {}, "data-sources": {}, "formats": {}, "inspires": {}, "keywords": {}, "licenses": {}, "owners": {}, "providers": {}, "shares": {}, "srs": {}, "types": {}, }, "_shares": prev_query.get("_shares"), "_terms": prev_query.get("_terms"), } # parsing and matching tags query_tags = query_as_dicts.get("_tags") for t in prev_query.get("_tags"): if t.startswith("action"): query_tags.get("actions")[tags.get(t)] = t continue elif t.startswith("catalog"): if v in query_tags.get("catalogs") and duplicated != "ignore": _duplicate_mng(query_tags.get("catalogs"), (v, k)) else: logging.debug( "Duplicated catalog name: {}. Last catalog is retained.".format( v ) ) query_tags.get("catalogs")[tags.get(t)] = t continue elif t.startswith("contact"): if v in query_tags.get("contacts") and duplicated != "ignore": _duplicate_mng(query_tags.get("contacts"), (v, k)) else: logging.debug( "Duplicated contact name: {}. Last contact is retained.".format( v ) ) query_tags.get("contacts")[tags.get(t)] = t continue elif t.startswith("coordinate-system"): query_tags.get("srs")[tags.get(t)] = t continue elif t.startswith("data-source"): if v in query_tags.get("data-sources") and duplicated != "ignore": _duplicate_mng(query_tags.get("data-sources"), (v, k)) else: logging.debug( "Duplicated data-source name: {}. Last data-source is retained.".format( v ) ) query_tags.get("data-sources")[tags.get(t)] = t continue elif t.startswith("format"): query_tags.get("formats")[tags.get(t)] = t continue elif t.startswith("keyword:in"): query_tags.get("inspires")[tags.get(t)] = t continue elif t.startswith("keyword:is"): query_tags.get("keywords")[tags.get(t)] = t continue elif t.startswith("license"): if v in query_tags.get("licenses") and duplicated != "ignore": _duplicate_mng(query_tags.get("licenses"), (v, k)) else: logging.debug( "Duplicated license name: {}. Last license is retained.".format( v ) ) query_tags.get("licenses")[tags.get(t)] = t continue elif t.startswith("owner"): query_tags.get("owners")[tags.get(t)] = t continue elif t.startswith("provider"): # providers are particular bcause its value is always null. query_tags.get("providers")[k.split(":")[1]] = k continue elif t.startswith("share"): query_tags.get("shares")[tags.get(t)] = t continue elif t.startswith("type"): query_tags.get("types")[tags.get(t)] = t continue # ignored tags else: logging.debug( "A query tag has been ignored during parsing: {}".format(t) ) # return the output return tags_as_dicts, query_as_dicts
python
def tags_to_dict(self, tags=dict, prev_query=dict, duplicated: str = "rename"): """Reverse search tags dictionary to values as keys. Useful to populate filters comboboxes for example. :param dict tags: tags dictionary from a search request :param dict prev_query: query parameters returned after a search request. Typically `search.get("query")`. :param str duplicated: what to do about duplicated tags label. Values: * ignore - last tag parsed survives * merge - add duplicated in value as separated list (sep = '||') * rename [default] - if duplicated tag labels are part of different workgroup, so the tag label is renamed with workgroup. """ # for rename option, get workgroups if duplicated == "rename": wgs = {k.split(":")[1]: v for k, v in tags.items() if k.startswith("owner")} # wgs = list(filter(lambda x[1]: x[0].startswith("owner"), tags.items())) elif duplicated == "ignore" or duplicated == "merge": wgs = None else: raise ValueError( "Duplicated value is not an accepted value." " Please refer to __doc__ method." ) # inner function def _duplicate_mng( target_dict: dict, duplicate, mode: str = duplicated, workgroups: dict = wgs ): if mode == "merge": target_dict[duplicate[0]] += "||" + duplicate[1] elif mode == "rename": # get workgroup uuid if checker.check_is_uuid(k.split(":")[1]): k_uuid = k.split(":")[1] else: k_uuid = k.split(":")[2] # match with workgroups owners if k_uuid in workgroups: repl = workgroups.get(k_uuid) else: repl = k_uuid[:5] target_dict["{} ({})".format(duplicate[0], repl)] = duplicate[1] else: pass return # -- SEARCH TAGS ------------- # output dicts structure tags_as_dicts = { "actions": {}, "catalogs": {}, "contacts": {}, "data-sources": {}, "formats": {}, "inspires": {}, "keywords": {}, "licenses": {}, "owners": {}, "providers": {}, "shares": {}, "srs": {}, "types": {}, } # parsing tags and storing each one in a dict for k, v in sorted(tags.items()): if k.startswith("action"): tags_as_dicts.get("actions")[v] = k continue elif k.startswith("catalog"): if v in tags_as_dicts.get("catalogs") and duplicated != "ignore": _duplicate_mng(tags_as_dicts.get("catalogs"), (v, k)) else: logging.debug( "Duplicated catalog name: {}. Last catalog is retained.".format( v ) ) tags_as_dicts.get("catalogs")[v] = k continue elif k.startswith("contact"): if v in tags_as_dicts.get("contacts") and duplicated != "ignore": _duplicate_mng(tags_as_dicts.get("contacts"), (v, k)) else: logging.debug( "Duplicated contact name: {}. Last contact is retained.".format( v ) ) tags_as_dicts.get("contacts")[v] = k continue elif k.startswith("coordinate-system"): tags_as_dicts.get("srs")[v] = k continue elif k.startswith("data-source"): if v in tags_as_dicts.get("data-sources") and duplicated != "ignore": _duplicate_mng(tags_as_dicts.get("data-sources"), (v, k)) else: logging.debug( "Duplicated data-source name: {}. Last data-source is retained.".format( v ) ) tags_as_dicts.get("data-sources")[v] = k continue elif k.startswith("format"): tags_as_dicts.get("formats")[v] = k continue elif k.startswith("keyword:in"): tags_as_dicts.get("inspires")[v] = k continue elif k.startswith("keyword:is"): tags_as_dicts.get("keywords")[v] = k continue elif k.startswith("license"): if v in tags_as_dicts.get("licenses") and duplicated != "ignore": _duplicate_mng(tags_as_dicts.get("licenses"), (v, k)) else: logging.debug( "Duplicated license name: {}. Last license is retained.".format( v ) ) tags_as_dicts.get("licenses")[v] = k continue elif k.startswith("owner"): tags_as_dicts.get("owners")[v] = k continue elif k.startswith("provider"): # providers are particular bcause its value is always null. tags_as_dicts.get("providers")[k.split(":")[1]] = k continue elif k.startswith("share"): tags_as_dicts.get("shares")[v] = k continue elif k.startswith("type"): tags_as_dicts.get("types")[v] = k continue # ignored tags else: logging.debug("A tag has been ignored during parsing: {}".format(k)) # -- QUERY TAGS ------------- # handle share case if prev_query.get("_shares"): prev_query.get("_tags").append( "share:{}".format(prev_query.get("_shares")[0]) ) else: pass # output dict struture logging.debug(prev_query) query_as_dicts = { "_tags": { "actions": {}, "catalogs": {}, "contacts": {}, "data-sources": {}, "formats": {}, "inspires": {}, "keywords": {}, "licenses": {}, "owners": {}, "providers": {}, "shares": {}, "srs": {}, "types": {}, }, "_shares": prev_query.get("_shares"), "_terms": prev_query.get("_terms"), } # parsing and matching tags query_tags = query_as_dicts.get("_tags") for t in prev_query.get("_tags"): if t.startswith("action"): query_tags.get("actions")[tags.get(t)] = t continue elif t.startswith("catalog"): if v in query_tags.get("catalogs") and duplicated != "ignore": _duplicate_mng(query_tags.get("catalogs"), (v, k)) else: logging.debug( "Duplicated catalog name: {}. Last catalog is retained.".format( v ) ) query_tags.get("catalogs")[tags.get(t)] = t continue elif t.startswith("contact"): if v in query_tags.get("contacts") and duplicated != "ignore": _duplicate_mng(query_tags.get("contacts"), (v, k)) else: logging.debug( "Duplicated contact name: {}. Last contact is retained.".format( v ) ) query_tags.get("contacts")[tags.get(t)] = t continue elif t.startswith("coordinate-system"): query_tags.get("srs")[tags.get(t)] = t continue elif t.startswith("data-source"): if v in query_tags.get("data-sources") and duplicated != "ignore": _duplicate_mng(query_tags.get("data-sources"), (v, k)) else: logging.debug( "Duplicated data-source name: {}. Last data-source is retained.".format( v ) ) query_tags.get("data-sources")[tags.get(t)] = t continue elif t.startswith("format"): query_tags.get("formats")[tags.get(t)] = t continue elif t.startswith("keyword:in"): query_tags.get("inspires")[tags.get(t)] = t continue elif t.startswith("keyword:is"): query_tags.get("keywords")[tags.get(t)] = t continue elif t.startswith("license"): if v in query_tags.get("licenses") and duplicated != "ignore": _duplicate_mng(query_tags.get("licenses"), (v, k)) else: logging.debug( "Duplicated license name: {}. Last license is retained.".format( v ) ) query_tags.get("licenses")[tags.get(t)] = t continue elif t.startswith("owner"): query_tags.get("owners")[tags.get(t)] = t continue elif t.startswith("provider"): # providers are particular bcause its value is always null. query_tags.get("providers")[k.split(":")[1]] = k continue elif t.startswith("share"): query_tags.get("shares")[tags.get(t)] = t continue elif t.startswith("type"): query_tags.get("types")[tags.get(t)] = t continue # ignored tags else: logging.debug( "A query tag has been ignored during parsing: {}".format(t) ) # return the output return tags_as_dicts, query_as_dicts
[ "def", "tags_to_dict", "(", "self", ",", "tags", "=", "dict", ",", "prev_query", "=", "dict", ",", "duplicated", ":", "str", "=", "\"rename\"", ")", ":", "# for rename option, get workgroups", "if", "duplicated", "==", "\"rename\"", ":", "wgs", "=", "{", "k", ".", "split", "(", "\":\"", ")", "[", "1", "]", ":", "v", "for", "k", ",", "v", "in", "tags", ".", "items", "(", ")", "if", "k", ".", "startswith", "(", "\"owner\"", ")", "}", "# wgs = list(filter(lambda x[1]: x[0].startswith(\"owner\"), tags.items()))", "elif", "duplicated", "==", "\"ignore\"", "or", "duplicated", "==", "\"merge\"", ":", "wgs", "=", "None", "else", ":", "raise", "ValueError", "(", "\"Duplicated value is not an accepted value.\"", "\" Please refer to __doc__ method.\"", ")", "# inner function", "def", "_duplicate_mng", "(", "target_dict", ":", "dict", ",", "duplicate", ",", "mode", ":", "str", "=", "duplicated", ",", "workgroups", ":", "dict", "=", "wgs", ")", ":", "if", "mode", "==", "\"merge\"", ":", "target_dict", "[", "duplicate", "[", "0", "]", "]", "+=", "\"||\"", "+", "duplicate", "[", "1", "]", "elif", "mode", "==", "\"rename\"", ":", "# get workgroup uuid", "if", "checker", ".", "check_is_uuid", "(", "k", ".", "split", "(", "\":\"", ")", "[", "1", "]", ")", ":", "k_uuid", "=", "k", ".", "split", "(", "\":\"", ")", "[", "1", "]", "else", ":", "k_uuid", "=", "k", ".", "split", "(", "\":\"", ")", "[", "2", "]", "# match with workgroups owners", "if", "k_uuid", "in", "workgroups", ":", "repl", "=", "workgroups", ".", "get", "(", "k_uuid", ")", "else", ":", "repl", "=", "k_uuid", "[", ":", "5", "]", "target_dict", "[", "\"{} ({})\"", ".", "format", "(", "duplicate", "[", "0", "]", ",", "repl", ")", "]", "=", "duplicate", "[", "1", "]", "else", ":", "pass", "return", "# -- SEARCH TAGS -------------", "# output dicts structure", "tags_as_dicts", "=", "{", "\"actions\"", ":", "{", "}", ",", "\"catalogs\"", ":", "{", "}", ",", "\"contacts\"", ":", "{", "}", ",", "\"data-sources\"", ":", "{", "}", ",", "\"formats\"", ":", "{", "}", ",", "\"inspires\"", ":", "{", "}", ",", "\"keywords\"", ":", "{", "}", ",", "\"licenses\"", ":", "{", "}", ",", "\"owners\"", ":", "{", "}", ",", "\"providers\"", ":", "{", "}", ",", "\"shares\"", ":", "{", "}", ",", "\"srs\"", ":", "{", "}", ",", "\"types\"", ":", "{", "}", ",", "}", "# parsing tags and storing each one in a dict", "for", "k", ",", "v", "in", "sorted", "(", "tags", ".", "items", "(", ")", ")", ":", "if", "k", ".", "startswith", "(", "\"action\"", ")", ":", "tags_as_dicts", ".", "get", "(", "\"actions\"", ")", "[", "v", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"catalog\"", ")", ":", "if", "v", "in", "tags_as_dicts", ".", "get", "(", "\"catalogs\"", ")", "and", "duplicated", "!=", "\"ignore\"", ":", "_duplicate_mng", "(", "tags_as_dicts", ".", "get", "(", "\"catalogs\"", ")", ",", "(", "v", ",", "k", ")", ")", "else", ":", "logging", ".", "debug", "(", "\"Duplicated catalog name: {}. Last catalog is retained.\"", ".", "format", "(", "v", ")", ")", "tags_as_dicts", ".", "get", "(", "\"catalogs\"", ")", "[", "v", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"contact\"", ")", ":", "if", "v", "in", "tags_as_dicts", ".", "get", "(", "\"contacts\"", ")", "and", "duplicated", "!=", "\"ignore\"", ":", "_duplicate_mng", "(", "tags_as_dicts", ".", "get", "(", "\"contacts\"", ")", ",", "(", "v", ",", "k", ")", ")", "else", ":", "logging", ".", "debug", "(", "\"Duplicated contact name: {}. Last contact is retained.\"", ".", "format", "(", "v", ")", ")", "tags_as_dicts", ".", "get", "(", "\"contacts\"", ")", "[", "v", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"coordinate-system\"", ")", ":", "tags_as_dicts", ".", "get", "(", "\"srs\"", ")", "[", "v", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"data-source\"", ")", ":", "if", "v", "in", "tags_as_dicts", ".", "get", "(", "\"data-sources\"", ")", "and", "duplicated", "!=", "\"ignore\"", ":", "_duplicate_mng", "(", "tags_as_dicts", ".", "get", "(", "\"data-sources\"", ")", ",", "(", "v", ",", "k", ")", ")", "else", ":", "logging", ".", "debug", "(", "\"Duplicated data-source name: {}. Last data-source is retained.\"", ".", "format", "(", "v", ")", ")", "tags_as_dicts", ".", "get", "(", "\"data-sources\"", ")", "[", "v", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"format\"", ")", ":", "tags_as_dicts", ".", "get", "(", "\"formats\"", ")", "[", "v", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"keyword:in\"", ")", ":", "tags_as_dicts", ".", "get", "(", "\"inspires\"", ")", "[", "v", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"keyword:is\"", ")", ":", "tags_as_dicts", ".", "get", "(", "\"keywords\"", ")", "[", "v", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"license\"", ")", ":", "if", "v", "in", "tags_as_dicts", ".", "get", "(", "\"licenses\"", ")", "and", "duplicated", "!=", "\"ignore\"", ":", "_duplicate_mng", "(", "tags_as_dicts", ".", "get", "(", "\"licenses\"", ")", ",", "(", "v", ",", "k", ")", ")", "else", ":", "logging", ".", "debug", "(", "\"Duplicated license name: {}. Last license is retained.\"", ".", "format", "(", "v", ")", ")", "tags_as_dicts", ".", "get", "(", "\"licenses\"", ")", "[", "v", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"owner\"", ")", ":", "tags_as_dicts", ".", "get", "(", "\"owners\"", ")", "[", "v", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"provider\"", ")", ":", "# providers are particular bcause its value is always null.", "tags_as_dicts", ".", "get", "(", "\"providers\"", ")", "[", "k", ".", "split", "(", "\":\"", ")", "[", "1", "]", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"share\"", ")", ":", "tags_as_dicts", ".", "get", "(", "\"shares\"", ")", "[", "v", "]", "=", "k", "continue", "elif", "k", ".", "startswith", "(", "\"type\"", ")", ":", "tags_as_dicts", ".", "get", "(", "\"types\"", ")", "[", "v", "]", "=", "k", "continue", "# ignored tags", "else", ":", "logging", ".", "debug", "(", "\"A tag has been ignored during parsing: {}\"", ".", "format", "(", "k", ")", ")", "# -- QUERY TAGS -------------", "# handle share case", "if", "prev_query", ".", "get", "(", "\"_shares\"", ")", ":", "prev_query", ".", "get", "(", "\"_tags\"", ")", ".", "append", "(", "\"share:{}\"", ".", "format", "(", "prev_query", ".", "get", "(", "\"_shares\"", ")", "[", "0", "]", ")", ")", "else", ":", "pass", "# output dict struture", "logging", ".", "debug", "(", "prev_query", ")", "query_as_dicts", "=", "{", "\"_tags\"", ":", "{", "\"actions\"", ":", "{", "}", ",", "\"catalogs\"", ":", "{", "}", ",", "\"contacts\"", ":", "{", "}", ",", "\"data-sources\"", ":", "{", "}", ",", "\"formats\"", ":", "{", "}", ",", "\"inspires\"", ":", "{", "}", ",", "\"keywords\"", ":", "{", "}", ",", "\"licenses\"", ":", "{", "}", ",", "\"owners\"", ":", "{", "}", ",", "\"providers\"", ":", "{", "}", ",", "\"shares\"", ":", "{", "}", ",", "\"srs\"", ":", "{", "}", ",", "\"types\"", ":", "{", "}", ",", "}", ",", "\"_shares\"", ":", "prev_query", ".", "get", "(", "\"_shares\"", ")", ",", "\"_terms\"", ":", "prev_query", ".", "get", "(", "\"_terms\"", ")", ",", "}", "# parsing and matching tags", "query_tags", "=", "query_as_dicts", ".", "get", "(", "\"_tags\"", ")", "for", "t", "in", "prev_query", ".", "get", "(", "\"_tags\"", ")", ":", "if", "t", ".", "startswith", "(", "\"action\"", ")", ":", "query_tags", ".", "get", "(", "\"actions\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "elif", "t", ".", "startswith", "(", "\"catalog\"", ")", ":", "if", "v", "in", "query_tags", ".", "get", "(", "\"catalogs\"", ")", "and", "duplicated", "!=", "\"ignore\"", ":", "_duplicate_mng", "(", "query_tags", ".", "get", "(", "\"catalogs\"", ")", ",", "(", "v", ",", "k", ")", ")", "else", ":", "logging", ".", "debug", "(", "\"Duplicated catalog name: {}. Last catalog is retained.\"", ".", "format", "(", "v", ")", ")", "query_tags", ".", "get", "(", "\"catalogs\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "elif", "t", ".", "startswith", "(", "\"contact\"", ")", ":", "if", "v", "in", "query_tags", ".", "get", "(", "\"contacts\"", ")", "and", "duplicated", "!=", "\"ignore\"", ":", "_duplicate_mng", "(", "query_tags", ".", "get", "(", "\"contacts\"", ")", ",", "(", "v", ",", "k", ")", ")", "else", ":", "logging", ".", "debug", "(", "\"Duplicated contact name: {}. Last contact is retained.\"", ".", "format", "(", "v", ")", ")", "query_tags", ".", "get", "(", "\"contacts\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "elif", "t", ".", "startswith", "(", "\"coordinate-system\"", ")", ":", "query_tags", ".", "get", "(", "\"srs\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "elif", "t", ".", "startswith", "(", "\"data-source\"", ")", ":", "if", "v", "in", "query_tags", ".", "get", "(", "\"data-sources\"", ")", "and", "duplicated", "!=", "\"ignore\"", ":", "_duplicate_mng", "(", "query_tags", ".", "get", "(", "\"data-sources\"", ")", ",", "(", "v", ",", "k", ")", ")", "else", ":", "logging", ".", "debug", "(", "\"Duplicated data-source name: {}. Last data-source is retained.\"", ".", "format", "(", "v", ")", ")", "query_tags", ".", "get", "(", "\"data-sources\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "elif", "t", ".", "startswith", "(", "\"format\"", ")", ":", "query_tags", ".", "get", "(", "\"formats\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "elif", "t", ".", "startswith", "(", "\"keyword:in\"", ")", ":", "query_tags", ".", "get", "(", "\"inspires\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "elif", "t", ".", "startswith", "(", "\"keyword:is\"", ")", ":", "query_tags", ".", "get", "(", "\"keywords\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "elif", "t", ".", "startswith", "(", "\"license\"", ")", ":", "if", "v", "in", "query_tags", ".", "get", "(", "\"licenses\"", ")", "and", "duplicated", "!=", "\"ignore\"", ":", "_duplicate_mng", "(", "query_tags", ".", "get", "(", "\"licenses\"", ")", ",", "(", "v", ",", "k", ")", ")", "else", ":", "logging", ".", "debug", "(", "\"Duplicated license name: {}. Last license is retained.\"", ".", "format", "(", "v", ")", ")", "query_tags", ".", "get", "(", "\"licenses\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "elif", "t", ".", "startswith", "(", "\"owner\"", ")", ":", "query_tags", ".", "get", "(", "\"owners\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "elif", "t", ".", "startswith", "(", "\"provider\"", ")", ":", "# providers are particular bcause its value is always null.", "query_tags", ".", "get", "(", "\"providers\"", ")", "[", "k", ".", "split", "(", "\":\"", ")", "[", "1", "]", "]", "=", "k", "continue", "elif", "t", ".", "startswith", "(", "\"share\"", ")", ":", "query_tags", ".", "get", "(", "\"shares\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "elif", "t", ".", "startswith", "(", "\"type\"", ")", ":", "query_tags", ".", "get", "(", "\"types\"", ")", "[", "tags", ".", "get", "(", "t", ")", "]", "=", "t", "continue", "# ignored tags", "else", ":", "logging", ".", "debug", "(", "\"A query tag has been ignored during parsing: {}\"", ".", "format", "(", "t", ")", ")", "# return the output", "return", "tags_as_dicts", ",", "query_as_dicts" ]
Reverse search tags dictionary to values as keys. Useful to populate filters comboboxes for example. :param dict tags: tags dictionary from a search request :param dict prev_query: query parameters returned after a search request. Typically `search.get("query")`. :param str duplicated: what to do about duplicated tags label. Values: * ignore - last tag parsed survives * merge - add duplicated in value as separated list (sep = '||') * rename [default] - if duplicated tag labels are part of different workgroup, so the tag label is renamed with workgroup.
[ "Reverse", "search", "tags", "dictionary", "to", "values", "as", "keys", ".", "Useful", "to", "populate", "filters", "comboboxes", "for", "example", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L379-L634
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.share_extender
def share_extender(self, share: dict, results_filtered: dict): """Extend share model with additional informations. :param dict share: share returned by API :param dict results_filtered: filtered search result """ # add share administration URL creator_id = share.get("_creator").get("_tag")[6:] share["admin_url"] = "{}/groups/{}/admin/shares/{}".format( self.app_url, creator_id, share.get("_id") ) # check if OpenCatalog is activated opencat_url = "{}/s/{}/{}".format( self.oc_url, share.get("_id"), share.get("urlToken") ) if requests.head(opencat_url): share["oc_url"] = opencat_url else: pass # add metadata ids list share["mds_ids"] = (i.get("_id") for i in results_filtered) return share
python
def share_extender(self, share: dict, results_filtered: dict): """Extend share model with additional informations. :param dict share: share returned by API :param dict results_filtered: filtered search result """ # add share administration URL creator_id = share.get("_creator").get("_tag")[6:] share["admin_url"] = "{}/groups/{}/admin/shares/{}".format( self.app_url, creator_id, share.get("_id") ) # check if OpenCatalog is activated opencat_url = "{}/s/{}/{}".format( self.oc_url, share.get("_id"), share.get("urlToken") ) if requests.head(opencat_url): share["oc_url"] = opencat_url else: pass # add metadata ids list share["mds_ids"] = (i.get("_id") for i in results_filtered) return share
[ "def", "share_extender", "(", "self", ",", "share", ":", "dict", ",", "results_filtered", ":", "dict", ")", ":", "# add share administration URL", "creator_id", "=", "share", ".", "get", "(", "\"_creator\"", ")", ".", "get", "(", "\"_tag\"", ")", "[", "6", ":", "]", "share", "[", "\"admin_url\"", "]", "=", "\"{}/groups/{}/admin/shares/{}\"", ".", "format", "(", "self", ".", "app_url", ",", "creator_id", ",", "share", ".", "get", "(", "\"_id\"", ")", ")", "# check if OpenCatalog is activated", "opencat_url", "=", "\"{}/s/{}/{}\"", ".", "format", "(", "self", ".", "oc_url", ",", "share", ".", "get", "(", "\"_id\"", ")", ",", "share", ".", "get", "(", "\"urlToken\"", ")", ")", "if", "requests", ".", "head", "(", "opencat_url", ")", ":", "share", "[", "\"oc_url\"", "]", "=", "opencat_url", "else", ":", "pass", "# add metadata ids list", "share", "[", "\"mds_ids\"", "]", "=", "(", "i", ".", "get", "(", "\"_id\"", ")", "for", "i", "in", "results_filtered", ")", "return", "share" ]
Extend share model with additional informations. :param dict share: share returned by API :param dict results_filtered: filtered search result
[ "Extend", "share", "model", "with", "additional", "informations", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L637-L659
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/utils.py
IsogeoUtils.credentials_loader
def credentials_loader(self, in_credentials: str = "client_secrets.json") -> dict: """Loads API credentials from a file, JSON or INI. :param str in_credentials: path to the credentials file. By default, look for a client_secrets.json file. """ accepted_extensions = (".ini", ".json") # checks if not path.isfile(in_credentials): raise IOError("Credentials file doesn't exist: {}".format(in_credentials)) else: in_credentials = path.normpath(in_credentials) if path.splitext(in_credentials)[1] not in accepted_extensions: raise ValueError( "Extension of credentials file must be one of {}".format( accepted_extensions ) ) else: kind = path.splitext(in_credentials)[1] # load, check and set if kind == ".json": with open(in_credentials, "r") as f: in_auth = json.loads(f.read()) # check structure heads = ("installed", "web") if not set(in_auth).intersection(set(heads)): raise ValueError( "Input JSON structure is not as expected." " First key must be one of: {}".format(heads) ) # set if "web" in in_auth: # json structure for group application auth_settings = in_auth.get("web") out_auth = { "auth_mode": "group", "client_id": auth_settings.get("client_id"), "client_secret": auth_settings.get("client_secret"), # if not specified, must be a former file then set classic scope "scopes": auth_settings.get("scopes", ["resources:read"]), "uri_auth": auth_settings.get("auth_uri"), "uri_token": auth_settings.get("token_uri"), "uri_base": self.get_url_base_from_url_token( auth_settings.get("token_uri") ), "uri_redirect": None, } else: # assuming in_auth == 'installed' auth_settings = in_auth.get("installed") out_auth = { "auth_mode": "user", "client_id": auth_settings.get("client_id"), "client_secret": auth_settings.get("client_secret"), # if not specified, must be a former file then set classic scope "scopes": auth_settings.get("scopes", ["resources:read"]), "uri_auth": auth_settings.get("auth_uri"), "uri_token": auth_settings.get("token_uri"), "uri_base": self.get_url_base_from_url_token( auth_settings.get("token_uri") ), "uri_redirect": auth_settings.get("redirect_uris", None), } else: # assuming file is an .ini ini_parser = ConfigParser() ini_parser.read(in_credentials) # check structure if "auth" in ini_parser._sections: auth_settings = ini_parser["auth"] else: raise ValueError( "Input INI structure is not as expected." " Section of credentials must be named: auth" ) # set out_auth = { "auth_mode": auth_settings.get("CLIENT_TYPE"), "client_id": auth_settings.get("CLIENT_ID"), "client_secret": auth_settings.get("CLIENT_SECRET"), "uri_auth": auth_settings.get("URI_AUTH"), "uri_token": auth_settings.get("URI_TOKEN"), "uri_base": self.get_url_base_from_url_token( auth_settings.get("URI_TOKEN") ), "uri_redirect": auth_settings.get("URI_REDIRECT"), } # method ending return out_auth
python
def credentials_loader(self, in_credentials: str = "client_secrets.json") -> dict: """Loads API credentials from a file, JSON or INI. :param str in_credentials: path to the credentials file. By default, look for a client_secrets.json file. """ accepted_extensions = (".ini", ".json") # checks if not path.isfile(in_credentials): raise IOError("Credentials file doesn't exist: {}".format(in_credentials)) else: in_credentials = path.normpath(in_credentials) if path.splitext(in_credentials)[1] not in accepted_extensions: raise ValueError( "Extension of credentials file must be one of {}".format( accepted_extensions ) ) else: kind = path.splitext(in_credentials)[1] # load, check and set if kind == ".json": with open(in_credentials, "r") as f: in_auth = json.loads(f.read()) # check structure heads = ("installed", "web") if not set(in_auth).intersection(set(heads)): raise ValueError( "Input JSON structure is not as expected." " First key must be one of: {}".format(heads) ) # set if "web" in in_auth: # json structure for group application auth_settings = in_auth.get("web") out_auth = { "auth_mode": "group", "client_id": auth_settings.get("client_id"), "client_secret": auth_settings.get("client_secret"), # if not specified, must be a former file then set classic scope "scopes": auth_settings.get("scopes", ["resources:read"]), "uri_auth": auth_settings.get("auth_uri"), "uri_token": auth_settings.get("token_uri"), "uri_base": self.get_url_base_from_url_token( auth_settings.get("token_uri") ), "uri_redirect": None, } else: # assuming in_auth == 'installed' auth_settings = in_auth.get("installed") out_auth = { "auth_mode": "user", "client_id": auth_settings.get("client_id"), "client_secret": auth_settings.get("client_secret"), # if not specified, must be a former file then set classic scope "scopes": auth_settings.get("scopes", ["resources:read"]), "uri_auth": auth_settings.get("auth_uri"), "uri_token": auth_settings.get("token_uri"), "uri_base": self.get_url_base_from_url_token( auth_settings.get("token_uri") ), "uri_redirect": auth_settings.get("redirect_uris", None), } else: # assuming file is an .ini ini_parser = ConfigParser() ini_parser.read(in_credentials) # check structure if "auth" in ini_parser._sections: auth_settings = ini_parser["auth"] else: raise ValueError( "Input INI structure is not as expected." " Section of credentials must be named: auth" ) # set out_auth = { "auth_mode": auth_settings.get("CLIENT_TYPE"), "client_id": auth_settings.get("CLIENT_ID"), "client_secret": auth_settings.get("CLIENT_SECRET"), "uri_auth": auth_settings.get("URI_AUTH"), "uri_token": auth_settings.get("URI_TOKEN"), "uri_base": self.get_url_base_from_url_token( auth_settings.get("URI_TOKEN") ), "uri_redirect": auth_settings.get("URI_REDIRECT"), } # method ending return out_auth
[ "def", "credentials_loader", "(", "self", ",", "in_credentials", ":", "str", "=", "\"client_secrets.json\"", ")", "->", "dict", ":", "accepted_extensions", "=", "(", "\".ini\"", ",", "\".json\"", ")", "# checks", "if", "not", "path", ".", "isfile", "(", "in_credentials", ")", ":", "raise", "IOError", "(", "\"Credentials file doesn't exist: {}\"", ".", "format", "(", "in_credentials", ")", ")", "else", ":", "in_credentials", "=", "path", ".", "normpath", "(", "in_credentials", ")", "if", "path", ".", "splitext", "(", "in_credentials", ")", "[", "1", "]", "not", "in", "accepted_extensions", ":", "raise", "ValueError", "(", "\"Extension of credentials file must be one of {}\"", ".", "format", "(", "accepted_extensions", ")", ")", "else", ":", "kind", "=", "path", ".", "splitext", "(", "in_credentials", ")", "[", "1", "]", "# load, check and set", "if", "kind", "==", "\".json\"", ":", "with", "open", "(", "in_credentials", ",", "\"r\"", ")", "as", "f", ":", "in_auth", "=", "json", ".", "loads", "(", "f", ".", "read", "(", ")", ")", "# check structure", "heads", "=", "(", "\"installed\"", ",", "\"web\"", ")", "if", "not", "set", "(", "in_auth", ")", ".", "intersection", "(", "set", "(", "heads", ")", ")", ":", "raise", "ValueError", "(", "\"Input JSON structure is not as expected.\"", "\" First key must be one of: {}\"", ".", "format", "(", "heads", ")", ")", "# set", "if", "\"web\"", "in", "in_auth", ":", "# json structure for group application", "auth_settings", "=", "in_auth", ".", "get", "(", "\"web\"", ")", "out_auth", "=", "{", "\"auth_mode\"", ":", "\"group\"", ",", "\"client_id\"", ":", "auth_settings", ".", "get", "(", "\"client_id\"", ")", ",", "\"client_secret\"", ":", "auth_settings", ".", "get", "(", "\"client_secret\"", ")", ",", "# if not specified, must be a former file then set classic scope", "\"scopes\"", ":", "auth_settings", ".", "get", "(", "\"scopes\"", ",", "[", "\"resources:read\"", "]", ")", ",", "\"uri_auth\"", ":", "auth_settings", ".", "get", "(", "\"auth_uri\"", ")", ",", "\"uri_token\"", ":", "auth_settings", ".", "get", "(", "\"token_uri\"", ")", ",", "\"uri_base\"", ":", "self", ".", "get_url_base_from_url_token", "(", "auth_settings", ".", "get", "(", "\"token_uri\"", ")", ")", ",", "\"uri_redirect\"", ":", "None", ",", "}", "else", ":", "# assuming in_auth == 'installed'", "auth_settings", "=", "in_auth", ".", "get", "(", "\"installed\"", ")", "out_auth", "=", "{", "\"auth_mode\"", ":", "\"user\"", ",", "\"client_id\"", ":", "auth_settings", ".", "get", "(", "\"client_id\"", ")", ",", "\"client_secret\"", ":", "auth_settings", ".", "get", "(", "\"client_secret\"", ")", ",", "# if not specified, must be a former file then set classic scope", "\"scopes\"", ":", "auth_settings", ".", "get", "(", "\"scopes\"", ",", "[", "\"resources:read\"", "]", ")", ",", "\"uri_auth\"", ":", "auth_settings", ".", "get", "(", "\"auth_uri\"", ")", ",", "\"uri_token\"", ":", "auth_settings", ".", "get", "(", "\"token_uri\"", ")", ",", "\"uri_base\"", ":", "self", ".", "get_url_base_from_url_token", "(", "auth_settings", ".", "get", "(", "\"token_uri\"", ")", ")", ",", "\"uri_redirect\"", ":", "auth_settings", ".", "get", "(", "\"redirect_uris\"", ",", "None", ")", ",", "}", "else", ":", "# assuming file is an .ini", "ini_parser", "=", "ConfigParser", "(", ")", "ini_parser", ".", "read", "(", "in_credentials", ")", "# check structure", "if", "\"auth\"", "in", "ini_parser", ".", "_sections", ":", "auth_settings", "=", "ini_parser", "[", "\"auth\"", "]", "else", ":", "raise", "ValueError", "(", "\"Input INI structure is not as expected.\"", "\" Section of credentials must be named: auth\"", ")", "# set", "out_auth", "=", "{", "\"auth_mode\"", ":", "auth_settings", ".", "get", "(", "\"CLIENT_TYPE\"", ")", ",", "\"client_id\"", ":", "auth_settings", ".", "get", "(", "\"CLIENT_ID\"", ")", ",", "\"client_secret\"", ":", "auth_settings", ".", "get", "(", "\"CLIENT_SECRET\"", ")", ",", "\"uri_auth\"", ":", "auth_settings", ".", "get", "(", "\"URI_AUTH\"", ")", ",", "\"uri_token\"", ":", "auth_settings", ".", "get", "(", "\"URI_TOKEN\"", ")", ",", "\"uri_base\"", ":", "self", ".", "get_url_base_from_url_token", "(", "auth_settings", ".", "get", "(", "\"URI_TOKEN\"", ")", ")", ",", "\"uri_redirect\"", ":", "auth_settings", ".", "get", "(", "\"URI_REDIRECT\"", ")", ",", "}", "# method ending", "return", "out_auth" ]
Loads API credentials from a file, JSON or INI. :param str in_credentials: path to the credentials file. By default, look for a client_secrets.json file.
[ "Loads", "API", "credentials", "from", "a", "file", "JSON", "or", "INI", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/utils.py#L662-L751
novopl/peltak
src/peltak/extra/pypi/commands.py
configure
def configure(username, password): # type: (str, str) -> None """ Generate .pypirc config with the given credentials. Example: $ peltak pypi configure my_pypi_user my_pypi_pass """ from peltak.extra.pypi import logic logic.gen_pypirc(username, password)
python
def configure(username, password): # type: (str, str) -> None """ Generate .pypirc config with the given credentials. Example: $ peltak pypi configure my_pypi_user my_pypi_pass """ from peltak.extra.pypi import logic logic.gen_pypirc(username, password)
[ "def", "configure", "(", "username", ",", "password", ")", ":", "# type: (str, str) -> None", "from", "peltak", ".", "extra", ".", "pypi", "import", "logic", "logic", ".", "gen_pypirc", "(", "username", ",", "password", ")" ]
Generate .pypirc config with the given credentials. Example: $ peltak pypi configure my_pypi_user my_pypi_pass
[ "Generate", ".", "pypirc", "config", "with", "the", "given", "credentials", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/extra/pypi/commands.py#L58-L69
isogeo/isogeo-api-py-minsdk
isogeo_pysdk/translator.py
IsogeoTranslator.tr
def tr(self, subdomain: str, string_to_translate: str = "") -> str: """Returns translation of string passed. :param str subdomain: subpart of strings dictionary. Must be one of self.translations.keys() i.e. 'restrictions' :param str string_to_translate: string you want to translate """ if subdomain not in self.translations.keys(): raise ValueError( "'{}' is not a correct subdomain." " Must be one of {}".format(subdomain, self.translations.keys()) ) else: pass # translate str_translated = self.translations.get( subdomain, {"error": "Subdomain not found: {}".format(subdomain)} ).get(string_to_translate, "String not found") # end of method return str_translated
python
def tr(self, subdomain: str, string_to_translate: str = "") -> str: """Returns translation of string passed. :param str subdomain: subpart of strings dictionary. Must be one of self.translations.keys() i.e. 'restrictions' :param str string_to_translate: string you want to translate """ if subdomain not in self.translations.keys(): raise ValueError( "'{}' is not a correct subdomain." " Must be one of {}".format(subdomain, self.translations.keys()) ) else: pass # translate str_translated = self.translations.get( subdomain, {"error": "Subdomain not found: {}".format(subdomain)} ).get(string_to_translate, "String not found") # end of method return str_translated
[ "def", "tr", "(", "self", ",", "subdomain", ":", "str", ",", "string_to_translate", ":", "str", "=", "\"\"", ")", "->", "str", ":", "if", "subdomain", "not", "in", "self", ".", "translations", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"'{}' is not a correct subdomain.\"", "\" Must be one of {}\"", ".", "format", "(", "subdomain", ",", "self", ".", "translations", ".", "keys", "(", ")", ")", ")", "else", ":", "pass", "# translate", "str_translated", "=", "self", ".", "translations", ".", "get", "(", "subdomain", ",", "{", "\"error\"", ":", "\"Subdomain not found: {}\"", ".", "format", "(", "subdomain", ")", "}", ")", ".", "get", "(", "string_to_translate", ",", "\"String not found\"", ")", "# end of method", "return", "str_translated" ]
Returns translation of string passed. :param str subdomain: subpart of strings dictionary. Must be one of self.translations.keys() i.e. 'restrictions' :param str string_to_translate: string you want to translate
[ "Returns", "translation", "of", "string", "passed", "." ]
train
https://github.com/isogeo/isogeo-api-py-minsdk/blob/57a604be92c7767b26abd247012cc1a584b386a0/isogeo_pysdk/translator.py#L197-L217
praekeltfoundation/seed-identity-store
identities/models.py
optout_saved
def optout_saved(sender, instance, **kwargs): """ This is a duplicte of the view code for DRF to stop future internal Django implementations breaking. """ if instance.identity is None: # look up using the address_type and address identities = Identity.objects.filter_by_addr( instance.address_type, instance.address ) if identities.count() == 1: instance.identity = identities[0]
python
def optout_saved(sender, instance, **kwargs): """ This is a duplicte of the view code for DRF to stop future internal Django implementations breaking. """ if instance.identity is None: # look up using the address_type and address identities = Identity.objects.filter_by_addr( instance.address_type, instance.address ) if identities.count() == 1: instance.identity = identities[0]
[ "def", "optout_saved", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "if", "instance", ".", "identity", "is", "None", ":", "# look up using the address_type and address", "identities", "=", "Identity", ".", "objects", ".", "filter_by_addr", "(", "instance", ".", "address_type", ",", "instance", ".", "address", ")", "if", "identities", ".", "count", "(", ")", "==", "1", ":", "instance", ".", "identity", "=", "identities", "[", "0", "]" ]
This is a duplicte of the view code for DRF to stop future internal Django implementations breaking.
[ "This", "is", "a", "duplicte", "of", "the", "view", "code", "for", "DRF", "to", "stop", "future", "internal", "Django", "implementations", "breaking", "." ]
train
https://github.com/praekeltfoundation/seed-identity-store/blob/194e5756b5a74ebce9798c390de958cf5305b105/identities/models.py#L397-L408
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
get_ec2_client
def get_ec2_client(region_name=None, aws_access_key_id=None, aws_secret_access_key=None): """Gets an EC2 client :return: boto3.client object :raises: AWSAPIError """ log = logging.getLogger(mod_logger + '.get_ec2_client') # Connect to EC2 API try: client = boto3.client('ec2', region_name=region_name, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) except ClientError: _, ex, trace = sys.exc_info() msg = 'There was a problem connecting to EC2, please check AWS CLI and boto configuration, ensure ' \ 'credentials and region are set appropriately.\n{e}'.format(e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.debug('Successfully created an EC2 client') return client
python
def get_ec2_client(region_name=None, aws_access_key_id=None, aws_secret_access_key=None): """Gets an EC2 client :return: boto3.client object :raises: AWSAPIError """ log = logging.getLogger(mod_logger + '.get_ec2_client') # Connect to EC2 API try: client = boto3.client('ec2', region_name=region_name, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) except ClientError: _, ex, trace = sys.exc_info() msg = 'There was a problem connecting to EC2, please check AWS CLI and boto configuration, ensure ' \ 'credentials and region are set appropriately.\n{e}'.format(e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.debug('Successfully created an EC2 client') return client
[ "def", "get_ec2_client", "(", "region_name", "=", "None", ",", "aws_access_key_id", "=", "None", ",", "aws_secret_access_key", "=", "None", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "mod_logger", "+", "'.get_ec2_client'", ")", "# Connect to EC2 API", "try", ":", "client", "=", "boto3", ".", "client", "(", "'ec2'", ",", "region_name", "=", "region_name", ",", "aws_access_key_id", "=", "aws_access_key_id", ",", "aws_secret_access_key", "=", "aws_secret_access_key", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'There was a problem connecting to EC2, please check AWS CLI and boto configuration, ensure '", "'credentials and region are set appropriately.\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "else", ":", "log", ".", "debug", "(", "'Successfully created an EC2 client'", ")", "return", "client" ]
Gets an EC2 client :return: boto3.client object :raises: AWSAPIError
[ "Gets", "an", "EC2", "client" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L954-L973
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.get_vpc_id
def get_vpc_id(self): """Gets the VPC ID for this EC2 instance :return: String instance ID or None """ log = logging.getLogger(self.cls_logger + '.get_vpc_id') # Exit if not running on AWS if not self.is_aws: log.info('This machine is not running in AWS, exiting...') return if self.instance_id is None: log.error('Unable to get the Instance ID for this machine') return log.info('Found Instance ID: {i}'.format(i=self.instance_id)) log.info('Querying AWS to get the VPC ID...') try: response = self.client.describe_instances( DryRun=False, InstanceIds=[self.instance_id]) except ClientError as ex: log.error('Unable to query AWS to get info for instance {i}\n{e}'.format( i=self.instance_id, e=ex)) return # Get the VPC ID from the response try: vpc_id = response['Reservations'][0]['Instances'][0]['VpcId'] except KeyError as ex: log.error('Unable to get VPC ID from response: {r}\n{e}'.format(r=response, e=ex)) return log.info('Found VPC ID: {v}'.format(v=vpc_id)) return vpc_id
python
def get_vpc_id(self): """Gets the VPC ID for this EC2 instance :return: String instance ID or None """ log = logging.getLogger(self.cls_logger + '.get_vpc_id') # Exit if not running on AWS if not self.is_aws: log.info('This machine is not running in AWS, exiting...') return if self.instance_id is None: log.error('Unable to get the Instance ID for this machine') return log.info('Found Instance ID: {i}'.format(i=self.instance_id)) log.info('Querying AWS to get the VPC ID...') try: response = self.client.describe_instances( DryRun=False, InstanceIds=[self.instance_id]) except ClientError as ex: log.error('Unable to query AWS to get info for instance {i}\n{e}'.format( i=self.instance_id, e=ex)) return # Get the VPC ID from the response try: vpc_id = response['Reservations'][0]['Instances'][0]['VpcId'] except KeyError as ex: log.error('Unable to get VPC ID from response: {r}\n{e}'.format(r=response, e=ex)) return log.info('Found VPC ID: {v}'.format(v=vpc_id)) return vpc_id
[ "def", "get_vpc_id", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.get_vpc_id'", ")", "# Exit if not running on AWS", "if", "not", "self", ".", "is_aws", ":", "log", ".", "info", "(", "'This machine is not running in AWS, exiting...'", ")", "return", "if", "self", ".", "instance_id", "is", "None", ":", "log", ".", "error", "(", "'Unable to get the Instance ID for this machine'", ")", "return", "log", ".", "info", "(", "'Found Instance ID: {i}'", ".", "format", "(", "i", "=", "self", ".", "instance_id", ")", ")", "log", ".", "info", "(", "'Querying AWS to get the VPC ID...'", ")", "try", ":", "response", "=", "self", ".", "client", ".", "describe_instances", "(", "DryRun", "=", "False", ",", "InstanceIds", "=", "[", "self", ".", "instance_id", "]", ")", "except", "ClientError", "as", "ex", ":", "log", ".", "error", "(", "'Unable to query AWS to get info for instance {i}\\n{e}'", ".", "format", "(", "i", "=", "self", ".", "instance_id", ",", "e", "=", "ex", ")", ")", "return", "# Get the VPC ID from the response", "try", ":", "vpc_id", "=", "response", "[", "'Reservations'", "]", "[", "0", "]", "[", "'Instances'", "]", "[", "0", "]", "[", "'VpcId'", "]", "except", "KeyError", "as", "ex", ":", "log", ".", "error", "(", "'Unable to get VPC ID from response: {r}\\n{e}'", ".", "format", "(", "r", "=", "response", ",", "e", "=", "ex", ")", ")", "return", "log", ".", "info", "(", "'Found VPC ID: {v}'", ".", "format", "(", "v", "=", "vpc_id", ")", ")", "return", "vpc_id" ]
Gets the VPC ID for this EC2 instance :return: String instance ID or None
[ "Gets", "the", "VPC", "ID", "for", "this", "EC2", "instance" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L73-L107
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.get_eni_id
def get_eni_id(self, interface=1): """Given an interface number, gets the AWS elastic network interface associated with the interface. :param interface: Integer associated to the interface/device number :return: String Elastic Network Interface ID or None if not found :raises OSError, AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.get_eni_id') # Get the instance-id if self.instance_id is None: msg = 'Instance ID not found for this machine' log.error(msg) raise OSError(msg) log.info('Found instance ID: {i}'.format(i=self.instance_id)) log.debug('Querying EC2 instances...') try: response = self.client.describe_instances( DryRun=False, InstanceIds=[self.instance_id] ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query EC2 for instances\n{e}'.format(e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace log.debug('Found instance info: {r}'.format(r=response)) # Find the ENI ID log.info('Looking for the ENI ID to alias...') eni_id = None try: for reservation in response['Reservations']: for instance in reservation['Instances']: if instance['InstanceId'] == self.instance_id: for network_interface in instance['NetworkInterfaces']: if network_interface['Attachment']['DeviceIndex'] == interface: eni_id = network_interface['NetworkInterfaceId'] except KeyError: _, ex, trace = sys.exc_info() msg = 'ENI ID not found in AWS response for interface: {i}'.format(i=interface) log.error(msg) raise EC2UtilError, msg, trace log.info('Found ENI ID: {e}'.format(e=eni_id)) return eni_id
python
def get_eni_id(self, interface=1): """Given an interface number, gets the AWS elastic network interface associated with the interface. :param interface: Integer associated to the interface/device number :return: String Elastic Network Interface ID or None if not found :raises OSError, AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.get_eni_id') # Get the instance-id if self.instance_id is None: msg = 'Instance ID not found for this machine' log.error(msg) raise OSError(msg) log.info('Found instance ID: {i}'.format(i=self.instance_id)) log.debug('Querying EC2 instances...') try: response = self.client.describe_instances( DryRun=False, InstanceIds=[self.instance_id] ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query EC2 for instances\n{e}'.format(e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace log.debug('Found instance info: {r}'.format(r=response)) # Find the ENI ID log.info('Looking for the ENI ID to alias...') eni_id = None try: for reservation in response['Reservations']: for instance in reservation['Instances']: if instance['InstanceId'] == self.instance_id: for network_interface in instance['NetworkInterfaces']: if network_interface['Attachment']['DeviceIndex'] == interface: eni_id = network_interface['NetworkInterfaceId'] except KeyError: _, ex, trace = sys.exc_info() msg = 'ENI ID not found in AWS response for interface: {i}'.format(i=interface) log.error(msg) raise EC2UtilError, msg, trace log.info('Found ENI ID: {e}'.format(e=eni_id)) return eni_id
[ "def", "get_eni_id", "(", "self", ",", "interface", "=", "1", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.get_eni_id'", ")", "# Get the instance-id", "if", "self", ".", "instance_id", "is", "None", ":", "msg", "=", "'Instance ID not found for this machine'", "log", ".", "error", "(", "msg", ")", "raise", "OSError", "(", "msg", ")", "log", ".", "info", "(", "'Found instance ID: {i}'", ".", "format", "(", "i", "=", "self", ".", "instance_id", ")", ")", "log", ".", "debug", "(", "'Querying EC2 instances...'", ")", "try", ":", "response", "=", "self", ".", "client", ".", "describe_instances", "(", "DryRun", "=", "False", ",", "InstanceIds", "=", "[", "self", ".", "instance_id", "]", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to query EC2 for instances\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "log", ".", "debug", "(", "'Found instance info: {r}'", ".", "format", "(", "r", "=", "response", ")", ")", "# Find the ENI ID", "log", ".", "info", "(", "'Looking for the ENI ID to alias...'", ")", "eni_id", "=", "None", "try", ":", "for", "reservation", "in", "response", "[", "'Reservations'", "]", ":", "for", "instance", "in", "reservation", "[", "'Instances'", "]", ":", "if", "instance", "[", "'InstanceId'", "]", "==", "self", ".", "instance_id", ":", "for", "network_interface", "in", "instance", "[", "'NetworkInterfaces'", "]", ":", "if", "network_interface", "[", "'Attachment'", "]", "[", "'DeviceIndex'", "]", "==", "interface", ":", "eni_id", "=", "network_interface", "[", "'NetworkInterfaceId'", "]", "except", "KeyError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'ENI ID not found in AWS response for interface: {i}'", ".", "format", "(", "i", "=", "interface", ")", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", ",", "msg", ",", "trace", "log", ".", "info", "(", "'Found ENI ID: {e}'", ".", "format", "(", "e", "=", "eni_id", ")", ")", "return", "eni_id" ]
Given an interface number, gets the AWS elastic network interface associated with the interface. :param interface: Integer associated to the interface/device number :return: String Elastic Network Interface ID or None if not found :raises OSError, AWSAPIError, EC2UtilError
[ "Given", "an", "interface", "number", "gets", "the", "AWS", "elastic", "network", "interface", "associated", "with", "the", "interface", "." ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L109-L156
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.add_secondary_ip
def add_secondary_ip(self, ip_address, interface=1): """Adds an IP address as a secondary IP address :param ip_address: String IP address to add as a secondary IP :param interface: Integer associated to the interface/device number :return: None :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.add_secondary_ip') # Get the ENI ID eni_id = self.get_eni_id(interface) # Verify the ENI ID was found if eni_id is None: msg = 'Unable to find the corresponding ENI ID for interface: {i}'. \ format(i=interface) log.error(msg) raise EC2UtilError(msg) else: log.info('Found ENI ID: {e}'.format(e=eni_id)) # Assign the secondary IP address log.info('Attempting to assign the secondary IP address...') try: self.client.assign_private_ip_addresses( NetworkInterfaceId=eni_id, PrivateIpAddresses=[ ip_address, ], AllowReassignment=True ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to assign secondary IP address\n{e}'.format(e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace log.info('Successfully added secondary IP address {s} to ENI ID {e} on interface {i}'.format( s=ip_address, e=eni_id, i=interface))
python
def add_secondary_ip(self, ip_address, interface=1): """Adds an IP address as a secondary IP address :param ip_address: String IP address to add as a secondary IP :param interface: Integer associated to the interface/device number :return: None :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.add_secondary_ip') # Get the ENI ID eni_id = self.get_eni_id(interface) # Verify the ENI ID was found if eni_id is None: msg = 'Unable to find the corresponding ENI ID for interface: {i}'. \ format(i=interface) log.error(msg) raise EC2UtilError(msg) else: log.info('Found ENI ID: {e}'.format(e=eni_id)) # Assign the secondary IP address log.info('Attempting to assign the secondary IP address...') try: self.client.assign_private_ip_addresses( NetworkInterfaceId=eni_id, PrivateIpAddresses=[ ip_address, ], AllowReassignment=True ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to assign secondary IP address\n{e}'.format(e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace log.info('Successfully added secondary IP address {s} to ENI ID {e} on interface {i}'.format( s=ip_address, e=eni_id, i=interface))
[ "def", "add_secondary_ip", "(", "self", ",", "ip_address", ",", "interface", "=", "1", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.add_secondary_ip'", ")", "# Get the ENI ID", "eni_id", "=", "self", ".", "get_eni_id", "(", "interface", ")", "# Verify the ENI ID was found", "if", "eni_id", "is", "None", ":", "msg", "=", "'Unable to find the corresponding ENI ID for interface: {i}'", ".", "format", "(", "i", "=", "interface", ")", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "else", ":", "log", ".", "info", "(", "'Found ENI ID: {e}'", ".", "format", "(", "e", "=", "eni_id", ")", ")", "# Assign the secondary IP address", "log", ".", "info", "(", "'Attempting to assign the secondary IP address...'", ")", "try", ":", "self", ".", "client", ".", "assign_private_ip_addresses", "(", "NetworkInterfaceId", "=", "eni_id", ",", "PrivateIpAddresses", "=", "[", "ip_address", ",", "]", ",", "AllowReassignment", "=", "True", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to assign secondary IP address\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "log", ".", "info", "(", "'Successfully added secondary IP address {s} to ENI ID {e} on interface {i}'", ".", "format", "(", "s", "=", "ip_address", ",", "e", "=", "eni_id", ",", "i", "=", "interface", ")", ")" ]
Adds an IP address as a secondary IP address :param ip_address: String IP address to add as a secondary IP :param interface: Integer associated to the interface/device number :return: None :raises: AWSAPIError, EC2UtilError
[ "Adds", "an", "IP", "address", "as", "a", "secondary", "IP", "address" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L158-L196
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.associate_elastic_ip
def associate_elastic_ip(self, allocation_id, interface=1, private_ip=None): """Given an elastic IP address and an interface number, associates the elastic IP to the interface number on this host. :param allocation_id: String ID for the elastic IP :param interface: Integer associated to the interface/device number :param private_ip: String IP address of the private IP address to assign :return: None :raises: OSError, AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.associate_elastic_ip') if private_ip is None: log.info('No private IP address provided, getting the primary IP' 'address on interface {i}...'.format(i=interface)) private_ip = get_ip_addresses()['eth{i}'.format(i=interface)] log.info('Associating Elastic IP {e} on interface {i} on IP {p}'.format( e=allocation_id, i=interface, p=private_ip)) # Get the ENI ID log.info('Getting the ENI ID for interface: {i}'.format(i=interface)) eni_id = self.get_eni_id(interface) # Verify the ENI ID was found if eni_id is None: msg = 'Unable to find the corresponding ENI ID for interface: {i}'. \ format(i=interface) log.error(msg) raise OSError(msg) else: log.info('Found ENI ID: {e}'.format(e=eni_id)) # Assign the secondary IP address log.info('Attempting to assign the secondary IP address...') try: response = self.client.associate_address( NetworkInterfaceId=eni_id, AllowReassociation=True, AllocationId=allocation_id, PrivateIpAddress=private_ip ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to attach elastic IP address {a} to interface {i}\n{e}'.format( a=allocation_id, i=interface, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace code = response['ResponseMetadata']['HTTPStatusCode'] if code != 200: msg = 'associate_address returned invalid code: {c}'.format(c=code) log.error(msg) raise AWSAPIError(msg) log.info('Successfully associated elastic IP address ID {a} to interface {i} on ENI ID {e}'.format( a=allocation_id, i=interface, e=eni_id))
python
def associate_elastic_ip(self, allocation_id, interface=1, private_ip=None): """Given an elastic IP address and an interface number, associates the elastic IP to the interface number on this host. :param allocation_id: String ID for the elastic IP :param interface: Integer associated to the interface/device number :param private_ip: String IP address of the private IP address to assign :return: None :raises: OSError, AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.associate_elastic_ip') if private_ip is None: log.info('No private IP address provided, getting the primary IP' 'address on interface {i}...'.format(i=interface)) private_ip = get_ip_addresses()['eth{i}'.format(i=interface)] log.info('Associating Elastic IP {e} on interface {i} on IP {p}'.format( e=allocation_id, i=interface, p=private_ip)) # Get the ENI ID log.info('Getting the ENI ID for interface: {i}'.format(i=interface)) eni_id = self.get_eni_id(interface) # Verify the ENI ID was found if eni_id is None: msg = 'Unable to find the corresponding ENI ID for interface: {i}'. \ format(i=interface) log.error(msg) raise OSError(msg) else: log.info('Found ENI ID: {e}'.format(e=eni_id)) # Assign the secondary IP address log.info('Attempting to assign the secondary IP address...') try: response = self.client.associate_address( NetworkInterfaceId=eni_id, AllowReassociation=True, AllocationId=allocation_id, PrivateIpAddress=private_ip ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to attach elastic IP address {a} to interface {i}\n{e}'.format( a=allocation_id, i=interface, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace code = response['ResponseMetadata']['HTTPStatusCode'] if code != 200: msg = 'associate_address returned invalid code: {c}'.format(c=code) log.error(msg) raise AWSAPIError(msg) log.info('Successfully associated elastic IP address ID {a} to interface {i} on ENI ID {e}'.format( a=allocation_id, i=interface, e=eni_id))
[ "def", "associate_elastic_ip", "(", "self", ",", "allocation_id", ",", "interface", "=", "1", ",", "private_ip", "=", "None", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.associate_elastic_ip'", ")", "if", "private_ip", "is", "None", ":", "log", ".", "info", "(", "'No private IP address provided, getting the primary IP'", "'address on interface {i}...'", ".", "format", "(", "i", "=", "interface", ")", ")", "private_ip", "=", "get_ip_addresses", "(", ")", "[", "'eth{i}'", ".", "format", "(", "i", "=", "interface", ")", "]", "log", ".", "info", "(", "'Associating Elastic IP {e} on interface {i} on IP {p}'", ".", "format", "(", "e", "=", "allocation_id", ",", "i", "=", "interface", ",", "p", "=", "private_ip", ")", ")", "# Get the ENI ID", "log", ".", "info", "(", "'Getting the ENI ID for interface: {i}'", ".", "format", "(", "i", "=", "interface", ")", ")", "eni_id", "=", "self", ".", "get_eni_id", "(", "interface", ")", "# Verify the ENI ID was found", "if", "eni_id", "is", "None", ":", "msg", "=", "'Unable to find the corresponding ENI ID for interface: {i}'", ".", "format", "(", "i", "=", "interface", ")", "log", ".", "error", "(", "msg", ")", "raise", "OSError", "(", "msg", ")", "else", ":", "log", ".", "info", "(", "'Found ENI ID: {e}'", ".", "format", "(", "e", "=", "eni_id", ")", ")", "# Assign the secondary IP address", "log", ".", "info", "(", "'Attempting to assign the secondary IP address...'", ")", "try", ":", "response", "=", "self", ".", "client", ".", "associate_address", "(", "NetworkInterfaceId", "=", "eni_id", ",", "AllowReassociation", "=", "True", ",", "AllocationId", "=", "allocation_id", ",", "PrivateIpAddress", "=", "private_ip", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to attach elastic IP address {a} to interface {i}\\n{e}'", ".", "format", "(", "a", "=", "allocation_id", ",", "i", "=", "interface", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "code", "=", "response", "[", "'ResponseMetadata'", "]", "[", "'HTTPStatusCode'", "]", "if", "code", "!=", "200", ":", "msg", "=", "'associate_address returned invalid code: {c}'", ".", "format", "(", "c", "=", "code", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", "(", "msg", ")", "log", ".", "info", "(", "'Successfully associated elastic IP address ID {a} to interface {i} on ENI ID {e}'", ".", "format", "(", "a", "=", "allocation_id", ",", "i", "=", "interface", ",", "e", "=", "eni_id", ")", ")" ]
Given an elastic IP address and an interface number, associates the elastic IP to the interface number on this host. :param allocation_id: String ID for the elastic IP :param interface: Integer associated to the interface/device number :param private_ip: String IP address of the private IP address to assign :return: None :raises: OSError, AWSAPIError, EC2UtilError
[ "Given", "an", "elastic", "IP", "address", "and", "an", "interface", "number", "associates", "the", "elastic", "IP", "to", "the", "interface", "number", "on", "this", "host", "." ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L198-L254
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.allocate_elastic_ip
def allocate_elastic_ip(self): """Allocates an elastic IP address :return: Dict with allocation ID and Public IP that were created :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.allocate_elastic_ip') # Attempt to allocate a new elastic IP log.info('Attempting to allocate an elastic IP...') try: response = self.client.allocate_address( DryRun=False, Domain='vpc' ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to allocate a new elastic IP address\n{e}'.format(e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace allocation_id = response['AllocationId'] public_ip = response['PublicIp'] log.info('Allocated Elastic IP with ID {a} and Public IP address {p}'. format(a=allocation_id, p=public_ip)) # Verify the Address was allocated successfully log.info('Verifying the elastic IP address was allocated and is available ' 'for use...') ready = False verification_timer = [2]*60 + [5]*60 + [10]*18 num_checks = len(verification_timer) for i in range(0, num_checks): wait_time = verification_timer[i] try: self.client.describe_addresses( DryRun=False, AllocationIds=[allocation_id] ) except ClientError: _, ex, trace = sys.exc_info() log.info('Elastic IP address {p} with Allocation ID {a} is not available for use, trying again in ' '{w} sec...\n{e}'.format(p=public_ip, a=allocation_id, w=wait_time, e=str(ex))) time.sleep(wait_time) else: log.info('Elastic IP {p} with Allocation ID {a} is available for use'.format( p=public_ip, a=allocation_id)) ready = True break if ready: return {'AllocationId': allocation_id, 'PublicIp': public_ip} else: msg = 'Unable to verify existence of new Elastic IP {p} with Allocation ID: {a}'. \ format(p=public_ip, a=allocation_id) log.error(msg) raise EC2UtilError(msg)
python
def allocate_elastic_ip(self): """Allocates an elastic IP address :return: Dict with allocation ID and Public IP that were created :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.allocate_elastic_ip') # Attempt to allocate a new elastic IP log.info('Attempting to allocate an elastic IP...') try: response = self.client.allocate_address( DryRun=False, Domain='vpc' ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to allocate a new elastic IP address\n{e}'.format(e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace allocation_id = response['AllocationId'] public_ip = response['PublicIp'] log.info('Allocated Elastic IP with ID {a} and Public IP address {p}'. format(a=allocation_id, p=public_ip)) # Verify the Address was allocated successfully log.info('Verifying the elastic IP address was allocated and is available ' 'for use...') ready = False verification_timer = [2]*60 + [5]*60 + [10]*18 num_checks = len(verification_timer) for i in range(0, num_checks): wait_time = verification_timer[i] try: self.client.describe_addresses( DryRun=False, AllocationIds=[allocation_id] ) except ClientError: _, ex, trace = sys.exc_info() log.info('Elastic IP address {p} with Allocation ID {a} is not available for use, trying again in ' '{w} sec...\n{e}'.format(p=public_ip, a=allocation_id, w=wait_time, e=str(ex))) time.sleep(wait_time) else: log.info('Elastic IP {p} with Allocation ID {a} is available for use'.format( p=public_ip, a=allocation_id)) ready = True break if ready: return {'AllocationId': allocation_id, 'PublicIp': public_ip} else: msg = 'Unable to verify existence of new Elastic IP {p} with Allocation ID: {a}'. \ format(p=public_ip, a=allocation_id) log.error(msg) raise EC2UtilError(msg)
[ "def", "allocate_elastic_ip", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.allocate_elastic_ip'", ")", "# Attempt to allocate a new elastic IP", "log", ".", "info", "(", "'Attempting to allocate an elastic IP...'", ")", "try", ":", "response", "=", "self", ".", "client", ".", "allocate_address", "(", "DryRun", "=", "False", ",", "Domain", "=", "'vpc'", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to allocate a new elastic IP address\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "allocation_id", "=", "response", "[", "'AllocationId'", "]", "public_ip", "=", "response", "[", "'PublicIp'", "]", "log", ".", "info", "(", "'Allocated Elastic IP with ID {a} and Public IP address {p}'", ".", "format", "(", "a", "=", "allocation_id", ",", "p", "=", "public_ip", ")", ")", "# Verify the Address was allocated successfully", "log", ".", "info", "(", "'Verifying the elastic IP address was allocated and is available '", "'for use...'", ")", "ready", "=", "False", "verification_timer", "=", "[", "2", "]", "*", "60", "+", "[", "5", "]", "*", "60", "+", "[", "10", "]", "*", "18", "num_checks", "=", "len", "(", "verification_timer", ")", "for", "i", "in", "range", "(", "0", ",", "num_checks", ")", ":", "wait_time", "=", "verification_timer", "[", "i", "]", "try", ":", "self", ".", "client", ".", "describe_addresses", "(", "DryRun", "=", "False", ",", "AllocationIds", "=", "[", "allocation_id", "]", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "log", ".", "info", "(", "'Elastic IP address {p} with Allocation ID {a} is not available for use, trying again in '", "'{w} sec...\\n{e}'", ".", "format", "(", "p", "=", "public_ip", ",", "a", "=", "allocation_id", ",", "w", "=", "wait_time", ",", "e", "=", "str", "(", "ex", ")", ")", ")", "time", ".", "sleep", "(", "wait_time", ")", "else", ":", "log", ".", "info", "(", "'Elastic IP {p} with Allocation ID {a} is available for use'", ".", "format", "(", "p", "=", "public_ip", ",", "a", "=", "allocation_id", ")", ")", "ready", "=", "True", "break", "if", "ready", ":", "return", "{", "'AllocationId'", ":", "allocation_id", ",", "'PublicIp'", ":", "public_ip", "}", "else", ":", "msg", "=", "'Unable to verify existence of new Elastic IP {p} with Allocation ID: {a}'", ".", "format", "(", "p", "=", "public_ip", ",", "a", "=", "allocation_id", ")", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")" ]
Allocates an elastic IP address :return: Dict with allocation ID and Public IP that were created :raises: AWSAPIError, EC2UtilError
[ "Allocates", "an", "elastic", "IP", "address" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L256-L311
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.attach_new_eni
def attach_new_eni(self, subnet_name, security_group_ids, device_index=2, allocation_id=None, description=''): """Creates a new Elastic Network Interface on the Subnet matching the subnet_name, with Security Group identified by the security_group_name, then attaches an Elastic IP address if specified in the allocation_id parameter, and finally attaches the new ENI to the EC2 instance instance_id at device index device_index. :param subnet_name: String name of the subnet :param security_group_ids: List of str IDs of the security groups :param device_index: Integer device index :param allocation_id: String ID of the elastic IP address :param description: String description :return: None :raises: EC2UtilError, AWSAPIError """ log = logging.getLogger(self.cls_logger + '.attach_new_eni') log.info('Attempting to attach a new network interface to this instance...') # Validate args if not isinstance(security_group_ids, list): msg = 'security_group_name argument is not a string' log.error(msg) raise EC2UtilError(msg) if not isinstance(subnet_name, basestring): msg = 'subnet_name argument is not a string' log.error(msg) raise EC2UtilError(msg) if allocation_id is not None: if not isinstance(allocation_id, basestring): msg = 'allocation_id argument is not a string' log.error(msg) raise EC2UtilError(msg) try: device_index = int(device_index) except ValueError: _, ex, trace = sys.exc_info() msg = 'device_index argument is not an int\n{e}'.format(e=str(ex)) log.error(msg) raise EC2UtilError, msg, trace # Get the instance ID and VPC ID for this machine if self.instance_id is None or self.vpc_id is None: msg = 'Unable to obtain instance ID or VPC ID' log.error(msg) raise EC2UtilError(msg) # Get the subnet ID by name log.info('Looking up the subnet ID by name: {n}'.format(n=subnet_name)) filters = [ {'Name': 'vpc-id', 'Values': [self.vpc_id]}, {'Name': 'tag-key', 'Values': ['Name']}, {'Name': 'tag-value', 'Values': [subnet_name]}] try: response = self.client.describe_subnets( DryRun=False, Filters=filters ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to find subnet by name {n} in VPC {v}\n{e}'.format(n=subnet_name, v=self.vpc_id, e=str(ex)) log.error(msg) raise EC2UtilError, msg, trace if len(response['Subnets']) < 1: msg = 'No subnets found with name {n} in VPC {v}'.format(n=subnet_name, v=self.vpc_id) log.error(msg) raise EC2UtilError(msg) elif len(response['Subnets']) > 1: msg = 'More than 1 subnet found in VPC {v} with name {n}'.format(n=subnet_name, v=self.vpc_id) log.error(msg) raise EC2UtilError(msg) subnet_id = response['Subnets'][0]['SubnetId'] log.info('Found Subnet ID: {s}'.format(s=subnet_id)) # Create the ENI log.info('Attempting to create the Elastic Network Interface on subnet: {s}, with Security Groups: {g}'.format( s=subnet_id, g=security_group_ids)) try: response = self.client.create_network_interface( DryRun=False, SubnetId=subnet_id, Description=description, Groups=security_group_ids) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to create a network interface on Subnet {s} using Security Groups {g}\n{e}'.format( s=subnet_id, g=security_group_ids, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace code = response['ResponseMetadata']['HTTPStatusCode'] if code != 200: msg = 'create_network_interface returned invalid code: {c}'.format(c=code) log.error(msg) raise AWSAPIError(msg) try: eni_id = response['NetworkInterface']['NetworkInterfaceId'] except KeyError: _, ex, trace = sys.exc_info() msg = 'Unable to parse ENI ID from response: {r}\n{e}'.format(r=response, e=str(ex)) log.error(msg) raise EC2UtilError, msg, trace log.info('Created ENI ID: {eni}'.format(eni=eni_id)) # Verify the ENI was created successfully log.info('Verifying the ENI was created and is available for use...') ready = False num_checks = 60 for _ in range(num_checks): try: self.client.describe_network_interfaces( DryRun=False, NetworkInterfaceIds=[eni_id] ) except ClientError as ex: log.info('ENI ID {eni} is not available for use, trying again in 1 sec...\n{e}'.format( eni=eni_id, e=ex)) time.sleep(2) else: log.info('ENI ID {eni} is available for use'.format(eni=eni_id)) ready = True break if not ready: msg = 'Unable to verify existence of new ENI ID: {eni}'.format(eni=eni_id) log.error(msg) raise EC2UtilError(msg) # If an allocation_id is specified, attach the elastic IP to the new ENI if allocation_id is not None: log.info('Attempting to attach elastic IP {a} to ENI {e}'.format(a=allocation_id, e=eni_id)) try: response = self.client.associate_address( AllocationId=allocation_id, DryRun=False, NetworkInterfaceId=eni_id, AllowReassociation=True) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to associate Elastic IP {a} to ENI {eni}\n{e}'.format( a=allocation_id, eni=eni_id, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace code = response['ResponseMetadata']['HTTPStatusCode'] if code != 200: msg = 'associate_address returned invalid code: {c}'.format(c=code) log.error(msg) raise AWSAPIError(msg) log.info('Successfully attached Elastic IP {a} to ENI ID {eni}'.format( eni=eni_id, a=allocation_id)) # Attach the ENI to this EC2 instance log.info('Attempting to attach ENI ID {eni} to instance ID {i}'.format( eni=eni_id, i=self.instance_id)) try: response = self.client.attach_network_interface( DryRun=False, NetworkInterfaceId=eni_id, InstanceId=self.instance_id, DeviceIndex=device_index) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to attach ENI ID {eni} to instance {i} at device index {d}\n{e}'.format( eni=eni_id, i=self.instance_id, d=device_index, e=ex) log.error(msg) raise AWSAPIError, msg, trace code = response['ResponseMetadata']['HTTPStatusCode'] if code != 200: msg = 'attach_network_interface returned invalid code: {c}'.format(c=code) log.error(msg) raise AWSAPIError(msg) log.info('Successfully attached ENI ID {eni} to EC2 instance ID {i}'.format( eni=eni_id, i=self.instance_id))
python
def attach_new_eni(self, subnet_name, security_group_ids, device_index=2, allocation_id=None, description=''): """Creates a new Elastic Network Interface on the Subnet matching the subnet_name, with Security Group identified by the security_group_name, then attaches an Elastic IP address if specified in the allocation_id parameter, and finally attaches the new ENI to the EC2 instance instance_id at device index device_index. :param subnet_name: String name of the subnet :param security_group_ids: List of str IDs of the security groups :param device_index: Integer device index :param allocation_id: String ID of the elastic IP address :param description: String description :return: None :raises: EC2UtilError, AWSAPIError """ log = logging.getLogger(self.cls_logger + '.attach_new_eni') log.info('Attempting to attach a new network interface to this instance...') # Validate args if not isinstance(security_group_ids, list): msg = 'security_group_name argument is not a string' log.error(msg) raise EC2UtilError(msg) if not isinstance(subnet_name, basestring): msg = 'subnet_name argument is not a string' log.error(msg) raise EC2UtilError(msg) if allocation_id is not None: if not isinstance(allocation_id, basestring): msg = 'allocation_id argument is not a string' log.error(msg) raise EC2UtilError(msg) try: device_index = int(device_index) except ValueError: _, ex, trace = sys.exc_info() msg = 'device_index argument is not an int\n{e}'.format(e=str(ex)) log.error(msg) raise EC2UtilError, msg, trace # Get the instance ID and VPC ID for this machine if self.instance_id is None or self.vpc_id is None: msg = 'Unable to obtain instance ID or VPC ID' log.error(msg) raise EC2UtilError(msg) # Get the subnet ID by name log.info('Looking up the subnet ID by name: {n}'.format(n=subnet_name)) filters = [ {'Name': 'vpc-id', 'Values': [self.vpc_id]}, {'Name': 'tag-key', 'Values': ['Name']}, {'Name': 'tag-value', 'Values': [subnet_name]}] try: response = self.client.describe_subnets( DryRun=False, Filters=filters ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to find subnet by name {n} in VPC {v}\n{e}'.format(n=subnet_name, v=self.vpc_id, e=str(ex)) log.error(msg) raise EC2UtilError, msg, trace if len(response['Subnets']) < 1: msg = 'No subnets found with name {n} in VPC {v}'.format(n=subnet_name, v=self.vpc_id) log.error(msg) raise EC2UtilError(msg) elif len(response['Subnets']) > 1: msg = 'More than 1 subnet found in VPC {v} with name {n}'.format(n=subnet_name, v=self.vpc_id) log.error(msg) raise EC2UtilError(msg) subnet_id = response['Subnets'][0]['SubnetId'] log.info('Found Subnet ID: {s}'.format(s=subnet_id)) # Create the ENI log.info('Attempting to create the Elastic Network Interface on subnet: {s}, with Security Groups: {g}'.format( s=subnet_id, g=security_group_ids)) try: response = self.client.create_network_interface( DryRun=False, SubnetId=subnet_id, Description=description, Groups=security_group_ids) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to create a network interface on Subnet {s} using Security Groups {g}\n{e}'.format( s=subnet_id, g=security_group_ids, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace code = response['ResponseMetadata']['HTTPStatusCode'] if code != 200: msg = 'create_network_interface returned invalid code: {c}'.format(c=code) log.error(msg) raise AWSAPIError(msg) try: eni_id = response['NetworkInterface']['NetworkInterfaceId'] except KeyError: _, ex, trace = sys.exc_info() msg = 'Unable to parse ENI ID from response: {r}\n{e}'.format(r=response, e=str(ex)) log.error(msg) raise EC2UtilError, msg, trace log.info('Created ENI ID: {eni}'.format(eni=eni_id)) # Verify the ENI was created successfully log.info('Verifying the ENI was created and is available for use...') ready = False num_checks = 60 for _ in range(num_checks): try: self.client.describe_network_interfaces( DryRun=False, NetworkInterfaceIds=[eni_id] ) except ClientError as ex: log.info('ENI ID {eni} is not available for use, trying again in 1 sec...\n{e}'.format( eni=eni_id, e=ex)) time.sleep(2) else: log.info('ENI ID {eni} is available for use'.format(eni=eni_id)) ready = True break if not ready: msg = 'Unable to verify existence of new ENI ID: {eni}'.format(eni=eni_id) log.error(msg) raise EC2UtilError(msg) # If an allocation_id is specified, attach the elastic IP to the new ENI if allocation_id is not None: log.info('Attempting to attach elastic IP {a} to ENI {e}'.format(a=allocation_id, e=eni_id)) try: response = self.client.associate_address( AllocationId=allocation_id, DryRun=False, NetworkInterfaceId=eni_id, AllowReassociation=True) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to associate Elastic IP {a} to ENI {eni}\n{e}'.format( a=allocation_id, eni=eni_id, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace code = response['ResponseMetadata']['HTTPStatusCode'] if code != 200: msg = 'associate_address returned invalid code: {c}'.format(c=code) log.error(msg) raise AWSAPIError(msg) log.info('Successfully attached Elastic IP {a} to ENI ID {eni}'.format( eni=eni_id, a=allocation_id)) # Attach the ENI to this EC2 instance log.info('Attempting to attach ENI ID {eni} to instance ID {i}'.format( eni=eni_id, i=self.instance_id)) try: response = self.client.attach_network_interface( DryRun=False, NetworkInterfaceId=eni_id, InstanceId=self.instance_id, DeviceIndex=device_index) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to attach ENI ID {eni} to instance {i} at device index {d}\n{e}'.format( eni=eni_id, i=self.instance_id, d=device_index, e=ex) log.error(msg) raise AWSAPIError, msg, trace code = response['ResponseMetadata']['HTTPStatusCode'] if code != 200: msg = 'attach_network_interface returned invalid code: {c}'.format(c=code) log.error(msg) raise AWSAPIError(msg) log.info('Successfully attached ENI ID {eni} to EC2 instance ID {i}'.format( eni=eni_id, i=self.instance_id))
[ "def", "attach_new_eni", "(", "self", ",", "subnet_name", ",", "security_group_ids", ",", "device_index", "=", "2", ",", "allocation_id", "=", "None", ",", "description", "=", "''", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.attach_new_eni'", ")", "log", ".", "info", "(", "'Attempting to attach a new network interface to this instance...'", ")", "# Validate args", "if", "not", "isinstance", "(", "security_group_ids", ",", "list", ")", ":", "msg", "=", "'security_group_name argument is not a string'", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "if", "not", "isinstance", "(", "subnet_name", ",", "basestring", ")", ":", "msg", "=", "'subnet_name argument is not a string'", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "if", "allocation_id", "is", "not", "None", ":", "if", "not", "isinstance", "(", "allocation_id", ",", "basestring", ")", ":", "msg", "=", "'allocation_id argument is not a string'", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "try", ":", "device_index", "=", "int", "(", "device_index", ")", "except", "ValueError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'device_index argument is not an int\\n{e}'", ".", "format", "(", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", ",", "msg", ",", "trace", "# Get the instance ID and VPC ID for this machine", "if", "self", ".", "instance_id", "is", "None", "or", "self", ".", "vpc_id", "is", "None", ":", "msg", "=", "'Unable to obtain instance ID or VPC ID'", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "# Get the subnet ID by name", "log", ".", "info", "(", "'Looking up the subnet ID by name: {n}'", ".", "format", "(", "n", "=", "subnet_name", ")", ")", "filters", "=", "[", "{", "'Name'", ":", "'vpc-id'", ",", "'Values'", ":", "[", "self", ".", "vpc_id", "]", "}", ",", "{", "'Name'", ":", "'tag-key'", ",", "'Values'", ":", "[", "'Name'", "]", "}", ",", "{", "'Name'", ":", "'tag-value'", ",", "'Values'", ":", "[", "subnet_name", "]", "}", "]", "try", ":", "response", "=", "self", ".", "client", ".", "describe_subnets", "(", "DryRun", "=", "False", ",", "Filters", "=", "filters", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to find subnet by name {n} in VPC {v}\\n{e}'", ".", "format", "(", "n", "=", "subnet_name", ",", "v", "=", "self", ".", "vpc_id", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", ",", "msg", ",", "trace", "if", "len", "(", "response", "[", "'Subnets'", "]", ")", "<", "1", ":", "msg", "=", "'No subnets found with name {n} in VPC {v}'", ".", "format", "(", "n", "=", "subnet_name", ",", "v", "=", "self", ".", "vpc_id", ")", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "elif", "len", "(", "response", "[", "'Subnets'", "]", ")", ">", "1", ":", "msg", "=", "'More than 1 subnet found in VPC {v} with name {n}'", ".", "format", "(", "n", "=", "subnet_name", ",", "v", "=", "self", ".", "vpc_id", ")", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "subnet_id", "=", "response", "[", "'Subnets'", "]", "[", "0", "]", "[", "'SubnetId'", "]", "log", ".", "info", "(", "'Found Subnet ID: {s}'", ".", "format", "(", "s", "=", "subnet_id", ")", ")", "# Create the ENI", "log", ".", "info", "(", "'Attempting to create the Elastic Network Interface on subnet: {s}, with Security Groups: {g}'", ".", "format", "(", "s", "=", "subnet_id", ",", "g", "=", "security_group_ids", ")", ")", "try", ":", "response", "=", "self", ".", "client", ".", "create_network_interface", "(", "DryRun", "=", "False", ",", "SubnetId", "=", "subnet_id", ",", "Description", "=", "description", ",", "Groups", "=", "security_group_ids", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to create a network interface on Subnet {s} using Security Groups {g}\\n{e}'", ".", "format", "(", "s", "=", "subnet_id", ",", "g", "=", "security_group_ids", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "code", "=", "response", "[", "'ResponseMetadata'", "]", "[", "'HTTPStatusCode'", "]", "if", "code", "!=", "200", ":", "msg", "=", "'create_network_interface returned invalid code: {c}'", ".", "format", "(", "c", "=", "code", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", "(", "msg", ")", "try", ":", "eni_id", "=", "response", "[", "'NetworkInterface'", "]", "[", "'NetworkInterfaceId'", "]", "except", "KeyError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to parse ENI ID from response: {r}\\n{e}'", ".", "format", "(", "r", "=", "response", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", ",", "msg", ",", "trace", "log", ".", "info", "(", "'Created ENI ID: {eni}'", ".", "format", "(", "eni", "=", "eni_id", ")", ")", "# Verify the ENI was created successfully", "log", ".", "info", "(", "'Verifying the ENI was created and is available for use...'", ")", "ready", "=", "False", "num_checks", "=", "60", "for", "_", "in", "range", "(", "num_checks", ")", ":", "try", ":", "self", ".", "client", ".", "describe_network_interfaces", "(", "DryRun", "=", "False", ",", "NetworkInterfaceIds", "=", "[", "eni_id", "]", ")", "except", "ClientError", "as", "ex", ":", "log", ".", "info", "(", "'ENI ID {eni} is not available for use, trying again in 1 sec...\\n{e}'", ".", "format", "(", "eni", "=", "eni_id", ",", "e", "=", "ex", ")", ")", "time", ".", "sleep", "(", "2", ")", "else", ":", "log", ".", "info", "(", "'ENI ID {eni} is available for use'", ".", "format", "(", "eni", "=", "eni_id", ")", ")", "ready", "=", "True", "break", "if", "not", "ready", ":", "msg", "=", "'Unable to verify existence of new ENI ID: {eni}'", ".", "format", "(", "eni", "=", "eni_id", ")", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "# If an allocation_id is specified, attach the elastic IP to the new ENI", "if", "allocation_id", "is", "not", "None", ":", "log", ".", "info", "(", "'Attempting to attach elastic IP {a} to ENI {e}'", ".", "format", "(", "a", "=", "allocation_id", ",", "e", "=", "eni_id", ")", ")", "try", ":", "response", "=", "self", ".", "client", ".", "associate_address", "(", "AllocationId", "=", "allocation_id", ",", "DryRun", "=", "False", ",", "NetworkInterfaceId", "=", "eni_id", ",", "AllowReassociation", "=", "True", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to associate Elastic IP {a} to ENI {eni}\\n{e}'", ".", "format", "(", "a", "=", "allocation_id", ",", "eni", "=", "eni_id", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "code", "=", "response", "[", "'ResponseMetadata'", "]", "[", "'HTTPStatusCode'", "]", "if", "code", "!=", "200", ":", "msg", "=", "'associate_address returned invalid code: {c}'", ".", "format", "(", "c", "=", "code", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", "(", "msg", ")", "log", ".", "info", "(", "'Successfully attached Elastic IP {a} to ENI ID {eni}'", ".", "format", "(", "eni", "=", "eni_id", ",", "a", "=", "allocation_id", ")", ")", "# Attach the ENI to this EC2 instance", "log", ".", "info", "(", "'Attempting to attach ENI ID {eni} to instance ID {i}'", ".", "format", "(", "eni", "=", "eni_id", ",", "i", "=", "self", ".", "instance_id", ")", ")", "try", ":", "response", "=", "self", ".", "client", ".", "attach_network_interface", "(", "DryRun", "=", "False", ",", "NetworkInterfaceId", "=", "eni_id", ",", "InstanceId", "=", "self", ".", "instance_id", ",", "DeviceIndex", "=", "device_index", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to attach ENI ID {eni} to instance {i} at device index {d}\\n{e}'", ".", "format", "(", "eni", "=", "eni_id", ",", "i", "=", "self", ".", "instance_id", ",", "d", "=", "device_index", ",", "e", "=", "ex", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "code", "=", "response", "[", "'ResponseMetadata'", "]", "[", "'HTTPStatusCode'", "]", "if", "code", "!=", "200", ":", "msg", "=", "'attach_network_interface returned invalid code: {c}'", ".", "format", "(", "c", "=", "code", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", "(", "msg", ")", "log", ".", "info", "(", "'Successfully attached ENI ID {eni} to EC2 instance ID {i}'", ".", "format", "(", "eni", "=", "eni_id", ",", "i", "=", "self", ".", "instance_id", ")", ")" ]
Creates a new Elastic Network Interface on the Subnet matching the subnet_name, with Security Group identified by the security_group_name, then attaches an Elastic IP address if specified in the allocation_id parameter, and finally attaches the new ENI to the EC2 instance instance_id at device index device_index. :param subnet_name: String name of the subnet :param security_group_ids: List of str IDs of the security groups :param device_index: Integer device index :param allocation_id: String ID of the elastic IP address :param description: String description :return: None :raises: EC2UtilError, AWSAPIError
[ "Creates", "a", "new", "Elastic", "Network", "Interface", "on", "the", "Subnet", "matching", "the", "subnet_name", "with", "Security", "Group", "identified", "by", "the", "security_group_name", "then", "attaches", "an", "Elastic", "IP", "address", "if", "specified", "in", "the", "allocation_id", "parameter", "and", "finally", "attaches", "the", "new", "ENI", "to", "the", "EC2", "instance", "instance_id", "at", "device", "index", "device_index", "." ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L313-L489
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.get_elastic_ips
def get_elastic_ips(self): """Returns the elastic IP info for this instance any are attached :return: (dict) Info about the Elastic IPs :raises AWSAPIError """ log = logging.getLogger(self.cls_logger + '.get_elastic_ips') instance_id = get_instance_id() if instance_id is None: log.error('Unable to get the Instance ID for this machine') return log.info('Found Instance ID: {i}'.format(i=instance_id)) log.info('Querying AWS for info about instance ID {i}...'.format(i=instance_id)) try: instance_info = self.client.describe_instances(DryRun=False, InstanceIds=[instance_id]) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query AWS to get info for instance {i}\n{e}'.format(i=instance_id, e=ex) log.error(msg) raise AWSAPIError, msg, trace # Get the list of Public/Elastic IPs for this instance public_ips = [] for network_interface in instance_info['Reservations'][0]['Instances'][0]['NetworkInterfaces']: network_interface_id = network_interface['NetworkInterfaceId'] log.info('Checking ENI: {n}...'.format(n=network_interface_id)) try: public_ips.append(network_interface['Association']['PublicIp']) except KeyError: log.info('No Public IP found for Network Interface ID: {n}'.format(n=network_interface_id)) else: log.info('Found public IP for Network Interface ID {n}: {p}'.format( n=network_interface_id, p=network_interface['Association']['PublicIp'])) # Return if no Public/Elastic IPs found if len(public_ips) == 0: log.info('No Elastic IPs found for this instance: {i}'.format(i=instance_id)) return else: log.info('Found Public IPs: {p}'.format(p=public_ips)) # Get info for each Public/Elastic IP try: address_info = self.client.describe_addresses(DryRun=False, PublicIps=public_ips) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query AWS to get info for addresses {p}\n{e}'.format(p=public_ips, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace if not address_info: msg = 'No address info return for Public IPs: {p}'.format(p=public_ips) log.error(msg) raise AWSAPIError(msg) return address_info
python
def get_elastic_ips(self): """Returns the elastic IP info for this instance any are attached :return: (dict) Info about the Elastic IPs :raises AWSAPIError """ log = logging.getLogger(self.cls_logger + '.get_elastic_ips') instance_id = get_instance_id() if instance_id is None: log.error('Unable to get the Instance ID for this machine') return log.info('Found Instance ID: {i}'.format(i=instance_id)) log.info('Querying AWS for info about instance ID {i}...'.format(i=instance_id)) try: instance_info = self.client.describe_instances(DryRun=False, InstanceIds=[instance_id]) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query AWS to get info for instance {i}\n{e}'.format(i=instance_id, e=ex) log.error(msg) raise AWSAPIError, msg, trace # Get the list of Public/Elastic IPs for this instance public_ips = [] for network_interface in instance_info['Reservations'][0]['Instances'][0]['NetworkInterfaces']: network_interface_id = network_interface['NetworkInterfaceId'] log.info('Checking ENI: {n}...'.format(n=network_interface_id)) try: public_ips.append(network_interface['Association']['PublicIp']) except KeyError: log.info('No Public IP found for Network Interface ID: {n}'.format(n=network_interface_id)) else: log.info('Found public IP for Network Interface ID {n}: {p}'.format( n=network_interface_id, p=network_interface['Association']['PublicIp'])) # Return if no Public/Elastic IPs found if len(public_ips) == 0: log.info('No Elastic IPs found for this instance: {i}'.format(i=instance_id)) return else: log.info('Found Public IPs: {p}'.format(p=public_ips)) # Get info for each Public/Elastic IP try: address_info = self.client.describe_addresses(DryRun=False, PublicIps=public_ips) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query AWS to get info for addresses {p}\n{e}'.format(p=public_ips, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace if not address_info: msg = 'No address info return for Public IPs: {p}'.format(p=public_ips) log.error(msg) raise AWSAPIError(msg) return address_info
[ "def", "get_elastic_ips", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.get_elastic_ips'", ")", "instance_id", "=", "get_instance_id", "(", ")", "if", "instance_id", "is", "None", ":", "log", ".", "error", "(", "'Unable to get the Instance ID for this machine'", ")", "return", "log", ".", "info", "(", "'Found Instance ID: {i}'", ".", "format", "(", "i", "=", "instance_id", ")", ")", "log", ".", "info", "(", "'Querying AWS for info about instance ID {i}...'", ".", "format", "(", "i", "=", "instance_id", ")", ")", "try", ":", "instance_info", "=", "self", ".", "client", ".", "describe_instances", "(", "DryRun", "=", "False", ",", "InstanceIds", "=", "[", "instance_id", "]", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to query AWS to get info for instance {i}\\n{e}'", ".", "format", "(", "i", "=", "instance_id", ",", "e", "=", "ex", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "# Get the list of Public/Elastic IPs for this instance", "public_ips", "=", "[", "]", "for", "network_interface", "in", "instance_info", "[", "'Reservations'", "]", "[", "0", "]", "[", "'Instances'", "]", "[", "0", "]", "[", "'NetworkInterfaces'", "]", ":", "network_interface_id", "=", "network_interface", "[", "'NetworkInterfaceId'", "]", "log", ".", "info", "(", "'Checking ENI: {n}...'", ".", "format", "(", "n", "=", "network_interface_id", ")", ")", "try", ":", "public_ips", ".", "append", "(", "network_interface", "[", "'Association'", "]", "[", "'PublicIp'", "]", ")", "except", "KeyError", ":", "log", ".", "info", "(", "'No Public IP found for Network Interface ID: {n}'", ".", "format", "(", "n", "=", "network_interface_id", ")", ")", "else", ":", "log", ".", "info", "(", "'Found public IP for Network Interface ID {n}: {p}'", ".", "format", "(", "n", "=", "network_interface_id", ",", "p", "=", "network_interface", "[", "'Association'", "]", "[", "'PublicIp'", "]", ")", ")", "# Return if no Public/Elastic IPs found", "if", "len", "(", "public_ips", ")", "==", "0", ":", "log", ".", "info", "(", "'No Elastic IPs found for this instance: {i}'", ".", "format", "(", "i", "=", "instance_id", ")", ")", "return", "else", ":", "log", ".", "info", "(", "'Found Public IPs: {p}'", ".", "format", "(", "p", "=", "public_ips", ")", ")", "# Get info for each Public/Elastic IP", "try", ":", "address_info", "=", "self", ".", "client", ".", "describe_addresses", "(", "DryRun", "=", "False", ",", "PublicIps", "=", "public_ips", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to query AWS to get info for addresses {p}\\n{e}'", ".", "format", "(", "p", "=", "public_ips", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "if", "not", "address_info", ":", "msg", "=", "'No address info return for Public IPs: {p}'", ".", "format", "(", "p", "=", "public_ips", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", "(", "msg", ")", "return", "address_info" ]
Returns the elastic IP info for this instance any are attached :return: (dict) Info about the Elastic IPs :raises AWSAPIError
[ "Returns", "the", "elastic", "IP", "info", "for", "this", "instance", "any", "are", "attached" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L491-L546
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.disassociate_elastic_ips
def disassociate_elastic_ips(self): """For each attached Elastic IP, disassociate it :return: None :raises AWSAPIError """ log = logging.getLogger(self.cls_logger + '.disassociate_elastic_ips') try: address_info = self.get_elastic_ips() except AWSAPIError: _, ex, trace = sys.exc_info() msg = 'Unable to determine Elastic IPs on this EC2 instance' log.error(msg) raise AWSAPIError, msg, trace # Return is no elastic IPs were found if not address_info: log.info('No elastic IPs found to disassociate') return # Disassociate each Elastic IP for address in address_info['Addresses']: association_id = address['AssociationId'] public_ip = address['PublicIp'] log.info('Attempting to disassociate address {p} from Association ID: {a}'.format( p=public_ip, a=association_id)) try: self.client.disassociate_address(PublicIp=public_ip, AssociationId=association_id) except ClientError: _, ex, trace = sys.exc_info() msg = 'There was a problem disassociating Public IP {p} from Association ID {a}'.format( p=public_ip, a=association_id) log.error(msg) raise AWSAPIError, msg, trace else: log.info('Successfully disassociated Public IP: {p}'.format(p=public_ip))
python
def disassociate_elastic_ips(self): """For each attached Elastic IP, disassociate it :return: None :raises AWSAPIError """ log = logging.getLogger(self.cls_logger + '.disassociate_elastic_ips') try: address_info = self.get_elastic_ips() except AWSAPIError: _, ex, trace = sys.exc_info() msg = 'Unable to determine Elastic IPs on this EC2 instance' log.error(msg) raise AWSAPIError, msg, trace # Return is no elastic IPs were found if not address_info: log.info('No elastic IPs found to disassociate') return # Disassociate each Elastic IP for address in address_info['Addresses']: association_id = address['AssociationId'] public_ip = address['PublicIp'] log.info('Attempting to disassociate address {p} from Association ID: {a}'.format( p=public_ip, a=association_id)) try: self.client.disassociate_address(PublicIp=public_ip, AssociationId=association_id) except ClientError: _, ex, trace = sys.exc_info() msg = 'There was a problem disassociating Public IP {p} from Association ID {a}'.format( p=public_ip, a=association_id) log.error(msg) raise AWSAPIError, msg, trace else: log.info('Successfully disassociated Public IP: {p}'.format(p=public_ip))
[ "def", "disassociate_elastic_ips", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.disassociate_elastic_ips'", ")", "try", ":", "address_info", "=", "self", ".", "get_elastic_ips", "(", ")", "except", "AWSAPIError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to determine Elastic IPs on this EC2 instance'", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "# Return is no elastic IPs were found", "if", "not", "address_info", ":", "log", ".", "info", "(", "'No elastic IPs found to disassociate'", ")", "return", "# Disassociate each Elastic IP", "for", "address", "in", "address_info", "[", "'Addresses'", "]", ":", "association_id", "=", "address", "[", "'AssociationId'", "]", "public_ip", "=", "address", "[", "'PublicIp'", "]", "log", ".", "info", "(", "'Attempting to disassociate address {p} from Association ID: {a}'", ".", "format", "(", "p", "=", "public_ip", ",", "a", "=", "association_id", ")", ")", "try", ":", "self", ".", "client", ".", "disassociate_address", "(", "PublicIp", "=", "public_ip", ",", "AssociationId", "=", "association_id", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'There was a problem disassociating Public IP {p} from Association ID {a}'", ".", "format", "(", "p", "=", "public_ip", ",", "a", "=", "association_id", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "else", ":", "log", ".", "info", "(", "'Successfully disassociated Public IP: {p}'", ".", "format", "(", "p", "=", "public_ip", ")", ")" ]
For each attached Elastic IP, disassociate it :return: None :raises AWSAPIError
[ "For", "each", "attached", "Elastic", "IP", "disassociate", "it" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L548-L584
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.create_security_group
def create_security_group(self, name, description='', vpc_id=None): """Creates a new Security Group with the specified name, description, in the specified vpc_id if provided. If vpc_id is not provided, use self.vpc_id :param name: (str) Security Group Name :param description: (str) Security Group Description :param vpc_id: (str) VPC ID to create the Security Group :return: (str) Security Group ID :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.create_security_group') # Validate args if not isinstance(name, basestring): msg = 'name argument is not a string' log.error(msg) raise EC2UtilError(msg) if not isinstance(description, basestring): msg = 'description argument is not a string' log.error(msg) raise EC2UtilError(msg) if vpc_id is None and self.vpc_id is not None: vpc_id = self.vpc_id else: msg = 'Unable to determine VPC ID to use to create the Security Group' log.error(msg) raise EC2UtilError(msg) # See if a Security Group already exists with the same name log.info('Checking for an existing security group with name {n} in VPC: {v}'.format(n=name, v=vpc_id)) filters = [{ 'Name': 'vpc-id', 'Values': [vpc_id] }, { 'Name': 'group-name', 'Values': [name] }] try: response = self.client.describe_security_groups(DryRun=False, Filters=filters) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query Security Groups to determine if {n} exists in VPC ID {v}\n{e}'.format( n=name, v=vpc_id, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.debug('Found Security Group: {r}'.format(r=response)) if len(response['SecurityGroups']) == 1: log.info('Found an existing security group with name {n} in VPC: {v}'.format(n=name, v=vpc_id)) try: group_id = response['SecurityGroups'][0]['GroupId'] except KeyError: _, ex, trace = sys.exc_info() msg = 'Unable to determine the Security Group GroupId from response: {r}\n{e}'.format( r=response, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.info('Found existing Security Group with GroupId: {g}'.format(g=group_id)) return group_id else: log.info('No existing Security Group with name {n} found in VPC: {v}'.format(n=name, v=vpc_id)) # Create a new Security Group log.info('Attempting to create a Security Group with name <{n}>, description <{d}>, in VPC: {v}'.format( n=name, d=description, v=vpc_id)) try: response = self.client.create_security_group( DryRun=False, GroupName=name, Description=description, VpcId=vpc_id ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to create Security Group <{n}> in VPC: {v}'.format(n=name, v=vpc_id) log.error(msg) raise AWSAPIError, msg, trace else: log.info('Successfully created Security Group <{n}> in VPC: {v}'.format(n=name, v=vpc_id)) return response['GroupId']
python
def create_security_group(self, name, description='', vpc_id=None): """Creates a new Security Group with the specified name, description, in the specified vpc_id if provided. If vpc_id is not provided, use self.vpc_id :param name: (str) Security Group Name :param description: (str) Security Group Description :param vpc_id: (str) VPC ID to create the Security Group :return: (str) Security Group ID :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.create_security_group') # Validate args if not isinstance(name, basestring): msg = 'name argument is not a string' log.error(msg) raise EC2UtilError(msg) if not isinstance(description, basestring): msg = 'description argument is not a string' log.error(msg) raise EC2UtilError(msg) if vpc_id is None and self.vpc_id is not None: vpc_id = self.vpc_id else: msg = 'Unable to determine VPC ID to use to create the Security Group' log.error(msg) raise EC2UtilError(msg) # See if a Security Group already exists with the same name log.info('Checking for an existing security group with name {n} in VPC: {v}'.format(n=name, v=vpc_id)) filters = [{ 'Name': 'vpc-id', 'Values': [vpc_id] }, { 'Name': 'group-name', 'Values': [name] }] try: response = self.client.describe_security_groups(DryRun=False, Filters=filters) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query Security Groups to determine if {n} exists in VPC ID {v}\n{e}'.format( n=name, v=vpc_id, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.debug('Found Security Group: {r}'.format(r=response)) if len(response['SecurityGroups']) == 1: log.info('Found an existing security group with name {n} in VPC: {v}'.format(n=name, v=vpc_id)) try: group_id = response['SecurityGroups'][0]['GroupId'] except KeyError: _, ex, trace = sys.exc_info() msg = 'Unable to determine the Security Group GroupId from response: {r}\n{e}'.format( r=response, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.info('Found existing Security Group with GroupId: {g}'.format(g=group_id)) return group_id else: log.info('No existing Security Group with name {n} found in VPC: {v}'.format(n=name, v=vpc_id)) # Create a new Security Group log.info('Attempting to create a Security Group with name <{n}>, description <{d}>, in VPC: {v}'.format( n=name, d=description, v=vpc_id)) try: response = self.client.create_security_group( DryRun=False, GroupName=name, Description=description, VpcId=vpc_id ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to create Security Group <{n}> in VPC: {v}'.format(n=name, v=vpc_id) log.error(msg) raise AWSAPIError, msg, trace else: log.info('Successfully created Security Group <{n}> in VPC: {v}'.format(n=name, v=vpc_id)) return response['GroupId']
[ "def", "create_security_group", "(", "self", ",", "name", ",", "description", "=", "''", ",", "vpc_id", "=", "None", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.create_security_group'", ")", "# Validate args", "if", "not", "isinstance", "(", "name", ",", "basestring", ")", ":", "msg", "=", "'name argument is not a string'", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "if", "not", "isinstance", "(", "description", ",", "basestring", ")", ":", "msg", "=", "'description argument is not a string'", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "if", "vpc_id", "is", "None", "and", "self", ".", "vpc_id", "is", "not", "None", ":", "vpc_id", "=", "self", ".", "vpc_id", "else", ":", "msg", "=", "'Unable to determine VPC ID to use to create the Security Group'", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "# See if a Security Group already exists with the same name", "log", ".", "info", "(", "'Checking for an existing security group with name {n} in VPC: {v}'", ".", "format", "(", "n", "=", "name", ",", "v", "=", "vpc_id", ")", ")", "filters", "=", "[", "{", "'Name'", ":", "'vpc-id'", ",", "'Values'", ":", "[", "vpc_id", "]", "}", ",", "{", "'Name'", ":", "'group-name'", ",", "'Values'", ":", "[", "name", "]", "}", "]", "try", ":", "response", "=", "self", ".", "client", ".", "describe_security_groups", "(", "DryRun", "=", "False", ",", "Filters", "=", "filters", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to query Security Groups to determine if {n} exists in VPC ID {v}\\n{e}'", ".", "format", "(", "n", "=", "name", ",", "v", "=", "vpc_id", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "else", ":", "log", ".", "debug", "(", "'Found Security Group: {r}'", ".", "format", "(", "r", "=", "response", ")", ")", "if", "len", "(", "response", "[", "'SecurityGroups'", "]", ")", "==", "1", ":", "log", ".", "info", "(", "'Found an existing security group with name {n} in VPC: {v}'", ".", "format", "(", "n", "=", "name", ",", "v", "=", "vpc_id", ")", ")", "try", ":", "group_id", "=", "response", "[", "'SecurityGroups'", "]", "[", "0", "]", "[", "'GroupId'", "]", "except", "KeyError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to determine the Security Group GroupId from response: {r}\\n{e}'", ".", "format", "(", "r", "=", "response", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "else", ":", "log", ".", "info", "(", "'Found existing Security Group with GroupId: {g}'", ".", "format", "(", "g", "=", "group_id", ")", ")", "return", "group_id", "else", ":", "log", ".", "info", "(", "'No existing Security Group with name {n} found in VPC: {v}'", ".", "format", "(", "n", "=", "name", ",", "v", "=", "vpc_id", ")", ")", "# Create a new Security Group", "log", ".", "info", "(", "'Attempting to create a Security Group with name <{n}>, description <{d}>, in VPC: {v}'", ".", "format", "(", "n", "=", "name", ",", "d", "=", "description", ",", "v", "=", "vpc_id", ")", ")", "try", ":", "response", "=", "self", ".", "client", ".", "create_security_group", "(", "DryRun", "=", "False", ",", "GroupName", "=", "name", ",", "Description", "=", "description", ",", "VpcId", "=", "vpc_id", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to create Security Group <{n}> in VPC: {v}'", ".", "format", "(", "n", "=", "name", ",", "v", "=", "vpc_id", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "else", ":", "log", ".", "info", "(", "'Successfully created Security Group <{n}> in VPC: {v}'", ".", "format", "(", "n", "=", "name", ",", "v", "=", "vpc_id", ")", ")", "return", "response", "[", "'GroupId'", "]" ]
Creates a new Security Group with the specified name, description, in the specified vpc_id if provided. If vpc_id is not provided, use self.vpc_id :param name: (str) Security Group Name :param description: (str) Security Group Description :param vpc_id: (str) VPC ID to create the Security Group :return: (str) Security Group ID :raises: AWSAPIError, EC2UtilError
[ "Creates", "a", "new", "Security", "Group", "with", "the", "specified", "name", "description", "in", "the", "specified", "vpc_id", "if", "provided", ".", "If", "vpc_id", "is", "not", "provided", "use", "self", ".", "vpc_id" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L586-L667
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.list_security_groups_in_vpc
def list_security_groups_in_vpc(self, vpc_id=None): """Lists security groups in the VPC. If vpc_id is not provided, use self.vpc_id :param vpc_id: (str) VPC ID to list security groups for :return: (list) Security Group info :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.list_security_groups_in_vpc') if vpc_id is None and self.vpc_id is not None: vpc_id = self.vpc_id else: msg = 'Unable to determine VPC ID to use to create the Security Group' log.error(msg) raise EC2UtilError(msg) # Create a filter on the VPC ID filters = [ { 'Name': 'vpc-id', 'Values': [vpc_id] } ] # Get a list of security groups in the VPC log.info('Querying for a list of security groups in VPC ID: {v}'.format(v=vpc_id)) try: security_groups = self.client.describe_security_groups(DryRun=False, Filters=filters) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query AWS for a list of security groups in VPC ID: {v}\n{e}'.format( v=vpc_id, e=str(ex)) raise AWSAPIError, msg, trace return security_groups
python
def list_security_groups_in_vpc(self, vpc_id=None): """Lists security groups in the VPC. If vpc_id is not provided, use self.vpc_id :param vpc_id: (str) VPC ID to list security groups for :return: (list) Security Group info :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.list_security_groups_in_vpc') if vpc_id is None and self.vpc_id is not None: vpc_id = self.vpc_id else: msg = 'Unable to determine VPC ID to use to create the Security Group' log.error(msg) raise EC2UtilError(msg) # Create a filter on the VPC ID filters = [ { 'Name': 'vpc-id', 'Values': [vpc_id] } ] # Get a list of security groups in the VPC log.info('Querying for a list of security groups in VPC ID: {v}'.format(v=vpc_id)) try: security_groups = self.client.describe_security_groups(DryRun=False, Filters=filters) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query AWS for a list of security groups in VPC ID: {v}\n{e}'.format( v=vpc_id, e=str(ex)) raise AWSAPIError, msg, trace return security_groups
[ "def", "list_security_groups_in_vpc", "(", "self", ",", "vpc_id", "=", "None", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.list_security_groups_in_vpc'", ")", "if", "vpc_id", "is", "None", "and", "self", ".", "vpc_id", "is", "not", "None", ":", "vpc_id", "=", "self", ".", "vpc_id", "else", ":", "msg", "=", "'Unable to determine VPC ID to use to create the Security Group'", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "# Create a filter on the VPC ID", "filters", "=", "[", "{", "'Name'", ":", "'vpc-id'", ",", "'Values'", ":", "[", "vpc_id", "]", "}", "]", "# Get a list of security groups in the VPC", "log", ".", "info", "(", "'Querying for a list of security groups in VPC ID: {v}'", ".", "format", "(", "v", "=", "vpc_id", ")", ")", "try", ":", "security_groups", "=", "self", ".", "client", ".", "describe_security_groups", "(", "DryRun", "=", "False", ",", "Filters", "=", "filters", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to query AWS for a list of security groups in VPC ID: {v}\\n{e}'", ".", "format", "(", "v", "=", "vpc_id", ",", "e", "=", "str", "(", "ex", ")", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "return", "security_groups" ]
Lists security groups in the VPC. If vpc_id is not provided, use self.vpc_id :param vpc_id: (str) VPC ID to list security groups for :return: (list) Security Group info :raises: AWSAPIError, EC2UtilError
[ "Lists", "security", "groups", "in", "the", "VPC", ".", "If", "vpc_id", "is", "not", "provided", "use", "self", ".", "vpc_id" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L669-L701
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.configure_security_group_ingress
def configure_security_group_ingress(self, security_group_id, port, desired_cidr_blocks): """Configures the security group ID allowing access only to the specified CIDR blocks, for the specified port number. :param security_group_id: (str) Security Group ID :param port: (str) TCP Port number :param desired_cidr_blocks: (list) List of desired CIDR blocks, e.g. 192.168.1.2/32 :return: None :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.configure_security_group_ingress') # Validate args if not isinstance(security_group_id, basestring): msg = 'security_group_id argument is not a string' log.error(msg) raise EC2UtilError(msg) if not isinstance(port, basestring): msg = 'port argument is not a string' log.error(msg) raise EC2UtilError(msg) if not isinstance(desired_cidr_blocks, list): msg = 'desired_cidr_blocks argument is not a list' log.error(msg) raise EC2UtilError(msg) log.info('Configuring Security Group <{g}> on port {p} to allow: {r}'.format( g=security_group_id, p=port, r=desired_cidr_blocks )) log.debug('Querying AWS for info on Security Group ID: {g}...'.format(g=security_group_id)) try: security_group_info = self.client.describe_security_groups(DryRun=False, GroupIds=[security_group_id]) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query AWS for Security Group ID: {g}\n{e}'.format(g=security_group_id, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.debug('Found Security Group: {g}'.format(g=security_group_info)) try: ingress_rules = security_group_info['SecurityGroups'][0]['IpPermissions'] except KeyError: _, ex, trace = sys.exc_info() msg = 'Unable to get list of ingress rules for Security Group ID: {g}\n{e}'.format( g=security_group_id, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.debug('Found ingress rules: {r}'.format(r=ingress_rules)) # Evaluate each rule against the provided port and IP address list log.debug('Setting ingress rules...') for ingress_rule in ingress_rules: log.debug('Evaluating ingress rule: {r}'.format(r=ingress_rule)) if ingress_rule['ToPort'] != int(port): log.debug('Skipping rule not matching port: {p}'.format(p=port)) continue log.info('Removing existing rules from Security Group {g} for port: {p}...'.format( g=security_group_id, p=port)) try: self.client.revoke_security_group_ingress( DryRun=False, GroupId=security_group_id, IpPermissions=[ingress_rule]) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to remove existing Security Group rules for port {p} from Security Group: ' \ '{g}\n{e}'.format(p=port, g=security_group_id, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace # Build ingress rule based on the provided CIDR block list desired_ip_permissions = [ { 'IpProtocol': 'tcp', 'FromPort': int(port), 'ToPort': int(port), 'UserIdGroupPairs': [], 'IpRanges': [], 'PrefixListIds': [] } ] # Add IP rules for desired_cidr_block in desired_cidr_blocks: log.debug('Adding ingress for CIDR block: {b}'.format(b=desired_cidr_block)) cidr_block_entry = { 'CidrIp': desired_cidr_block } desired_ip_permissions[0]['IpRanges'].append(cidr_block_entry) # Add the ingress rule log.debug('Adding ingress rule: {r}'.format(r=desired_ip_permissions)) try: self.client.authorize_security_group_ingress( DryRun=False, GroupId=security_group_id, IpPermissions=desired_ip_permissions ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to authorize Security Group ingress rule for Security Group {g}: {r}\n{e}'.format( g=security_group_id, r=desired_ip_permissions, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.info('Successfully added ingress rule for Security Group {g} on port: {p}'.format( g=security_group_id, p=port))
python
def configure_security_group_ingress(self, security_group_id, port, desired_cidr_blocks): """Configures the security group ID allowing access only to the specified CIDR blocks, for the specified port number. :param security_group_id: (str) Security Group ID :param port: (str) TCP Port number :param desired_cidr_blocks: (list) List of desired CIDR blocks, e.g. 192.168.1.2/32 :return: None :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.configure_security_group_ingress') # Validate args if not isinstance(security_group_id, basestring): msg = 'security_group_id argument is not a string' log.error(msg) raise EC2UtilError(msg) if not isinstance(port, basestring): msg = 'port argument is not a string' log.error(msg) raise EC2UtilError(msg) if not isinstance(desired_cidr_blocks, list): msg = 'desired_cidr_blocks argument is not a list' log.error(msg) raise EC2UtilError(msg) log.info('Configuring Security Group <{g}> on port {p} to allow: {r}'.format( g=security_group_id, p=port, r=desired_cidr_blocks )) log.debug('Querying AWS for info on Security Group ID: {g}...'.format(g=security_group_id)) try: security_group_info = self.client.describe_security_groups(DryRun=False, GroupIds=[security_group_id]) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to query AWS for Security Group ID: {g}\n{e}'.format(g=security_group_id, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.debug('Found Security Group: {g}'.format(g=security_group_info)) try: ingress_rules = security_group_info['SecurityGroups'][0]['IpPermissions'] except KeyError: _, ex, trace = sys.exc_info() msg = 'Unable to get list of ingress rules for Security Group ID: {g}\n{e}'.format( g=security_group_id, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.debug('Found ingress rules: {r}'.format(r=ingress_rules)) # Evaluate each rule against the provided port and IP address list log.debug('Setting ingress rules...') for ingress_rule in ingress_rules: log.debug('Evaluating ingress rule: {r}'.format(r=ingress_rule)) if ingress_rule['ToPort'] != int(port): log.debug('Skipping rule not matching port: {p}'.format(p=port)) continue log.info('Removing existing rules from Security Group {g} for port: {p}...'.format( g=security_group_id, p=port)) try: self.client.revoke_security_group_ingress( DryRun=False, GroupId=security_group_id, IpPermissions=[ingress_rule]) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to remove existing Security Group rules for port {p} from Security Group: ' \ '{g}\n{e}'.format(p=port, g=security_group_id, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace # Build ingress rule based on the provided CIDR block list desired_ip_permissions = [ { 'IpProtocol': 'tcp', 'FromPort': int(port), 'ToPort': int(port), 'UserIdGroupPairs': [], 'IpRanges': [], 'PrefixListIds': [] } ] # Add IP rules for desired_cidr_block in desired_cidr_blocks: log.debug('Adding ingress for CIDR block: {b}'.format(b=desired_cidr_block)) cidr_block_entry = { 'CidrIp': desired_cidr_block } desired_ip_permissions[0]['IpRanges'].append(cidr_block_entry) # Add the ingress rule log.debug('Adding ingress rule: {r}'.format(r=desired_ip_permissions)) try: self.client.authorize_security_group_ingress( DryRun=False, GroupId=security_group_id, IpPermissions=desired_ip_permissions ) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to authorize Security Group ingress rule for Security Group {g}: {r}\n{e}'.format( g=security_group_id, r=desired_ip_permissions, e=str(ex)) log.error(msg) raise AWSAPIError, msg, trace else: log.info('Successfully added ingress rule for Security Group {g} on port: {p}'.format( g=security_group_id, p=port))
[ "def", "configure_security_group_ingress", "(", "self", ",", "security_group_id", ",", "port", ",", "desired_cidr_blocks", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.configure_security_group_ingress'", ")", "# Validate args", "if", "not", "isinstance", "(", "security_group_id", ",", "basestring", ")", ":", "msg", "=", "'security_group_id argument is not a string'", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "if", "not", "isinstance", "(", "port", ",", "basestring", ")", ":", "msg", "=", "'port argument is not a string'", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "if", "not", "isinstance", "(", "desired_cidr_blocks", ",", "list", ")", ":", "msg", "=", "'desired_cidr_blocks argument is not a list'", "log", ".", "error", "(", "msg", ")", "raise", "EC2UtilError", "(", "msg", ")", "log", ".", "info", "(", "'Configuring Security Group <{g}> on port {p} to allow: {r}'", ".", "format", "(", "g", "=", "security_group_id", ",", "p", "=", "port", ",", "r", "=", "desired_cidr_blocks", ")", ")", "log", ".", "debug", "(", "'Querying AWS for info on Security Group ID: {g}...'", ".", "format", "(", "g", "=", "security_group_id", ")", ")", "try", ":", "security_group_info", "=", "self", ".", "client", ".", "describe_security_groups", "(", "DryRun", "=", "False", ",", "GroupIds", "=", "[", "security_group_id", "]", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to query AWS for Security Group ID: {g}\\n{e}'", ".", "format", "(", "g", "=", "security_group_id", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "else", ":", "log", ".", "debug", "(", "'Found Security Group: {g}'", ".", "format", "(", "g", "=", "security_group_info", ")", ")", "try", ":", "ingress_rules", "=", "security_group_info", "[", "'SecurityGroups'", "]", "[", "0", "]", "[", "'IpPermissions'", "]", "except", "KeyError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to get list of ingress rules for Security Group ID: {g}\\n{e}'", ".", "format", "(", "g", "=", "security_group_id", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "else", ":", "log", ".", "debug", "(", "'Found ingress rules: {r}'", ".", "format", "(", "r", "=", "ingress_rules", ")", ")", "# Evaluate each rule against the provided port and IP address list", "log", ".", "debug", "(", "'Setting ingress rules...'", ")", "for", "ingress_rule", "in", "ingress_rules", ":", "log", ".", "debug", "(", "'Evaluating ingress rule: {r}'", ".", "format", "(", "r", "=", "ingress_rule", ")", ")", "if", "ingress_rule", "[", "'ToPort'", "]", "!=", "int", "(", "port", ")", ":", "log", ".", "debug", "(", "'Skipping rule not matching port: {p}'", ".", "format", "(", "p", "=", "port", ")", ")", "continue", "log", ".", "info", "(", "'Removing existing rules from Security Group {g} for port: {p}...'", ".", "format", "(", "g", "=", "security_group_id", ",", "p", "=", "port", ")", ")", "try", ":", "self", ".", "client", ".", "revoke_security_group_ingress", "(", "DryRun", "=", "False", ",", "GroupId", "=", "security_group_id", ",", "IpPermissions", "=", "[", "ingress_rule", "]", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to remove existing Security Group rules for port {p} from Security Group: '", "'{g}\\n{e}'", ".", "format", "(", "p", "=", "port", ",", "g", "=", "security_group_id", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "# Build ingress rule based on the provided CIDR block list", "desired_ip_permissions", "=", "[", "{", "'IpProtocol'", ":", "'tcp'", ",", "'FromPort'", ":", "int", "(", "port", ")", ",", "'ToPort'", ":", "int", "(", "port", ")", ",", "'UserIdGroupPairs'", ":", "[", "]", ",", "'IpRanges'", ":", "[", "]", ",", "'PrefixListIds'", ":", "[", "]", "}", "]", "# Add IP rules", "for", "desired_cidr_block", "in", "desired_cidr_blocks", ":", "log", ".", "debug", "(", "'Adding ingress for CIDR block: {b}'", ".", "format", "(", "b", "=", "desired_cidr_block", ")", ")", "cidr_block_entry", "=", "{", "'CidrIp'", ":", "desired_cidr_block", "}", "desired_ip_permissions", "[", "0", "]", "[", "'IpRanges'", "]", ".", "append", "(", "cidr_block_entry", ")", "# Add the ingress rule", "log", ".", "debug", "(", "'Adding ingress rule: {r}'", ".", "format", "(", "r", "=", "desired_ip_permissions", ")", ")", "try", ":", "self", ".", "client", ".", "authorize_security_group_ingress", "(", "DryRun", "=", "False", ",", "GroupId", "=", "security_group_id", ",", "IpPermissions", "=", "desired_ip_permissions", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to authorize Security Group ingress rule for Security Group {g}: {r}\\n{e}'", ".", "format", "(", "g", "=", "security_group_id", ",", "r", "=", "desired_ip_permissions", ",", "e", "=", "str", "(", "ex", ")", ")", "log", ".", "error", "(", "msg", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace", "else", ":", "log", ".", "info", "(", "'Successfully added ingress rule for Security Group {g} on port: {p}'", ".", "format", "(", "g", "=", "security_group_id", ",", "p", "=", "port", ")", ")" ]
Configures the security group ID allowing access only to the specified CIDR blocks, for the specified port number. :param security_group_id: (str) Security Group ID :param port: (str) TCP Port number :param desired_cidr_blocks: (list) List of desired CIDR blocks, e.g. 192.168.1.2/32 :return: None :raises: AWSAPIError, EC2UtilError
[ "Configures", "the", "security", "group", "ID", "allowing", "access", "only", "to", "the", "specified", "CIDR", "blocks", "for", "the", "specified", "port", "number", "." ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L703-L810
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.revoke_security_group_ingress
def revoke_security_group_ingress(self, security_group_id, ingress_rules): """Revokes all ingress rules for a security group bu ID :param security_group_id: (str) Security Group ID :param port: (str) TCP Port number :param ingress_rules: (list) List of IP permissions (see AWS API docs re: IpPermissions) :return: None :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.revoke_security_group_ingress') log.info('Revoking ingress rules from security group: {g}'.format(g=security_group_id)) try: self.client.revoke_security_group_ingress( DryRun=False, GroupId=security_group_id, IpPermissions=ingress_rules) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to remove existing Security Group rules for port from Security Group: {g}\n{e}'.format( g=security_group_id, e=str(ex)) raise AWSAPIError, msg, trace
python
def revoke_security_group_ingress(self, security_group_id, ingress_rules): """Revokes all ingress rules for a security group bu ID :param security_group_id: (str) Security Group ID :param port: (str) TCP Port number :param ingress_rules: (list) List of IP permissions (see AWS API docs re: IpPermissions) :return: None :raises: AWSAPIError, EC2UtilError """ log = logging.getLogger(self.cls_logger + '.revoke_security_group_ingress') log.info('Revoking ingress rules from security group: {g}'.format(g=security_group_id)) try: self.client.revoke_security_group_ingress( DryRun=False, GroupId=security_group_id, IpPermissions=ingress_rules) except ClientError: _, ex, trace = sys.exc_info() msg = 'Unable to remove existing Security Group rules for port from Security Group: {g}\n{e}'.format( g=security_group_id, e=str(ex)) raise AWSAPIError, msg, trace
[ "def", "revoke_security_group_ingress", "(", "self", ",", "security_group_id", ",", "ingress_rules", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.revoke_security_group_ingress'", ")", "log", ".", "info", "(", "'Revoking ingress rules from security group: {g}'", ".", "format", "(", "g", "=", "security_group_id", ")", ")", "try", ":", "self", ".", "client", ".", "revoke_security_group_ingress", "(", "DryRun", "=", "False", ",", "GroupId", "=", "security_group_id", ",", "IpPermissions", "=", "ingress_rules", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'Unable to remove existing Security Group rules for port from Security Group: {g}\\n{e}'", ".", "format", "(", "g", "=", "security_group_id", ",", "e", "=", "str", "(", "ex", ")", ")", "raise", "AWSAPIError", ",", "msg", ",", "trace" ]
Revokes all ingress rules for a security group bu ID :param security_group_id: (str) Security Group ID :param port: (str) TCP Port number :param ingress_rules: (list) List of IP permissions (see AWS API docs re: IpPermissions) :return: None :raises: AWSAPIError, EC2UtilError
[ "Revokes", "all", "ingress", "rules", "for", "a", "security", "group", "bu", "ID" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L812-L832
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.launch_instance
def launch_instance(self, ami_id, key_name, subnet_id, security_group_id=None, security_group_list=None, user_data_script_path=None, instance_type='t2.small', root_device_name='/dev/xvda'): """Launches an EC2 instance with the specified parameters, intended to launch an instance for creation of a CONS3RT template. :param ami_id: (str) ID of the AMI to launch from :param key_name: (str) Name of the key-pair to use :param subnet_id: (str) IF of the VPC subnet to attach the instance to :param security_group_id: (str) ID of the security group, of not provided the default will be applied appended to security_group_list if provided :param security_group_id_list: (list) of IDs of the security group, if not provided the default will be applied :param user_data_script_path: (str) Path to the user-data script to run :param instance_type: (str) Instance Type (e.g. t2.micro) :param root_device_name: (str) The device name for the root volume :return: """ log = logging.getLogger(self.cls_logger + '.launch_instance') log.info('Launching with AMI ID: {a}'.format(a=ami_id)) log.info('Launching with Key Pair: {k}'.format(k=key_name)) if security_group_list: if not isinstance(security_group_list, list): raise EC2UtilError('security_group_list must be a list') if security_group_id and security_group_list: security_group_list.append(security_group_id) elif security_group_id and not security_group_list: security_group_list = [security_group_id] log.info('Launching with security group list: {s}'.format(s=security_group_list)) user_data = None if user_data_script_path is not None: if os.path.isfile(user_data_script_path): with open(user_data_script_path, 'r') as f: user_data = f.read() monitoring = {'Enabled': False} block_device_mappings = [ { 'DeviceName': root_device_name, 'Ebs': { 'VolumeSize': 100, 'DeleteOnTermination': True } } ] log.info('Attempting to launch the EC2 instance now...') try: response = self.client.run_instances( DryRun=False, ImageId=ami_id, MinCount=1, MaxCount=1, KeyName=key_name, SecurityGroupIds=security_group_list, UserData=user_data, InstanceType=instance_type, Monitoring=monitoring, SubnetId=subnet_id, InstanceInitiatedShutdownBehavior='stop', BlockDeviceMappings=block_device_mappings ) except ClientError: _, ex, trace = sys.exc_info() msg = '{n}: There was a problem launching the EC2 instance\n{e}'.format(n=ex.__class__.__name__, e=str(ex)) raise EC2UtilError, msg, trace instance_id = response['Instances'][0]['InstanceId'] output = { 'InstanceId': instance_id, 'InstanceInfo': response['Instances'][0] } return output
python
def launch_instance(self, ami_id, key_name, subnet_id, security_group_id=None, security_group_list=None, user_data_script_path=None, instance_type='t2.small', root_device_name='/dev/xvda'): """Launches an EC2 instance with the specified parameters, intended to launch an instance for creation of a CONS3RT template. :param ami_id: (str) ID of the AMI to launch from :param key_name: (str) Name of the key-pair to use :param subnet_id: (str) IF of the VPC subnet to attach the instance to :param security_group_id: (str) ID of the security group, of not provided the default will be applied appended to security_group_list if provided :param security_group_id_list: (list) of IDs of the security group, if not provided the default will be applied :param user_data_script_path: (str) Path to the user-data script to run :param instance_type: (str) Instance Type (e.g. t2.micro) :param root_device_name: (str) The device name for the root volume :return: """ log = logging.getLogger(self.cls_logger + '.launch_instance') log.info('Launching with AMI ID: {a}'.format(a=ami_id)) log.info('Launching with Key Pair: {k}'.format(k=key_name)) if security_group_list: if not isinstance(security_group_list, list): raise EC2UtilError('security_group_list must be a list') if security_group_id and security_group_list: security_group_list.append(security_group_id) elif security_group_id and not security_group_list: security_group_list = [security_group_id] log.info('Launching with security group list: {s}'.format(s=security_group_list)) user_data = None if user_data_script_path is not None: if os.path.isfile(user_data_script_path): with open(user_data_script_path, 'r') as f: user_data = f.read() monitoring = {'Enabled': False} block_device_mappings = [ { 'DeviceName': root_device_name, 'Ebs': { 'VolumeSize': 100, 'DeleteOnTermination': True } } ] log.info('Attempting to launch the EC2 instance now...') try: response = self.client.run_instances( DryRun=False, ImageId=ami_id, MinCount=1, MaxCount=1, KeyName=key_name, SecurityGroupIds=security_group_list, UserData=user_data, InstanceType=instance_type, Monitoring=monitoring, SubnetId=subnet_id, InstanceInitiatedShutdownBehavior='stop', BlockDeviceMappings=block_device_mappings ) except ClientError: _, ex, trace = sys.exc_info() msg = '{n}: There was a problem launching the EC2 instance\n{e}'.format(n=ex.__class__.__name__, e=str(ex)) raise EC2UtilError, msg, trace instance_id = response['Instances'][0]['InstanceId'] output = { 'InstanceId': instance_id, 'InstanceInfo': response['Instances'][0] } return output
[ "def", "launch_instance", "(", "self", ",", "ami_id", ",", "key_name", ",", "subnet_id", ",", "security_group_id", "=", "None", ",", "security_group_list", "=", "None", ",", "user_data_script_path", "=", "None", ",", "instance_type", "=", "'t2.small'", ",", "root_device_name", "=", "'/dev/xvda'", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.launch_instance'", ")", "log", ".", "info", "(", "'Launching with AMI ID: {a}'", ".", "format", "(", "a", "=", "ami_id", ")", ")", "log", ".", "info", "(", "'Launching with Key Pair: {k}'", ".", "format", "(", "k", "=", "key_name", ")", ")", "if", "security_group_list", ":", "if", "not", "isinstance", "(", "security_group_list", ",", "list", ")", ":", "raise", "EC2UtilError", "(", "'security_group_list must be a list'", ")", "if", "security_group_id", "and", "security_group_list", ":", "security_group_list", ".", "append", "(", "security_group_id", ")", "elif", "security_group_id", "and", "not", "security_group_list", ":", "security_group_list", "=", "[", "security_group_id", "]", "log", ".", "info", "(", "'Launching with security group list: {s}'", ".", "format", "(", "s", "=", "security_group_list", ")", ")", "user_data", "=", "None", "if", "user_data_script_path", "is", "not", "None", ":", "if", "os", ".", "path", ".", "isfile", "(", "user_data_script_path", ")", ":", "with", "open", "(", "user_data_script_path", ",", "'r'", ")", "as", "f", ":", "user_data", "=", "f", ".", "read", "(", ")", "monitoring", "=", "{", "'Enabled'", ":", "False", "}", "block_device_mappings", "=", "[", "{", "'DeviceName'", ":", "root_device_name", ",", "'Ebs'", ":", "{", "'VolumeSize'", ":", "100", ",", "'DeleteOnTermination'", ":", "True", "}", "}", "]", "log", ".", "info", "(", "'Attempting to launch the EC2 instance now...'", ")", "try", ":", "response", "=", "self", ".", "client", ".", "run_instances", "(", "DryRun", "=", "False", ",", "ImageId", "=", "ami_id", ",", "MinCount", "=", "1", ",", "MaxCount", "=", "1", ",", "KeyName", "=", "key_name", ",", "SecurityGroupIds", "=", "security_group_list", ",", "UserData", "=", "user_data", ",", "InstanceType", "=", "instance_type", ",", "Monitoring", "=", "monitoring", ",", "SubnetId", "=", "subnet_id", ",", "InstanceInitiatedShutdownBehavior", "=", "'stop'", ",", "BlockDeviceMappings", "=", "block_device_mappings", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'{n}: There was a problem launching the EC2 instance\\n{e}'", ".", "format", "(", "n", "=", "ex", ".", "__class__", ".", "__name__", ",", "e", "=", "str", "(", "ex", ")", ")", "raise", "EC2UtilError", ",", "msg", ",", "trace", "instance_id", "=", "response", "[", "'Instances'", "]", "[", "0", "]", "[", "'InstanceId'", "]", "output", "=", "{", "'InstanceId'", ":", "instance_id", ",", "'InstanceInfo'", ":", "response", "[", "'Instances'", "]", "[", "0", "]", "}", "return", "output" ]
Launches an EC2 instance with the specified parameters, intended to launch an instance for creation of a CONS3RT template. :param ami_id: (str) ID of the AMI to launch from :param key_name: (str) Name of the key-pair to use :param subnet_id: (str) IF of the VPC subnet to attach the instance to :param security_group_id: (str) ID of the security group, of not provided the default will be applied appended to security_group_list if provided :param security_group_id_list: (list) of IDs of the security group, if not provided the default will be applied :param user_data_script_path: (str) Path to the user-data script to run :param instance_type: (str) Instance Type (e.g. t2.micro) :param root_device_name: (str) The device name for the root volume :return:
[ "Launches", "an", "EC2", "instance", "with", "the", "specified", "parameters", "intended", "to", "launch", "an", "instance", "for", "creation", "of", "a", "CONS3RT", "template", "." ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L834-L903
cons3rt/pycons3rt
pycons3rt/awsapi/ec2util.py
EC2Util.get_ec2_instances
def get_ec2_instances(self): """Describes the EC2 instances :return: dict containing EC2 instance data :raises: EC2UtilError """ log = logging.getLogger(self.cls_logger + '.get_ec2_instances') log.info('Describing EC2 instances...') try: response = self.client.describe_instances() except ClientError: _, ex, trace = sys.exc_info() msg = '{n}: There was a problem describing EC2 instances\n{e}'.format(n=ex.__class__.__name__, e=str(ex)) raise EC2UtilError, msg, trace return response
python
def get_ec2_instances(self): """Describes the EC2 instances :return: dict containing EC2 instance data :raises: EC2UtilError """ log = logging.getLogger(self.cls_logger + '.get_ec2_instances') log.info('Describing EC2 instances...') try: response = self.client.describe_instances() except ClientError: _, ex, trace = sys.exc_info() msg = '{n}: There was a problem describing EC2 instances\n{e}'.format(n=ex.__class__.__name__, e=str(ex)) raise EC2UtilError, msg, trace return response
[ "def", "get_ec2_instances", "(", "self", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "self", ".", "cls_logger", "+", "'.get_ec2_instances'", ")", "log", ".", "info", "(", "'Describing EC2 instances...'", ")", "try", ":", "response", "=", "self", ".", "client", ".", "describe_instances", "(", ")", "except", "ClientError", ":", "_", ",", "ex", ",", "trace", "=", "sys", ".", "exc_info", "(", ")", "msg", "=", "'{n}: There was a problem describing EC2 instances\\n{e}'", ".", "format", "(", "n", "=", "ex", ".", "__class__", ".", "__name__", ",", "e", "=", "str", "(", "ex", ")", ")", "raise", "EC2UtilError", ",", "msg", ",", "trace", "return", "response" ]
Describes the EC2 instances :return: dict containing EC2 instance data :raises: EC2UtilError
[ "Describes", "the", "EC2", "instances" ]
train
https://github.com/cons3rt/pycons3rt/blob/f004ab3a35c5bff2f698131fef3b2a8ed5a7596d/pycons3rt/awsapi/ec2util.py#L905-L919
novopl/peltak
src/peltak/commands/docs.py
docs_cli
def docs_cli(ctx, recreate, gen_index, run_doctests): # type: (click.Context, bool, bool, bool) -> None """ Build project documentation. This command will run sphinx-refdoc first to generate the reference documentation for the code base. Then it will run sphinx to generate the final docs. You can configure the directory that stores the docs source (index.rst, conf.py, etc.) using the DOC_SRC_PATH conf variable. In case you need it, the sphinx build directory is located in ``BUILD_DIR/docs``. The reference documentation will be generated for all directories listed under 'REFDOC_PATHS conf variable. By default it is empty so no reference docs are generated. Sample Config:: \b build_dir: '.build' docs: path: 'docs' reference: - 'src/mypkg' Examples:: \b $ peltak docs # Generate docs for the project $ peltak docs --no-index # Skip main reference index $ peltak docs --recreate --no-index # Build docs from clean slate """ if ctx.invoked_subcommand: return from peltak.logic import docs docs.docs(recreate, gen_index, run_doctests)
python
def docs_cli(ctx, recreate, gen_index, run_doctests): # type: (click.Context, bool, bool, bool) -> None """ Build project documentation. This command will run sphinx-refdoc first to generate the reference documentation for the code base. Then it will run sphinx to generate the final docs. You can configure the directory that stores the docs source (index.rst, conf.py, etc.) using the DOC_SRC_PATH conf variable. In case you need it, the sphinx build directory is located in ``BUILD_DIR/docs``. The reference documentation will be generated for all directories listed under 'REFDOC_PATHS conf variable. By default it is empty so no reference docs are generated. Sample Config:: \b build_dir: '.build' docs: path: 'docs' reference: - 'src/mypkg' Examples:: \b $ peltak docs # Generate docs for the project $ peltak docs --no-index # Skip main reference index $ peltak docs --recreate --no-index # Build docs from clean slate """ if ctx.invoked_subcommand: return from peltak.logic import docs docs.docs(recreate, gen_index, run_doctests)
[ "def", "docs_cli", "(", "ctx", ",", "recreate", ",", "gen_index", ",", "run_doctests", ")", ":", "# type: (click.Context, bool, bool, bool) -> None", "if", "ctx", ".", "invoked_subcommand", ":", "return", "from", "peltak", ".", "logic", "import", "docs", "docs", ".", "docs", "(", "recreate", ",", "gen_index", ",", "run_doctests", ")" ]
Build project documentation. This command will run sphinx-refdoc first to generate the reference documentation for the code base. Then it will run sphinx to generate the final docs. You can configure the directory that stores the docs source (index.rst, conf.py, etc.) using the DOC_SRC_PATH conf variable. In case you need it, the sphinx build directory is located in ``BUILD_DIR/docs``. The reference documentation will be generated for all directories listed under 'REFDOC_PATHS conf variable. By default it is empty so no reference docs are generated. Sample Config:: \b build_dir: '.build' docs: path: 'docs' reference: - 'src/mypkg' Examples:: \b $ peltak docs # Generate docs for the project $ peltak docs --no-index # Skip main reference index $ peltak docs --recreate --no-index # Build docs from clean slate
[ "Build", "project", "documentation", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/commands/docs.py#L52-L88
Josef-Friedrich/phrydy
phrydy/utils.py
as_string
def as_string(value): """Convert a value to a Unicode object for matching with a query. None becomes the empty string. Bytestrings are silently decoded. """ if six.PY2: buffer_types = buffer, memoryview # noqa: F821 else: buffer_types = memoryview if value is None: return u'' elif isinstance(value, buffer_types): return bytes(value).decode('utf8', 'ignore') elif isinstance(value, bytes): return value.decode('utf8', 'ignore') else: return six.text_type(value)
python
def as_string(value): """Convert a value to a Unicode object for matching with a query. None becomes the empty string. Bytestrings are silently decoded. """ if six.PY2: buffer_types = buffer, memoryview # noqa: F821 else: buffer_types = memoryview if value is None: return u'' elif isinstance(value, buffer_types): return bytes(value).decode('utf8', 'ignore') elif isinstance(value, bytes): return value.decode('utf8', 'ignore') else: return six.text_type(value)
[ "def", "as_string", "(", "value", ")", ":", "if", "six", ".", "PY2", ":", "buffer_types", "=", "buffer", ",", "memoryview", "# noqa: F821", "else", ":", "buffer_types", "=", "memoryview", "if", "value", "is", "None", ":", "return", "u''", "elif", "isinstance", "(", "value", ",", "buffer_types", ")", ":", "return", "bytes", "(", "value", ")", ".", "decode", "(", "'utf8'", ",", "'ignore'", ")", "elif", "isinstance", "(", "value", ",", "bytes", ")", ":", "return", "value", ".", "decode", "(", "'utf8'", ",", "'ignore'", ")", "else", ":", "return", "six", ".", "text_type", "(", "value", ")" ]
Convert a value to a Unicode object for matching with a query. None becomes the empty string. Bytestrings are silently decoded.
[ "Convert", "a", "value", "to", "a", "Unicode", "object", "for", "matching", "with", "a", "query", ".", "None", "becomes", "the", "empty", "string", ".", "Bytestrings", "are", "silently", "decoded", "." ]
train
https://github.com/Josef-Friedrich/phrydy/blob/aa13755155977b4776e49f79984f9968ac1d74dc/phrydy/utils.py#L23-L39
Josef-Friedrich/phrydy
phrydy/utils.py
displayable_path
def displayable_path(path, separator=u'; '): """Attempts to decode a bytestring path to a unicode object for the purpose of displaying it to the user. If the `path` argument is a list or a tuple, the elements are joined with `separator`. """ if isinstance(path, (list, tuple)): return separator.join(displayable_path(p) for p in path) elif isinstance(path, six.text_type): return path elif not isinstance(path, bytes): # A non-string object: just get its unicode representation. return six.text_type(path) try: return path.decode(_fsencoding(), 'ignore') except (UnicodeError, LookupError): return path.decode('utf8', 'ignore')
python
def displayable_path(path, separator=u'; '): """Attempts to decode a bytestring path to a unicode object for the purpose of displaying it to the user. If the `path` argument is a list or a tuple, the elements are joined with `separator`. """ if isinstance(path, (list, tuple)): return separator.join(displayable_path(p) for p in path) elif isinstance(path, six.text_type): return path elif not isinstance(path, bytes): # A non-string object: just get its unicode representation. return six.text_type(path) try: return path.decode(_fsencoding(), 'ignore') except (UnicodeError, LookupError): return path.decode('utf8', 'ignore')
[ "def", "displayable_path", "(", "path", ",", "separator", "=", "u'; '", ")", ":", "if", "isinstance", "(", "path", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "separator", ".", "join", "(", "displayable_path", "(", "p", ")", "for", "p", "in", "path", ")", "elif", "isinstance", "(", "path", ",", "six", ".", "text_type", ")", ":", "return", "path", "elif", "not", "isinstance", "(", "path", ",", "bytes", ")", ":", "# A non-string object: just get its unicode representation.", "return", "six", ".", "text_type", "(", "path", ")", "try", ":", "return", "path", ".", "decode", "(", "_fsencoding", "(", ")", ",", "'ignore'", ")", "except", "(", "UnicodeError", ",", "LookupError", ")", ":", "return", "path", ".", "decode", "(", "'utf8'", ",", "'ignore'", ")" ]
Attempts to decode a bytestring path to a unicode object for the purpose of displaying it to the user. If the `path` argument is a list or a tuple, the elements are joined with `separator`.
[ "Attempts", "to", "decode", "a", "bytestring", "path", "to", "a", "unicode", "object", "for", "the", "purpose", "of", "displaying", "it", "to", "the", "user", ".", "If", "the", "path", "argument", "is", "a", "list", "or", "a", "tuple", "the", "elements", "are", "joined", "with", "separator", "." ]
train
https://github.com/Josef-Friedrich/phrydy/blob/aa13755155977b4776e49f79984f9968ac1d74dc/phrydy/utils.py#L42-L58
Josef-Friedrich/phrydy
phrydy/utils.py
syspath
def syspath(path, prefix=True): """Convert a path for use by the operating system. In particular, paths on Windows must receive a magic prefix and must be converted to Unicode before they are sent to the OS. To disable the magic prefix on Windows, set `prefix` to False---but only do this if you *really* know what you're doing. """ # Don't do anything if we're not on windows if os.path.__name__ != 'ntpath': return path if not isinstance(path, six.text_type): # Beets currently represents Windows paths internally with UTF-8 # arbitrarily. But earlier versions used MBCS because it is # reported as the FS encoding by Windows. Try both. try: path = path.decode('utf8') except UnicodeError: # The encoding should always be MBCS, Windows' broken # Unicode representation. encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() path = path.decode(encoding, 'replace') # Add the magic prefix if it isn't already there. # http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx if prefix and not path.startswith(WINDOWS_MAGIC_PREFIX): if path.startswith(u'\\\\'): # UNC path. Final path should look like \\?\UNC\... path = u'UNC' + path[1:] path = WINDOWS_MAGIC_PREFIX + path return path
python
def syspath(path, prefix=True): """Convert a path for use by the operating system. In particular, paths on Windows must receive a magic prefix and must be converted to Unicode before they are sent to the OS. To disable the magic prefix on Windows, set `prefix` to False---but only do this if you *really* know what you're doing. """ # Don't do anything if we're not on windows if os.path.__name__ != 'ntpath': return path if not isinstance(path, six.text_type): # Beets currently represents Windows paths internally with UTF-8 # arbitrarily. But earlier versions used MBCS because it is # reported as the FS encoding by Windows. Try both. try: path = path.decode('utf8') except UnicodeError: # The encoding should always be MBCS, Windows' broken # Unicode representation. encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() path = path.decode(encoding, 'replace') # Add the magic prefix if it isn't already there. # http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx if prefix and not path.startswith(WINDOWS_MAGIC_PREFIX): if path.startswith(u'\\\\'): # UNC path. Final path should look like \\?\UNC\... path = u'UNC' + path[1:] path = WINDOWS_MAGIC_PREFIX + path return path
[ "def", "syspath", "(", "path", ",", "prefix", "=", "True", ")", ":", "# Don't do anything if we're not on windows", "if", "os", ".", "path", ".", "__name__", "!=", "'ntpath'", ":", "return", "path", "if", "not", "isinstance", "(", "path", ",", "six", ".", "text_type", ")", ":", "# Beets currently represents Windows paths internally with UTF-8", "# arbitrarily. But earlier versions used MBCS because it is", "# reported as the FS encoding by Windows. Try both.", "try", ":", "path", "=", "path", ".", "decode", "(", "'utf8'", ")", "except", "UnicodeError", ":", "# The encoding should always be MBCS, Windows' broken", "# Unicode representation.", "encoding", "=", "sys", ".", "getfilesystemencoding", "(", ")", "or", "sys", ".", "getdefaultencoding", "(", ")", "path", "=", "path", ".", "decode", "(", "encoding", ",", "'replace'", ")", "# Add the magic prefix if it isn't already there.", "# http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx", "if", "prefix", "and", "not", "path", ".", "startswith", "(", "WINDOWS_MAGIC_PREFIX", ")", ":", "if", "path", ".", "startswith", "(", "u'\\\\\\\\'", ")", ":", "# UNC path. Final path should look like \\\\?\\UNC\\...", "path", "=", "u'UNC'", "+", "path", "[", "1", ":", "]", "path", "=", "WINDOWS_MAGIC_PREFIX", "+", "path", "return", "path" ]
Convert a path for use by the operating system. In particular, paths on Windows must receive a magic prefix and must be converted to Unicode before they are sent to the OS. To disable the magic prefix on Windows, set `prefix` to False---but only do this if you *really* know what you're doing.
[ "Convert", "a", "path", "for", "use", "by", "the", "operating", "system", ".", "In", "particular", "paths", "on", "Windows", "must", "receive", "a", "magic", "prefix", "and", "must", "be", "converted", "to", "Unicode", "before", "they", "are", "sent", "to", "the", "OS", ".", "To", "disable", "the", "magic", "prefix", "on", "Windows", "set", "prefix", "to", "False", "---", "but", "only", "do", "this", "if", "you", "*", "really", "*", "know", "what", "you", "re", "doing", "." ]
train
https://github.com/Josef-Friedrich/phrydy/blob/aa13755155977b4776e49f79984f9968ac1d74dc/phrydy/utils.py#L61-L92
cfobel/pygtk3-helpers
pygtk3_helpers/ui/notebook.py
NotebookManagerView.on_new
def on_new(self, button): ''' Copy selected notebook template to notebook directory. ## Notes ## - An exception is raised if the parent of the selected file is the notebook directory. - If notebook with same name already exists in notebook directory, offer is made to overwrite (the new copy of the file is renamed with a count if overwrite is not selected). ''' buttons = (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.OK) dialog = Gtk.FileChooserDialog("Select notebook template", self.parent, Gtk.FileChooserAction.OPEN, buttons) add_filters(dialog, [{'name': 'IPython notebook (*.ipynb)', 'pattern': '*.ipynb'}]) response = dialog.run() if response == Gtk.ResponseType.OK: selected_path = path(dialog.get_filename()) output_path = self.notebook_dir.joinpath(selected_path.name) overwrite = False if output_path.isfile(): response = yesno('%s already exists. Overwrite?' % output_path.name, 'Overwrite?') if response == Gtk.ResponseType.YES: overwrite = True else: counter = 1 renamed_path = output_path while renamed_path.isfile(): new_name = '%s (%d)%s' % (output_path.namebase, counter, output_path.ext) renamed_path = output_path.parent.joinpath(new_name) counter += 1 output_path = renamed_path self.notebook_manager.launch_from_template(selected_path, overwrite=overwrite, output_name=output_path.name, notebook_dir=self.notebook_dir) dialog.destroy()
python
def on_new(self, button): ''' Copy selected notebook template to notebook directory. ## Notes ## - An exception is raised if the parent of the selected file is the notebook directory. - If notebook with same name already exists in notebook directory, offer is made to overwrite (the new copy of the file is renamed with a count if overwrite is not selected). ''' buttons = (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.OK) dialog = Gtk.FileChooserDialog("Select notebook template", self.parent, Gtk.FileChooserAction.OPEN, buttons) add_filters(dialog, [{'name': 'IPython notebook (*.ipynb)', 'pattern': '*.ipynb'}]) response = dialog.run() if response == Gtk.ResponseType.OK: selected_path = path(dialog.get_filename()) output_path = self.notebook_dir.joinpath(selected_path.name) overwrite = False if output_path.isfile(): response = yesno('%s already exists. Overwrite?' % output_path.name, 'Overwrite?') if response == Gtk.ResponseType.YES: overwrite = True else: counter = 1 renamed_path = output_path while renamed_path.isfile(): new_name = '%s (%d)%s' % (output_path.namebase, counter, output_path.ext) renamed_path = output_path.parent.joinpath(new_name) counter += 1 output_path = renamed_path self.notebook_manager.launch_from_template(selected_path, overwrite=overwrite, output_name=output_path.name, notebook_dir=self.notebook_dir) dialog.destroy()
[ "def", "on_new", "(", "self", ",", "button", ")", ":", "buttons", "=", "(", "Gtk", ".", "STOCK_CANCEL", ",", "Gtk", ".", "ResponseType", ".", "CANCEL", ",", "Gtk", ".", "STOCK_OPEN", ",", "Gtk", ".", "ResponseType", ".", "OK", ")", "dialog", "=", "Gtk", ".", "FileChooserDialog", "(", "\"Select notebook template\"", ",", "self", ".", "parent", ",", "Gtk", ".", "FileChooserAction", ".", "OPEN", ",", "buttons", ")", "add_filters", "(", "dialog", ",", "[", "{", "'name'", ":", "'IPython notebook (*.ipynb)'", ",", "'pattern'", ":", "'*.ipynb'", "}", "]", ")", "response", "=", "dialog", ".", "run", "(", ")", "if", "response", "==", "Gtk", ".", "ResponseType", ".", "OK", ":", "selected_path", "=", "path", "(", "dialog", ".", "get_filename", "(", ")", ")", "output_path", "=", "self", ".", "notebook_dir", ".", "joinpath", "(", "selected_path", ".", "name", ")", "overwrite", "=", "False", "if", "output_path", ".", "isfile", "(", ")", ":", "response", "=", "yesno", "(", "'%s already exists. Overwrite?'", "%", "output_path", ".", "name", ",", "'Overwrite?'", ")", "if", "response", "==", "Gtk", ".", "ResponseType", ".", "YES", ":", "overwrite", "=", "True", "else", ":", "counter", "=", "1", "renamed_path", "=", "output_path", "while", "renamed_path", ".", "isfile", "(", ")", ":", "new_name", "=", "'%s (%d)%s'", "%", "(", "output_path", ".", "namebase", ",", "counter", ",", "output_path", ".", "ext", ")", "renamed_path", "=", "output_path", ".", "parent", ".", "joinpath", "(", "new_name", ")", "counter", "+=", "1", "output_path", "=", "renamed_path", "self", ".", "notebook_manager", ".", "launch_from_template", "(", "selected_path", ",", "overwrite", "=", "overwrite", ",", "output_name", "=", "output_path", ".", "name", ",", "notebook_dir", "=", "self", ".", "notebook_dir", ")", "dialog", ".", "destroy", "(", ")" ]
Copy selected notebook template to notebook directory. ## Notes ## - An exception is raised if the parent of the selected file is the notebook directory. - If notebook with same name already exists in notebook directory, offer is made to overwrite (the new copy of the file is renamed with a count if overwrite is not selected).
[ "Copy", "selected", "notebook", "template", "to", "notebook", "directory", "." ]
train
https://github.com/cfobel/pygtk3-helpers/blob/ae793cb34a5c1bbe40cc83bb8a6084f0eeed2519/pygtk3_helpers/ui/notebook.py#L52-L94
kajala/django-jutil
jutil/management/commands/bank_const_se.py
se_iban_load_map
def se_iban_load_map(filename: str) -> list: """ Loads Swedish monetary institution codes in CSV format. :param filename: CSV file name of the BIC definitions. Columns: Institution Name, Range Begin-Range End (inclusive), Account digits count :return: List of (bank name, clearing code begin, clearing code end, account digits) """ out = [] name_repl = { 'BNP Paribas Fortis SA/NV, Bankfilial Sverige': 'BNP Paribas Fortis SA/NV', 'Citibank International Plc, Sweden Branch': 'Citibank International Plc', 'Santander Consumer Bank AS (deltar endast i Dataclearingen)': 'Santander Consumer Bank AS', 'Nordax Bank AB (deltar endast i Dataclearingen)': 'Nordax Bank AB', 'Swedbank och fristående Sparbanker, t ex Leksands Sparbank och Roslagsbanken.': 'Swedbank', 'Ålandsbanken Abp (Finland),svensk filial': 'Ålandsbanken Abp', 'SBAB deltar endast i Dataclearingen': 'SBAB', } with open(filename) as fp: for row in csv.reader(fp): if len(row) == 3: name, series, acc_digits = row # pprint([name, series, acc_digits]) # clean up name name = re.sub(r'\n.*', '', name) if name in name_repl: name = name_repl[name] # clean up series ml_acc_digits = acc_digits.split('\n') for i, ser in enumerate(series.split('\n')): begin, end = None, None res = re.match(r'^(\d+)-(\d+).*$', ser) if res: begin, end = res.group(1), res.group(2) if begin is None: res = re.match(r'^(\d{4}).*$', ser) if res: begin = res.group(1) end = begin if begin and end: digits = None try: digits = int(acc_digits) except ValueError: pass if digits is None: try: digits = int(ml_acc_digits[i]) except ValueError: digits = '?' except IndexError: digits = '?' out.append([name, begin, end, digits]) # print('OK!') return out
python
def se_iban_load_map(filename: str) -> list: """ Loads Swedish monetary institution codes in CSV format. :param filename: CSV file name of the BIC definitions. Columns: Institution Name, Range Begin-Range End (inclusive), Account digits count :return: List of (bank name, clearing code begin, clearing code end, account digits) """ out = [] name_repl = { 'BNP Paribas Fortis SA/NV, Bankfilial Sverige': 'BNP Paribas Fortis SA/NV', 'Citibank International Plc, Sweden Branch': 'Citibank International Plc', 'Santander Consumer Bank AS (deltar endast i Dataclearingen)': 'Santander Consumer Bank AS', 'Nordax Bank AB (deltar endast i Dataclearingen)': 'Nordax Bank AB', 'Swedbank och fristående Sparbanker, t ex Leksands Sparbank och Roslagsbanken.': 'Swedbank', 'Ålandsbanken Abp (Finland),svensk filial': 'Ålandsbanken Abp', 'SBAB deltar endast i Dataclearingen': 'SBAB', } with open(filename) as fp: for row in csv.reader(fp): if len(row) == 3: name, series, acc_digits = row # pprint([name, series, acc_digits]) # clean up name name = re.sub(r'\n.*', '', name) if name in name_repl: name = name_repl[name] # clean up series ml_acc_digits = acc_digits.split('\n') for i, ser in enumerate(series.split('\n')): begin, end = None, None res = re.match(r'^(\d+)-(\d+).*$', ser) if res: begin, end = res.group(1), res.group(2) if begin is None: res = re.match(r'^(\d{4}).*$', ser) if res: begin = res.group(1) end = begin if begin and end: digits = None try: digits = int(acc_digits) except ValueError: pass if digits is None: try: digits = int(ml_acc_digits[i]) except ValueError: digits = '?' except IndexError: digits = '?' out.append([name, begin, end, digits]) # print('OK!') return out
[ "def", "se_iban_load_map", "(", "filename", ":", "str", ")", "->", "list", ":", "out", "=", "[", "]", "name_repl", "=", "{", "'BNP Paribas Fortis SA/NV, Bankfilial Sverige'", ":", "'BNP Paribas Fortis SA/NV'", ",", "'Citibank International Plc, Sweden Branch'", ":", "'Citibank International Plc'", ",", "'Santander Consumer Bank AS (deltar endast i Dataclearingen)'", ":", "'Santander Consumer Bank AS'", ",", "'Nordax Bank AB (deltar endast i Dataclearingen)'", ":", "'Nordax Bank AB'", ",", "'Swedbank och fristående Sparbanker, t ex Leksands Sparbank och Roslagsbanken.':", " ", "Swedbank',", "", "'Ålandsbanken Abp (Finland),svensk filial':", " ", "Ålandsbanken Abp',", "", "'SBAB deltar endast i Dataclearingen'", ":", "'SBAB'", ",", "}", "with", "open", "(", "filename", ")", "as", "fp", ":", "for", "row", "in", "csv", ".", "reader", "(", "fp", ")", ":", "if", "len", "(", "row", ")", "==", "3", ":", "name", ",", "series", ",", "acc_digits", "=", "row", "# pprint([name, series, acc_digits])", "# clean up name", "name", "=", "re", ".", "sub", "(", "r'\\n.*'", ",", "''", ",", "name", ")", "if", "name", "in", "name_repl", ":", "name", "=", "name_repl", "[", "name", "]", "# clean up series", "ml_acc_digits", "=", "acc_digits", ".", "split", "(", "'\\n'", ")", "for", "i", ",", "ser", "in", "enumerate", "(", "series", ".", "split", "(", "'\\n'", ")", ")", ":", "begin", ",", "end", "=", "None", ",", "None", "res", "=", "re", ".", "match", "(", "r'^(\\d+)-(\\d+).*$'", ",", "ser", ")", "if", "res", ":", "begin", ",", "end", "=", "res", ".", "group", "(", "1", ")", ",", "res", ".", "group", "(", "2", ")", "if", "begin", "is", "None", ":", "res", "=", "re", ".", "match", "(", "r'^(\\d{4}).*$'", ",", "ser", ")", "if", "res", ":", "begin", "=", "res", ".", "group", "(", "1", ")", "end", "=", "begin", "if", "begin", "and", "end", ":", "digits", "=", "None", "try", ":", "digits", "=", "int", "(", "acc_digits", ")", "except", "ValueError", ":", "pass", "if", "digits", "is", "None", ":", "try", ":", "digits", "=", "int", "(", "ml_acc_digits", "[", "i", "]", ")", "except", "ValueError", ":", "digits", "=", "'?'", "except", "IndexError", ":", "digits", "=", "'?'", "out", ".", "append", "(", "[", "name", ",", "begin", ",", "end", ",", "digits", "]", ")", "# print('OK!')", "return", "out" ]
Loads Swedish monetary institution codes in CSV format. :param filename: CSV file name of the BIC definitions. Columns: Institution Name, Range Begin-Range End (inclusive), Account digits count :return: List of (bank name, clearing code begin, clearing code end, account digits)
[ "Loads", "Swedish", "monetary", "institution", "codes", "in", "CSV", "format", ".", ":", "param", "filename", ":", "CSV", "file", "name", "of", "the", "BIC", "definitions", ".", "Columns", ":", "Institution", "Name", "Range", "Begin", "-", "Range", "End", "(", "inclusive", ")", "Account", "digits", "count", ":", "return", ":", "List", "of", "(", "bank", "name", "clearing", "code", "begin", "clearing", "code", "end", "account", "digits", ")" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/management/commands/bank_const_se.py#L11-L68
kajala/django-jutil
jutil/admin.py
admin_log
def admin_log(instances, msg: str, who: User=None, **kw): """ Logs an entry to admin logs of model(s). :param instances: Model instance or list of instances :param msg: Message to log :param who: Who did the change :param kw: Optional key-value attributes to append to message :return: None """ from django.contrib.admin.models import LogEntry, CHANGE from django.contrib.admin.options import get_content_type_for_model from django.utils.encoding import force_text # use system user if 'who' is missing if not who: username = settings.DJANGO_SYSTEM_USER if hasattr(settings, 'DJANGO_SYSTEM_USER') else 'system' who, created = User.objects.get_or_create(username=username) # append extra keyword attributes if any att_str = '' for k, v in kw.items(): if hasattr(v, 'pk'): # log only primary key for model instances, not whole str representation v = v.pk att_str += '{}={}'.format(k, v) if not att_str else ', {}={}'.format(k, v) if att_str: att_str = ' [{}]'.format(att_str) msg = str(msg) + att_str if not isinstance(instances, list) and not isinstance(instances, tuple): instances = [instances] for instance in instances: if instance: LogEntry.objects.log_action( user_id=who.pk, content_type_id=get_content_type_for_model(instance).pk, object_id=instance.pk, object_repr=force_text(instance), action_flag=CHANGE, change_message=msg, )
python
def admin_log(instances, msg: str, who: User=None, **kw): """ Logs an entry to admin logs of model(s). :param instances: Model instance or list of instances :param msg: Message to log :param who: Who did the change :param kw: Optional key-value attributes to append to message :return: None """ from django.contrib.admin.models import LogEntry, CHANGE from django.contrib.admin.options import get_content_type_for_model from django.utils.encoding import force_text # use system user if 'who' is missing if not who: username = settings.DJANGO_SYSTEM_USER if hasattr(settings, 'DJANGO_SYSTEM_USER') else 'system' who, created = User.objects.get_or_create(username=username) # append extra keyword attributes if any att_str = '' for k, v in kw.items(): if hasattr(v, 'pk'): # log only primary key for model instances, not whole str representation v = v.pk att_str += '{}={}'.format(k, v) if not att_str else ', {}={}'.format(k, v) if att_str: att_str = ' [{}]'.format(att_str) msg = str(msg) + att_str if not isinstance(instances, list) and not isinstance(instances, tuple): instances = [instances] for instance in instances: if instance: LogEntry.objects.log_action( user_id=who.pk, content_type_id=get_content_type_for_model(instance).pk, object_id=instance.pk, object_repr=force_text(instance), action_flag=CHANGE, change_message=msg, )
[ "def", "admin_log", "(", "instances", ",", "msg", ":", "str", ",", "who", ":", "User", "=", "None", ",", "*", "*", "kw", ")", ":", "from", "django", ".", "contrib", ".", "admin", ".", "models", "import", "LogEntry", ",", "CHANGE", "from", "django", ".", "contrib", ".", "admin", ".", "options", "import", "get_content_type_for_model", "from", "django", ".", "utils", ".", "encoding", "import", "force_text", "# use system user if 'who' is missing", "if", "not", "who", ":", "username", "=", "settings", ".", "DJANGO_SYSTEM_USER", "if", "hasattr", "(", "settings", ",", "'DJANGO_SYSTEM_USER'", ")", "else", "'system'", "who", ",", "created", "=", "User", ".", "objects", ".", "get_or_create", "(", "username", "=", "username", ")", "# append extra keyword attributes if any", "att_str", "=", "''", "for", "k", ",", "v", "in", "kw", ".", "items", "(", ")", ":", "if", "hasattr", "(", "v", ",", "'pk'", ")", ":", "# log only primary key for model instances, not whole str representation", "v", "=", "v", ".", "pk", "att_str", "+=", "'{}={}'", ".", "format", "(", "k", ",", "v", ")", "if", "not", "att_str", "else", "', {}={}'", ".", "format", "(", "k", ",", "v", ")", "if", "att_str", ":", "att_str", "=", "' [{}]'", ".", "format", "(", "att_str", ")", "msg", "=", "str", "(", "msg", ")", "+", "att_str", "if", "not", "isinstance", "(", "instances", ",", "list", ")", "and", "not", "isinstance", "(", "instances", ",", "tuple", ")", ":", "instances", "=", "[", "instances", "]", "for", "instance", "in", "instances", ":", "if", "instance", ":", "LogEntry", ".", "objects", ".", "log_action", "(", "user_id", "=", "who", ".", "pk", ",", "content_type_id", "=", "get_content_type_for_model", "(", "instance", ")", ".", "pk", ",", "object_id", "=", "instance", ".", "pk", ",", "object_repr", "=", "force_text", "(", "instance", ")", ",", "action_flag", "=", "CHANGE", ",", "change_message", "=", "msg", ",", ")" ]
Logs an entry to admin logs of model(s). :param instances: Model instance or list of instances :param msg: Message to log :param who: Who did the change :param kw: Optional key-value attributes to append to message :return: None
[ "Logs", "an", "entry", "to", "admin", "logs", "of", "model", "(", "s", ")", ".", ":", "param", "instances", ":", "Model", "instance", "or", "list", "of", "instances", ":", "param", "msg", ":", "Message", "to", "log", ":", "param", "who", ":", "Who", "did", "the", "change", ":", "param", "kw", ":", "Optional", "key", "-", "value", "attributes", "to", "append", "to", "message", ":", "return", ":", "None" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/admin.py#L15-L55
kajala/django-jutil
jutil/admin.py
ModelAdminBase.kw_changelist_view
def kw_changelist_view(self, request: HttpRequest, extra_context=None, **kw): """ Changelist view which allow key-value arguments. :param request: HttpRequest :param extra_context: Extra context dict :param kw: Key-value dict :return: See changelist_view() """ return self.changelist_view(request, extra_context)
python
def kw_changelist_view(self, request: HttpRequest, extra_context=None, **kw): """ Changelist view which allow key-value arguments. :param request: HttpRequest :param extra_context: Extra context dict :param kw: Key-value dict :return: See changelist_view() """ return self.changelist_view(request, extra_context)
[ "def", "kw_changelist_view", "(", "self", ",", "request", ":", "HttpRequest", ",", "extra_context", "=", "None", ",", "*", "*", "kw", ")", ":", "return", "self", ".", "changelist_view", "(", "request", ",", "extra_context", ")" ]
Changelist view which allow key-value arguments. :param request: HttpRequest :param extra_context: Extra context dict :param kw: Key-value dict :return: See changelist_view()
[ "Changelist", "view", "which", "allow", "key", "-", "value", "arguments", ".", ":", "param", "request", ":", "HttpRequest", ":", "param", "extra_context", ":", "Extra", "context", "dict", ":", "param", "kw", ":", "Key", "-", "value", "dict", ":", "return", ":", "See", "changelist_view", "()" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/admin.py#L65-L73
kajala/django-jutil
jutil/admin.py
ModelAdminBase.history_view
def history_view(self, request, object_id, extra_context=None): from django.template.response import TemplateResponse from django.contrib.admin.options import get_content_type_for_model from django.contrib.admin.utils import unquote from django.core.exceptions import PermissionDenied from django.utils.text import capfirst from django.utils.encoding import force_text from django.utils.translation import ugettext as _ "The 'history' admin view for this model." from django.contrib.admin.models import LogEntry # First check if the user can see this history. model = self.model obj = self.get_object(request, unquote(object_id)) if obj is None: return self._get_obj_does_not_exist_redirect(request, model._meta, object_id) if not self.has_change_permission(request, obj): raise PermissionDenied # Then get the history for this object. opts = model._meta app_label = opts.app_label action_list = LogEntry.objects.filter( object_id=unquote(object_id), content_type=get_content_type_for_model(model) ).select_related().order_by('-action_time')[:self.max_history_length] context = dict( self.admin_site.each_context(request), title=_('Change history: %s') % force_text(obj), action_list=action_list, module_name=capfirst(force_text(opts.verbose_name_plural)), object=obj, opts=opts, preserved_filters=self.get_preserved_filters(request), ) context.update(extra_context or {}) request.current_app = self.admin_site.name return TemplateResponse(request, self.object_history_template or [ "admin/%s/%s/object_history.html" % (app_label, opts.model_name), "admin/%s/object_history.html" % app_label, "admin/object_history.html" ], context)
python
def history_view(self, request, object_id, extra_context=None): from django.template.response import TemplateResponse from django.contrib.admin.options import get_content_type_for_model from django.contrib.admin.utils import unquote from django.core.exceptions import PermissionDenied from django.utils.text import capfirst from django.utils.encoding import force_text from django.utils.translation import ugettext as _ "The 'history' admin view for this model." from django.contrib.admin.models import LogEntry # First check if the user can see this history. model = self.model obj = self.get_object(request, unquote(object_id)) if obj is None: return self._get_obj_does_not_exist_redirect(request, model._meta, object_id) if not self.has_change_permission(request, obj): raise PermissionDenied # Then get the history for this object. opts = model._meta app_label = opts.app_label action_list = LogEntry.objects.filter( object_id=unquote(object_id), content_type=get_content_type_for_model(model) ).select_related().order_by('-action_time')[:self.max_history_length] context = dict( self.admin_site.each_context(request), title=_('Change history: %s') % force_text(obj), action_list=action_list, module_name=capfirst(force_text(opts.verbose_name_plural)), object=obj, opts=opts, preserved_filters=self.get_preserved_filters(request), ) context.update(extra_context or {}) request.current_app = self.admin_site.name return TemplateResponse(request, self.object_history_template or [ "admin/%s/%s/object_history.html" % (app_label, opts.model_name), "admin/%s/object_history.html" % app_label, "admin/object_history.html" ], context)
[ "def", "history_view", "(", "self", ",", "request", ",", "object_id", ",", "extra_context", "=", "None", ")", ":", "from", "django", ".", "template", ".", "response", "import", "TemplateResponse", "from", "django", ".", "contrib", ".", "admin", ".", "options", "import", "get_content_type_for_model", "from", "django", ".", "contrib", ".", "admin", ".", "utils", "import", "unquote", "from", "django", ".", "core", ".", "exceptions", "import", "PermissionDenied", "from", "django", ".", "utils", ".", "text", "import", "capfirst", "from", "django", ".", "utils", ".", "encoding", "import", "force_text", "from", "django", ".", "utils", ".", "translation", "import", "ugettext", "as", "_", "from", "django", ".", "contrib", ".", "admin", ".", "models", "import", "LogEntry", "# First check if the user can see this history.", "model", "=", "self", ".", "model", "obj", "=", "self", ".", "get_object", "(", "request", ",", "unquote", "(", "object_id", ")", ")", "if", "obj", "is", "None", ":", "return", "self", ".", "_get_obj_does_not_exist_redirect", "(", "request", ",", "model", ".", "_meta", ",", "object_id", ")", "if", "not", "self", ".", "has_change_permission", "(", "request", ",", "obj", ")", ":", "raise", "PermissionDenied", "# Then get the history for this object.", "opts", "=", "model", ".", "_meta", "app_label", "=", "opts", ".", "app_label", "action_list", "=", "LogEntry", ".", "objects", ".", "filter", "(", "object_id", "=", "unquote", "(", "object_id", ")", ",", "content_type", "=", "get_content_type_for_model", "(", "model", ")", ")", ".", "select_related", "(", ")", ".", "order_by", "(", "'-action_time'", ")", "[", ":", "self", ".", "max_history_length", "]", "context", "=", "dict", "(", "self", ".", "admin_site", ".", "each_context", "(", "request", ")", ",", "title", "=", "_", "(", "'Change history: %s'", ")", "%", "force_text", "(", "obj", ")", ",", "action_list", "=", "action_list", ",", "module_name", "=", "capfirst", "(", "force_text", "(", "opts", ".", "verbose_name_plural", ")", ")", ",", "object", "=", "obj", ",", "opts", "=", "opts", ",", "preserved_filters", "=", "self", ".", "get_preserved_filters", "(", "request", ")", ",", ")", "context", ".", "update", "(", "extra_context", "or", "{", "}", ")", "request", ".", "current_app", "=", "self", ".", "admin_site", ".", "name", "return", "TemplateResponse", "(", "request", ",", "self", ".", "object_history_template", "or", "[", "\"admin/%s/%s/object_history.html\"", "%", "(", "app_label", ",", "opts", ".", "model_name", ")", ",", "\"admin/%s/object_history.html\"", "%", "app_label", ",", "\"admin/object_history.html\"", "]", ",", "context", ")" ]
The 'history' admin view for this model.
[ "The", "history", "admin", "view", "for", "this", "model", "." ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/admin.py#L75-L120
kajala/django-jutil
jutil/admin.py
AdminFileDownloadMixin.get_object_by_filename
def get_object_by_filename(self, request, filename): """ Returns owner object by filename (to be downloaded). This can be used to implement custom permission checks. :param request: HttpRequest :param filename: File name of the downloaded object. :return: owner object """ kw = dict() kw[self.file_field] = filename obj = self.get_queryset(request).filter(**kw).first() if not obj: raise Http404(_('File {} not found').format(filename)) return self.get_object(request, obj.id)
python
def get_object_by_filename(self, request, filename): """ Returns owner object by filename (to be downloaded). This can be used to implement custom permission checks. :param request: HttpRequest :param filename: File name of the downloaded object. :return: owner object """ kw = dict() kw[self.file_field] = filename obj = self.get_queryset(request).filter(**kw).first() if not obj: raise Http404(_('File {} not found').format(filename)) return self.get_object(request, obj.id)
[ "def", "get_object_by_filename", "(", "self", ",", "request", ",", "filename", ")", ":", "kw", "=", "dict", "(", ")", "kw", "[", "self", ".", "file_field", "]", "=", "filename", "obj", "=", "self", ".", "get_queryset", "(", "request", ")", ".", "filter", "(", "*", "*", "kw", ")", ".", "first", "(", ")", "if", "not", "obj", ":", "raise", "Http404", "(", "_", "(", "'File {} not found'", ")", ".", "format", "(", "filename", ")", ")", "return", "self", ".", "get_object", "(", "request", ",", "obj", ".", "id", ")" ]
Returns owner object by filename (to be downloaded). This can be used to implement custom permission checks. :param request: HttpRequest :param filename: File name of the downloaded object. :return: owner object
[ "Returns", "owner", "object", "by", "filename", "(", "to", "be", "downloaded", ")", ".", "This", "can", "be", "used", "to", "implement", "custom", "permission", "checks", ".", ":", "param", "request", ":", "HttpRequest", ":", "param", "filename", ":", "File", "name", "of", "the", "downloaded", "object", ".", ":", "return", ":", "owner", "object" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/admin.py#L148-L161
kajala/django-jutil
jutil/admin.py
AdminFileDownloadMixin.get_download_urls
def get_download_urls(self): """ Use like this: def get_urls(self): return self.get_download_urls() + super().get_urls() Returns: File download URLs for this model. """ info = self.model._meta.app_label, self.model._meta.model_name return [ url(r'^.+(' + self.upload_to + '/.+)/$', self.file_download_view, name='%s_%s_file_download' % info), ]
python
def get_download_urls(self): """ Use like this: def get_urls(self): return self.get_download_urls() + super().get_urls() Returns: File download URLs for this model. """ info = self.model._meta.app_label, self.model._meta.model_name return [ url(r'^.+(' + self.upload_to + '/.+)/$', self.file_download_view, name='%s_%s_file_download' % info), ]
[ "def", "get_download_urls", "(", "self", ")", ":", "info", "=", "self", ".", "model", ".", "_meta", ".", "app_label", ",", "self", ".", "model", ".", "_meta", ".", "model_name", "return", "[", "url", "(", "r'^.+('", "+", "self", ".", "upload_to", "+", "'/.+)/$'", ",", "self", ".", "file_download_view", ",", "name", "=", "'%s_%s_file_download'", "%", "info", ")", ",", "]" ]
Use like this: def get_urls(self): return self.get_download_urls() + super().get_urls() Returns: File download URLs for this model.
[ "Use", "like", "this", ":", "def", "get_urls", "(", "self", ")", ":", "return", "self", ".", "get_download_urls", "()", "+", "super", "()", ".", "get_urls", "()" ]
train
https://github.com/kajala/django-jutil/blob/2abd93ebad51042744eaeb1ee1074ed0eb55ad0c/jutil/admin.py#L170-L181
novopl/peltak
src/peltak/core/versioning.py
write
def write(version): # type: (str) -> None """ Write the given version to the VERSION_FILE """ if not is_valid(version): raise ValueError("Invalid version: ".format(version)) storage = get_version_storage() storage.write(version)
python
def write(version): # type: (str) -> None """ Write the given version to the VERSION_FILE """ if not is_valid(version): raise ValueError("Invalid version: ".format(version)) storage = get_version_storage() storage.write(version)
[ "def", "write", "(", "version", ")", ":", "# type: (str) -> None", "if", "not", "is_valid", "(", "version", ")", ":", "raise", "ValueError", "(", "\"Invalid version: \"", ".", "format", "(", "version", ")", ")", "storage", "=", "get_version_storage", "(", ")", "storage", ".", "write", "(", "version", ")" ]
Write the given version to the VERSION_FILE
[ "Write", "the", "given", "version", "to", "the", "VERSION_FILE" ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/versioning.py#L76-L83
novopl/peltak
src/peltak/core/versioning.py
bump
def bump(component='patch', exact=None): # type: (str, str) -> Tuple[str, str] """ Bump the given version component. Args: component (str): What part of the version should be bumped. Can be one of: - major - minor - patch exact (str): The exact version that should be set instead of bumping the current one. Returns: tuple(str, str): A tuple of old and bumped version. """ old_ver = current() if exact is None: new_ver = _bump_version(old_ver, component) else: new_ver = exact write(new_ver) return old_ver, new_ver
python
def bump(component='patch', exact=None): # type: (str, str) -> Tuple[str, str] """ Bump the given version component. Args: component (str): What part of the version should be bumped. Can be one of: - major - minor - patch exact (str): The exact version that should be set instead of bumping the current one. Returns: tuple(str, str): A tuple of old and bumped version. """ old_ver = current() if exact is None: new_ver = _bump_version(old_ver, component) else: new_ver = exact write(new_ver) return old_ver, new_ver
[ "def", "bump", "(", "component", "=", "'patch'", ",", "exact", "=", "None", ")", ":", "# type: (str, str) -> Tuple[str, str]", "old_ver", "=", "current", "(", ")", "if", "exact", "is", "None", ":", "new_ver", "=", "_bump_version", "(", "old_ver", ",", "component", ")", "else", ":", "new_ver", "=", "exact", "write", "(", "new_ver", ")", "return", "old_ver", ",", "new_ver" ]
Bump the given version component. Args: component (str): What part of the version should be bumped. Can be one of: - major - minor - patch exact (str): The exact version that should be set instead of bumping the current one. Returns: tuple(str, str): A tuple of old and bumped version.
[ "Bump", "the", "given", "version", "component", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/versioning.py#L86-L113
novopl/peltak
src/peltak/core/versioning.py
_bump_version
def _bump_version(version, component='patch'): # type: (str, str) -> str """ Bump the given version component. Args: version (str): The current version. The format is: MAJOR.MINOR[.PATCH]. component (str): What part of the version should be bumped. Can be one of: - major - minor - patch Returns: str: Bumped version as a string. """ if component not in ('major', 'minor', 'patch'): raise ValueError("Invalid version component: {}".format(component)) m = RE_VERSION.match(version) if m is None: raise ValueError("Version must be in MAJOR.MINOR[.PATCH] format") major = m.group('major') minor = m.group('minor') or '0' patch = m.group('patch') or None if patch == '0': patch = None if component == 'major': major = str(int(major) + 1) minor = '0' patch = None elif component == 'minor': minor = str(int(minor) + 1) patch = None else: patch = patch or 0 patch = str(int(patch) + 1) new_ver = '{}.{}'.format(major, minor) if patch is not None: new_ver += '.' + patch return new_ver
python
def _bump_version(version, component='patch'): # type: (str, str) -> str """ Bump the given version component. Args: version (str): The current version. The format is: MAJOR.MINOR[.PATCH]. component (str): What part of the version should be bumped. Can be one of: - major - minor - patch Returns: str: Bumped version as a string. """ if component not in ('major', 'minor', 'patch'): raise ValueError("Invalid version component: {}".format(component)) m = RE_VERSION.match(version) if m is None: raise ValueError("Version must be in MAJOR.MINOR[.PATCH] format") major = m.group('major') minor = m.group('minor') or '0' patch = m.group('patch') or None if patch == '0': patch = None if component == 'major': major = str(int(major) + 1) minor = '0' patch = None elif component == 'minor': minor = str(int(minor) + 1) patch = None else: patch = patch or 0 patch = str(int(patch) + 1) new_ver = '{}.{}'.format(major, minor) if patch is not None: new_ver += '.' + patch return new_ver
[ "def", "_bump_version", "(", "version", ",", "component", "=", "'patch'", ")", ":", "# type: (str, str) -> str", "if", "component", "not", "in", "(", "'major'", ",", "'minor'", ",", "'patch'", ")", ":", "raise", "ValueError", "(", "\"Invalid version component: {}\"", ".", "format", "(", "component", ")", ")", "m", "=", "RE_VERSION", ".", "match", "(", "version", ")", "if", "m", "is", "None", ":", "raise", "ValueError", "(", "\"Version must be in MAJOR.MINOR[.PATCH] format\"", ")", "major", "=", "m", ".", "group", "(", "'major'", ")", "minor", "=", "m", ".", "group", "(", "'minor'", ")", "or", "'0'", "patch", "=", "m", ".", "group", "(", "'patch'", ")", "or", "None", "if", "patch", "==", "'0'", ":", "patch", "=", "None", "if", "component", "==", "'major'", ":", "major", "=", "str", "(", "int", "(", "major", ")", "+", "1", ")", "minor", "=", "'0'", "patch", "=", "None", "elif", "component", "==", "'minor'", ":", "minor", "=", "str", "(", "int", "(", "minor", ")", "+", "1", ")", "patch", "=", "None", "else", ":", "patch", "=", "patch", "or", "0", "patch", "=", "str", "(", "int", "(", "patch", ")", "+", "1", ")", "new_ver", "=", "'{}.{}'", ".", "format", "(", "major", ",", "minor", ")", "if", "patch", "is", "not", "None", ":", "new_ver", "+=", "'.'", "+", "patch", "return", "new_ver" ]
Bump the given version component. Args: version (str): The current version. The format is: MAJOR.MINOR[.PATCH]. component (str): What part of the version should be bumped. Can be one of: - major - minor - patch Returns: str: Bumped version as a string.
[ "Bump", "the", "given", "version", "component", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/versioning.py#L116-L164
novopl/peltak
src/peltak/core/versioning.py
get_version_storage
def get_version_storage(): # type: () -> VersionStorage """ Get version storage for the given version file. The storage engine used depends on the extension of the *version_file*. """ version_file = conf.get_path('version_file', 'VERSION') if version_file.endswith('.py'): return PyVersionStorage(version_file) elif version_file.endswith('package.json'): return NodeVersionStorage(version_file) else: return RawVersionStorage(version_file)
python
def get_version_storage(): # type: () -> VersionStorage """ Get version storage for the given version file. The storage engine used depends on the extension of the *version_file*. """ version_file = conf.get_path('version_file', 'VERSION') if version_file.endswith('.py'): return PyVersionStorage(version_file) elif version_file.endswith('package.json'): return NodeVersionStorage(version_file) else: return RawVersionStorage(version_file)
[ "def", "get_version_storage", "(", ")", ":", "# type: () -> VersionStorage", "version_file", "=", "conf", ".", "get_path", "(", "'version_file'", ",", "'VERSION'", ")", "if", "version_file", ".", "endswith", "(", "'.py'", ")", ":", "return", "PyVersionStorage", "(", "version_file", ")", "elif", "version_file", ".", "endswith", "(", "'package.json'", ")", ":", "return", "NodeVersionStorage", "(", "version_file", ")", "else", ":", "return", "RawVersionStorage", "(", "version_file", ")" ]
Get version storage for the given version file. The storage engine used depends on the extension of the *version_file*.
[ "Get", "version", "storage", "for", "the", "given", "version", "file", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/versioning.py#L291-L303
novopl/peltak
src/peltak/core/versioning.py
PyVersionStorage.read
def read(self): # type: () -> Optional[str] """ Read the project version from .py file. This will regex search in the file for a ``__version__ = VERSION_STRING`` and read the version string. """ with open(self.version_file) as fp: content = fp.read() m = RE_PY_VERSION.search(content) if not m: return None else: return m.group('version')
python
def read(self): # type: () -> Optional[str] """ Read the project version from .py file. This will regex search in the file for a ``__version__ = VERSION_STRING`` and read the version string. """ with open(self.version_file) as fp: content = fp.read() m = RE_PY_VERSION.search(content) if not m: return None else: return m.group('version')
[ "def", "read", "(", "self", ")", ":", "# type: () -> Optional[str]", "with", "open", "(", "self", ".", "version_file", ")", "as", "fp", ":", "content", "=", "fp", ".", "read", "(", ")", "m", "=", "RE_PY_VERSION", ".", "search", "(", "content", ")", "if", "not", "m", ":", "return", "None", "else", ":", "return", "m", ".", "group", "(", "'version'", ")" ]
Read the project version from .py file. This will regex search in the file for a ``__version__ = VERSION_STRING`` and read the version string.
[ "Read", "the", "project", "version", "from", ".", "py", "file", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/versioning.py#L220-L233
novopl/peltak
src/peltak/core/versioning.py
PyVersionStorage.write
def write(self, version): # type: (str) -> None """ Write the project version to .py file. This will regex search in the file for a ``__version__ = VERSION_STRING`` and substitute the version string for the new version. """ with open(self.version_file) as fp: content = fp.read() ver_statement = "__version__ = '{}'".format(version) new_content = RE_PY_VERSION.sub(ver_statement, content) fs.write_file(self.version_file, new_content)
python
def write(self, version): # type: (str) -> None """ Write the project version to .py file. This will regex search in the file for a ``__version__ = VERSION_STRING`` and substitute the version string for the new version. """ with open(self.version_file) as fp: content = fp.read() ver_statement = "__version__ = '{}'".format(version) new_content = RE_PY_VERSION.sub(ver_statement, content) fs.write_file(self.version_file, new_content)
[ "def", "write", "(", "self", ",", "version", ")", ":", "# type: (str) -> None", "with", "open", "(", "self", ".", "version_file", ")", "as", "fp", ":", "content", "=", "fp", ".", "read", "(", ")", "ver_statement", "=", "\"__version__ = '{}'\"", ".", "format", "(", "version", ")", "new_content", "=", "RE_PY_VERSION", ".", "sub", "(", "ver_statement", ",", "content", ")", "fs", ".", "write_file", "(", "self", ".", "version_file", ",", "new_content", ")" ]
Write the project version to .py file. This will regex search in the file for a ``__version__ = VERSION_STRING`` and substitute the version string for the new version.
[ "Write", "the", "project", "version", "to", ".", "py", "file", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/versioning.py#L235-L248
novopl/peltak
src/peltak/core/versioning.py
RawVersionStorage.read
def read(self): # type: () -> Optional[str] """ Read the project version from .py file. This will regex search in the file for a ``__version__ = VERSION_STRING`` and read the version string. """ with open(self.version_file) as fp: version = fp.read().strip() if is_valid(version): return version return None
python
def read(self): # type: () -> Optional[str] """ Read the project version from .py file. This will regex search in the file for a ``__version__ = VERSION_STRING`` and read the version string. """ with open(self.version_file) as fp: version = fp.read().strip() if is_valid(version): return version return None
[ "def", "read", "(", "self", ")", ":", "# type: () -> Optional[str]", "with", "open", "(", "self", ".", "version_file", ")", "as", "fp", ":", "version", "=", "fp", ".", "read", "(", ")", ".", "strip", "(", ")", "if", "is_valid", "(", "version", ")", ":", "return", "version", "return", "None" ]
Read the project version from .py file. This will regex search in the file for a ``__version__ = VERSION_STRING`` and read the version string.
[ "Read", "the", "project", "version", "from", ".", "py", "file", "." ]
train
https://github.com/novopl/peltak/blob/b627acc019e3665875fe76cdca0a14773b69beaa/src/peltak/core/versioning.py#L253-L266
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
loggray
def loggray(x, a, b): """Auxiliary function that specifies the logarithmic gray scale. a and b are the cutoffs.""" linval = 10.0 + 990.0 * (x-float(a))/(b-a) return (np.log10(linval)-1.0)*0.5 * 255.0
python
def loggray(x, a, b): """Auxiliary function that specifies the logarithmic gray scale. a and b are the cutoffs.""" linval = 10.0 + 990.0 * (x-float(a))/(b-a) return (np.log10(linval)-1.0)*0.5 * 255.0
[ "def", "loggray", "(", "x", ",", "a", ",", "b", ")", ":", "linval", "=", "10.0", "+", "990.0", "*", "(", "x", "-", "float", "(", "a", ")", ")", "/", "(", "b", "-", "a", ")", "return", "(", "np", ".", "log10", "(", "linval", ")", "-", "1.0", ")", "*", "0.5", "*", "255.0" ]
Auxiliary function that specifies the logarithmic gray scale. a and b are the cutoffs.
[ "Auxiliary", "function", "that", "specifies", "the", "logarithmic", "gray", "scale", ".", "a", "and", "b", "are", "the", "cutoffs", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L945-L949
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
fromfits
def fromfits(infile, hdu = 0, verbose = True): """ Factory function that reads a FITS file and returns a f2nimage object. Use hdu to specify which HDU you want (primary = 0) """ pixelarray, hdr = ft.getdata(infile, hdu, header=True) pixelarray = np.asarray(pixelarray).transpose() #print pixelarray pixelarrayshape = pixelarray.shape if verbose : print "Input shape : (%i, %i)" % (pixelarrayshape[0], pixelarrayshape[1]) print "Input file BITPIX : %s" % (hdr["BITPIX"]) pixelarrayshape = np.asarray(pixelarrayshape) if verbose : print "Internal array type :", pixelarray.dtype.name return f2nimage(pixelarray, verbose = verbose)
python
def fromfits(infile, hdu = 0, verbose = True): """ Factory function that reads a FITS file and returns a f2nimage object. Use hdu to specify which HDU you want (primary = 0) """ pixelarray, hdr = ft.getdata(infile, hdu, header=True) pixelarray = np.asarray(pixelarray).transpose() #print pixelarray pixelarrayshape = pixelarray.shape if verbose : print "Input shape : (%i, %i)" % (pixelarrayshape[0], pixelarrayshape[1]) print "Input file BITPIX : %s" % (hdr["BITPIX"]) pixelarrayshape = np.asarray(pixelarrayshape) if verbose : print "Internal array type :", pixelarray.dtype.name return f2nimage(pixelarray, verbose = verbose)
[ "def", "fromfits", "(", "infile", ",", "hdu", "=", "0", ",", "verbose", "=", "True", ")", ":", "pixelarray", ",", "hdr", "=", "ft", ".", "getdata", "(", "infile", ",", "hdu", ",", "header", "=", "True", ")", "pixelarray", "=", "np", ".", "asarray", "(", "pixelarray", ")", ".", "transpose", "(", ")", "#print pixelarray", "pixelarrayshape", "=", "pixelarray", ".", "shape", "if", "verbose", ":", "print", "\"Input shape : (%i, %i)\"", "%", "(", "pixelarrayshape", "[", "0", "]", ",", "pixelarrayshape", "[", "1", "]", ")", "print", "\"Input file BITPIX : %s\"", "%", "(", "hdr", "[", "\"BITPIX\"", "]", ")", "pixelarrayshape", "=", "np", ".", "asarray", "(", "pixelarrayshape", ")", "if", "verbose", ":", "print", "\"Internal array type :\"", ",", "pixelarray", ".", "dtype", ".", "name", "return", "f2nimage", "(", "pixelarray", ",", "verbose", "=", "verbose", ")" ]
Factory function that reads a FITS file and returns a f2nimage object. Use hdu to specify which HDU you want (primary = 0)
[ "Factory", "function", "that", "reads", "a", "FITS", "file", "and", "returns", "a", "f2nimage", "object", ".", "Use", "hdu", "to", "specify", "which", "HDU", "you", "want", "(", "primary", "=", "0", ")" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L954-L972
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
rebin
def rebin(a, newshape): """ Auxiliary function to rebin ndarray data. Source : http://www.scipy.org/Cookbook/Rebinning example usage: >>> a=rand(6,4); b=rebin(a,(3,2)) """ shape = a.shape lenShape = len(shape) factor = np.asarray(shape)/np.asarray(newshape) #print factor evList = ['a.reshape('] + \ ['newshape[%d],factor[%d],'%(i,i) for i in xrange(lenShape)] + \ [')'] + ['.sum(%d)'%(i+1) for i in xrange(lenShape)] + \ ['/factor[%d]'%i for i in xrange(lenShape)] return eval(''.join(evList))
python
def rebin(a, newshape): """ Auxiliary function to rebin ndarray data. Source : http://www.scipy.org/Cookbook/Rebinning example usage: >>> a=rand(6,4); b=rebin(a,(3,2)) """ shape = a.shape lenShape = len(shape) factor = np.asarray(shape)/np.asarray(newshape) #print factor evList = ['a.reshape('] + \ ['newshape[%d],factor[%d],'%(i,i) for i in xrange(lenShape)] + \ [')'] + ['.sum(%d)'%(i+1) for i in xrange(lenShape)] + \ ['/factor[%d]'%i for i in xrange(lenShape)] return eval(''.join(evList))
[ "def", "rebin", "(", "a", ",", "newshape", ")", ":", "shape", "=", "a", ".", "shape", "lenShape", "=", "len", "(", "shape", ")", "factor", "=", "np", ".", "asarray", "(", "shape", ")", "/", "np", ".", "asarray", "(", "newshape", ")", "#print factor", "evList", "=", "[", "'a.reshape('", "]", "+", "[", "'newshape[%d],factor[%d],'", "%", "(", "i", ",", "i", ")", "for", "i", "in", "xrange", "(", "lenShape", ")", "]", "+", "[", "')'", "]", "+", "[", "'.sum(%d)'", "%", "(", "i", "+", "1", ")", "for", "i", "in", "xrange", "(", "lenShape", ")", "]", "+", "[", "'/factor[%d]'", "%", "i", "for", "i", "in", "xrange", "(", "lenShape", ")", "]", "return", "eval", "(", "''", ".", "join", "(", "evList", ")", ")" ]
Auxiliary function to rebin ndarray data. Source : http://www.scipy.org/Cookbook/Rebinning example usage: >>> a=rand(6,4); b=rebin(a,(3,2))
[ "Auxiliary", "function", "to", "rebin", "ndarray", "data", ".", "Source", ":", "http", ":", "//", "www", ".", "scipy", ".", "org", "/", "Cookbook", "/", "Rebinning", "example", "usage", ":", ">>>", "a", "=", "rand", "(", "6", "4", ")", ";", "b", "=", "rebin", "(", "a", "(", "3", "2", "))" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L977-L997
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
compose
def compose(f2nimages, outfile): """ Takes f2nimages and writes them into one single png file, side by side. f2nimages is a list of horizontal lines, where each line is a list of f2nimages. For instance : [ [image1, image2], [image3, image4] ] The sizes of these images have to "match", so that the final result is rectangular. This function is verbose if any of the images is verbose. """ # We start by doing some checks, and try to print out helpfull error messages. verbosity = [] colourmodes = [] for i, line in enumerate(f2nimages): for j, img in enumerate(line): if img.verbose: print "Checking line %i, image %i (verbose)..." % (i+1, j+1) img.checkforpilimage() verbosity.append(img.verbose) colourmodes.append(img.pilimage.mode) verbose = np.any(np.array(verbosity)) # So we set the verbosity used in this function to true if any of the images is verbose. colours = list(set(colourmodes)) # We check if the widths are compatible : widths = [np.sum(np.array([img.pilimage.size[0] for img in line])) for line in f2nimages] if len(set(widths)) != 1 : print "Total widths of the lines :" print widths raise RuntimeError, "The total widths of your lines are not compatible !" totwidth = widths[0] # Similar for the heights : for i, line in enumerate(f2nimages): heights = [img.pilimage.size[1] for img in line] if len(set(heights)) != 1 : print "Heights of the images in line %i :" % (i + 1) print heights raise RuntimeError, "Heights of the images in line %i are not compatible." % (i + 1) totheight = np.sum(np.array([line[0].pilimage.size[1] for line in f2nimages])) # Ok, now it should be safe to go for the composition : if verbose: print "Composition size : %i x %i" % (totwidth, totheight) if verbose: print "Colour modes of input : %s" % colours if len(colours) == 1 and colours[0] == "L" : if verbose : print "Builing graylevel composition" compoimg = im.new("L", (totwidth, totheight), 128) else: if verbose : print "Building RGB composition" compoimg = im.new("RGB", (totwidth, totheight), (255, 0, 0)) y = 0 for line in f2nimages: x = 0 for img in line: box = (x, y, x+img.pilimage.size[0], y+img.pilimage.size[1]) #print box compoimg.paste(img.pilimage, box) x += img.pilimage.size[0] y += img.pilimage.size[1] if verbose: print "Writing compositions to %s...\n%i x %i pixels, mode %s" % (outfile, compoimg.size[0], compoimg.size[1], compoimg.mode) compoimg.save(outfile, "PNG")
python
def compose(f2nimages, outfile): """ Takes f2nimages and writes them into one single png file, side by side. f2nimages is a list of horizontal lines, where each line is a list of f2nimages. For instance : [ [image1, image2], [image3, image4] ] The sizes of these images have to "match", so that the final result is rectangular. This function is verbose if any of the images is verbose. """ # We start by doing some checks, and try to print out helpfull error messages. verbosity = [] colourmodes = [] for i, line in enumerate(f2nimages): for j, img in enumerate(line): if img.verbose: print "Checking line %i, image %i (verbose)..." % (i+1, j+1) img.checkforpilimage() verbosity.append(img.verbose) colourmodes.append(img.pilimage.mode) verbose = np.any(np.array(verbosity)) # So we set the verbosity used in this function to true if any of the images is verbose. colours = list(set(colourmodes)) # We check if the widths are compatible : widths = [np.sum(np.array([img.pilimage.size[0] for img in line])) for line in f2nimages] if len(set(widths)) != 1 : print "Total widths of the lines :" print widths raise RuntimeError, "The total widths of your lines are not compatible !" totwidth = widths[0] # Similar for the heights : for i, line in enumerate(f2nimages): heights = [img.pilimage.size[1] for img in line] if len(set(heights)) != 1 : print "Heights of the images in line %i :" % (i + 1) print heights raise RuntimeError, "Heights of the images in line %i are not compatible." % (i + 1) totheight = np.sum(np.array([line[0].pilimage.size[1] for line in f2nimages])) # Ok, now it should be safe to go for the composition : if verbose: print "Composition size : %i x %i" % (totwidth, totheight) if verbose: print "Colour modes of input : %s" % colours if len(colours) == 1 and colours[0] == "L" : if verbose : print "Builing graylevel composition" compoimg = im.new("L", (totwidth, totheight), 128) else: if verbose : print "Building RGB composition" compoimg = im.new("RGB", (totwidth, totheight), (255, 0, 0)) y = 0 for line in f2nimages: x = 0 for img in line: box = (x, y, x+img.pilimage.size[0], y+img.pilimage.size[1]) #print box compoimg.paste(img.pilimage, box) x += img.pilimage.size[0] y += img.pilimage.size[1] if verbose: print "Writing compositions to %s...\n%i x %i pixels, mode %s" % (outfile, compoimg.size[0], compoimg.size[1], compoimg.mode) compoimg.save(outfile, "PNG")
[ "def", "compose", "(", "f2nimages", ",", "outfile", ")", ":", "# We start by doing some checks, and try to print out helpfull error messages.", "verbosity", "=", "[", "]", "colourmodes", "=", "[", "]", "for", "i", ",", "line", "in", "enumerate", "(", "f2nimages", ")", ":", "for", "j", ",", "img", "in", "enumerate", "(", "line", ")", ":", "if", "img", ".", "verbose", ":", "print", "\"Checking line %i, image %i (verbose)...\"", "%", "(", "i", "+", "1", ",", "j", "+", "1", ")", "img", ".", "checkforpilimage", "(", ")", "verbosity", ".", "append", "(", "img", ".", "verbose", ")", "colourmodes", ".", "append", "(", "img", ".", "pilimage", ".", "mode", ")", "verbose", "=", "np", ".", "any", "(", "np", ".", "array", "(", "verbosity", ")", ")", "# So we set the verbosity used in this function to true if any of the images is verbose.", "colours", "=", "list", "(", "set", "(", "colourmodes", ")", ")", "# We check if the widths are compatible :", "widths", "=", "[", "np", ".", "sum", "(", "np", ".", "array", "(", "[", "img", ".", "pilimage", ".", "size", "[", "0", "]", "for", "img", "in", "line", "]", ")", ")", "for", "line", "in", "f2nimages", "]", "if", "len", "(", "set", "(", "widths", ")", ")", "!=", "1", ":", "print", "\"Total widths of the lines :\"", "print", "widths", "raise", "RuntimeError", ",", "\"The total widths of your lines are not compatible !\"", "totwidth", "=", "widths", "[", "0", "]", "# Similar for the heights :", "for", "i", ",", "line", "in", "enumerate", "(", "f2nimages", ")", ":", "heights", "=", "[", "img", ".", "pilimage", ".", "size", "[", "1", "]", "for", "img", "in", "line", "]", "if", "len", "(", "set", "(", "heights", ")", ")", "!=", "1", ":", "print", "\"Heights of the images in line %i :\"", "%", "(", "i", "+", "1", ")", "print", "heights", "raise", "RuntimeError", ",", "\"Heights of the images in line %i are not compatible.\"", "%", "(", "i", "+", "1", ")", "totheight", "=", "np", ".", "sum", "(", "np", ".", "array", "(", "[", "line", "[", "0", "]", ".", "pilimage", ".", "size", "[", "1", "]", "for", "line", "in", "f2nimages", "]", ")", ")", "# Ok, now it should be safe to go for the composition :", "if", "verbose", ":", "print", "\"Composition size : %i x %i\"", "%", "(", "totwidth", ",", "totheight", ")", "if", "verbose", ":", "print", "\"Colour modes of input : %s\"", "%", "colours", "if", "len", "(", "colours", ")", "==", "1", "and", "colours", "[", "0", "]", "==", "\"L\"", ":", "if", "verbose", ":", "print", "\"Builing graylevel composition\"", "compoimg", "=", "im", ".", "new", "(", "\"L\"", ",", "(", "totwidth", ",", "totheight", ")", ",", "128", ")", "else", ":", "if", "verbose", ":", "print", "\"Building RGB composition\"", "compoimg", "=", "im", ".", "new", "(", "\"RGB\"", ",", "(", "totwidth", ",", "totheight", ")", ",", "(", "255", ",", "0", ",", "0", ")", ")", "y", "=", "0", "for", "line", "in", "f2nimages", ":", "x", "=", "0", "for", "img", "in", "line", ":", "box", "=", "(", "x", ",", "y", ",", "x", "+", "img", ".", "pilimage", ".", "size", "[", "0", "]", ",", "y", "+", "img", ".", "pilimage", ".", "size", "[", "1", "]", ")", "#print box", "compoimg", ".", "paste", "(", "img", ".", "pilimage", ",", "box", ")", "x", "+=", "img", ".", "pilimage", ".", "size", "[", "0", "]", "y", "+=", "img", ".", "pilimage", ".", "size", "[", "1", "]", "if", "verbose", ":", "print", "\"Writing compositions to %s...\\n%i x %i pixels, mode %s\"", "%", "(", "outfile", ",", "compoimg", ".", "size", "[", "0", "]", ",", "compoimg", ".", "size", "[", "1", "]", ",", "compoimg", ".", "mode", ")", "compoimg", ".", "save", "(", "outfile", ",", "\"PNG\"", ")" ]
Takes f2nimages and writes them into one single png file, side by side. f2nimages is a list of horizontal lines, where each line is a list of f2nimages. For instance : [ [image1, image2], [image3, image4] ] The sizes of these images have to "match", so that the final result is rectangular. This function is verbose if any of the images is verbose.
[ "Takes", "f2nimages", "and", "writes", "them", "into", "one", "single", "png", "file", "side", "by", "side", ".", "f2nimages", "is", "a", "list", "of", "horizontal", "lines", "where", "each", "line", "is", "a", "list", "of", "f2nimages", ".", "For", "instance", ":", "[", "[", "image1", "image2", "]", "[", "image3", "image4", "]", "]", "The", "sizes", "of", "these", "images", "have", "to", "match", "so", "that", "the", "final", "result", "is", "rectangular", ".", "This", "function", "is", "verbose", "if", "any", "of", "the", "images", "is", "verbose", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L1001-L1074
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.setzscale
def setzscale(self, z1="auto", z2="auto", nsig=3, samplesizelimit = 10000, border=300): """ We set z1 and z2, according to different algorithms or arguments. For both z1 and z2, give either : - "auto" (default automatic, different between z1 and z2) - "ex" (extrema) - "flat" ("sigma-cuts" around median value, well-suited for flatfields) - numeric value like 1230.34 nsig is the number of sigmas to be rejected (used by auto z1 + both flats) samplesizelimit is the maximum number of pixels to compute statistics on. If your image is larger then samplesizelimit, I will use only samplesizelimit pixels of it. If your image is 3 times border in width and height, I will skip border pixels around the image before doing calculations. This is made to get rid of the overscan and prescan etc. So you can basically leave this at 300, it will only affect images wider then 900 pixels. (300 happens to be a safe value for many telescopes.) You can put border = 0 to deactivate this feature. If you give nothing, the cutoff will not be changed. You should set the z scale directly after cropping the image. """ if self.pilimage != None: raise RuntimeError, "Cannot set z scale anymore, PIL image already exists !" if self.numpyarray.shape[0] > 3 * border and self.numpyarray.shape[1] > 3 * border: if border > 0: if self.verbose : print "For the stats I will leave a border of %i pixels" % border calcarray = self.numpyarray[border:-border, border:-border].copy() else: calcarray = self.numpyarray.copy() else: calcarray = self.numpyarray.copy() if self.verbose: print "Image is too small for a border of %i" % (border) # Starting with the simple possibilities : if z1 == "ex" : self.z1 = np.min(calcarray) if self.verbose: print "Setting ex z1 to %f" % self.z1 if z2 == "ex": self.z2 = np.max(calcarray) if self.verbose: print "Setting ex z2 to %f" % self.z2 if type(z1) == type(0) or type(z1) == type(0.0): self.z1 = z1 if self.verbose: print "Setting z1 to %f" % self.z1 if type(z2) == type(0) or type(z2) == type(0.0): self.z2 = z2 if self.verbose: print "Setting z2 to %f" % self.z2 # Now it gets a little more sophisticated. if z1 == "auto" or z2 == "auto" or z1 == "flat" or z2 == "flat": # To speed up, we do not want to do statistics on the full image if it is large. # So we prepare a small random sample of pixels. calcarray.shape = calcarray.size # We flatten the 2D array if calcarray.size > samplesizelimit : #selectionindices = np.random.random_integers(low = 0, high = calcarray.size - 1, size=samplesizelimit) selectionindices = np.linspace(0, calcarray.size-1, samplesizelimit).astype(np.int) statsel = calcarray[selectionindices] else : statsel = calcarray #nbrofbins = 10 + int(np.log10(calcarray.size)*10.0) #print "Building histogram with %i bins" % nbrofbins #nbrofbins = 100 #hist = np.histogram(statsel, bins=nbrofbins, range=(self.z1, self.z2), normed=False, weights=None, new=True) medianlevel = np.median(statsel) firststd = np.std(statsel) if z1 == "auto" : # 2 sigma clipping (quick and dirty star removal) : nearskypixvals = statsel[np.logical_and(statsel > medianlevel - 2*firststd, statsel < medianlevel + 2*firststd)] skylevel = np.median(nearskypixvals) secondstd = np.std(nearskypixvals) if self.verbose : print "Sky level at %f +/- %f" % (skylevel, secondstd) self.z1 = skylevel - nsig*secondstd if self.verbose : print "Setting auto z1 to %f, nsig = %i" % (self.z1, nsig) if z2 == "auto" : # Here we want to reject a percentage of high values... sortedstatsel = np.sort(statsel) n = round(0.9995 * statsel.size) self.z2 = sortedstatsel[n] if self.verbose : print "Setting auto z2 to %f" % self.z2 if z1 == "flat" : # 5 sigma clipping to get rid of cosmics : nearflatpixvals = statsel[np.logical_and(statsel > medianlevel - 5*firststd, statsel < medianlevel + 5*firststd)] flatlevel = np.median(nearflatpixvals) flatstd = np.std(nearflatpixvals) self.z1 = flatlevel - nsig*flatstd if self.verbose : print "Setting flat z1 : %f, nsig = %i" % (self.z1, nsig) if z2 == "flat" : # symmetric to z1 # 5 sigma clipping to get rid of cosmics : nearflatpixvals = statsel[np.logical_and(statsel > medianlevel - 5*firststd, statsel < medianlevel + 5*firststd)] flatlevel = np.median(nearflatpixvals) flatstd = np.std(nearflatpixvals) self.z2 = flatlevel + nsig*flatstd if self.verbose : print "Setting flat z2 : %f, nsig = %i" % (self.z2, nsig)
python
def setzscale(self, z1="auto", z2="auto", nsig=3, samplesizelimit = 10000, border=300): """ We set z1 and z2, according to different algorithms or arguments. For both z1 and z2, give either : - "auto" (default automatic, different between z1 and z2) - "ex" (extrema) - "flat" ("sigma-cuts" around median value, well-suited for flatfields) - numeric value like 1230.34 nsig is the number of sigmas to be rejected (used by auto z1 + both flats) samplesizelimit is the maximum number of pixels to compute statistics on. If your image is larger then samplesizelimit, I will use only samplesizelimit pixels of it. If your image is 3 times border in width and height, I will skip border pixels around the image before doing calculations. This is made to get rid of the overscan and prescan etc. So you can basically leave this at 300, it will only affect images wider then 900 pixels. (300 happens to be a safe value for many telescopes.) You can put border = 0 to deactivate this feature. If you give nothing, the cutoff will not be changed. You should set the z scale directly after cropping the image. """ if self.pilimage != None: raise RuntimeError, "Cannot set z scale anymore, PIL image already exists !" if self.numpyarray.shape[0] > 3 * border and self.numpyarray.shape[1] > 3 * border: if border > 0: if self.verbose : print "For the stats I will leave a border of %i pixels" % border calcarray = self.numpyarray[border:-border, border:-border].copy() else: calcarray = self.numpyarray.copy() else: calcarray = self.numpyarray.copy() if self.verbose: print "Image is too small for a border of %i" % (border) # Starting with the simple possibilities : if z1 == "ex" : self.z1 = np.min(calcarray) if self.verbose: print "Setting ex z1 to %f" % self.z1 if z2 == "ex": self.z2 = np.max(calcarray) if self.verbose: print "Setting ex z2 to %f" % self.z2 if type(z1) == type(0) or type(z1) == type(0.0): self.z1 = z1 if self.verbose: print "Setting z1 to %f" % self.z1 if type(z2) == type(0) or type(z2) == type(0.0): self.z2 = z2 if self.verbose: print "Setting z2 to %f" % self.z2 # Now it gets a little more sophisticated. if z1 == "auto" or z2 == "auto" or z1 == "flat" or z2 == "flat": # To speed up, we do not want to do statistics on the full image if it is large. # So we prepare a small random sample of pixels. calcarray.shape = calcarray.size # We flatten the 2D array if calcarray.size > samplesizelimit : #selectionindices = np.random.random_integers(low = 0, high = calcarray.size - 1, size=samplesizelimit) selectionindices = np.linspace(0, calcarray.size-1, samplesizelimit).astype(np.int) statsel = calcarray[selectionindices] else : statsel = calcarray #nbrofbins = 10 + int(np.log10(calcarray.size)*10.0) #print "Building histogram with %i bins" % nbrofbins #nbrofbins = 100 #hist = np.histogram(statsel, bins=nbrofbins, range=(self.z1, self.z2), normed=False, weights=None, new=True) medianlevel = np.median(statsel) firststd = np.std(statsel) if z1 == "auto" : # 2 sigma clipping (quick and dirty star removal) : nearskypixvals = statsel[np.logical_and(statsel > medianlevel - 2*firststd, statsel < medianlevel + 2*firststd)] skylevel = np.median(nearskypixvals) secondstd = np.std(nearskypixvals) if self.verbose : print "Sky level at %f +/- %f" % (skylevel, secondstd) self.z1 = skylevel - nsig*secondstd if self.verbose : print "Setting auto z1 to %f, nsig = %i" % (self.z1, nsig) if z2 == "auto" : # Here we want to reject a percentage of high values... sortedstatsel = np.sort(statsel) n = round(0.9995 * statsel.size) self.z2 = sortedstatsel[n] if self.verbose : print "Setting auto z2 to %f" % self.z2 if z1 == "flat" : # 5 sigma clipping to get rid of cosmics : nearflatpixvals = statsel[np.logical_and(statsel > medianlevel - 5*firststd, statsel < medianlevel + 5*firststd)] flatlevel = np.median(nearflatpixvals) flatstd = np.std(nearflatpixvals) self.z1 = flatlevel - nsig*flatstd if self.verbose : print "Setting flat z1 : %f, nsig = %i" % (self.z1, nsig) if z2 == "flat" : # symmetric to z1 # 5 sigma clipping to get rid of cosmics : nearflatpixvals = statsel[np.logical_and(statsel > medianlevel - 5*firststd, statsel < medianlevel + 5*firststd)] flatlevel = np.median(nearflatpixvals) flatstd = np.std(nearflatpixvals) self.z2 = flatlevel + nsig*flatstd if self.verbose : print "Setting flat z2 : %f, nsig = %i" % (self.z2, nsig)
[ "def", "setzscale", "(", "self", ",", "z1", "=", "\"auto\"", ",", "z2", "=", "\"auto\"", ",", "nsig", "=", "3", ",", "samplesizelimit", "=", "10000", ",", "border", "=", "300", ")", ":", "if", "self", ".", "pilimage", "!=", "None", ":", "raise", "RuntimeError", ",", "\"Cannot set z scale anymore, PIL image already exists !\"", "if", "self", ".", "numpyarray", ".", "shape", "[", "0", "]", ">", "3", "*", "border", "and", "self", ".", "numpyarray", ".", "shape", "[", "1", "]", ">", "3", "*", "border", ":", "if", "border", ">", "0", ":", "if", "self", ".", "verbose", ":", "print", "\"For the stats I will leave a border of %i pixels\"", "%", "border", "calcarray", "=", "self", ".", "numpyarray", "[", "border", ":", "-", "border", ",", "border", ":", "-", "border", "]", ".", "copy", "(", ")", "else", ":", "calcarray", "=", "self", ".", "numpyarray", ".", "copy", "(", ")", "else", ":", "calcarray", "=", "self", ".", "numpyarray", ".", "copy", "(", ")", "if", "self", ".", "verbose", ":", "print", "\"Image is too small for a border of %i\"", "%", "(", "border", ")", "# Starting with the simple possibilities :", "if", "z1", "==", "\"ex\"", ":", "self", ".", "z1", "=", "np", ".", "min", "(", "calcarray", ")", "if", "self", ".", "verbose", ":", "print", "\"Setting ex z1 to %f\"", "%", "self", ".", "z1", "if", "z2", "==", "\"ex\"", ":", "self", ".", "z2", "=", "np", ".", "max", "(", "calcarray", ")", "if", "self", ".", "verbose", ":", "print", "\"Setting ex z2 to %f\"", "%", "self", ".", "z2", "if", "type", "(", "z1", ")", "==", "type", "(", "0", ")", "or", "type", "(", "z1", ")", "==", "type", "(", "0.0", ")", ":", "self", ".", "z1", "=", "z1", "if", "self", ".", "verbose", ":", "print", "\"Setting z1 to %f\"", "%", "self", ".", "z1", "if", "type", "(", "z2", ")", "==", "type", "(", "0", ")", "or", "type", "(", "z2", ")", "==", "type", "(", "0.0", ")", ":", "self", ".", "z2", "=", "z2", "if", "self", ".", "verbose", ":", "print", "\"Setting z2 to %f\"", "%", "self", ".", "z2", "# Now it gets a little more sophisticated.", "if", "z1", "==", "\"auto\"", "or", "z2", "==", "\"auto\"", "or", "z1", "==", "\"flat\"", "or", "z2", "==", "\"flat\"", ":", "# To speed up, we do not want to do statistics on the full image if it is large.", "# So we prepare a small random sample of pixels.", "calcarray", ".", "shape", "=", "calcarray", ".", "size", "# We flatten the 2D array", "if", "calcarray", ".", "size", ">", "samplesizelimit", ":", "#selectionindices = np.random.random_integers(low = 0, high = calcarray.size - 1, size=samplesizelimit)", "selectionindices", "=", "np", ".", "linspace", "(", "0", ",", "calcarray", ".", "size", "-", "1", ",", "samplesizelimit", ")", ".", "astype", "(", "np", ".", "int", ")", "statsel", "=", "calcarray", "[", "selectionindices", "]", "else", ":", "statsel", "=", "calcarray", "#nbrofbins = 10 + int(np.log10(calcarray.size)*10.0)", "#print \"Building histogram with %i bins\" % nbrofbins", "#nbrofbins = 100", "#hist = np.histogram(statsel, bins=nbrofbins, range=(self.z1, self.z2), normed=False, weights=None, new=True)", "medianlevel", "=", "np", ".", "median", "(", "statsel", ")", "firststd", "=", "np", ".", "std", "(", "statsel", ")", "if", "z1", "==", "\"auto\"", ":", "# 2 sigma clipping (quick and dirty star removal) :", "nearskypixvals", "=", "statsel", "[", "np", ".", "logical_and", "(", "statsel", ">", "medianlevel", "-", "2", "*", "firststd", ",", "statsel", "<", "medianlevel", "+", "2", "*", "firststd", ")", "]", "skylevel", "=", "np", ".", "median", "(", "nearskypixvals", ")", "secondstd", "=", "np", ".", "std", "(", "nearskypixvals", ")", "if", "self", ".", "verbose", ":", "print", "\"Sky level at %f +/- %f\"", "%", "(", "skylevel", ",", "secondstd", ")", "self", ".", "z1", "=", "skylevel", "-", "nsig", "*", "secondstd", "if", "self", ".", "verbose", ":", "print", "\"Setting auto z1 to %f, nsig = %i\"", "%", "(", "self", ".", "z1", ",", "nsig", ")", "if", "z2", "==", "\"auto\"", ":", "# Here we want to reject a percentage of high values...", "sortedstatsel", "=", "np", ".", "sort", "(", "statsel", ")", "n", "=", "round", "(", "0.9995", "*", "statsel", ".", "size", ")", "self", ".", "z2", "=", "sortedstatsel", "[", "n", "]", "if", "self", ".", "verbose", ":", "print", "\"Setting auto z2 to %f\"", "%", "self", ".", "z2", "if", "z1", "==", "\"flat\"", ":", "# 5 sigma clipping to get rid of cosmics :", "nearflatpixvals", "=", "statsel", "[", "np", ".", "logical_and", "(", "statsel", ">", "medianlevel", "-", "5", "*", "firststd", ",", "statsel", "<", "medianlevel", "+", "5", "*", "firststd", ")", "]", "flatlevel", "=", "np", ".", "median", "(", "nearflatpixvals", ")", "flatstd", "=", "np", ".", "std", "(", "nearflatpixvals", ")", "self", ".", "z1", "=", "flatlevel", "-", "nsig", "*", "flatstd", "if", "self", ".", "verbose", ":", "print", "\"Setting flat z1 : %f, nsig = %i\"", "%", "(", "self", ".", "z1", ",", "nsig", ")", "if", "z2", "==", "\"flat\"", ":", "# symmetric to z1", "# 5 sigma clipping to get rid of cosmics :", "nearflatpixvals", "=", "statsel", "[", "np", ".", "logical_and", "(", "statsel", ">", "medianlevel", "-", "5", "*", "firststd", ",", "statsel", "<", "medianlevel", "+", "5", "*", "firststd", ")", "]", "flatlevel", "=", "np", ".", "median", "(", "nearflatpixvals", ")", "flatstd", "=", "np", ".", "std", "(", "nearflatpixvals", ")", "self", ".", "z2", "=", "flatlevel", "+", "nsig", "*", "flatstd", "if", "self", ".", "verbose", ":", "print", "\"Setting flat z2 : %f, nsig = %i\"", "%", "(", "self", ".", "z2", ",", "nsig", ")" ]
We set z1 and z2, according to different algorithms or arguments. For both z1 and z2, give either : - "auto" (default automatic, different between z1 and z2) - "ex" (extrema) - "flat" ("sigma-cuts" around median value, well-suited for flatfields) - numeric value like 1230.34 nsig is the number of sigmas to be rejected (used by auto z1 + both flats) samplesizelimit is the maximum number of pixels to compute statistics on. If your image is larger then samplesizelimit, I will use only samplesizelimit pixels of it. If your image is 3 times border in width and height, I will skip border pixels around the image before doing calculations. This is made to get rid of the overscan and prescan etc. So you can basically leave this at 300, it will only affect images wider then 900 pixels. (300 happens to be a safe value for many telescopes.) You can put border = 0 to deactivate this feature. If you give nothing, the cutoff will not be changed. You should set the z scale directly after cropping the image.
[ "We", "set", "z1", "and", "z2", "according", "to", "different", "algorithms", "or", "arguments", ".", "For", "both", "z1", "and", "z2", "give", "either", ":", "-", "auto", "(", "default", "automatic", "different", "between", "z1", "and", "z2", ")", "-", "ex", "(", "extrema", ")", "-", "flat", "(", "sigma", "-", "cuts", "around", "median", "value", "well", "-", "suited", "for", "flatfields", ")", "-", "numeric", "value", "like", "1230", ".", "34", "nsig", "is", "the", "number", "of", "sigmas", "to", "be", "rejected", "(", "used", "by", "auto", "z1", "+", "both", "flats", ")", "samplesizelimit", "is", "the", "maximum", "number", "of", "pixels", "to", "compute", "statistics", "on", ".", "If", "your", "image", "is", "larger", "then", "samplesizelimit", "I", "will", "use", "only", "samplesizelimit", "pixels", "of", "it", ".", "If", "your", "image", "is", "3", "times", "border", "in", "width", "and", "height", "I", "will", "skip", "border", "pixels", "around", "the", "image", "before", "doing", "calculations", ".", "This", "is", "made", "to", "get", "rid", "of", "the", "overscan", "and", "prescan", "etc", ".", "So", "you", "can", "basically", "leave", "this", "at", "300", "it", "will", "only", "affect", "images", "wider", "then", "900", "pixels", ".", "(", "300", "happens", "to", "be", "a", "safe", "value", "for", "many", "telescopes", ".", ")", "You", "can", "put", "border", "=", "0", "to", "deactivate", "this", "feature", ".", "If", "you", "give", "nothing", "the", "cutoff", "will", "not", "be", "changed", ".", "You", "should", "set", "the", "z", "scale", "directly", "after", "cropping", "the", "image", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L173-L311
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.crop
def crop(self, xa, xb, ya, yb): """ Crops the image. Two points : - We use numpy conventions xa = 200 and xb = 400 will give you a width of 200 pixels ! - We crop relative to the current array (i.e. not necessarily to the original array !) This means you can crop several times in a row with xa = 10, it will each time remove 10 pixels in x ! But we update the crop region specifications, so that the object remembers how it was cut. Please give positive integers in compatible ranges, no checks are made. """ if self.pilimage != None: raise RuntimeError, "Cannot crop anymore, PIL image already exists !" if self.verbose: print "Cropping : [%i:%i, %i:%i]" % (xa, xb, ya, yb) self.numpyarray = self.numpyarray[xa:xb, ya:yb] self.xa += xa self.ya += ya self.xb = self.xa + (xb - xa) self.yb = self.ya + (yb - ya)
python
def crop(self, xa, xb, ya, yb): """ Crops the image. Two points : - We use numpy conventions xa = 200 and xb = 400 will give you a width of 200 pixels ! - We crop relative to the current array (i.e. not necessarily to the original array !) This means you can crop several times in a row with xa = 10, it will each time remove 10 pixels in x ! But we update the crop region specifications, so that the object remembers how it was cut. Please give positive integers in compatible ranges, no checks are made. """ if self.pilimage != None: raise RuntimeError, "Cannot crop anymore, PIL image already exists !" if self.verbose: print "Cropping : [%i:%i, %i:%i]" % (xa, xb, ya, yb) self.numpyarray = self.numpyarray[xa:xb, ya:yb] self.xa += xa self.ya += ya self.xb = self.xa + (xb - xa) self.yb = self.ya + (yb - ya)
[ "def", "crop", "(", "self", ",", "xa", ",", "xb", ",", "ya", ",", "yb", ")", ":", "if", "self", ".", "pilimage", "!=", "None", ":", "raise", "RuntimeError", ",", "\"Cannot crop anymore, PIL image already exists !\"", "if", "self", ".", "verbose", ":", "print", "\"Cropping : [%i:%i, %i:%i]\"", "%", "(", "xa", ",", "xb", ",", "ya", ",", "yb", ")", "self", ".", "numpyarray", "=", "self", ".", "numpyarray", "[", "xa", ":", "xb", ",", "ya", ":", "yb", "]", "self", ".", "xa", "+=", "xa", "self", ".", "ya", "+=", "ya", "self", ".", "xb", "=", "self", ".", "xa", "+", "(", "xb", "-", "xa", ")", "self", ".", "yb", "=", "self", ".", "ya", "+", "(", "yb", "-", "ya", ")" ]
Crops the image. Two points : - We use numpy conventions xa = 200 and xb = 400 will give you a width of 200 pixels ! - We crop relative to the current array (i.e. not necessarily to the original array !) This means you can crop several times in a row with xa = 10, it will each time remove 10 pixels in x ! But we update the crop region specifications, so that the object remembers how it was cut. Please give positive integers in compatible ranges, no checks are made.
[ "Crops", "the", "image", ".", "Two", "points", ":", "-", "We", "use", "numpy", "conventions", "xa", "=", "200", "and", "xb", "=", "400", "will", "give", "you", "a", "width", "of", "200", "pixels", "!", "-", "We", "crop", "relative", "to", "the", "current", "array", "(", "i", ".", "e", ".", "not", "necessarily", "to", "the", "original", "array", "!", ")", "This", "means", "you", "can", "crop", "several", "times", "in", "a", "row", "with", "xa", "=", "10", "it", "will", "each", "time", "remove", "10", "pixels", "in", "x", "!", "But", "we", "update", "the", "crop", "region", "specifications", "so", "that", "the", "object", "remembers", "how", "it", "was", "cut", ".", "Please", "give", "positive", "integers", "in", "compatible", "ranges", "no", "checks", "are", "made", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L343-L369
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.irafcrop
def irafcrop(self, irafcropstring): """ This is a wrapper around crop(), similar to iraf imcopy, using iraf conventions (100:199 will be 100 pixels, not 99). """ irafcropstring = irafcropstring[1:-1] # removing the [ ] ranges = irafcropstring.split(",") xr = ranges[0].split(":") yr = ranges[1].split(":") xmin = int(xr[0]) xmax = int(xr[1])+1 ymin = int(yr[0]) ymax = int(yr[1])+1 self.crop(xmin, xmax, ymin, ymax)
python
def irafcrop(self, irafcropstring): """ This is a wrapper around crop(), similar to iraf imcopy, using iraf conventions (100:199 will be 100 pixels, not 99). """ irafcropstring = irafcropstring[1:-1] # removing the [ ] ranges = irafcropstring.split(",") xr = ranges[0].split(":") yr = ranges[1].split(":") xmin = int(xr[0]) xmax = int(xr[1])+1 ymin = int(yr[0]) ymax = int(yr[1])+1 self.crop(xmin, xmax, ymin, ymax)
[ "def", "irafcrop", "(", "self", ",", "irafcropstring", ")", ":", "irafcropstring", "=", "irafcropstring", "[", "1", ":", "-", "1", "]", "# removing the [ ]", "ranges", "=", "irafcropstring", ".", "split", "(", "\",\"", ")", "xr", "=", "ranges", "[", "0", "]", ".", "split", "(", "\":\"", ")", "yr", "=", "ranges", "[", "1", "]", ".", "split", "(", "\":\"", ")", "xmin", "=", "int", "(", "xr", "[", "0", "]", ")", "xmax", "=", "int", "(", "xr", "[", "1", "]", ")", "+", "1", "ymin", "=", "int", "(", "yr", "[", "0", "]", ")", "ymax", "=", "int", "(", "yr", "[", "1", "]", ")", "+", "1", "self", ".", "crop", "(", "xmin", ",", "xmax", ",", "ymin", ",", "ymax", ")" ]
This is a wrapper around crop(), similar to iraf imcopy, using iraf conventions (100:199 will be 100 pixels, not 99).
[ "This", "is", "a", "wrapper", "around", "crop", "()", "similar", "to", "iraf", "imcopy", "using", "iraf", "conventions", "(", "100", ":", "199", "will", "be", "100", "pixels", "not", "99", ")", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L375-L388
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.rebin
def rebin(self, factor): """ I robustly rebin your image by a given factor. You simply specify a factor, and I will eventually take care of a crop to bring the image to interger-multiple-of-your-factor dimensions. Note that if you crop your image before, you must directly crop to compatible dimensions ! We update the binfactor, this allows you to draw on the image later, still using the orignial pixel coordinates. Here we work on the numpy array. """ if self.pilimage != None: raise RuntimeError, "Cannot rebin anymore, PIL image already exists !" if type(factor) != type(0): raise RuntimeError, "Rebin factor must be an integer !" if factor < 1: return origshape = np.asarray(self.numpyarray.shape) neededshape = origshape - (origshape % factor) if not (origshape == neededshape).all(): if self.verbose : print "Rebinning %ix%i : I have to crop from %s to %s" % (factor, factor, origshape, neededshape) self.crop(0, neededshape[0], 0, neededshape[1]) else: if self.verbose : print "Rebinning %ix%i : I do not need to crop" % (factor, factor) self.numpyarray = rebin(self.numpyarray, neededshape/factor) # we call the rebin function defined below # The integer division neededshape/factor is ok, we checked for this above. self.binfactor = int(self.binfactor * factor)
python
def rebin(self, factor): """ I robustly rebin your image by a given factor. You simply specify a factor, and I will eventually take care of a crop to bring the image to interger-multiple-of-your-factor dimensions. Note that if you crop your image before, you must directly crop to compatible dimensions ! We update the binfactor, this allows you to draw on the image later, still using the orignial pixel coordinates. Here we work on the numpy array. """ if self.pilimage != None: raise RuntimeError, "Cannot rebin anymore, PIL image already exists !" if type(factor) != type(0): raise RuntimeError, "Rebin factor must be an integer !" if factor < 1: return origshape = np.asarray(self.numpyarray.shape) neededshape = origshape - (origshape % factor) if not (origshape == neededshape).all(): if self.verbose : print "Rebinning %ix%i : I have to crop from %s to %s" % (factor, factor, origshape, neededshape) self.crop(0, neededshape[0], 0, neededshape[1]) else: if self.verbose : print "Rebinning %ix%i : I do not need to crop" % (factor, factor) self.numpyarray = rebin(self.numpyarray, neededshape/factor) # we call the rebin function defined below # The integer division neededshape/factor is ok, we checked for this above. self.binfactor = int(self.binfactor * factor)
[ "def", "rebin", "(", "self", ",", "factor", ")", ":", "if", "self", ".", "pilimage", "!=", "None", ":", "raise", "RuntimeError", ",", "\"Cannot rebin anymore, PIL image already exists !\"", "if", "type", "(", "factor", ")", "!=", "type", "(", "0", ")", ":", "raise", "RuntimeError", ",", "\"Rebin factor must be an integer !\"", "if", "factor", "<", "1", ":", "return", "origshape", "=", "np", ".", "asarray", "(", "self", ".", "numpyarray", ".", "shape", ")", "neededshape", "=", "origshape", "-", "(", "origshape", "%", "factor", ")", "if", "not", "(", "origshape", "==", "neededshape", ")", ".", "all", "(", ")", ":", "if", "self", ".", "verbose", ":", "print", "\"Rebinning %ix%i : I have to crop from %s to %s\"", "%", "(", "factor", ",", "factor", ",", "origshape", ",", "neededshape", ")", "self", ".", "crop", "(", "0", ",", "neededshape", "[", "0", "]", ",", "0", ",", "neededshape", "[", "1", "]", ")", "else", ":", "if", "self", ".", "verbose", ":", "print", "\"Rebinning %ix%i : I do not need to crop\"", "%", "(", "factor", ",", "factor", ")", "self", ".", "numpyarray", "=", "rebin", "(", "self", ".", "numpyarray", ",", "neededshape", "/", "factor", ")", "# we call the rebin function defined below", "# The integer division neededshape/factor is ok, we checked for this above.", "self", ".", "binfactor", "=", "int", "(", "self", ".", "binfactor", "*", "factor", ")" ]
I robustly rebin your image by a given factor. You simply specify a factor, and I will eventually take care of a crop to bring the image to interger-multiple-of-your-factor dimensions. Note that if you crop your image before, you must directly crop to compatible dimensions ! We update the binfactor, this allows you to draw on the image later, still using the orignial pixel coordinates. Here we work on the numpy array.
[ "I", "robustly", "rebin", "your", "image", "by", "a", "given", "factor", ".", "You", "simply", "specify", "a", "factor", "and", "I", "will", "eventually", "take", "care", "of", "a", "crop", "to", "bring", "the", "image", "to", "interger", "-", "multiple", "-", "of", "-", "your", "-", "factor", "dimensions", ".", "Note", "that", "if", "you", "crop", "your", "image", "before", "you", "must", "directly", "crop", "to", "compatible", "dimensions", "!", "We", "update", "the", "binfactor", "this", "allows", "you", "to", "draw", "on", "the", "image", "later", "still", "using", "the", "orignial", "pixel", "coordinates", ".", "Here", "we", "work", "on", "the", "numpy", "array", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L391-L423
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.makepilimage
def makepilimage(self, scale = "log", negative = False): """ Makes a PIL image out of the array, respecting the z1 and z2 cutoffs. By default we use a log scaling identical to iraf's, and produce an image of mode "L", i.e. grayscale. But some drawings or colourscales will change the mode to "RGB" later, if you choose your own colours. If you choose scale = "clog" or "clin", you get hue values (aka rainbow colours). """ if scale == "log" or scale == "lin": self.negative = negative numpyarrayshape = self.numpyarray.shape calcarray = self.numpyarray.copy() #calcarray.ravel() # does not change in place in fact ! calcarray = calcarray.clip(min = self.z1, max = self.z2) if scale == "log": calcarray = np.array(map(lambda x: loggray(x, self.z1, self.z2), calcarray)) else : calcarray = np.array(map(lambda x: lingray(x, self.z1, self.z2), calcarray)) calcarray.shape = numpyarrayshape bwarray = np.zeros(numpyarrayshape, dtype=np.uint8) calcarray.round(out=bwarray) if negative: if self.verbose: print "Using negative scale" bwarray = 255 - bwarray if self.verbose: print "PIL range : [%i, %i]" % (np.min(bwarray), np.max(bwarray)) # We flip it so that (0, 0) is back in the bottom left corner as in ds9 # We do this here, so that you can write on the image from left to right :-) self.pilimage = imop.flip(im.fromarray(bwarray.transpose())) if self.verbose: print "PIL image made with scale : %s" % scale return 0 if scale == "clog" or scale == "clin": """ rainbow ! Algorithm for HSV to RGB from http://www.cs.rit.edu/~ncs/color/t_convert.html, by Eugene Vishnevsky Same stuff then for f2n in C h is from 0 to 360 (hue) s from 0 to 1 (saturation) v from 0 to 1 (brightness) """ self.negative = False calcarray = self.numpyarray.transpose() if scale == "clin": calcarray = (calcarray.clip(min = self.z1, max = self.z2)-self.z1)/(self.z2 - self.z1) # 0 to 1 if scale == "clog": calcarray = 10.0 + 990.0 * (calcarray.clip(min = self.z1, max = self.z2)-self.z1)/(self.z2 - self.z1) # 10 to 1000 calcarray = (np.log10(calcarray)-1.0)*0.5 # 0 to 1 #calcarray = calcarray * 359.0 # This is now our "hue value", 0 to 360 calcarray = (1.0-calcarray) * 300.0 # I limit this to not go into red again # The order of colours is Violet < Blue < Green < Yellow < Red # We prepare the output arrays rcalcarray = np.ones(calcarray.shape) gcalcarray = np.ones(calcarray.shape) bcalcarray = np.ones(calcarray.shape) h = calcarray/60.0 # sector 0 to 5 i = np.floor(h).astype(np.int) v = 1.0 * np.ones(calcarray.shape) s = 1.0 * np.ones(calcarray.shape) f = h - i # factorial part of h, this is an array p = v * ( 1.0 - s ) q = v * ( 1.0 - s * f ) t = v * ( 1.0 - s * ( 1.0 - f ) ) # sector 0: indices = (i == 0) rcalcarray[indices] = 255.0 * v[indices] gcalcarray[indices] = 255.0 * t[indices] bcalcarray[indices] = 255.0 * p[indices] # sector 1: indices = (i == 1) rcalcarray[indices] = 255.0 * q[indices] gcalcarray[indices] = 255.0 * v[indices] bcalcarray[indices] = 255.0 * p[indices] # sector 2: indices = (i == 2) rcalcarray[indices] = 255.0 * p[indices] gcalcarray[indices] = 255.0 * v[indices] bcalcarray[indices] = 255.0 * t[indices] # sector 3: indices = (i == 3) rcalcarray[indices] = 255.0 * p[indices] gcalcarray[indices] = 255.0 * q[indices] bcalcarray[indices] = 255.0 * v[indices] # sector 4: indices = (i == 4) rcalcarray[indices] = 255.0 * t[indices] gcalcarray[indices] = 255.0 * p[indices] bcalcarray[indices] = 255.0 * v[indices] # sector 5: indices = (i == 5) rcalcarray[indices] = 255.0 * v[indices] gcalcarray[indices] = 255.0 * p[indices] bcalcarray[indices] = 255.0 * q[indices] rarray = np.zeros(calcarray.shape, dtype=np.uint8) garray = np.zeros(calcarray.shape, dtype=np.uint8) barray = np.zeros(calcarray.shape, dtype=np.uint8) rcalcarray.round(out=rarray) gcalcarray.round(out=garray) bcalcarray.round(out=barray) carray = np.dstack((rarray,garray,barray)) self.pilimage = imop.flip(im.fromarray(carray, "RGB")) if self.verbose: print "PIL image made with scale : %s" % scale return 0 raise RuntimeError, "I don't know your colourscale, choose lin log clin or clog !"
python
def makepilimage(self, scale = "log", negative = False): """ Makes a PIL image out of the array, respecting the z1 and z2 cutoffs. By default we use a log scaling identical to iraf's, and produce an image of mode "L", i.e. grayscale. But some drawings or colourscales will change the mode to "RGB" later, if you choose your own colours. If you choose scale = "clog" or "clin", you get hue values (aka rainbow colours). """ if scale == "log" or scale == "lin": self.negative = negative numpyarrayshape = self.numpyarray.shape calcarray = self.numpyarray.copy() #calcarray.ravel() # does not change in place in fact ! calcarray = calcarray.clip(min = self.z1, max = self.z2) if scale == "log": calcarray = np.array(map(lambda x: loggray(x, self.z1, self.z2), calcarray)) else : calcarray = np.array(map(lambda x: lingray(x, self.z1, self.z2), calcarray)) calcarray.shape = numpyarrayshape bwarray = np.zeros(numpyarrayshape, dtype=np.uint8) calcarray.round(out=bwarray) if negative: if self.verbose: print "Using negative scale" bwarray = 255 - bwarray if self.verbose: print "PIL range : [%i, %i]" % (np.min(bwarray), np.max(bwarray)) # We flip it so that (0, 0) is back in the bottom left corner as in ds9 # We do this here, so that you can write on the image from left to right :-) self.pilimage = imop.flip(im.fromarray(bwarray.transpose())) if self.verbose: print "PIL image made with scale : %s" % scale return 0 if scale == "clog" or scale == "clin": """ rainbow ! Algorithm for HSV to RGB from http://www.cs.rit.edu/~ncs/color/t_convert.html, by Eugene Vishnevsky Same stuff then for f2n in C h is from 0 to 360 (hue) s from 0 to 1 (saturation) v from 0 to 1 (brightness) """ self.negative = False calcarray = self.numpyarray.transpose() if scale == "clin": calcarray = (calcarray.clip(min = self.z1, max = self.z2)-self.z1)/(self.z2 - self.z1) # 0 to 1 if scale == "clog": calcarray = 10.0 + 990.0 * (calcarray.clip(min = self.z1, max = self.z2)-self.z1)/(self.z2 - self.z1) # 10 to 1000 calcarray = (np.log10(calcarray)-1.0)*0.5 # 0 to 1 #calcarray = calcarray * 359.0 # This is now our "hue value", 0 to 360 calcarray = (1.0-calcarray) * 300.0 # I limit this to not go into red again # The order of colours is Violet < Blue < Green < Yellow < Red # We prepare the output arrays rcalcarray = np.ones(calcarray.shape) gcalcarray = np.ones(calcarray.shape) bcalcarray = np.ones(calcarray.shape) h = calcarray/60.0 # sector 0 to 5 i = np.floor(h).astype(np.int) v = 1.0 * np.ones(calcarray.shape) s = 1.0 * np.ones(calcarray.shape) f = h - i # factorial part of h, this is an array p = v * ( 1.0 - s ) q = v * ( 1.0 - s * f ) t = v * ( 1.0 - s * ( 1.0 - f ) ) # sector 0: indices = (i == 0) rcalcarray[indices] = 255.0 * v[indices] gcalcarray[indices] = 255.0 * t[indices] bcalcarray[indices] = 255.0 * p[indices] # sector 1: indices = (i == 1) rcalcarray[indices] = 255.0 * q[indices] gcalcarray[indices] = 255.0 * v[indices] bcalcarray[indices] = 255.0 * p[indices] # sector 2: indices = (i == 2) rcalcarray[indices] = 255.0 * p[indices] gcalcarray[indices] = 255.0 * v[indices] bcalcarray[indices] = 255.0 * t[indices] # sector 3: indices = (i == 3) rcalcarray[indices] = 255.0 * p[indices] gcalcarray[indices] = 255.0 * q[indices] bcalcarray[indices] = 255.0 * v[indices] # sector 4: indices = (i == 4) rcalcarray[indices] = 255.0 * t[indices] gcalcarray[indices] = 255.0 * p[indices] bcalcarray[indices] = 255.0 * v[indices] # sector 5: indices = (i == 5) rcalcarray[indices] = 255.0 * v[indices] gcalcarray[indices] = 255.0 * p[indices] bcalcarray[indices] = 255.0 * q[indices] rarray = np.zeros(calcarray.shape, dtype=np.uint8) garray = np.zeros(calcarray.shape, dtype=np.uint8) barray = np.zeros(calcarray.shape, dtype=np.uint8) rcalcarray.round(out=rarray) gcalcarray.round(out=garray) bcalcarray.round(out=barray) carray = np.dstack((rarray,garray,barray)) self.pilimage = imop.flip(im.fromarray(carray, "RGB")) if self.verbose: print "PIL image made with scale : %s" % scale return 0 raise RuntimeError, "I don't know your colourscale, choose lin log clin or clog !"
[ "def", "makepilimage", "(", "self", ",", "scale", "=", "\"log\"", ",", "negative", "=", "False", ")", ":", "if", "scale", "==", "\"log\"", "or", "scale", "==", "\"lin\"", ":", "self", ".", "negative", "=", "negative", "numpyarrayshape", "=", "self", ".", "numpyarray", ".", "shape", "calcarray", "=", "self", ".", "numpyarray", ".", "copy", "(", ")", "#calcarray.ravel() # does not change in place in fact !", "calcarray", "=", "calcarray", ".", "clip", "(", "min", "=", "self", ".", "z1", ",", "max", "=", "self", ".", "z2", ")", "if", "scale", "==", "\"log\"", ":", "calcarray", "=", "np", ".", "array", "(", "map", "(", "lambda", "x", ":", "loggray", "(", "x", ",", "self", ".", "z1", ",", "self", ".", "z2", ")", ",", "calcarray", ")", ")", "else", ":", "calcarray", "=", "np", ".", "array", "(", "map", "(", "lambda", "x", ":", "lingray", "(", "x", ",", "self", ".", "z1", ",", "self", ".", "z2", ")", ",", "calcarray", ")", ")", "calcarray", ".", "shape", "=", "numpyarrayshape", "bwarray", "=", "np", ".", "zeros", "(", "numpyarrayshape", ",", "dtype", "=", "np", ".", "uint8", ")", "calcarray", ".", "round", "(", "out", "=", "bwarray", ")", "if", "negative", ":", "if", "self", ".", "verbose", ":", "print", "\"Using negative scale\"", "bwarray", "=", "255", "-", "bwarray", "if", "self", ".", "verbose", ":", "print", "\"PIL range : [%i, %i]\"", "%", "(", "np", ".", "min", "(", "bwarray", ")", ",", "np", ".", "max", "(", "bwarray", ")", ")", "# We flip it so that (0, 0) is back in the bottom left corner as in ds9", "# We do this here, so that you can write on the image from left to right :-) ", "self", ".", "pilimage", "=", "imop", ".", "flip", "(", "im", ".", "fromarray", "(", "bwarray", ".", "transpose", "(", ")", ")", ")", "if", "self", ".", "verbose", ":", "print", "\"PIL image made with scale : %s\"", "%", "scale", "return", "0", "if", "scale", "==", "\"clog\"", "or", "scale", "==", "\"clin\"", ":", "\"\"\"\n rainbow !\n Algorithm for HSV to RGB from http://www.cs.rit.edu/~ncs/color/t_convert.html, by Eugene Vishnevsky\n Same stuff then for f2n in C\n \n h is from 0 to 360 (hue)\n s from 0 to 1 (saturation)\n v from 0 to 1 (brightness) \n \"\"\"", "self", ".", "negative", "=", "False", "calcarray", "=", "self", ".", "numpyarray", ".", "transpose", "(", ")", "if", "scale", "==", "\"clin\"", ":", "calcarray", "=", "(", "calcarray", ".", "clip", "(", "min", "=", "self", ".", "z1", ",", "max", "=", "self", ".", "z2", ")", "-", "self", ".", "z1", ")", "/", "(", "self", ".", "z2", "-", "self", ".", "z1", ")", "# 0 to 1", "if", "scale", "==", "\"clog\"", ":", "calcarray", "=", "10.0", "+", "990.0", "*", "(", "calcarray", ".", "clip", "(", "min", "=", "self", ".", "z1", ",", "max", "=", "self", ".", "z2", ")", "-", "self", ".", "z1", ")", "/", "(", "self", ".", "z2", "-", "self", ".", "z1", ")", "# 10 to 1000", "calcarray", "=", "(", "np", ".", "log10", "(", "calcarray", ")", "-", "1.0", ")", "*", "0.5", "# 0 to 1 ", "#calcarray = calcarray * 359.0 # This is now our \"hue value\", 0 to 360", "calcarray", "=", "(", "1.0", "-", "calcarray", ")", "*", "300.0", "# I limit this to not go into red again", "# The order of colours is Violet < Blue < Green < Yellow < Red", "# We prepare the output arrays", "rcalcarray", "=", "np", ".", "ones", "(", "calcarray", ".", "shape", ")", "gcalcarray", "=", "np", ".", "ones", "(", "calcarray", ".", "shape", ")", "bcalcarray", "=", "np", ".", "ones", "(", "calcarray", ".", "shape", ")", "h", "=", "calcarray", "/", "60.0", "# sector 0 to 5", "i", "=", "np", ".", "floor", "(", "h", ")", ".", "astype", "(", "np", ".", "int", ")", "v", "=", "1.0", "*", "np", ".", "ones", "(", "calcarray", ".", "shape", ")", "s", "=", "1.0", "*", "np", ".", "ones", "(", "calcarray", ".", "shape", ")", "f", "=", "h", "-", "i", "# factorial part of h, this is an array", "p", "=", "v", "*", "(", "1.0", "-", "s", ")", "q", "=", "v", "*", "(", "1.0", "-", "s", "*", "f", ")", "t", "=", "v", "*", "(", "1.0", "-", "s", "*", "(", "1.0", "-", "f", ")", ")", "# sector 0:", "indices", "=", "(", "i", "==", "0", ")", "rcalcarray", "[", "indices", "]", "=", "255.0", "*", "v", "[", "indices", "]", "gcalcarray", "[", "indices", "]", "=", "255.0", "*", "t", "[", "indices", "]", "bcalcarray", "[", "indices", "]", "=", "255.0", "*", "p", "[", "indices", "]", "# sector 1:", "indices", "=", "(", "i", "==", "1", ")", "rcalcarray", "[", "indices", "]", "=", "255.0", "*", "q", "[", "indices", "]", "gcalcarray", "[", "indices", "]", "=", "255.0", "*", "v", "[", "indices", "]", "bcalcarray", "[", "indices", "]", "=", "255.0", "*", "p", "[", "indices", "]", "# sector 2:", "indices", "=", "(", "i", "==", "2", ")", "rcalcarray", "[", "indices", "]", "=", "255.0", "*", "p", "[", "indices", "]", "gcalcarray", "[", "indices", "]", "=", "255.0", "*", "v", "[", "indices", "]", "bcalcarray", "[", "indices", "]", "=", "255.0", "*", "t", "[", "indices", "]", "# sector 3:", "indices", "=", "(", "i", "==", "3", ")", "rcalcarray", "[", "indices", "]", "=", "255.0", "*", "p", "[", "indices", "]", "gcalcarray", "[", "indices", "]", "=", "255.0", "*", "q", "[", "indices", "]", "bcalcarray", "[", "indices", "]", "=", "255.0", "*", "v", "[", "indices", "]", "# sector 4:", "indices", "=", "(", "i", "==", "4", ")", "rcalcarray", "[", "indices", "]", "=", "255.0", "*", "t", "[", "indices", "]", "gcalcarray", "[", "indices", "]", "=", "255.0", "*", "p", "[", "indices", "]", "bcalcarray", "[", "indices", "]", "=", "255.0", "*", "v", "[", "indices", "]", "# sector 5:", "indices", "=", "(", "i", "==", "5", ")", "rcalcarray", "[", "indices", "]", "=", "255.0", "*", "v", "[", "indices", "]", "gcalcarray", "[", "indices", "]", "=", "255.0", "*", "p", "[", "indices", "]", "bcalcarray", "[", "indices", "]", "=", "255.0", "*", "q", "[", "indices", "]", "rarray", "=", "np", ".", "zeros", "(", "calcarray", ".", "shape", ",", "dtype", "=", "np", ".", "uint8", ")", "garray", "=", "np", ".", "zeros", "(", "calcarray", ".", "shape", ",", "dtype", "=", "np", ".", "uint8", ")", "barray", "=", "np", ".", "zeros", "(", "calcarray", ".", "shape", ",", "dtype", "=", "np", ".", "uint8", ")", "rcalcarray", ".", "round", "(", "out", "=", "rarray", ")", "gcalcarray", ".", "round", "(", "out", "=", "garray", ")", "bcalcarray", ".", "round", "(", "out", "=", "barray", ")", "carray", "=", "np", ".", "dstack", "(", "(", "rarray", ",", "garray", ",", "barray", ")", ")", "self", ".", "pilimage", "=", "imop", ".", "flip", "(", "im", ".", "fromarray", "(", "carray", ",", "\"RGB\"", ")", ")", "if", "self", ".", "verbose", ":", "print", "\"PIL image made with scale : %s\"", "%", "scale", "return", "0", "raise", "RuntimeError", ",", "\"I don't know your colourscale, choose lin log clin or clog !\"" ]
Makes a PIL image out of the array, respecting the z1 and z2 cutoffs. By default we use a log scaling identical to iraf's, and produce an image of mode "L", i.e. grayscale. But some drawings or colourscales will change the mode to "RGB" later, if you choose your own colours. If you choose scale = "clog" or "clin", you get hue values (aka rainbow colours).
[ "Makes", "a", "PIL", "image", "out", "of", "the", "array", "respecting", "the", "z1", "and", "z2", "cutoffs", ".", "By", "default", "we", "use", "a", "log", "scaling", "identical", "to", "iraf", "s", "and", "produce", "an", "image", "of", "mode", "L", "i", ".", "e", ".", "grayscale", ".", "But", "some", "drawings", "or", "colourscales", "will", "change", "the", "mode", "to", "RGB", "later", "if", "you", "choose", "your", "own", "colours", ".", "If", "you", "choose", "scale", "=", "clog", "or", "clin", "you", "get", "hue", "values", "(", "aka", "rainbow", "colours", ")", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L426-L557
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.drawmask
def drawmask(self, maskarray, colour = 128): """ I draw a mask on the image. Give me a numpy "maskarray" of same size as mine, and I draw on the pilimage all pixels of the maskarray that are True in the maskcolour. By default, colour is gray, to avoid switching to RGB. But if you give for instance (255, 0, 0), I will do the switch. """ self.checkforpilimage() self.changecolourmode(colour) self.makedraw() # Checking size of maskarray : if maskarray.shape[0] != self.pilimage.size[0] or maskarray.shape[1] != self.pilimage.size[1]: raise RuntimeError, "Mask and image must have the same size !" # We make an "L" mode image out of the mask : tmparray = np.zeros(maskarray.shape, dtype=np.uint8) tmparray[maskarray] = 255 maskpil = imop.flip(im.fromarray(tmparray.transpose())) # We make a plain colour image : if type(colour) == type(0) : plainpil = im.new("L", self.pilimage.size, colour) else : plainpil = im.new("RGB", self.pilimage.size, colour) # We switch self to RGB if needed : self.changecolourmode(colour) # And now use the function composite to "blend" our image with the plain colour image : self.pilimage = im.composite(plainpil, self.pilimage, maskpil) # As we have changed the image object, we have to rebuild the draw object : self.draw = None
python
def drawmask(self, maskarray, colour = 128): """ I draw a mask on the image. Give me a numpy "maskarray" of same size as mine, and I draw on the pilimage all pixels of the maskarray that are True in the maskcolour. By default, colour is gray, to avoid switching to RGB. But if you give for instance (255, 0, 0), I will do the switch. """ self.checkforpilimage() self.changecolourmode(colour) self.makedraw() # Checking size of maskarray : if maskarray.shape[0] != self.pilimage.size[0] or maskarray.shape[1] != self.pilimage.size[1]: raise RuntimeError, "Mask and image must have the same size !" # We make an "L" mode image out of the mask : tmparray = np.zeros(maskarray.shape, dtype=np.uint8) tmparray[maskarray] = 255 maskpil = imop.flip(im.fromarray(tmparray.transpose())) # We make a plain colour image : if type(colour) == type(0) : plainpil = im.new("L", self.pilimage.size, colour) else : plainpil = im.new("RGB", self.pilimage.size, colour) # We switch self to RGB if needed : self.changecolourmode(colour) # And now use the function composite to "blend" our image with the plain colour image : self.pilimage = im.composite(plainpil, self.pilimage, maskpil) # As we have changed the image object, we have to rebuild the draw object : self.draw = None
[ "def", "drawmask", "(", "self", ",", "maskarray", ",", "colour", "=", "128", ")", ":", "self", ".", "checkforpilimage", "(", ")", "self", ".", "changecolourmode", "(", "colour", ")", "self", ".", "makedraw", "(", ")", "# Checking size of maskarray :", "if", "maskarray", ".", "shape", "[", "0", "]", "!=", "self", ".", "pilimage", ".", "size", "[", "0", "]", "or", "maskarray", ".", "shape", "[", "1", "]", "!=", "self", ".", "pilimage", ".", "size", "[", "1", "]", ":", "raise", "RuntimeError", ",", "\"Mask and image must have the same size !\"", "# We make an \"L\" mode image out of the mask :", "tmparray", "=", "np", ".", "zeros", "(", "maskarray", ".", "shape", ",", "dtype", "=", "np", ".", "uint8", ")", "tmparray", "[", "maskarray", "]", "=", "255", "maskpil", "=", "imop", ".", "flip", "(", "im", ".", "fromarray", "(", "tmparray", ".", "transpose", "(", ")", ")", ")", "# We make a plain colour image :", "if", "type", "(", "colour", ")", "==", "type", "(", "0", ")", ":", "plainpil", "=", "im", ".", "new", "(", "\"L\"", ",", "self", ".", "pilimage", ".", "size", ",", "colour", ")", "else", ":", "plainpil", "=", "im", ".", "new", "(", "\"RGB\"", ",", "self", ".", "pilimage", ".", "size", ",", "colour", ")", "# We switch self to RGB if needed :", "self", ".", "changecolourmode", "(", "colour", ")", "# And now use the function composite to \"blend\" our image with the plain colour image :", "self", ".", "pilimage", "=", "im", ".", "composite", "(", "plainpil", ",", "self", ".", "pilimage", ",", "maskpil", ")", "# As we have changed the image object, we have to rebuild the draw object :", "self", ".", "draw", "=", "None" ]
I draw a mask on the image. Give me a numpy "maskarray" of same size as mine, and I draw on the pilimage all pixels of the maskarray that are True in the maskcolour. By default, colour is gray, to avoid switching to RGB. But if you give for instance (255, 0, 0), I will do the switch.
[ "I", "draw", "a", "mask", "on", "the", "image", ".", "Give", "me", "a", "numpy", "maskarray", "of", "same", "size", "as", "mine", "and", "I", "draw", "on", "the", "pilimage", "all", "pixels", "of", "the", "maskarray", "that", "are", "True", "in", "the", "maskcolour", ".", "By", "default", "colour", "is", "gray", "to", "avoid", "switching", "to", "RGB", ".", "But", "if", "you", "give", "for", "instance", "(", "255", "0", "0", ")", "I", "will", "do", "the", "switch", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L559-L594
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.showcutoffs
def showcutoffs(self, redblue = False): """ We use drawmask to visualize pixels above and below the z cutoffs. By default this is done in black (above) and white (below) (and adapts to negative images). But if you choose redblue = True, I use red for above z2 and blue for below z1. """ highmask = self.numpyarray > self.z2 lowmask = self.numpyarray < self.z1 if redblue == False : if self.negative : self.drawmask(highmask, colour = 255) self.drawmask(lowmask, colour = 0) else : self.drawmask(highmask, colour = 0) self.drawmask(lowmask, colour = 255) else : self.drawmask(highmask, colour = (255, 0, 0)) self.drawmask(lowmask, colour = (0, 0, 255))
python
def showcutoffs(self, redblue = False): """ We use drawmask to visualize pixels above and below the z cutoffs. By default this is done in black (above) and white (below) (and adapts to negative images). But if you choose redblue = True, I use red for above z2 and blue for below z1. """ highmask = self.numpyarray > self.z2 lowmask = self.numpyarray < self.z1 if redblue == False : if self.negative : self.drawmask(highmask, colour = 255) self.drawmask(lowmask, colour = 0) else : self.drawmask(highmask, colour = 0) self.drawmask(lowmask, colour = 255) else : self.drawmask(highmask, colour = (255, 0, 0)) self.drawmask(lowmask, colour = (0, 0, 255))
[ "def", "showcutoffs", "(", "self", ",", "redblue", "=", "False", ")", ":", "highmask", "=", "self", ".", "numpyarray", ">", "self", ".", "z2", "lowmask", "=", "self", ".", "numpyarray", "<", "self", ".", "z1", "if", "redblue", "==", "False", ":", "if", "self", ".", "negative", ":", "self", ".", "drawmask", "(", "highmask", ",", "colour", "=", "255", ")", "self", ".", "drawmask", "(", "lowmask", ",", "colour", "=", "0", ")", "else", ":", "self", ".", "drawmask", "(", "highmask", ",", "colour", "=", "0", ")", "self", ".", "drawmask", "(", "lowmask", ",", "colour", "=", "255", ")", "else", ":", "self", ".", "drawmask", "(", "highmask", ",", "colour", "=", "(", "255", ",", "0", ",", "0", ")", ")", "self", ".", "drawmask", "(", "lowmask", ",", "colour", "=", "(", "0", ",", "0", ",", "255", ")", ")" ]
We use drawmask to visualize pixels above and below the z cutoffs. By default this is done in black (above) and white (below) (and adapts to negative images). But if you choose redblue = True, I use red for above z2 and blue for below z1.
[ "We", "use", "drawmask", "to", "visualize", "pixels", "above", "and", "below", "the", "z", "cutoffs", ".", "By", "default", "this", "is", "done", "in", "black", "(", "above", ")", "and", "white", "(", "below", ")", "(", "and", "adapts", "to", "negative", "images", ")", ".", "But", "if", "you", "choose", "redblue", "=", "True", "I", "use", "red", "for", "above", "z2", "and", "blue", "for", "below", "z1", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L598-L617
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.makedraw
def makedraw(self): """Auxiliary method to make a draw object if not yet done. This is also called by changecolourmode, when we go from L to RGB, to get a new draw object. """ if self.draw == None: self.draw = imdw.Draw(self.pilimage)
python
def makedraw(self): """Auxiliary method to make a draw object if not yet done. This is also called by changecolourmode, when we go from L to RGB, to get a new draw object. """ if self.draw == None: self.draw = imdw.Draw(self.pilimage)
[ "def", "makedraw", "(", "self", ")", ":", "if", "self", ".", "draw", "==", "None", ":", "self", ".", "draw", "=", "imdw", ".", "Draw", "(", "self", ".", "pilimage", ")" ]
Auxiliary method to make a draw object if not yet done. This is also called by changecolourmode, when we go from L to RGB, to get a new draw object.
[ "Auxiliary", "method", "to", "make", "a", "draw", "object", "if", "not", "yet", "done", ".", "This", "is", "also", "called", "by", "changecolourmode", "when", "we", "go", "from", "L", "to", "RGB", "to", "get", "a", "new", "draw", "object", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L626-L631
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.defaultcolour
def defaultcolour(self, colour): """ Auxiliary method to choose a default colour. Give me a user provided colour : if it is None, I change it to the default colour, respecting negative. Plus, if the image is in RGB mode and you give me 128 for a gray, I translate this to the expected (128, 128, 128) ... """ if colour == None: if self.negative == True: if self.pilimage.mode == "L" : return 0 else : return (0, 0, 0) else : if self.pilimage.mode == "L" : return 255 else : return (255, 255, 255) else : if self.pilimage.mode == "RGB" and type(colour) == type(0): return (colour, colour, colour) else : return colour
python
def defaultcolour(self, colour): """ Auxiliary method to choose a default colour. Give me a user provided colour : if it is None, I change it to the default colour, respecting negative. Plus, if the image is in RGB mode and you give me 128 for a gray, I translate this to the expected (128, 128, 128) ... """ if colour == None: if self.negative == True: if self.pilimage.mode == "L" : return 0 else : return (0, 0, 0) else : if self.pilimage.mode == "L" : return 255 else : return (255, 255, 255) else : if self.pilimage.mode == "RGB" and type(colour) == type(0): return (colour, colour, colour) else : return colour
[ "def", "defaultcolour", "(", "self", ",", "colour", ")", ":", "if", "colour", "==", "None", ":", "if", "self", ".", "negative", "==", "True", ":", "if", "self", ".", "pilimage", ".", "mode", "==", "\"L\"", ":", "return", "0", "else", ":", "return", "(", "0", ",", "0", ",", "0", ")", "else", ":", "if", "self", ".", "pilimage", ".", "mode", "==", "\"L\"", ":", "return", "255", "else", ":", "return", "(", "255", ",", "255", ",", "255", ")", "else", ":", "if", "self", ".", "pilimage", ".", "mode", "==", "\"RGB\"", "and", "type", "(", "colour", ")", "==", "type", "(", "0", ")", ":", "return", "(", "colour", ",", "colour", ",", "colour", ")", "else", ":", "return", "colour" ]
Auxiliary method to choose a default colour. Give me a user provided colour : if it is None, I change it to the default colour, respecting negative. Plus, if the image is in RGB mode and you give me 128 for a gray, I translate this to the expected (128, 128, 128) ...
[ "Auxiliary", "method", "to", "choose", "a", "default", "colour", ".", "Give", "me", "a", "user", "provided", "colour", ":", "if", "it", "is", "None", "I", "change", "it", "to", "the", "default", "colour", "respecting", "negative", ".", "Plus", "if", "the", "image", "is", "in", "RGB", "mode", "and", "you", "give", "me", "128", "for", "a", "gray", "I", "translate", "this", "to", "the", "expected", "(", "128", "128", "128", ")", "..." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L633-L654
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.loadtitlefont
def loadtitlefont(self): """Auxiliary method to load font if not yet done.""" if self.titlefont == None: # print 'the bloody fonts dir is????', fontsdir # print 'pero esto que hace??', os.path.join(fontsdir, "courR18.pil") # /home/vital/Workspace/pyResources/Scientific_Lib/f2n_fonts/f2n_fonts/courR18.pil # /home/vital/Workspace/pyResources/Scientific_Lib/f2n_fonts self.titlefont = imft.load_path(os.path.join(fontsdir, "courR18.pil"))
python
def loadtitlefont(self): """Auxiliary method to load font if not yet done.""" if self.titlefont == None: # print 'the bloody fonts dir is????', fontsdir # print 'pero esto que hace??', os.path.join(fontsdir, "courR18.pil") # /home/vital/Workspace/pyResources/Scientific_Lib/f2n_fonts/f2n_fonts/courR18.pil # /home/vital/Workspace/pyResources/Scientific_Lib/f2n_fonts self.titlefont = imft.load_path(os.path.join(fontsdir, "courR18.pil"))
[ "def", "loadtitlefont", "(", "self", ")", ":", "if", "self", ".", "titlefont", "==", "None", ":", "# print 'the bloody fonts dir is????', fontsdir", "# print 'pero esto que hace??', os.path.join(fontsdir, \"courR18.pil\")", "# /home/vital/Workspace/pyResources/Scientific_Lib/f2n_fonts/f2n_fonts/courR18.pil", "# /home/vital/Workspace/pyResources/Scientific_Lib/f2n_fonts", "self", ".", "titlefont", "=", "imft", ".", "load_path", "(", "os", ".", "path", ".", "join", "(", "fontsdir", ",", "\"courR18.pil\"", ")", ")" ]
Auxiliary method to load font if not yet done.
[ "Auxiliary", "method", "to", "load", "font", "if", "not", "yet", "done", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L656-L663
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.loadinfofont
def loadinfofont(self): """Auxiliary method to load font if not yet done.""" if self.infofont == None: self.infofont = imft.load_path(os.path.join(fontsdir, "courR10.pil"))
python
def loadinfofont(self): """Auxiliary method to load font if not yet done.""" if self.infofont == None: self.infofont = imft.load_path(os.path.join(fontsdir, "courR10.pil"))
[ "def", "loadinfofont", "(", "self", ")", ":", "if", "self", ".", "infofont", "==", "None", ":", "self", ".", "infofont", "=", "imft", ".", "load_path", "(", "os", ".", "path", ".", "join", "(", "fontsdir", ",", "\"courR10.pil\"", ")", ")" ]
Auxiliary method to load font if not yet done.
[ "Auxiliary", "method", "to", "load", "font", "if", "not", "yet", "done", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L665-L668
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.loadlabelfont
def loadlabelfont(self): """Auxiliary method to load font if not yet done.""" if self.labelfont == None: self.labelfont = imft.load_path(os.path.join(fontsdir, "courR10.pil"))
python
def loadlabelfont(self): """Auxiliary method to load font if not yet done.""" if self.labelfont == None: self.labelfont = imft.load_path(os.path.join(fontsdir, "courR10.pil"))
[ "def", "loadlabelfont", "(", "self", ")", ":", "if", "self", ".", "labelfont", "==", "None", ":", "self", ".", "labelfont", "=", "imft", ".", "load_path", "(", "os", ".", "path", ".", "join", "(", "fontsdir", ",", "\"courR10.pil\"", ")", ")" ]
Auxiliary method to load font if not yet done.
[ "Auxiliary", "method", "to", "load", "font", "if", "not", "yet", "done", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L670-L673
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.changecolourmode
def changecolourmode(self, newcolour): """Auxiliary method to change the colour mode. Give me a colour (either an int, or a 3-tuple, values 0 to 255) and I decide if the image mode has to be switched from "L" to "RGB". """ if type(newcolour) != type(0) and self.pilimage.mode != "RGB": if self.verbose : print "Switching to RGB !" self.pilimage = self.pilimage.convert("RGB") self.draw = None # important, we have to bebuild the draw object. self.makedraw()
python
def changecolourmode(self, newcolour): """Auxiliary method to change the colour mode. Give me a colour (either an int, or a 3-tuple, values 0 to 255) and I decide if the image mode has to be switched from "L" to "RGB". """ if type(newcolour) != type(0) and self.pilimage.mode != "RGB": if self.verbose : print "Switching to RGB !" self.pilimage = self.pilimage.convert("RGB") self.draw = None # important, we have to bebuild the draw object. self.makedraw()
[ "def", "changecolourmode", "(", "self", ",", "newcolour", ")", ":", "if", "type", "(", "newcolour", ")", "!=", "type", "(", "0", ")", "and", "self", ".", "pilimage", ".", "mode", "!=", "\"RGB\"", ":", "if", "self", ".", "verbose", ":", "print", "\"Switching to RGB !\"", "self", ".", "pilimage", "=", "self", ".", "pilimage", ".", "convert", "(", "\"RGB\"", ")", "self", ".", "draw", "=", "None", "# important, we have to bebuild the draw object.", "self", ".", "makedraw", "(", ")" ]
Auxiliary method to change the colour mode. Give me a colour (either an int, or a 3-tuple, values 0 to 255) and I decide if the image mode has to be switched from "L" to "RGB".
[ "Auxiliary", "method", "to", "change", "the", "colour", "mode", ".", "Give", "me", "a", "colour", "(", "either", "an", "int", "or", "a", "3", "-", "tuple", "values", "0", "to", "255", ")", "and", "I", "decide", "if", "the", "image", "mode", "has", "to", "be", "switched", "from", "L", "to", "RGB", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L675-L685
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.upsample
def upsample(self, factor): """ The inverse operation of rebin, applied on the PIL image. Do this before writing text or drawing on the image ! The coordinates will be automatically converted for you """ self.checkforpilimage() if type(factor) != type(0): raise RuntimeError, "Upsample factor must be an integer !" if self.verbose: print "Upsampling by a factor of %i" % factor self.pilimage = self.pilimage.resize((self.pilimage.size[0] * factor, self.pilimage.size[1] * factor)) self.upsamplefactor = factor self.draw = None
python
def upsample(self, factor): """ The inverse operation of rebin, applied on the PIL image. Do this before writing text or drawing on the image ! The coordinates will be automatically converted for you """ self.checkforpilimage() if type(factor) != type(0): raise RuntimeError, "Upsample factor must be an integer !" if self.verbose: print "Upsampling by a factor of %i" % factor self.pilimage = self.pilimage.resize((self.pilimage.size[0] * factor, self.pilimage.size[1] * factor)) self.upsamplefactor = factor self.draw = None
[ "def", "upsample", "(", "self", ",", "factor", ")", ":", "self", ".", "checkforpilimage", "(", ")", "if", "type", "(", "factor", ")", "!=", "type", "(", "0", ")", ":", "raise", "RuntimeError", ",", "\"Upsample factor must be an integer !\"", "if", "self", ".", "verbose", ":", "print", "\"Upsampling by a factor of %i\"", "%", "factor", "self", ".", "pilimage", "=", "self", ".", "pilimage", ".", "resize", "(", "(", "self", ".", "pilimage", ".", "size", "[", "0", "]", "*", "factor", ",", "self", ".", "pilimage", ".", "size", "[", "1", "]", "*", "factor", ")", ")", "self", ".", "upsamplefactor", "=", "factor", "self", ".", "draw", "=", "None" ]
The inverse operation of rebin, applied on the PIL image. Do this before writing text or drawing on the image ! The coordinates will be automatically converted for you
[ "The", "inverse", "operation", "of", "rebin", "applied", "on", "the", "PIL", "image", ".", "Do", "this", "before", "writing", "text", "or", "drawing", "on", "the", "image", "!", "The", "coordinates", "will", "be", "automatically", "converted", "for", "you" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L688-L705
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.pilcoords
def pilcoords(self, (x,y)): """ Converts the coordinates (x,y) of the original array or FITS file to the current coordinates of the PIL image, respecting cropping, rebinning, and upsampling. This is only used once the PIL image is available, for drawing. Note that we also have to take care about the different origin conventions here ! For PIL, (0,0) is top left, so the y axis needs to be inverted. """ pilx = int((x - 1 - self.xa) * float(self.upsamplefactor) / float(self.binfactor)) pily = int((self.yb - y) * float(self.upsamplefactor) / float(self.binfactor)) return (pilx, pily)
python
def pilcoords(self, (x,y)): """ Converts the coordinates (x,y) of the original array or FITS file to the current coordinates of the PIL image, respecting cropping, rebinning, and upsampling. This is only used once the PIL image is available, for drawing. Note that we also have to take care about the different origin conventions here ! For PIL, (0,0) is top left, so the y axis needs to be inverted. """ pilx = int((x - 1 - self.xa) * float(self.upsamplefactor) / float(self.binfactor)) pily = int((self.yb - y) * float(self.upsamplefactor) / float(self.binfactor)) return (pilx, pily)
[ "def", "pilcoords", "(", "self", ",", "(", "x", ",", "y", ")", ")", ":", "pilx", "=", "int", "(", "(", "x", "-", "1", "-", "self", ".", "xa", ")", "*", "float", "(", "self", ".", "upsamplefactor", ")", "/", "float", "(", "self", ".", "binfactor", ")", ")", "pily", "=", "int", "(", "(", "self", ".", "yb", "-", "y", ")", "*", "float", "(", "self", ".", "upsamplefactor", ")", "/", "float", "(", "self", ".", "binfactor", ")", ")", "return", "(", "pilx", ",", "pily", ")" ]
Converts the coordinates (x,y) of the original array or FITS file to the current coordinates of the PIL image, respecting cropping, rebinning, and upsampling. This is only used once the PIL image is available, for drawing. Note that we also have to take care about the different origin conventions here ! For PIL, (0,0) is top left, so the y axis needs to be inverted.
[ "Converts", "the", "coordinates", "(", "x", "y", ")", "of", "the", "original", "array", "or", "FITS", "file", "to", "the", "current", "coordinates", "of", "the", "PIL", "image", "respecting", "cropping", "rebinning", "and", "upsampling", ".", "This", "is", "only", "used", "once", "the", "PIL", "image", "is", "available", "for", "drawing", ".", "Note", "that", "we", "also", "have", "to", "take", "care", "about", "the", "different", "origin", "conventions", "here", "!", "For", "PIL", "(", "0", "0", ")", "is", "top", "left", "so", "the", "y", "axis", "needs", "to", "be", "inverted", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L708-L720
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.pilscale
def pilscale(self, r): """ Converts a "scale" (like an aperture radius) of the original array or FITS file to the current PIL coordinates. """ return r * float(self.upsamplefactor) / float(self.binfactor)
python
def pilscale(self, r): """ Converts a "scale" (like an aperture radius) of the original array or FITS file to the current PIL coordinates. """ return r * float(self.upsamplefactor) / float(self.binfactor)
[ "def", "pilscale", "(", "self", ",", "r", ")", ":", "return", "r", "*", "float", "(", "self", ".", "upsamplefactor", ")", "/", "float", "(", "self", ".", "binfactor", ")" ]
Converts a "scale" (like an aperture radius) of the original array or FITS file to the current PIL coordinates.
[ "Converts", "a", "scale", "(", "like", "an", "aperture", "radius", ")", "of", "the", "original", "array", "or", "FITS", "file", "to", "the", "current", "PIL", "coordinates", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L722-L726
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.drawpoint
def drawpoint(self, x, y, colour = None): """ Most elementary drawing, single pixel, used mainly for testing purposes. Coordinates are those of your initial image ! """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() (pilx, pily) = self.pilcoords((x,y)) self.draw.point((pilx, pily), fill = colour)
python
def drawpoint(self, x, y, colour = None): """ Most elementary drawing, single pixel, used mainly for testing purposes. Coordinates are those of your initial image ! """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() (pilx, pily) = self.pilcoords((x,y)) self.draw.point((pilx, pily), fill = colour)
[ "def", "drawpoint", "(", "self", ",", "x", ",", "y", ",", "colour", "=", "None", ")", ":", "self", ".", "checkforpilimage", "(", ")", "colour", "=", "self", ".", "defaultcolour", "(", "colour", ")", "self", ".", "changecolourmode", "(", "colour", ")", "self", ".", "makedraw", "(", ")", "(", "pilx", ",", "pily", ")", "=", "self", ".", "pilcoords", "(", "(", "x", ",", "y", ")", ")", "self", ".", "draw", ".", "point", "(", "(", "pilx", ",", "pily", ")", ",", "fill", "=", "colour", ")" ]
Most elementary drawing, single pixel, used mainly for testing purposes. Coordinates are those of your initial image !
[ "Most", "elementary", "drawing", "single", "pixel", "used", "mainly", "for", "testing", "purposes", ".", "Coordinates", "are", "those", "of", "your", "initial", "image", "!" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L729-L741
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.drawcircle
def drawcircle(self, x, y, r = 10, colour = None, label = None): """ Draws a circle centered on (x, y) with radius r. All these are in the coordinates of your initial image ! You give these x and y in the usual ds9 pixels, (0,0) is bottom left. I will convert this into the right PIL coordiates. """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() (pilx, pily) = self.pilcoords((x,y)) pilr = self.pilscale(r) self.draw.ellipse([(pilx-pilr+1, pily-pilr+1), (pilx+pilr+1, pily+pilr+1)], outline = colour) if label != None: # The we write it : self.loadlabelfont() textwidth = self.draw.textsize(label, font = self.labelfont)[0] self.draw.text((pilx - float(textwidth)/2.0 + 2, pily + pilr + 4), label, fill = colour, font = self.labelfont)
python
def drawcircle(self, x, y, r = 10, colour = None, label = None): """ Draws a circle centered on (x, y) with radius r. All these are in the coordinates of your initial image ! You give these x and y in the usual ds9 pixels, (0,0) is bottom left. I will convert this into the right PIL coordiates. """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() (pilx, pily) = self.pilcoords((x,y)) pilr = self.pilscale(r) self.draw.ellipse([(pilx-pilr+1, pily-pilr+1), (pilx+pilr+1, pily+pilr+1)], outline = colour) if label != None: # The we write it : self.loadlabelfont() textwidth = self.draw.textsize(label, font = self.labelfont)[0] self.draw.text((pilx - float(textwidth)/2.0 + 2, pily + pilr + 4), label, fill = colour, font = self.labelfont)
[ "def", "drawcircle", "(", "self", ",", "x", ",", "y", ",", "r", "=", "10", ",", "colour", "=", "None", ",", "label", "=", "None", ")", ":", "self", ".", "checkforpilimage", "(", ")", "colour", "=", "self", ".", "defaultcolour", "(", "colour", ")", "self", ".", "changecolourmode", "(", "colour", ")", "self", ".", "makedraw", "(", ")", "(", "pilx", ",", "pily", ")", "=", "self", ".", "pilcoords", "(", "(", "x", ",", "y", ")", ")", "pilr", "=", "self", ".", "pilscale", "(", "r", ")", "self", ".", "draw", ".", "ellipse", "(", "[", "(", "pilx", "-", "pilr", "+", "1", ",", "pily", "-", "pilr", "+", "1", ")", ",", "(", "pilx", "+", "pilr", "+", "1", ",", "pily", "+", "pilr", "+", "1", ")", "]", ",", "outline", "=", "colour", ")", "if", "label", "!=", "None", ":", "# The we write it :", "self", ".", "loadlabelfont", "(", ")", "textwidth", "=", "self", ".", "draw", ".", "textsize", "(", "label", ",", "font", "=", "self", ".", "labelfont", ")", "[", "0", "]", "self", ".", "draw", ".", "text", "(", "(", "pilx", "-", "float", "(", "textwidth", ")", "/", "2.0", "+", "2", ",", "pily", "+", "pilr", "+", "4", ")", ",", "label", ",", "fill", "=", "colour", ",", "font", "=", "self", ".", "labelfont", ")" ]
Draws a circle centered on (x, y) with radius r. All these are in the coordinates of your initial image ! You give these x and y in the usual ds9 pixels, (0,0) is bottom left. I will convert this into the right PIL coordiates.
[ "Draws", "a", "circle", "centered", "on", "(", "x", "y", ")", "with", "radius", "r", ".", "All", "these", "are", "in", "the", "coordinates", "of", "your", "initial", "image", "!", "You", "give", "these", "x", "and", "y", "in", "the", "usual", "ds9", "pixels", "(", "0", "0", ")", "is", "bottom", "left", ".", "I", "will", "convert", "this", "into", "the", "right", "PIL", "coordiates", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L744-L765
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.drawrectangle
def drawrectangle(self, xa, xb, ya, yb, colour=None, label = None): """ Draws a 1-pixel wide frame AROUND the region you specify. Same convention as for crop(). """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() (pilxa, pilya) = self.pilcoords((xa,ya)) (pilxb, pilyb) = self.pilcoords((xb,yb)) self.draw.rectangle([(pilxa, pilyb-1), (pilxb+1, pilya)], outline = colour) if label != None: # The we write it : self.loadlabelfont() textwidth = self.draw.textsize(label, font = self.labelfont)[0] self.draw.text(((pilxa + pilxb)/2.0 - float(textwidth)/2.0 + 1, pilya + 2), label, fill = colour, font = self.labelfont)
python
def drawrectangle(self, xa, xb, ya, yb, colour=None, label = None): """ Draws a 1-pixel wide frame AROUND the region you specify. Same convention as for crop(). """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() (pilxa, pilya) = self.pilcoords((xa,ya)) (pilxb, pilyb) = self.pilcoords((xb,yb)) self.draw.rectangle([(pilxa, pilyb-1), (pilxb+1, pilya)], outline = colour) if label != None: # The we write it : self.loadlabelfont() textwidth = self.draw.textsize(label, font = self.labelfont)[0] self.draw.text(((pilxa + pilxb)/2.0 - float(textwidth)/2.0 + 1, pilya + 2), label, fill = colour, font = self.labelfont)
[ "def", "drawrectangle", "(", "self", ",", "xa", ",", "xb", ",", "ya", ",", "yb", ",", "colour", "=", "None", ",", "label", "=", "None", ")", ":", "self", ".", "checkforpilimage", "(", ")", "colour", "=", "self", ".", "defaultcolour", "(", "colour", ")", "self", ".", "changecolourmode", "(", "colour", ")", "self", ".", "makedraw", "(", ")", "(", "pilxa", ",", "pilya", ")", "=", "self", ".", "pilcoords", "(", "(", "xa", ",", "ya", ")", ")", "(", "pilxb", ",", "pilyb", ")", "=", "self", ".", "pilcoords", "(", "(", "xb", ",", "yb", ")", ")", "self", ".", "draw", ".", "rectangle", "(", "[", "(", "pilxa", ",", "pilyb", "-", "1", ")", ",", "(", "pilxb", "+", "1", ",", "pilya", ")", "]", ",", "outline", "=", "colour", ")", "if", "label", "!=", "None", ":", "# The we write it :", "self", ".", "loadlabelfont", "(", ")", "textwidth", "=", "self", ".", "draw", ".", "textsize", "(", "label", ",", "font", "=", "self", ".", "labelfont", ")", "[", "0", "]", "self", ".", "draw", ".", "text", "(", "(", "(", "pilxa", "+", "pilxb", ")", "/", "2.0", "-", "float", "(", "textwidth", ")", "/", "2.0", "+", "1", ",", "pilya", "+", "2", ")", ",", "label", ",", "fill", "=", "colour", ",", "font", "=", "self", ".", "labelfont", ")" ]
Draws a 1-pixel wide frame AROUND the region you specify. Same convention as for crop().
[ "Draws", "a", "1", "-", "pixel", "wide", "frame", "AROUND", "the", "region", "you", "specify", ".", "Same", "convention", "as", "for", "crop", "()", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L769-L789
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.writetitle
def writetitle(self, titlestring, colour = None): """ We write a title, centered below the image. """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() self.loadtitlefont() imgwidth = self.pilimage.size[0] imgheight = self.pilimage.size[1] textwidth = self.draw.textsize(titlestring, font = self.titlefont)[0] textxpos = imgwidth/2.0 - textwidth/2.0 textypos = imgheight - 30 self.draw.text((textxpos, textypos), titlestring, fill = colour, font = self.titlefont) if self.verbose : print "I've written a title on the image."
python
def writetitle(self, titlestring, colour = None): """ We write a title, centered below the image. """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() self.loadtitlefont() imgwidth = self.pilimage.size[0] imgheight = self.pilimage.size[1] textwidth = self.draw.textsize(titlestring, font = self.titlefont)[0] textxpos = imgwidth/2.0 - textwidth/2.0 textypos = imgheight - 30 self.draw.text((textxpos, textypos), titlestring, fill = colour, font = self.titlefont) if self.verbose : print "I've written a title on the image."
[ "def", "writetitle", "(", "self", ",", "titlestring", ",", "colour", "=", "None", ")", ":", "self", ".", "checkforpilimage", "(", ")", "colour", "=", "self", ".", "defaultcolour", "(", "colour", ")", "self", ".", "changecolourmode", "(", "colour", ")", "self", ".", "makedraw", "(", ")", "self", ".", "loadtitlefont", "(", ")", "imgwidth", "=", "self", ".", "pilimage", ".", "size", "[", "0", "]", "imgheight", "=", "self", ".", "pilimage", ".", "size", "[", "1", "]", "textwidth", "=", "self", ".", "draw", ".", "textsize", "(", "titlestring", ",", "font", "=", "self", ".", "titlefont", ")", "[", "0", "]", "textxpos", "=", "imgwidth", "/", "2.0", "-", "textwidth", "/", "2.0", "textypos", "=", "imgheight", "-", "30", "self", ".", "draw", ".", "text", "(", "(", "textxpos", ",", "textypos", ")", ",", "titlestring", ",", "fill", "=", "colour", ",", "font", "=", "self", ".", "titlefont", ")", "if", "self", ".", "verbose", ":", "print", "\"I've written a title on the image.\"" ]
We write a title, centered below the image.
[ "We", "write", "a", "title", "centered", "below", "the", "image", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L816-L837
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.writeinfo
def writeinfo(self, linelist, colour = None): """ We add a longer chunk of text on the upper left corner of the image. Provide linelist, a list of strings that will be written one below the other. """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() self.loadinfofont() for i, line in enumerate(linelist): topspacing = 5 + (12 + 5)*i self.draw.text((10, topspacing), line, fill = colour, font = self.infofont) if self.verbose : print "I've written some info on the image."
python
def writeinfo(self, linelist, colour = None): """ We add a longer chunk of text on the upper left corner of the image. Provide linelist, a list of strings that will be written one below the other. """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() self.loadinfofont() for i, line in enumerate(linelist): topspacing = 5 + (12 + 5)*i self.draw.text((10, topspacing), line, fill = colour, font = self.infofont) if self.verbose : print "I've written some info on the image."
[ "def", "writeinfo", "(", "self", ",", "linelist", ",", "colour", "=", "None", ")", ":", "self", ".", "checkforpilimage", "(", ")", "colour", "=", "self", ".", "defaultcolour", "(", "colour", ")", "self", ".", "changecolourmode", "(", "colour", ")", "self", ".", "makedraw", "(", ")", "self", ".", "loadinfofont", "(", ")", "for", "i", ",", "line", "in", "enumerate", "(", "linelist", ")", ":", "topspacing", "=", "5", "+", "(", "12", "+", "5", ")", "*", "i", "self", ".", "draw", ".", "text", "(", "(", "10", ",", "topspacing", ")", ",", "line", ",", "fill", "=", "colour", ",", "font", "=", "self", ".", "infofont", ")", "if", "self", ".", "verbose", ":", "print", "\"I've written some info on the image.\"" ]
We add a longer chunk of text on the upper left corner of the image. Provide linelist, a list of strings that will be written one below the other.
[ "We", "add", "a", "longer", "chunk", "of", "text", "on", "the", "upper", "left", "corner", "of", "the", "image", ".", "Provide", "linelist", "a", "list", "of", "strings", "that", "will", "be", "written", "one", "below", "the", "other", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L841-L861
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.drawstarslist
def drawstarslist(self, dictlist, r = 10, colour = None): """ Calls drawcircle and writelable for an list of stars. Provide a list of dictionnaries, where each dictionnary contains "name", "x", and "y". """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() for star in dictlist: self.drawcircle(star["x"], star["y"], r = r, colour = colour, label = star["name"]) #self.writelabel(star["x"], star["y"], star["name"], r = r, colour = colour) if self.verbose : print "I've drawn %i stars." % len(dictlist)
python
def drawstarslist(self, dictlist, r = 10, colour = None): """ Calls drawcircle and writelable for an list of stars. Provide a list of dictionnaries, where each dictionnary contains "name", "x", and "y". """ self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() for star in dictlist: self.drawcircle(star["x"], star["y"], r = r, colour = colour, label = star["name"]) #self.writelabel(star["x"], star["y"], star["name"], r = r, colour = colour) if self.verbose : print "I've drawn %i stars." % len(dictlist)
[ "def", "drawstarslist", "(", "self", ",", "dictlist", ",", "r", "=", "10", ",", "colour", "=", "None", ")", ":", "self", ".", "checkforpilimage", "(", ")", "colour", "=", "self", ".", "defaultcolour", "(", "colour", ")", "self", ".", "changecolourmode", "(", "colour", ")", "self", ".", "makedraw", "(", ")", "for", "star", "in", "dictlist", ":", "self", ".", "drawcircle", "(", "star", "[", "\"x\"", "]", ",", "star", "[", "\"y\"", "]", ",", "r", "=", "r", ",", "colour", "=", "colour", ",", "label", "=", "star", "[", "\"name\"", "]", ")", "#self.writelabel(star[\"x\"], star[\"y\"], star[\"name\"], r = r, colour = colour)", "if", "self", ".", "verbose", ":", "print", "\"I've drawn %i stars.\"", "%", "len", "(", "dictlist", ")" ]
Calls drawcircle and writelable for an list of stars. Provide a list of dictionnaries, where each dictionnary contains "name", "x", and "y".
[ "Calls", "drawcircle", "and", "writelable", "for", "an", "list", "of", "stars", ".", "Provide", "a", "list", "of", "dictionnaries", "where", "each", "dictionnary", "contains", "name", "x", "and", "y", "." ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L865-L883
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.drawstarsfile
def drawstarsfile(self, filename, r = 10, colour = None): """ Same as drawstarlist but we read the stars from a file. Here we read a text file of hand picked stars. Same format as for cosmouline, that is : # comment starA 23.4 45.6 [other stuff...] Then we pass this to drawstarlist, """ if not os.path.isfile(filename): print "File does not exist :" print filename print "Line format to write : name x y [other stuff ...]" raise RuntimeError, "Cannot read star catalog." catfile = open(filename, "r") lines = catfile.readlines() catfile.close dictlist=[] # We will append dicts here. for i, line in enumerate(lines): if line[0] == '#' or len(line) < 4: continue elements = line.split() nbelements = len(elements) if nbelements < 3: print "Format error on line", i+1, "of :" print filename print "We want : name x y [other stuff ...]" raise RuntimeError, "Cannot read star catalog." name = elements[0] x = float(elements[1]) y = float(elements[2]) dictlist.append({"name":name, "x":x, "y":y}) if self.verbose : print "I've read %i stars from :" print os.path.split(filename)[1] self.drawstarslist(dictlist, r = r, colour = colour)
python
def drawstarsfile(self, filename, r = 10, colour = None): """ Same as drawstarlist but we read the stars from a file. Here we read a text file of hand picked stars. Same format as for cosmouline, that is : # comment starA 23.4 45.6 [other stuff...] Then we pass this to drawstarlist, """ if not os.path.isfile(filename): print "File does not exist :" print filename print "Line format to write : name x y [other stuff ...]" raise RuntimeError, "Cannot read star catalog." catfile = open(filename, "r") lines = catfile.readlines() catfile.close dictlist=[] # We will append dicts here. for i, line in enumerate(lines): if line[0] == '#' or len(line) < 4: continue elements = line.split() nbelements = len(elements) if nbelements < 3: print "Format error on line", i+1, "of :" print filename print "We want : name x y [other stuff ...]" raise RuntimeError, "Cannot read star catalog." name = elements[0] x = float(elements[1]) y = float(elements[2]) dictlist.append({"name":name, "x":x, "y":y}) if self.verbose : print "I've read %i stars from :" print os.path.split(filename)[1] self.drawstarslist(dictlist, r = r, colour = colour)
[ "def", "drawstarsfile", "(", "self", ",", "filename", ",", "r", "=", "10", ",", "colour", "=", "None", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "print", "\"File does not exist :\"", "print", "filename", "print", "\"Line format to write : name x y [other stuff ...]\"", "raise", "RuntimeError", ",", "\"Cannot read star catalog.\"", "catfile", "=", "open", "(", "filename", ",", "\"r\"", ")", "lines", "=", "catfile", ".", "readlines", "(", ")", "catfile", ".", "close", "dictlist", "=", "[", "]", "# We will append dicts here.", "for", "i", ",", "line", "in", "enumerate", "(", "lines", ")", ":", "if", "line", "[", "0", "]", "==", "'#'", "or", "len", "(", "line", ")", "<", "4", ":", "continue", "elements", "=", "line", ".", "split", "(", ")", "nbelements", "=", "len", "(", "elements", ")", "if", "nbelements", "<", "3", ":", "print", "\"Format error on line\"", ",", "i", "+", "1", ",", "\"of :\"", "print", "filename", "print", "\"We want : name x y [other stuff ...]\"", "raise", "RuntimeError", ",", "\"Cannot read star catalog.\"", "name", "=", "elements", "[", "0", "]", "x", "=", "float", "(", "elements", "[", "1", "]", ")", "y", "=", "float", "(", "elements", "[", "2", "]", ")", "dictlist", ".", "append", "(", "{", "\"name\"", ":", "name", ",", "\"x\"", ":", "x", ",", "\"y\"", ":", "y", "}", ")", "if", "self", ".", "verbose", ":", "print", "\"I've read %i stars from :\"", "print", "os", ".", "path", ".", "split", "(", "filename", ")", "[", "1", "]", "self", ".", "drawstarslist", "(", "dictlist", ",", "r", "=", "r", ",", "colour", "=", "colour", ")" ]
Same as drawstarlist but we read the stars from a file. Here we read a text file of hand picked stars. Same format as for cosmouline, that is : # comment starA 23.4 45.6 [other stuff...] Then we pass this to drawstarlist,
[ "Same", "as", "drawstarlist", "but", "we", "read", "the", "stars", "from", "a", "file", ".", "Here", "we", "read", "a", "text", "file", "of", "hand", "picked", "stars", ".", "Same", "format", "as", "for", "cosmouline", "that", "is", ":", "#", "comment", "starA", "23", ".", "4", "45", ".", "6", "[", "other", "stuff", "...", "]", "Then", "we", "pass", "this", "to", "drawstarlist" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L886-L925
Vital-Fernandez/dazer
bin/lib/Astro_Libraries/f2n.py
f2nimage.tonet
def tonet(self, outfile): """ Writes the PIL image into a png. We do not want to flip the image at this stage, as you might have written on it ! """ self.checkforpilimage() if self.verbose : print "Writing image to %s...\n%i x %i pixels, mode %s" % (outfile, self.pilimage.size[0], self.pilimage.size[1], self.pilimage.mode) self.pilimage.save(outfile, "PNG")
python
def tonet(self, outfile): """ Writes the PIL image into a png. We do not want to flip the image at this stage, as you might have written on it ! """ self.checkforpilimage() if self.verbose : print "Writing image to %s...\n%i x %i pixels, mode %s" % (outfile, self.pilimage.size[0], self.pilimage.size[1], self.pilimage.mode) self.pilimage.save(outfile, "PNG")
[ "def", "tonet", "(", "self", ",", "outfile", ")", ":", "self", ".", "checkforpilimage", "(", ")", "if", "self", ".", "verbose", ":", "print", "\"Writing image to %s...\\n%i x %i pixels, mode %s\"", "%", "(", "outfile", ",", "self", ".", "pilimage", ".", "size", "[", "0", "]", ",", "self", ".", "pilimage", ".", "size", "[", "1", "]", ",", "self", ".", "pilimage", ".", "mode", ")", "self", ".", "pilimage", ".", "save", "(", "outfile", ",", "\"PNG\"", ")" ]
Writes the PIL image into a png. We do not want to flip the image at this stage, as you might have written on it !
[ "Writes", "the", "PIL", "image", "into", "a", "png", ".", "We", "do", "not", "want", "to", "flip", "the", "image", "at", "this", "stage", "as", "you", "might", "have", "written", "on", "it", "!" ]
train
https://github.com/Vital-Fernandez/dazer/blob/3c9ae8ae6d40ea33f22cc20dc11365d6d6e65244/bin/lib/Astro_Libraries/f2n.py#L928-L937
bitesofcode/pyramid_restful
pyramid_restful/api.py
ApiFactory.cors_setup
def cors_setup(self, request): """ Sets up the CORS headers response based on the settings used for the API. :param request: <pyramid.request.Request> """ def cors_headers(request, response): if request.method.lower() == 'options': response.headers.update({ '-'.join([p.capitalize() for p in k.split('_')]): v for k, v in self.cors_options.items() }) else: origin = self.cors_options.get('access_control_allow_origin', '*') expose_headers = self.cors_options.get('access_control_expose_headers', '') response.headers['Access-Control-Allow-Origin'] = origin if expose_headers: response.headers['Access-Control-Expose-Headers'] = expose_headers # setup the CORS supported response request.add_response_callback(cors_headers)
python
def cors_setup(self, request): """ Sets up the CORS headers response based on the settings used for the API. :param request: <pyramid.request.Request> """ def cors_headers(request, response): if request.method.lower() == 'options': response.headers.update({ '-'.join([p.capitalize() for p in k.split('_')]): v for k, v in self.cors_options.items() }) else: origin = self.cors_options.get('access_control_allow_origin', '*') expose_headers = self.cors_options.get('access_control_expose_headers', '') response.headers['Access-Control-Allow-Origin'] = origin if expose_headers: response.headers['Access-Control-Expose-Headers'] = expose_headers # setup the CORS supported response request.add_response_callback(cors_headers)
[ "def", "cors_setup", "(", "self", ",", "request", ")", ":", "def", "cors_headers", "(", "request", ",", "response", ")", ":", "if", "request", ".", "method", ".", "lower", "(", ")", "==", "'options'", ":", "response", ".", "headers", ".", "update", "(", "{", "'-'", ".", "join", "(", "[", "p", ".", "capitalize", "(", ")", "for", "p", "in", "k", ".", "split", "(", "'_'", ")", "]", ")", ":", "v", "for", "k", ",", "v", "in", "self", ".", "cors_options", ".", "items", "(", ")", "}", ")", "else", ":", "origin", "=", "self", ".", "cors_options", ".", "get", "(", "'access_control_allow_origin'", ",", "'*'", ")", "expose_headers", "=", "self", ".", "cors_options", ".", "get", "(", "'access_control_expose_headers'", ",", "''", ")", "response", ".", "headers", "[", "'Access-Control-Allow-Origin'", "]", "=", "origin", "if", "expose_headers", ":", "response", ".", "headers", "[", "'Access-Control-Expose-Headers'", "]", "=", "expose_headers", "# setup the CORS supported response", "request", ".", "add_response_callback", "(", "cors_headers", ")" ]
Sets up the CORS headers response based on the settings used for the API. :param request: <pyramid.request.Request>
[ "Sets", "up", "the", "CORS", "headers", "response", "based", "on", "the", "settings", "used", "for", "the", "API", "." ]
train
https://github.com/bitesofcode/pyramid_restful/blob/0f1eccb2c61b9bd6add03b461d4e4d7901c957da/pyramid_restful/api.py#L67-L87
bitesofcode/pyramid_restful
pyramid_restful/api.py
ApiFactory.factory
def factory(self, request, parent=None, name=None): """ Returns a new service for the given request. :param request | <pyramid.request.Request> :return <pyramid_restful.services.AbstractService> """ traverse = request.matchdict['traverse'] # show documentation at the root path if not traverse: return {} else: service = {} name = name or traverse[0] # look for direct pattern matches traversed = '/' + '/'.join(traverse) service_type = None service_object = None for route, endpoint in self.routes: result = route.match(traversed) if result is not None: request.matchdict = result request.endpoint = endpoint break else: try: service_type, service_object = self.services[name] except KeyError: raise HTTPNotFound() if service_type: if isinstance(service_type, Endpoint): service[name] = service_type elif service_object is None: service[name] = service_type(request) else: service[name] = service_type(request, service_object) request.api_service = service return service
python
def factory(self, request, parent=None, name=None): """ Returns a new service for the given request. :param request | <pyramid.request.Request> :return <pyramid_restful.services.AbstractService> """ traverse = request.matchdict['traverse'] # show documentation at the root path if not traverse: return {} else: service = {} name = name or traverse[0] # look for direct pattern matches traversed = '/' + '/'.join(traverse) service_type = None service_object = None for route, endpoint in self.routes: result = route.match(traversed) if result is not None: request.matchdict = result request.endpoint = endpoint break else: try: service_type, service_object = self.services[name] except KeyError: raise HTTPNotFound() if service_type: if isinstance(service_type, Endpoint): service[name] = service_type elif service_object is None: service[name] = service_type(request) else: service[name] = service_type(request, service_object) request.api_service = service return service
[ "def", "factory", "(", "self", ",", "request", ",", "parent", "=", "None", ",", "name", "=", "None", ")", ":", "traverse", "=", "request", ".", "matchdict", "[", "'traverse'", "]", "# show documentation at the root path", "if", "not", "traverse", ":", "return", "{", "}", "else", ":", "service", "=", "{", "}", "name", "=", "name", "or", "traverse", "[", "0", "]", "# look for direct pattern matches", "traversed", "=", "'/'", "+", "'/'", ".", "join", "(", "traverse", ")", "service_type", "=", "None", "service_object", "=", "None", "for", "route", ",", "endpoint", "in", "self", ".", "routes", ":", "result", "=", "route", ".", "match", "(", "traversed", ")", "if", "result", "is", "not", "None", ":", "request", ".", "matchdict", "=", "result", "request", ".", "endpoint", "=", "endpoint", "break", "else", ":", "try", ":", "service_type", ",", "service_object", "=", "self", ".", "services", "[", "name", "]", "except", "KeyError", ":", "raise", "HTTPNotFound", "(", ")", "if", "service_type", ":", "if", "isinstance", "(", "service_type", ",", "Endpoint", ")", ":", "service", "[", "name", "]", "=", "service_type", "elif", "service_object", "is", "None", ":", "service", "[", "name", "]", "=", "service_type", "(", "request", ")", "else", ":", "service", "[", "name", "]", "=", "service_type", "(", "request", ",", "service_object", ")", "request", ".", "api_service", "=", "service", "return", "service" ]
Returns a new service for the given request. :param request | <pyramid.request.Request> :return <pyramid_restful.services.AbstractService>
[ "Returns", "a", "new", "service", "for", "the", "given", "request", "." ]
train
https://github.com/bitesofcode/pyramid_restful/blob/0f1eccb2c61b9bd6add03b461d4e4d7901c957da/pyramid_restful/api.py#L89-L132
bitesofcode/pyramid_restful
pyramid_restful/api.py
ApiFactory.register
def register(self, service, name=''): """ Exposes a given service to this API. """ # expose a sub-factory if isinstance(service, ApiFactory): self.services[name] = (service.factory, None) # expose a module dynamically as a service elif inspect.ismodule(service): name = name or service.__name__.split('.')[-1] # exclude endpoints with patterns for obj in vars(service).values(): endpoint = getattr(obj, 'endpoint', None) if isinstance(endpoint, Endpoint) and endpoint.pattern: route = Route('', endpoint.pattern) self.routes.append((route, endpoint)) self.services[name] = (ModuleService, service) # expose a class dynamically as a service elif inspect.isclass(service): name = name or service.__name__ self.services[name] = (ClassService, service) # expose an endpoint directly elif isinstance(getattr(service, 'endpoint', None), Endpoint): if service.endpoint.pattern: route = Route('', service.endpoint.pattern) self.routes.append((route, service.endpoint)) else: self.services[service.endpoint.name] = (service.endpoint, None) # expose a scope elif isinstance(service, dict): for srv in service.values(): try: self.register(srv) except RuntimeError: pass # expose a list of services elif isinstance(service, list): for srv in service: try: self.register(srv) except RuntimeError: pass # expose a service directly else: raise RuntimeError('Invalid service provide: {0} ({1}).'.format(service, type(service)))
python
def register(self, service, name=''): """ Exposes a given service to this API. """ # expose a sub-factory if isinstance(service, ApiFactory): self.services[name] = (service.factory, None) # expose a module dynamically as a service elif inspect.ismodule(service): name = name or service.__name__.split('.')[-1] # exclude endpoints with patterns for obj in vars(service).values(): endpoint = getattr(obj, 'endpoint', None) if isinstance(endpoint, Endpoint) and endpoint.pattern: route = Route('', endpoint.pattern) self.routes.append((route, endpoint)) self.services[name] = (ModuleService, service) # expose a class dynamically as a service elif inspect.isclass(service): name = name or service.__name__ self.services[name] = (ClassService, service) # expose an endpoint directly elif isinstance(getattr(service, 'endpoint', None), Endpoint): if service.endpoint.pattern: route = Route('', service.endpoint.pattern) self.routes.append((route, service.endpoint)) else: self.services[service.endpoint.name] = (service.endpoint, None) # expose a scope elif isinstance(service, dict): for srv in service.values(): try: self.register(srv) except RuntimeError: pass # expose a list of services elif isinstance(service, list): for srv in service: try: self.register(srv) except RuntimeError: pass # expose a service directly else: raise RuntimeError('Invalid service provide: {0} ({1}).'.format(service, type(service)))
[ "def", "register", "(", "self", ",", "service", ",", "name", "=", "''", ")", ":", "# expose a sub-factory", "if", "isinstance", "(", "service", ",", "ApiFactory", ")", ":", "self", ".", "services", "[", "name", "]", "=", "(", "service", ".", "factory", ",", "None", ")", "# expose a module dynamically as a service", "elif", "inspect", ".", "ismodule", "(", "service", ")", ":", "name", "=", "name", "or", "service", ".", "__name__", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "# exclude endpoints with patterns", "for", "obj", "in", "vars", "(", "service", ")", ".", "values", "(", ")", ":", "endpoint", "=", "getattr", "(", "obj", ",", "'endpoint'", ",", "None", ")", "if", "isinstance", "(", "endpoint", ",", "Endpoint", ")", "and", "endpoint", ".", "pattern", ":", "route", "=", "Route", "(", "''", ",", "endpoint", ".", "pattern", ")", "self", ".", "routes", ".", "append", "(", "(", "route", ",", "endpoint", ")", ")", "self", ".", "services", "[", "name", "]", "=", "(", "ModuleService", ",", "service", ")", "# expose a class dynamically as a service", "elif", "inspect", ".", "isclass", "(", "service", ")", ":", "name", "=", "name", "or", "service", ".", "__name__", "self", ".", "services", "[", "name", "]", "=", "(", "ClassService", ",", "service", ")", "# expose an endpoint directly", "elif", "isinstance", "(", "getattr", "(", "service", ",", "'endpoint'", ",", "None", ")", ",", "Endpoint", ")", ":", "if", "service", ".", "endpoint", ".", "pattern", ":", "route", "=", "Route", "(", "''", ",", "service", ".", "endpoint", ".", "pattern", ")", "self", ".", "routes", ".", "append", "(", "(", "route", ",", "service", ".", "endpoint", ")", ")", "else", ":", "self", ".", "services", "[", "service", ".", "endpoint", ".", "name", "]", "=", "(", "service", ".", "endpoint", ",", "None", ")", "# expose a scope", "elif", "isinstance", "(", "service", ",", "dict", ")", ":", "for", "srv", "in", "service", ".", "values", "(", ")", ":", "try", ":", "self", ".", "register", "(", "srv", ")", "except", "RuntimeError", ":", "pass", "# expose a list of services", "elif", "isinstance", "(", "service", ",", "list", ")", ":", "for", "srv", "in", "service", ":", "try", ":", "self", ".", "register", "(", "srv", ")", "except", "RuntimeError", ":", "pass", "# expose a service directly", "else", ":", "raise", "RuntimeError", "(", "'Invalid service provide: {0} ({1}).'", ".", "format", "(", "service", ",", "type", "(", "service", ")", ")", ")" ]
Exposes a given service to this API.
[ "Exposes", "a", "given", "service", "to", "this", "API", "." ]
train
https://github.com/bitesofcode/pyramid_restful/blob/0f1eccb2c61b9bd6add03b461d4e4d7901c957da/pyramid_restful/api.py#L231-L283
bitesofcode/pyramid_restful
pyramid_restful/api.py
ApiFactory.serve
def serve(self, config, path, route_name=None, permission=None, **view_options): """ Serves this API from the inputted root path """ route_name = route_name or path.replace('/', '.').strip('.') path = path.strip('/') + '*traverse' self.route_name = route_name self.base_permission = permission # configure the route and the path config.add_route(route_name, path, factory=self.factory) config.add_view( self.handle_standard_error, route_name=route_name, renderer='json2', context=StandardError ), config.add_view( self.handle_http_error, route_name=route_name, renderer='json2', context=HTTPException ) config.add_view( self.process, route_name=route_name, renderer='json2', **view_options )
python
def serve(self, config, path, route_name=None, permission=None, **view_options): """ Serves this API from the inputted root path """ route_name = route_name or path.replace('/', '.').strip('.') path = path.strip('/') + '*traverse' self.route_name = route_name self.base_permission = permission # configure the route and the path config.add_route(route_name, path, factory=self.factory) config.add_view( self.handle_standard_error, route_name=route_name, renderer='json2', context=StandardError ), config.add_view( self.handle_http_error, route_name=route_name, renderer='json2', context=HTTPException ) config.add_view( self.process, route_name=route_name, renderer='json2', **view_options )
[ "def", "serve", "(", "self", ",", "config", ",", "path", ",", "route_name", "=", "None", ",", "permission", "=", "None", ",", "*", "*", "view_options", ")", ":", "route_name", "=", "route_name", "or", "path", ".", "replace", "(", "'/'", ",", "'.'", ")", ".", "strip", "(", "'.'", ")", "path", "=", "path", ".", "strip", "(", "'/'", ")", "+", "'*traverse'", "self", ".", "route_name", "=", "route_name", "self", ".", "base_permission", "=", "permission", "# configure the route and the path", "config", ".", "add_route", "(", "route_name", ",", "path", ",", "factory", "=", "self", ".", "factory", ")", "config", ".", "add_view", "(", "self", ".", "handle_standard_error", ",", "route_name", "=", "route_name", ",", "renderer", "=", "'json2'", ",", "context", "=", "StandardError", ")", ",", "config", ".", "add_view", "(", "self", ".", "handle_http_error", ",", "route_name", "=", "route_name", ",", "renderer", "=", "'json2'", ",", "context", "=", "HTTPException", ")", "config", ".", "add_view", "(", "self", ".", "process", ",", "route_name", "=", "route_name", ",", "renderer", "=", "'json2'", ",", "*", "*", "view_options", ")" ]
Serves this API from the inputted root path
[ "Serves", "this", "API", "from", "the", "inputted", "root", "path" ]
train
https://github.com/bitesofcode/pyramid_restful/blob/0f1eccb2c61b9bd6add03b461d4e4d7901c957da/pyramid_restful/api.py#L369-L398
jaredLunde/vital-tools
vital/debug/__init__.py
line
def line(separator="-·-", color=None, padding=None, num=1): """ Prints a line separator the full width of the terminal. @separator: the #str chars to create the line from @color: line color from :mod:vital.debug.colors @padding: adds extra lines to either the top, bottom or both of the line via :func:padd @num: #int number of lines to print .. from vital.debug import line line("__") ____________________________________________________________________ .. """ for x in range(num): columns = get_terminal_width() separator = "".join( separator for x in range(floor(columns/len(separator)))) print(padd(colorize(separator.strip(), color), padding))
python
def line(separator="-·-", color=None, padding=None, num=1): """ Prints a line separator the full width of the terminal. @separator: the #str chars to create the line from @color: line color from :mod:vital.debug.colors @padding: adds extra lines to either the top, bottom or both of the line via :func:padd @num: #int number of lines to print .. from vital.debug import line line("__") ____________________________________________________________________ .. """ for x in range(num): columns = get_terminal_width() separator = "".join( separator for x in range(floor(columns/len(separator)))) print(padd(colorize(separator.strip(), color), padding))
[ "def", "line", "(", "separator", "=", "\"-·-\",", " ", "olor=", "N", "one,", " ", "adding=", "N", "one,", " ", "um=", "1", ")", ":", "", "for", "x", "in", "range", "(", "num", ")", ":", "columns", "=", "get_terminal_width", "(", ")", "separator", "=", "\"\"", ".", "join", "(", "separator", "for", "x", "in", "range", "(", "floor", "(", "columns", "/", "len", "(", "separator", ")", ")", ")", ")", "print", "(", "padd", "(", "colorize", "(", "separator", ".", "strip", "(", ")", ",", "color", ")", ",", "padding", ")", ")" ]
Prints a line separator the full width of the terminal. @separator: the #str chars to create the line from @color: line color from :mod:vital.debug.colors @padding: adds extra lines to either the top, bottom or both of the line via :func:padd @num: #int number of lines to print .. from vital.debug import line line("__") ____________________________________________________________________ ..
[ "Prints", "a", "line", "separator", "the", "full", "width", "of", "the", "terminal", "." ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L103-L122
jaredLunde/vital-tools
vital/debug/__init__.py
padd
def padd(text, padding="top", size=1): """ Adds extra new lines to the top, bottom or both of a String @text: #str text to pad @padding: #str 'top', 'bottom' or 'all' @size: #int number of new lines -> #str padded @text .. from vital.debug import * padd("Hello world") # -> '\\nHello world' padd("Hello world", size=5, padding="all") # -> '\\n\\n\\n\\n\\nHello world\\n\\n\\n\\n\\n' .. """ if padding: padding = padding.lower() pad_all = padding == 'all' padding_top = "" if padding and (padding == 'top' or pad_all): padding_top = "".join("\n" for x in range(size)) padding_bottom = "" if padding and (padding == 'bottom' or pad_all): padding_bottom = "".join("\n" for x in range(size)) return "{}{}{}".format(padding_top, text, padding_bottom) return text
python
def padd(text, padding="top", size=1): """ Adds extra new lines to the top, bottom or both of a String @text: #str text to pad @padding: #str 'top', 'bottom' or 'all' @size: #int number of new lines -> #str padded @text .. from vital.debug import * padd("Hello world") # -> '\\nHello world' padd("Hello world", size=5, padding="all") # -> '\\n\\n\\n\\n\\nHello world\\n\\n\\n\\n\\n' .. """ if padding: padding = padding.lower() pad_all = padding == 'all' padding_top = "" if padding and (padding == 'top' or pad_all): padding_top = "".join("\n" for x in range(size)) padding_bottom = "" if padding and (padding == 'bottom' or pad_all): padding_bottom = "".join("\n" for x in range(size)) return "{}{}{}".format(padding_top, text, padding_bottom) return text
[ "def", "padd", "(", "text", ",", "padding", "=", "\"top\"", ",", "size", "=", "1", ")", ":", "if", "padding", ":", "padding", "=", "padding", ".", "lower", "(", ")", "pad_all", "=", "padding", "==", "'all'", "padding_top", "=", "\"\"", "if", "padding", "and", "(", "padding", "==", "'top'", "or", "pad_all", ")", ":", "padding_top", "=", "\"\"", ".", "join", "(", "\"\\n\"", "for", "x", "in", "range", "(", "size", ")", ")", "padding_bottom", "=", "\"\"", "if", "padding", "and", "(", "padding", "==", "'bottom'", "or", "pad_all", ")", ":", "padding_bottom", "=", "\"\"", ".", "join", "(", "\"\\n\"", "for", "x", "in", "range", "(", "size", ")", ")", "return", "\"{}{}{}\"", ".", "format", "(", "padding_top", ",", "text", ",", "padding_bottom", ")", "return", "text" ]
Adds extra new lines to the top, bottom or both of a String @text: #str text to pad @padding: #str 'top', 'bottom' or 'all' @size: #int number of new lines -> #str padded @text .. from vital.debug import * padd("Hello world") # -> '\\nHello world' padd("Hello world", size=5, padding="all") # -> '\\n\\n\\n\\n\\nHello world\\n\\n\\n\\n\\n' ..
[ "Adds", "extra", "new", "lines", "to", "the", "top", "bottom", "or", "both", "of", "a", "String" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L125-L153
jaredLunde/vital-tools
vital/debug/__init__.py
colorize
def colorize(text, color="BLUE", close=True): """ Colorizes text for terminal outputs @text: #str to colorize @color: #str color from :mod:colors @close: #bool whether or not to reset the color -> #str colorized @text .. from vital.debug import colorize colorize("Hello world", "blue") # -> '\x1b[0;34mHello world\x1b[1;m' colorize("Hello world", "blue", close=False) # -> '\x1b[0;34mHello world' .. """ if color: color = getattr(colors, color.upper()) return color + uncolorize(str(text)) + (colors.RESET if close else "") return text
python
def colorize(text, color="BLUE", close=True): """ Colorizes text for terminal outputs @text: #str to colorize @color: #str color from :mod:colors @close: #bool whether or not to reset the color -> #str colorized @text .. from vital.debug import colorize colorize("Hello world", "blue") # -> '\x1b[0;34mHello world\x1b[1;m' colorize("Hello world", "blue", close=False) # -> '\x1b[0;34mHello world' .. """ if color: color = getattr(colors, color.upper()) return color + uncolorize(str(text)) + (colors.RESET if close else "") return text
[ "def", "colorize", "(", "text", ",", "color", "=", "\"BLUE\"", ",", "close", "=", "True", ")", ":", "if", "color", ":", "color", "=", "getattr", "(", "colors", ",", "color", ".", "upper", "(", ")", ")", "return", "color", "+", "uncolorize", "(", "str", "(", "text", ")", ")", "+", "(", "colors", ".", "RESET", "if", "close", "else", "\"\"", ")", "return", "text" ]
Colorizes text for terminal outputs @text: #str to colorize @color: #str color from :mod:colors @close: #bool whether or not to reset the color -> #str colorized @text .. from vital.debug import colorize colorize("Hello world", "blue") # -> '\x1b[0;34mHello world\x1b[1;m' colorize("Hello world", "blue", close=False) # -> '\x1b[0;34mHello world' ..
[ "Colorizes", "text", "for", "terminal", "outputs" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L156-L177
jaredLunde/vital-tools
vital/debug/__init__.py
bold
def bold(text, close=True): """ Bolds text for terminal outputs @text: #str to bold @close: #bool whether or not to reset the bold flag -> #str bolded @text .. from vital.debug import bold bold("Hello world") # -> '\x1b[1mHello world\x1b[1;m' bold("Hello world", close=False) # -> '\x1b[1mHello world' .. """ return getattr(colors, "BOLD") + str(text) + \ (colors.RESET if close else "")
python
def bold(text, close=True): """ Bolds text for terminal outputs @text: #str to bold @close: #bool whether or not to reset the bold flag -> #str bolded @text .. from vital.debug import bold bold("Hello world") # -> '\x1b[1mHello world\x1b[1;m' bold("Hello world", close=False) # -> '\x1b[1mHello world' .. """ return getattr(colors, "BOLD") + str(text) + \ (colors.RESET if close else "")
[ "def", "bold", "(", "text", ",", "close", "=", "True", ")", ":", "return", "getattr", "(", "colors", ",", "\"BOLD\"", ")", "+", "str", "(", "text", ")", "+", "(", "colors", ".", "RESET", "if", "close", "else", "\"\"", ")" ]
Bolds text for terminal outputs @text: #str to bold @close: #bool whether or not to reset the bold flag -> #str bolded @text .. from vital.debug import bold bold("Hello world") # -> '\x1b[1mHello world\x1b[1;m' bold("Hello world", close=False) # -> '\x1b[1mHello world' ..
[ "Bolds", "text", "for", "terminal", "outputs" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L199-L217
jaredLunde/vital-tools
vital/debug/__init__.py
cut
def cut(text, length=50, replace_with="…"): """ Shortens text to @length, appends @replace_with to end of string if the string length is > @length @text: #str text to shortens @length: #int max length of string @replace_with: #str to replace chars beyond @length with .. from vital.debug import cut cut("Hello world", 8) # -> 'Hello w…' cut("Hello world", 15) # -> 'Hello world' .. """ text_len = len(uncolorize(text)) if text_len > length: replace_len = len(replace_with) color_spans = [ _colors.span() for _colors in _find_colors.finditer(text)] chars = 0 _length = length+1 - replace_len for i, c in enumerate(text): broken = False for span in color_spans: if span[0] <= i < span[1]: broken = True break if broken: continue chars += 1 if chars <= _length: cutoff = i else: break if color_spans: return text[:cutoff] + replace_with + colors.RESET else: return text[:cutoff] + replace_with return text
python
def cut(text, length=50, replace_with="…"): """ Shortens text to @length, appends @replace_with to end of string if the string length is > @length @text: #str text to shortens @length: #int max length of string @replace_with: #str to replace chars beyond @length with .. from vital.debug import cut cut("Hello world", 8) # -> 'Hello w…' cut("Hello world", 15) # -> 'Hello world' .. """ text_len = len(uncolorize(text)) if text_len > length: replace_len = len(replace_with) color_spans = [ _colors.span() for _colors in _find_colors.finditer(text)] chars = 0 _length = length+1 - replace_len for i, c in enumerate(text): broken = False for span in color_spans: if span[0] <= i < span[1]: broken = True break if broken: continue chars += 1 if chars <= _length: cutoff = i else: break if color_spans: return text[:cutoff] + replace_with + colors.RESET else: return text[:cutoff] + replace_with return text
[ "def", "cut", "(", "text", ",", "length", "=", "50", ",", "replace_with", "=", "\"…\"):", "", "", "text_len", "=", "len", "(", "uncolorize", "(", "text", ")", ")", "if", "text_len", ">", "length", ":", "replace_len", "=", "len", "(", "replace_with", ")", "color_spans", "=", "[", "_colors", ".", "span", "(", ")", "for", "_colors", "in", "_find_colors", ".", "finditer", "(", "text", ")", "]", "chars", "=", "0", "_length", "=", "length", "+", "1", "-", "replace_len", "for", "i", ",", "c", "in", "enumerate", "(", "text", ")", ":", "broken", "=", "False", "for", "span", "in", "color_spans", ":", "if", "span", "[", "0", "]", "<=", "i", "<", "span", "[", "1", "]", ":", "broken", "=", "True", "break", "if", "broken", ":", "continue", "chars", "+=", "1", "if", "chars", "<=", "_length", ":", "cutoff", "=", "i", "else", ":", "break", "if", "color_spans", ":", "return", "text", "[", ":", "cutoff", "]", "+", "replace_with", "+", "colors", ".", "RESET", "else", ":", "return", "text", "[", ":", "cutoff", "]", "+", "replace_with", "return", "text" ]
Shortens text to @length, appends @replace_with to end of string if the string length is > @length @text: #str text to shortens @length: #int max length of string @replace_with: #str to replace chars beyond @length with .. from vital.debug import cut cut("Hello world", 8) # -> 'Hello w…' cut("Hello world", 15) # -> 'Hello world' ..
[ "Shortens", "text", "to", "@length", "appends", "@replace_with", "to", "end", "of", "string", "if", "the", "string", "length", "is", ">", "@length" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L226-L267
jaredLunde/vital-tools
vital/debug/__init__.py
flag
def flag(text=None, color=None, padding=None, show=True, brackets='⸨⸩'): """ Wraps @text in parentheses (), optionally colors and pads and prints the text. @text: #str text to (flag) @color: #str color to :func:colorize the text within @padding: #str location of padding from :func:padd @show: #bool whether or not to print the text in addition to returning it -> #str (flagged) text .. from vital.debug import flag flag("Hello world", "blue") # -> (Hello world) # '(\x1b[0;34mHello world\x1b[1;m)' flag("Hello world", "blue", show=False) # -> '(\x1b[0;34mHello world\x1b[1;m)' flag("Hello world", color="blue", padding="all") # -> # (Hello world) # # '\\n(\x1b[0;34mHello world\x1b[1;m)\\n' .. """ _flag = None if text: _flag = padd( "{}{}{}".format( brackets[0], colorize(text, color) if color else text, brackets[1] ), padding ) if not show: return _flag else: print(_flag) return _flag or text
python
def flag(text=None, color=None, padding=None, show=True, brackets='⸨⸩'): """ Wraps @text in parentheses (), optionally colors and pads and prints the text. @text: #str text to (flag) @color: #str color to :func:colorize the text within @padding: #str location of padding from :func:padd @show: #bool whether or not to print the text in addition to returning it -> #str (flagged) text .. from vital.debug import flag flag("Hello world", "blue") # -> (Hello world) # '(\x1b[0;34mHello world\x1b[1;m)' flag("Hello world", "blue", show=False) # -> '(\x1b[0;34mHello world\x1b[1;m)' flag("Hello world", color="blue", padding="all") # -> # (Hello world) # # '\\n(\x1b[0;34mHello world\x1b[1;m)\\n' .. """ _flag = None if text: _flag = padd( "{}{}{}".format( brackets[0], colorize(text, color) if color else text, brackets[1] ), padding ) if not show: return _flag else: print(_flag) return _flag or text
[ "def", "flag", "(", "text", "=", "None", ",", "color", "=", "None", ",", "padding", "=", "None", ",", "show", "=", "True", ",", "brackets", "=", "'⸨⸩'):", "", "", "_flag", "=", "None", "if", "text", ":", "_flag", "=", "padd", "(", "\"{}{}{}\"", ".", "format", "(", "brackets", "[", "0", "]", ",", "colorize", "(", "text", ",", "color", ")", "if", "color", "else", "text", ",", "brackets", "[", "1", "]", ")", ",", "padding", ")", "if", "not", "show", ":", "return", "_flag", "else", ":", "print", "(", "_flag", ")", "return", "_flag", "or", "text" ]
Wraps @text in parentheses (), optionally colors and pads and prints the text. @text: #str text to (flag) @color: #str color to :func:colorize the text within @padding: #str location of padding from :func:padd @show: #bool whether or not to print the text in addition to returning it -> #str (flagged) text .. from vital.debug import flag flag("Hello world", "blue") # -> (Hello world) # '(\x1b[0;34mHello world\x1b[1;m)' flag("Hello world", "blue", show=False) # -> '(\x1b[0;34mHello world\x1b[1;m)' flag("Hello world", color="blue", padding="all") # -> # (Hello world) # # '\\n(\x1b[0;34mHello world\x1b[1;m)\\n' ..
[ "Wraps", "@text", "in", "parentheses", "()", "optionally", "colors", "and", "pads", "and", "prints", "the", "text", "." ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L270-L312
jaredLunde/vital-tools
vital/debug/__init__.py
table_mapping
def table_mapping(data, padding=1, separator=" "): """ Pretty prints a one-dimensional key: value mapping @data: #dict data to pretty print @padding: #int number of spaces to pad the left side of the key with @separator: #str chars to separate the key and value pair with -> #str pretty one dimensional table .. from vital.debug import table_mapping print(table_mapping({"key1": "val1", "key2": "val2"})) # -> \x1b[1m key1\x1b[1;m val1 # \x1b[1m key2\x1b[1;m val2 print(table_mapping({"key1": "val1", "key2": "val2"}, padding=4)) # -> \x1b[1m key1\x1b[1;m val1 # \x1b[1m key2\x1b[1;m val2 print(table_mapping( {"key1": "val1", "key2": "val2"}, padding=4, separator=": ")) # -> \x1b[1m key1\x1b[1;m: val1 # \x1b[1m key2\x1b[1;m: val2 .. """ if data: ml = max(len(k) for k in data.keys())+1 return "\n".join("{}{}{}".format( bold(k.rjust(ml+padding, " ")), separator, v) for k, v in data.items()) return ""
python
def table_mapping(data, padding=1, separator=" "): """ Pretty prints a one-dimensional key: value mapping @data: #dict data to pretty print @padding: #int number of spaces to pad the left side of the key with @separator: #str chars to separate the key and value pair with -> #str pretty one dimensional table .. from vital.debug import table_mapping print(table_mapping({"key1": "val1", "key2": "val2"})) # -> \x1b[1m key1\x1b[1;m val1 # \x1b[1m key2\x1b[1;m val2 print(table_mapping({"key1": "val1", "key2": "val2"}, padding=4)) # -> \x1b[1m key1\x1b[1;m val1 # \x1b[1m key2\x1b[1;m val2 print(table_mapping( {"key1": "val1", "key2": "val2"}, padding=4, separator=": ")) # -> \x1b[1m key1\x1b[1;m: val1 # \x1b[1m key2\x1b[1;m: val2 .. """ if data: ml = max(len(k) for k in data.keys())+1 return "\n".join("{}{}{}".format( bold(k.rjust(ml+padding, " ")), separator, v) for k, v in data.items()) return ""
[ "def", "table_mapping", "(", "data", ",", "padding", "=", "1", ",", "separator", "=", "\" \"", ")", ":", "if", "data", ":", "ml", "=", "max", "(", "len", "(", "k", ")", "for", "k", "in", "data", ".", "keys", "(", ")", ")", "+", "1", "return", "\"\\n\"", ".", "join", "(", "\"{}{}{}\"", ".", "format", "(", "bold", "(", "k", ".", "rjust", "(", "ml", "+", "padding", ",", "\" \"", ")", ")", ",", "separator", ",", "v", ")", "for", "k", ",", "v", "in", "data", ".", "items", "(", ")", ")", "return", "\"\"" ]
Pretty prints a one-dimensional key: value mapping @data: #dict data to pretty print @padding: #int number of spaces to pad the left side of the key with @separator: #str chars to separate the key and value pair with -> #str pretty one dimensional table .. from vital.debug import table_mapping print(table_mapping({"key1": "val1", "key2": "val2"})) # -> \x1b[1m key1\x1b[1;m val1 # \x1b[1m key2\x1b[1;m val2 print(table_mapping({"key1": "val1", "key2": "val2"}, padding=4)) # -> \x1b[1m key1\x1b[1;m val1 # \x1b[1m key2\x1b[1;m val2 print(table_mapping( {"key1": "val1", "key2": "val2"}, padding=4, separator=": ")) # -> \x1b[1m key1\x1b[1;m: val1 # \x1b[1m key2\x1b[1;m: val2 ..
[ "Pretty", "prints", "a", "one", "-", "dimensional", "key", ":", "value", "mapping" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L315-L345
jaredLunde/vital-tools
vital/debug/__init__.py
gen_rand_str
def gen_rand_str(*size, use=None, keyspace=None): """ Generates a random string using random module specified in @use within the @keyspace @*size: #int size range for the length of the string @use: the random module to use @keyspace: #str chars allowed in the random string .. from vital.debug import gen_rand_str gen_rand_str() # -> 'PRCpAq' gen_rand_str(1, 2) # -> 'Y' gen_rand_str(12, keyspace="abcdefg") # -> 'gaaacffbedf' .. """ keyspace = keyspace or (string.ascii_letters + string.digits) keyspace = [char for char in keyspace] use = use or _random use.seed() if size: size = size if len(size) == 2 else (size[0], size[0]) else: size = (10, 12) return ''.join( use.choice(keyspace) for _ in range(use.randint(*size)))
python
def gen_rand_str(*size, use=None, keyspace=None): """ Generates a random string using random module specified in @use within the @keyspace @*size: #int size range for the length of the string @use: the random module to use @keyspace: #str chars allowed in the random string .. from vital.debug import gen_rand_str gen_rand_str() # -> 'PRCpAq' gen_rand_str(1, 2) # -> 'Y' gen_rand_str(12, keyspace="abcdefg") # -> 'gaaacffbedf' .. """ keyspace = keyspace or (string.ascii_letters + string.digits) keyspace = [char for char in keyspace] use = use or _random use.seed() if size: size = size if len(size) == 2 else (size[0], size[0]) else: size = (10, 12) return ''.join( use.choice(keyspace) for _ in range(use.randint(*size)))
[ "def", "gen_rand_str", "(", "*", "size", ",", "use", "=", "None", ",", "keyspace", "=", "None", ")", ":", "keyspace", "=", "keyspace", "or", "(", "string", ".", "ascii_letters", "+", "string", ".", "digits", ")", "keyspace", "=", "[", "char", "for", "char", "in", "keyspace", "]", "use", "=", "use", "or", "_random", "use", ".", "seed", "(", ")", "if", "size", ":", "size", "=", "size", "if", "len", "(", "size", ")", "==", "2", "else", "(", "size", "[", "0", "]", ",", "size", "[", "0", "]", ")", "else", ":", "size", "=", "(", "10", ",", "12", ")", "return", "''", ".", "join", "(", "use", ".", "choice", "(", "keyspace", ")", "for", "_", "in", "range", "(", "use", ".", "randint", "(", "*", "size", ")", ")", ")" ]
Generates a random string using random module specified in @use within the @keyspace @*size: #int size range for the length of the string @use: the random module to use @keyspace: #str chars allowed in the random string .. from vital.debug import gen_rand_str gen_rand_str() # -> 'PRCpAq' gen_rand_str(1, 2) # -> 'Y' gen_rand_str(12, keyspace="abcdefg") # -> 'gaaacffbedf' ..
[ "Generates", "a", "random", "string", "using", "random", "module", "specified", "in", "@use", "within", "the", "@keyspace" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L348-L378
jaredLunde/vital-tools
vital/debug/__init__.py
get_parent_name
def get_parent_name(obj): """ Gets the name of the object containing @obj and returns as a string @obj: any python object -> #str parent object name or None .. from vital.debug import get_parent_name get_parent_name(get_parent_name) # -> 'vital.debug' get_parent_name(vital.debug) # -> 'vital' get_parent_name(str) # -> 'builtins' .. """ parent_obj = get_parent_obj(obj) parent_name = get_obj_name(parent_obj) if parent_obj else None n = 0 while parent_obj and n < 2500: parent_obj = get_parent_obj(parent_obj) if parent_obj: parent_name = "{}.{}".format(get_obj_name(parent_obj), parent_name) n += 1 if not parent_name or not len(parent_name): parent_name = None objname = get_obj_name(obj) if objname and len(objname.split(".")) > 1: return ".".join(objname.split(".")[:-1]) return None return parent_name
python
def get_parent_name(obj): """ Gets the name of the object containing @obj and returns as a string @obj: any python object -> #str parent object name or None .. from vital.debug import get_parent_name get_parent_name(get_parent_name) # -> 'vital.debug' get_parent_name(vital.debug) # -> 'vital' get_parent_name(str) # -> 'builtins' .. """ parent_obj = get_parent_obj(obj) parent_name = get_obj_name(parent_obj) if parent_obj else None n = 0 while parent_obj and n < 2500: parent_obj = get_parent_obj(parent_obj) if parent_obj: parent_name = "{}.{}".format(get_obj_name(parent_obj), parent_name) n += 1 if not parent_name or not len(parent_name): parent_name = None objname = get_obj_name(obj) if objname and len(objname.split(".")) > 1: return ".".join(objname.split(".")[:-1]) return None return parent_name
[ "def", "get_parent_name", "(", "obj", ")", ":", "parent_obj", "=", "get_parent_obj", "(", "obj", ")", "parent_name", "=", "get_obj_name", "(", "parent_obj", ")", "if", "parent_obj", "else", "None", "n", "=", "0", "while", "parent_obj", "and", "n", "<", "2500", ":", "parent_obj", "=", "get_parent_obj", "(", "parent_obj", ")", "if", "parent_obj", ":", "parent_name", "=", "\"{}.{}\"", ".", "format", "(", "get_obj_name", "(", "parent_obj", ")", ",", "parent_name", ")", "n", "+=", "1", "if", "not", "parent_name", "or", "not", "len", "(", "parent_name", ")", ":", "parent_name", "=", "None", "objname", "=", "get_obj_name", "(", "obj", ")", "if", "objname", "and", "len", "(", "objname", ".", "split", "(", "\".\"", ")", ")", ">", "1", ":", "return", "\".\"", ".", "join", "(", "objname", ".", "split", "(", "\".\"", ")", "[", ":", "-", "1", "]", ")", "return", "None", "return", "parent_name" ]
Gets the name of the object containing @obj and returns as a string @obj: any python object -> #str parent object name or None .. from vital.debug import get_parent_name get_parent_name(get_parent_name) # -> 'vital.debug' get_parent_name(vital.debug) # -> 'vital' get_parent_name(str) # -> 'builtins' ..
[ "Gets", "the", "name", "of", "the", "object", "containing", "@obj", "and", "returns", "as", "a", "string" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L425-L458
jaredLunde/vital-tools
vital/debug/__init__.py
get_class_that_defined_method
def get_class_that_defined_method(meth): """ Gets the class object which defined a given method @meth: a class method -> owner class object """ if inspect.ismethod(meth): for cls in inspect.getmro(meth.__self__.__class__): if cls.__dict__.get(meth.__name__) is meth: return cls meth = meth.__func__ # fallback to __qualname__ parsing if inspect.isfunction(meth): cls = getattr( inspect.getmodule(meth), meth.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0]) if isinstance(cls, type): return cls return None
python
def get_class_that_defined_method(meth): """ Gets the class object which defined a given method @meth: a class method -> owner class object """ if inspect.ismethod(meth): for cls in inspect.getmro(meth.__self__.__class__): if cls.__dict__.get(meth.__name__) is meth: return cls meth = meth.__func__ # fallback to __qualname__ parsing if inspect.isfunction(meth): cls = getattr( inspect.getmodule(meth), meth.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0]) if isinstance(cls, type): return cls return None
[ "def", "get_class_that_defined_method", "(", "meth", ")", ":", "if", "inspect", ".", "ismethod", "(", "meth", ")", ":", "for", "cls", "in", "inspect", ".", "getmro", "(", "meth", ".", "__self__", ".", "__class__", ")", ":", "if", "cls", ".", "__dict__", ".", "get", "(", "meth", ".", "__name__", ")", "is", "meth", ":", "return", "cls", "meth", "=", "meth", ".", "__func__", "# fallback to __qualname__ parsing", "if", "inspect", ".", "isfunction", "(", "meth", ")", ":", "cls", "=", "getattr", "(", "inspect", ".", "getmodule", "(", "meth", ")", ",", "meth", ".", "__qualname__", ".", "split", "(", "'.<locals>'", ",", "1", ")", "[", "0", "]", ".", "rsplit", "(", "'.'", ",", "1", ")", "[", "0", "]", ")", "if", "isinstance", "(", "cls", ",", "type", ")", ":", "return", "cls", "return", "None" ]
Gets the class object which defined a given method @meth: a class method -> owner class object
[ "Gets", "the", "class", "object", "which", "defined", "a", "given", "method" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L461-L479
jaredLunde/vital-tools
vital/debug/__init__.py
get_parent_obj
def get_parent_obj(obj): """ Gets the name of the object containing @obj and returns as a string @obj: any python object -> #str parent object name or None .. from vital.debug import get_parent_obj get_parent_obj(get_parent_obj) # -> <module 'vital.debug' from> .. """ try: cls = get_class_that_defined_method(obj) if cls and cls != obj: return cls except AttributeError: pass if hasattr(obj, '__module__') and obj.__module__: try: module = locate(obj.__module__) assert module is not obj return module except Exception: try: module = module.__module__.split('.')[:-1] if len(module): return locate(module) except Exception: pass elif hasattr(obj, '__objclass__') and obj.__objclass__: return obj.__objclass__ try: assert hasattr(obj, '__qualname__') or hasattr(obj, '__name__') objname = obj.__qualname__ if hasattr(obj, '__qualname__') \ else obj.__name__ objname = objname.split(".") assert len(objname) > 1 return locate(".".join(objname[:-1])) except Exception: try: module = importlib.import_module(".".join(objname[:-1])) return module except Exception: pass return None
python
def get_parent_obj(obj): """ Gets the name of the object containing @obj and returns as a string @obj: any python object -> #str parent object name or None .. from vital.debug import get_parent_obj get_parent_obj(get_parent_obj) # -> <module 'vital.debug' from> .. """ try: cls = get_class_that_defined_method(obj) if cls and cls != obj: return cls except AttributeError: pass if hasattr(obj, '__module__') and obj.__module__: try: module = locate(obj.__module__) assert module is not obj return module except Exception: try: module = module.__module__.split('.')[:-1] if len(module): return locate(module) except Exception: pass elif hasattr(obj, '__objclass__') and obj.__objclass__: return obj.__objclass__ try: assert hasattr(obj, '__qualname__') or hasattr(obj, '__name__') objname = obj.__qualname__ if hasattr(obj, '__qualname__') \ else obj.__name__ objname = objname.split(".") assert len(objname) > 1 return locate(".".join(objname[:-1])) except Exception: try: module = importlib.import_module(".".join(objname[:-1])) return module except Exception: pass return None
[ "def", "get_parent_obj", "(", "obj", ")", ":", "try", ":", "cls", "=", "get_class_that_defined_method", "(", "obj", ")", "if", "cls", "and", "cls", "!=", "obj", ":", "return", "cls", "except", "AttributeError", ":", "pass", "if", "hasattr", "(", "obj", ",", "'__module__'", ")", "and", "obj", ".", "__module__", ":", "try", ":", "module", "=", "locate", "(", "obj", ".", "__module__", ")", "assert", "module", "is", "not", "obj", "return", "module", "except", "Exception", ":", "try", ":", "module", "=", "module", ".", "__module__", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", "if", "len", "(", "module", ")", ":", "return", "locate", "(", "module", ")", "except", "Exception", ":", "pass", "elif", "hasattr", "(", "obj", ",", "'__objclass__'", ")", "and", "obj", ".", "__objclass__", ":", "return", "obj", ".", "__objclass__", "try", ":", "assert", "hasattr", "(", "obj", ",", "'__qualname__'", ")", "or", "hasattr", "(", "obj", ",", "'__name__'", ")", "objname", "=", "obj", ".", "__qualname__", "if", "hasattr", "(", "obj", ",", "'__qualname__'", ")", "else", "obj", ".", "__name__", "objname", "=", "objname", ".", "split", "(", "\".\"", ")", "assert", "len", "(", "objname", ")", ">", "1", "return", "locate", "(", "\".\"", ".", "join", "(", "objname", "[", ":", "-", "1", "]", ")", ")", "except", "Exception", ":", "try", ":", "module", "=", "importlib", ".", "import_module", "(", "\".\"", ".", "join", "(", "objname", "[", ":", "-", "1", "]", ")", ")", "return", "module", "except", "Exception", ":", "pass", "return", "None" ]
Gets the name of the object containing @obj and returns as a string @obj: any python object -> #str parent object name or None .. from vital.debug import get_parent_obj get_parent_obj(get_parent_obj) # -> <module 'vital.debug' from> ..
[ "Gets", "the", "name", "of", "the", "object", "containing", "@obj", "and", "returns", "as", "a", "string" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L482-L528
jaredLunde/vital-tools
vital/debug/__init__.py
format_obj_name
def format_obj_name(obj, delim="<>"): """ Formats the object name in a pretty way @obj: any python object @delim: the characters to wrap a parent object name in -> #str formatted name .. from vital.debug import format_obj_name format_obj_name(vital.debug.Timer) # -> 'Timer<vital.debug>' format_obj_name(vital.debug) # -> 'debug<vital>' format_obj_name(vital.debug.Timer.time) # -> 'time<vital.debug.Timer>' .. """ pname = "" parent_name = get_parent_name(obj) if parent_name: pname = "{}{}{}".format(delim[0], get_parent_name(obj), delim[1]) return "{}{}".format(get_obj_name(obj), pname)
python
def format_obj_name(obj, delim="<>"): """ Formats the object name in a pretty way @obj: any python object @delim: the characters to wrap a parent object name in -> #str formatted name .. from vital.debug import format_obj_name format_obj_name(vital.debug.Timer) # -> 'Timer<vital.debug>' format_obj_name(vital.debug) # -> 'debug<vital>' format_obj_name(vital.debug.Timer.time) # -> 'time<vital.debug.Timer>' .. """ pname = "" parent_name = get_parent_name(obj) if parent_name: pname = "{}{}{}".format(delim[0], get_parent_name(obj), delim[1]) return "{}{}".format(get_obj_name(obj), pname)
[ "def", "format_obj_name", "(", "obj", ",", "delim", "=", "\"<>\"", ")", ":", "pname", "=", "\"\"", "parent_name", "=", "get_parent_name", "(", "obj", ")", "if", "parent_name", ":", "pname", "=", "\"{}{}{}\"", ".", "format", "(", "delim", "[", "0", "]", ",", "get_parent_name", "(", "obj", ")", ",", "delim", "[", "1", "]", ")", "return", "\"{}{}\"", ".", "format", "(", "get_obj_name", "(", "obj", ")", ",", "pname", ")" ]
Formats the object name in a pretty way @obj: any python object @delim: the characters to wrap a parent object name in -> #str formatted name .. from vital.debug import format_obj_name format_obj_name(vital.debug.Timer) # -> 'Timer<vital.debug>' format_obj_name(vital.debug) # -> 'debug<vital>' format_obj_name(vital.debug.Timer.time) # -> 'time<vital.debug.Timer>' ..
[ "Formats", "the", "object", "name", "in", "a", "pretty", "way" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L564-L588
jaredLunde/vital-tools
vital/debug/__init__.py
preprX
def preprX(*attributes, address=True, full_name=False, pretty=False, keyless=False, **kwargs): """ `Creates prettier object representations` @*attributes: (#str) instance attributes within the object you wish to display. Attributes can be recursive e.g. |one.two.three| for access to |self.one.two.three| @address: (#bool) |True| to include the memory address @full_name: (#bool) |True| to include the full path to the object vs. the qualified name @pretty: (#bool) |True| to allow bolding and coloring @keyless: (#bool) |True| to display the values of @attributes withotu their attribute names .. class Foo(object): def __init__(self, bar, baz=None): self.bar = bar self.baz = baz __repr__ = prepr('bar', 'baz', address=False) foo = Foo('foobar') repr(foo) .. |<Foo:bar=`foobar`, baz=None>| """ def _format(obj, attribute): try: if keyless: val = getattr_in(obj, attribute) if val is not None: return repr(val) else: return '%s=%s' % (attribute, repr(getattr_in(obj, attribute))) except AttributeError: return None def prep(obj, address=address, full_name=full_name, pretty=pretty, keyless=keyless, **kwargs): if address: address = ":%s" % hex(id(obj)) else: address = "" data = list(filter(lambda x: x is not None, map(lambda a: _format(obj, a), attributes))) if data: data = ':%s' % ', '.join(data) else: data = '' return stdout_encode("<%s%s%s>" % (get_obj_name(obj), data, address)) return prep
python
def preprX(*attributes, address=True, full_name=False, pretty=False, keyless=False, **kwargs): """ `Creates prettier object representations` @*attributes: (#str) instance attributes within the object you wish to display. Attributes can be recursive e.g. |one.two.three| for access to |self.one.two.three| @address: (#bool) |True| to include the memory address @full_name: (#bool) |True| to include the full path to the object vs. the qualified name @pretty: (#bool) |True| to allow bolding and coloring @keyless: (#bool) |True| to display the values of @attributes withotu their attribute names .. class Foo(object): def __init__(self, bar, baz=None): self.bar = bar self.baz = baz __repr__ = prepr('bar', 'baz', address=False) foo = Foo('foobar') repr(foo) .. |<Foo:bar=`foobar`, baz=None>| """ def _format(obj, attribute): try: if keyless: val = getattr_in(obj, attribute) if val is not None: return repr(val) else: return '%s=%s' % (attribute, repr(getattr_in(obj, attribute))) except AttributeError: return None def prep(obj, address=address, full_name=full_name, pretty=pretty, keyless=keyless, **kwargs): if address: address = ":%s" % hex(id(obj)) else: address = "" data = list(filter(lambda x: x is not None, map(lambda a: _format(obj, a), attributes))) if data: data = ':%s' % ', '.join(data) else: data = '' return stdout_encode("<%s%s%s>" % (get_obj_name(obj), data, address)) return prep
[ "def", "preprX", "(", "*", "attributes", ",", "address", "=", "True", ",", "full_name", "=", "False", ",", "pretty", "=", "False", ",", "keyless", "=", "False", ",", "*", "*", "kwargs", ")", ":", "def", "_format", "(", "obj", ",", "attribute", ")", ":", "try", ":", "if", "keyless", ":", "val", "=", "getattr_in", "(", "obj", ",", "attribute", ")", "if", "val", "is", "not", "None", ":", "return", "repr", "(", "val", ")", "else", ":", "return", "'%s=%s'", "%", "(", "attribute", ",", "repr", "(", "getattr_in", "(", "obj", ",", "attribute", ")", ")", ")", "except", "AttributeError", ":", "return", "None", "def", "prep", "(", "obj", ",", "address", "=", "address", ",", "full_name", "=", "full_name", ",", "pretty", "=", "pretty", ",", "keyless", "=", "keyless", ",", "*", "*", "kwargs", ")", ":", "if", "address", ":", "address", "=", "\":%s\"", "%", "hex", "(", "id", "(", "obj", ")", ")", "else", ":", "address", "=", "\"\"", "data", "=", "list", "(", "filter", "(", "lambda", "x", ":", "x", "is", "not", "None", ",", "map", "(", "lambda", "a", ":", "_format", "(", "obj", ",", "a", ")", ",", "attributes", ")", ")", ")", "if", "data", ":", "data", "=", "':%s'", "%", "', '", ".", "join", "(", "data", ")", "else", ":", "data", "=", "''", "return", "stdout_encode", "(", "\"<%s%s%s>\"", "%", "(", "get_obj_name", "(", "obj", ")", ",", "data", ",", "address", ")", ")", "return", "prep" ]
`Creates prettier object representations` @*attributes: (#str) instance attributes within the object you wish to display. Attributes can be recursive e.g. |one.two.three| for access to |self.one.two.three| @address: (#bool) |True| to include the memory address @full_name: (#bool) |True| to include the full path to the object vs. the qualified name @pretty: (#bool) |True| to allow bolding and coloring @keyless: (#bool) |True| to display the values of @attributes withotu their attribute names .. class Foo(object): def __init__(self, bar, baz=None): self.bar = bar self.baz = baz __repr__ = prepr('bar', 'baz', address=False) foo = Foo('foobar') repr(foo) .. |<Foo:bar=`foobar`, baz=None>|
[ "Creates", "prettier", "object", "representations" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L591-L643
jaredLunde/vital-tools
vital/debug/__init__.py
prepr.add_attrs
def add_attrs(self, *args, _order=[], **kwargs): """ Adds attributes to the __repr__ string @order: optional #list containing order to display kwargs """ for arg in args: if isinstance(arg, (tuple, list)): key, color = arg self.attrs[key] = (None, color) else: self.attrs[arg] = (None, None) if not _order: for key, value in kwargs.items(): self.attrs[key] = (value, None) else: for key in _order: self.attrs[key] = (kwargs[key], None)
python
def add_attrs(self, *args, _order=[], **kwargs): """ Adds attributes to the __repr__ string @order: optional #list containing order to display kwargs """ for arg in args: if isinstance(arg, (tuple, list)): key, color = arg self.attrs[key] = (None, color) else: self.attrs[arg] = (None, None) if not _order: for key, value in kwargs.items(): self.attrs[key] = (value, None) else: for key in _order: self.attrs[key] = (kwargs[key], None)
[ "def", "add_attrs", "(", "self", ",", "*", "args", ",", "_order", "=", "[", "]", ",", "*", "*", "kwargs", ")", ":", "for", "arg", "in", "args", ":", "if", "isinstance", "(", "arg", ",", "(", "tuple", ",", "list", ")", ")", ":", "key", ",", "color", "=", "arg", "self", ".", "attrs", "[", "key", "]", "=", "(", "None", ",", "color", ")", "else", ":", "self", ".", "attrs", "[", "arg", "]", "=", "(", "None", ",", "None", ")", "if", "not", "_order", ":", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "self", ".", "attrs", "[", "key", "]", "=", "(", "value", ",", "None", ")", "else", ":", "for", "key", "in", "_order", ":", "self", ".", "attrs", "[", "key", "]", "=", "(", "kwargs", "[", "key", "]", ",", "None", ")" ]
Adds attributes to the __repr__ string @order: optional #list containing order to display kwargs
[ "Adds", "attributes", "to", "the", "__repr__", "string" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L683-L698
jaredLunde/vital-tools
vital/debug/__init__.py
prepr._format_attrs
def _format_attrs(self): """ Formats the self.attrs #OrderedDict """ _bold = bold _colorize = colorize if not self.pretty: _bold = lambda x: x _colorize = lambda x, c: x attrs = [] add_attr = attrs.append if self.doc and hasattr(self.obj, "__doc__"): # Optionally attaches documentation if self.obj.__doc__: add_attr("`{}`".format(self.obj.__doc__.strip())) if self.attrs: # Attach request attributes for key, value in self.attrs.items(): value, color = value try: value = value or \ self._getattrs(getattr, self.obj, key.split(".")) except AttributeError: pass value = _colorize(value, color) if color else value v = None if value is not None: value = "`{}`".format(value) \ if isinstance(value, Look.str_) else value k, v = _bold(key), value else: k, v = _bold(key), str(value) if v: k = '{}='.format(k) if not self._no_keys else '' add_attr("{}{}".format(k, v)) if len(attrs): breaker = "\n " if self.line_break and len(attrs) > 1 else "" return breaker + ((", "+breaker).join(attrs)) + breaker.strip(" ") else: return ""
python
def _format_attrs(self): """ Formats the self.attrs #OrderedDict """ _bold = bold _colorize = colorize if not self.pretty: _bold = lambda x: x _colorize = lambda x, c: x attrs = [] add_attr = attrs.append if self.doc and hasattr(self.obj, "__doc__"): # Optionally attaches documentation if self.obj.__doc__: add_attr("`{}`".format(self.obj.__doc__.strip())) if self.attrs: # Attach request attributes for key, value in self.attrs.items(): value, color = value try: value = value or \ self._getattrs(getattr, self.obj, key.split(".")) except AttributeError: pass value = _colorize(value, color) if color else value v = None if value is not None: value = "`{}`".format(value) \ if isinstance(value, Look.str_) else value k, v = _bold(key), value else: k, v = _bold(key), str(value) if v: k = '{}='.format(k) if not self._no_keys else '' add_attr("{}{}".format(k, v)) if len(attrs): breaker = "\n " if self.line_break and len(attrs) > 1 else "" return breaker + ((", "+breaker).join(attrs)) + breaker.strip(" ") else: return ""
[ "def", "_format_attrs", "(", "self", ")", ":", "_bold", "=", "bold", "_colorize", "=", "colorize", "if", "not", "self", ".", "pretty", ":", "_bold", "=", "lambda", "x", ":", "x", "_colorize", "=", "lambda", "x", ",", "c", ":", "x", "attrs", "=", "[", "]", "add_attr", "=", "attrs", ".", "append", "if", "self", ".", "doc", "and", "hasattr", "(", "self", ".", "obj", ",", "\"__doc__\"", ")", ":", "# Optionally attaches documentation", "if", "self", ".", "obj", ".", "__doc__", ":", "add_attr", "(", "\"`{}`\"", ".", "format", "(", "self", ".", "obj", ".", "__doc__", ".", "strip", "(", ")", ")", ")", "if", "self", ".", "attrs", ":", "# Attach request attributes", "for", "key", ",", "value", "in", "self", ".", "attrs", ".", "items", "(", ")", ":", "value", ",", "color", "=", "value", "try", ":", "value", "=", "value", "or", "self", ".", "_getattrs", "(", "getattr", ",", "self", ".", "obj", ",", "key", ".", "split", "(", "\".\"", ")", ")", "except", "AttributeError", ":", "pass", "value", "=", "_colorize", "(", "value", ",", "color", ")", "if", "color", "else", "value", "v", "=", "None", "if", "value", "is", "not", "None", ":", "value", "=", "\"`{}`\"", ".", "format", "(", "value", ")", "if", "isinstance", "(", "value", ",", "Look", ".", "str_", ")", "else", "value", "k", ",", "v", "=", "_bold", "(", "key", ")", ",", "value", "else", ":", "k", ",", "v", "=", "_bold", "(", "key", ")", ",", "str", "(", "value", ")", "if", "v", ":", "k", "=", "'{}='", ".", "format", "(", "k", ")", "if", "not", "self", ".", "_no_keys", "else", "''", "add_attr", "(", "\"{}{}\"", ".", "format", "(", "k", ",", "v", ")", ")", "if", "len", "(", "attrs", ")", ":", "breaker", "=", "\"\\n \"", "if", "self", ".", "line_break", "and", "len", "(", "attrs", ")", ">", "1", "else", "\"\"", "return", "breaker", "+", "(", "(", "\", \"", "+", "breaker", ")", ".", "join", "(", "attrs", ")", ")", "+", "breaker", ".", "strip", "(", "\" \"", ")", "else", ":", "return", "\"\"" ]
Formats the self.attrs #OrderedDict
[ "Formats", "the", "self", ".", "attrs", "#OrderedDict" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L708-L745
jaredLunde/vital-tools
vital/debug/__init__.py
prepr.format
def format(self): """ Formats the __repr__ string -> #str containing __repr__ output """ _bold = bold _break = "\n " if not self.pretty: _bold = lambda x: x # Attach memory address and return _attrs = self._format_attrs() parent_name = get_parent_name(self.obj) if self.full_name else None self.data = "<{}{}:{}{}>{}".format( parent_name + "." if parent_name else "", get_obj_name(self.obj), _attrs, ":{}".format(hex(id(self.obj))) if self.address else "", _break+self.supplemental if self.supplemental else "") return stdout_encode(self.data)
python
def format(self): """ Formats the __repr__ string -> #str containing __repr__ output """ _bold = bold _break = "\n " if not self.pretty: _bold = lambda x: x # Attach memory address and return _attrs = self._format_attrs() parent_name = get_parent_name(self.obj) if self.full_name else None self.data = "<{}{}:{}{}>{}".format( parent_name + "." if parent_name else "", get_obj_name(self.obj), _attrs, ":{}".format(hex(id(self.obj))) if self.address else "", _break+self.supplemental if self.supplemental else "") return stdout_encode(self.data)
[ "def", "format", "(", "self", ")", ":", "_bold", "=", "bold", "_break", "=", "\"\\n \"", "if", "not", "self", ".", "pretty", ":", "_bold", "=", "lambda", "x", ":", "x", "# Attach memory address and return", "_attrs", "=", "self", ".", "_format_attrs", "(", ")", "parent_name", "=", "get_parent_name", "(", "self", ".", "obj", ")", "if", "self", ".", "full_name", "else", "None", "self", ".", "data", "=", "\"<{}{}:{}{}>{}\"", ".", "format", "(", "parent_name", "+", "\".\"", "if", "parent_name", "else", "\"\"", ",", "get_obj_name", "(", "self", ".", "obj", ")", ",", "_attrs", ",", "\":{}\"", ".", "format", "(", "hex", "(", "id", "(", "self", ".", "obj", ")", ")", ")", "if", "self", ".", "address", "else", "\"\"", ",", "_break", "+", "self", ".", "supplemental", "if", "self", ".", "supplemental", "else", "\"\"", ")", "return", "stdout_encode", "(", "self", ".", "data", ")" ]
Formats the __repr__ string -> #str containing __repr__ output
[ "Formats", "the", "__repr__", "string", "-", ">", "#str", "containing", "__repr__", "output" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L747-L764
jaredLunde/vital-tools
vital/debug/__init__.py
RandData.randstr
def randstr(self): """ -> #str result of :func:gen_rand_str """ return gen_rand_str( 4, 10, use=self.random, keyspace=list(string.ascii_letters))
python
def randstr(self): """ -> #str result of :func:gen_rand_str """ return gen_rand_str( 4, 10, use=self.random, keyspace=list(string.ascii_letters))
[ "def", "randstr", "(", "self", ")", ":", "return", "gen_rand_str", "(", "4", ",", "10", ",", "use", "=", "self", ".", "random", ",", "keyspace", "=", "list", "(", "string", ".", "ascii_letters", ")", ")" ]
-> #str result of :func:gen_rand_str
[ "-", ">", "#str", "result", "of", ":", "func", ":", "gen_rand_str" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L851-L854
jaredLunde/vital-tools
vital/debug/__init__.py
RandData.randbytes
def randbytes(self): """ -> #bytes result of bytes-encoded :func:gen_rand_str """ return gen_rand_str( 10, 30, use=self.random, keyspace=list(self.keyspace) ).encode("utf-8")
python
def randbytes(self): """ -> #bytes result of bytes-encoded :func:gen_rand_str """ return gen_rand_str( 10, 30, use=self.random, keyspace=list(self.keyspace) ).encode("utf-8")
[ "def", "randbytes", "(", "self", ")", ":", "return", "gen_rand_str", "(", "10", ",", "30", ",", "use", "=", "self", ".", "random", ",", "keyspace", "=", "list", "(", "self", ".", "keyspace", ")", ")", ".", "encode", "(", "\"utf-8\"", ")" ]
-> #bytes result of bytes-encoded :func:gen_rand_str
[ "-", ">", "#bytes", "result", "of", "bytes", "-", "encoded", ":", "func", ":", "gen_rand_str" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L857-L861
jaredLunde/vital-tools
vital/debug/__init__.py
RandData.randdomain
def randdomain(self): """ -> a randomized domain-like name """ return '.'.join( rand_readable(3, 6, use=self.random, density=3) for _ in range(self.random.randint(1, 2)) ).lower()
python
def randdomain(self): """ -> a randomized domain-like name """ return '.'.join( rand_readable(3, 6, use=self.random, density=3) for _ in range(self.random.randint(1, 2)) ).lower()
[ "def", "randdomain", "(", "self", ")", ":", "return", "'.'", ".", "join", "(", "rand_readable", "(", "3", ",", "6", ",", "use", "=", "self", ".", "random", ",", "density", "=", "3", ")", "for", "_", "in", "range", "(", "self", ".", "random", ".", "randint", "(", "1", ",", "2", ")", ")", ")", ".", "lower", "(", ")" ]
-> a randomized domain-like name
[ "-", ">", "a", "randomized", "domain", "-", "like", "name" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L864-L869
jaredLunde/vital-tools
vital/debug/__init__.py
RandData.randpath
def randpath(self): """ -> a random URI-like #str path """ return '/'.join( gen_rand_str(3, 10, use=self.random, keyspace=list(self.keyspace)) for _ in range(self.random.randint(0, 3)))
python
def randpath(self): """ -> a random URI-like #str path """ return '/'.join( gen_rand_str(3, 10, use=self.random, keyspace=list(self.keyspace)) for _ in range(self.random.randint(0, 3)))
[ "def", "randpath", "(", "self", ")", ":", "return", "'/'", ".", "join", "(", "gen_rand_str", "(", "3", ",", "10", ",", "use", "=", "self", ".", "random", ",", "keyspace", "=", "list", "(", "self", ".", "keyspace", ")", ")", "for", "_", "in", "range", "(", "self", ".", "random", ".", "randint", "(", "0", ",", "3", ")", ")", ")" ]
-> a random URI-like #str path
[ "-", ">", "a", "random", "URI", "-", "like", "#str", "path" ]
train
https://github.com/jaredLunde/vital-tools/blob/ea924c9bbb6ec22aa66f8095f018b1ee0099ac04/vital/debug/__init__.py#L872-L876